aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/appmon/src/appmon.erl15
-rw-r--r--lib/appmon/src/appmon_info.erl21
-rw-r--r--lib/appmon/src/appmon_place.erl14
-rw-r--r--lib/appmon/vsn.mk2
-rw-r--r--lib/asn1/vsn.mk11
-rw-r--r--lib/compiler/doc/src/compile.xml61
-rw-r--r--lib/compiler/src/beam_asm.erl3
-rw-r--r--lib/compiler/src/beam_block.erl1
-rw-r--r--lib/compiler/src/beam_bool.erl21
-rw-r--r--lib/compiler/src/beam_dead.erl58
-rw-r--r--lib/compiler/src/beam_disasm.erl15
-rw-r--r--lib/compiler/src/beam_peep.erl58
-rw-r--r--lib/compiler/src/beam_type.erl4
-rw-r--r--lib/compiler/src/beam_utils.erl16
-rw-r--r--lib/compiler/src/beam_validator.erl5
-rw-r--r--lib/compiler/src/cerl.erl13
-rw-r--r--lib/compiler/src/cerl_inline.erl7
-rw-r--r--lib/compiler/src/cerl_trees.erl16
-rw-r--r--lib/compiler/src/compile.erl25
-rw-r--r--lib/compiler/src/erl_bifs.erl12
-rw-r--r--lib/compiler/src/genop.tab3
-rw-r--r--lib/compiler/src/rec_env.erl14
-rw-r--r--lib/compiler/src/sys_core_fold.erl45
-rw-r--r--lib/compiler/src/sys_pre_expand.erl28
-rw-r--r--lib/compiler/src/v3_core.erl47
-rw-r--r--lib/compiler/src/v3_life.erl2
-rw-r--r--lib/compiler/test/andor_SUITE.erl24
-rw-r--r--lib/compiler/test/bs_match_SUITE.erl24
-rw-r--r--lib/compiler/test/compiler.cover2
-rw-r--r--lib/compiler/test/core_SUITE_data/.gitignore1
-rw-r--r--lib/compiler/test/error_SUITE.erl166
-rw-r--r--lib/compiler/test/float_SUITE.erl24
-rw-r--r--lib/compiler/test/guard_SUITE.erl146
-rw-r--r--lib/compiler/test/match_SUITE.erl15
-rw-r--r--lib/compiler/test/misc_SUITE.erl61
-rw-r--r--lib/compiler/test/test_lib.erl8
-rw-r--r--lib/compiler/vsn.mk2
-rw-r--r--lib/crypto/c_src/crypto.c76
-rw-r--r--lib/crypto/doc/src/crypto.xml25
-rw-r--r--lib/crypto/src/crypto.app.src12
-rw-r--r--lib/crypto/src/crypto.erl110
-rw-r--r--lib/crypto/test/crypto_SUITE.erl42
-rw-r--r--lib/debugger/src/dbg_iload.erl28
-rw-r--r--lib/debugger/src/dbg_ui_trace_win.erl32
-rw-r--r--lib/debugger/src/dbg_ui_win.erl15
-rwxr-xr-xlib/debugger/src/dbg_wx_trace_win.erl10
-rw-r--r--lib/debugger/vsn.mk2
-rw-r--r--lib/dialyzer/RELEASE_NOTES13
-rw-r--r--lib/dialyzer/src/dialyzer.erl36
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl79
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl76
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl41
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl78
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl104
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl465
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl117
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl64
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl393
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl67
-rw-r--r--lib/dialyzer/vsn.mk2
-rw-r--r--lib/erl_interface/doc/src/ei.xml2
-rw-r--r--lib/erl_interface/include/ei.h11
-rw-r--r--lib/erl_interface/src/connect/ei_connect.c18
-rw-r--r--lib/erl_interface/src/connect/ei_connect_int.h11
-rw-r--r--lib/erl_interface/src/decode/decode_double.c30
-rw-r--r--lib/erl_interface/src/decode/decode_skip.c11
-rw-r--r--lib/erl_interface/src/encode/encode_double.c20
-rw-r--r--lib/erl_interface/src/legacy/decode_term.c11
-rw-r--r--lib/erl_interface/src/legacy/erl_marshal.c39
-rw-r--r--lib/erl_interface/src/misc/ei_decode_term.c23
-rw-r--r--lib/erl_interface/src/misc/ei_printterm.c11
-rw-r--r--lib/erl_interface/src/misc/get_type.c17
-rw-r--r--lib/erl_interface/src/misc/putget.h38
-rw-r--r--lib/erl_interface/src/misc/show_msg.c11
-rw-r--r--lib/erl_interface/test/ei_decode_SUITE.erl29
-rw-r--r--lib/erl_interface/test/ei_decode_SUITE_data/ei_decode_test.c19
-rw-r--r--lib/erl_interface/test/ei_encode_SUITE.erl30
-rw-r--r--lib/erl_interface/test/ei_encode_SUITE_data/ei_encode_test.c14
-rw-r--r--lib/erl_interface/test/ei_tmo_SUITE.erl14
-rw-r--r--lib/erl_interface/vsn.mk2
-rw-r--r--lib/gs/contribs/bonk/bonk.erl26
-rw-r--r--lib/gs/contribs/othello/othello_adt.erl36
-rw-r--r--lib/gs/src/tool_utils.erl23
-rw-r--r--lib/gs/vsn.mk2
-rw-r--r--lib/hipe/cerl/cerl_closurean.erl14
-rw-r--r--lib/hipe/cerl/cerl_messagean.erl16
-rw-r--r--lib/hipe/cerl/erl_bif_types.erl220
-rw-r--r--lib/hipe/cerl/erl_types.erl345
-rw-r--r--lib/hipe/flow/hipe_dominators.erl12
-rw-r--r--lib/hipe/icode/hipe_beam_to_icode.erl12
-rw-r--r--lib/hipe/util/hipe_digraph.erl12
-rw-r--r--lib/hipe/vsn.mk2
-rw-r--r--lib/ic/doc/src/Makefile16
-rw-r--r--lib/ic/doc/src/notes.xml22
-rw-r--r--lib/ic/vsn.mk6
-rw-r--r--lib/inets/doc/src/http_server.xml13
-rw-r--r--lib/inets/doc/src/httpc.xml19
-rw-r--r--lib/inets/doc/src/httpd.xml66
-rw-r--r--lib/inets/doc/src/mod_esi.xml3
-rw-r--r--lib/inets/doc/src/notes.xml117
-rw-r--r--lib/inets/examples/Makefile194
-rw-r--r--lib/inets/examples/httpd_load_test/Makefile123
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt.config.skel20
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt.erl74
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt.sh.skel44
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_client.erl370
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_ctrl.erl1530
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_logger.erl138
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_logger.hrl33
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_random_html.erl59
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_server.erl163
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_slave.erl291
l---------lib/inets/examples/httpd_load_test/hdlt_ssl_client_cert.pem1
l---------lib/inets/examples/httpd_load_test/hdlt_ssl_server_cert.pem1
-rw-r--r--lib/inets/examples/httpd_load_test/modules.mk44
-rw-r--r--lib/inets/examples/server_root/Makefile209
-rw-r--r--lib/inets/examples/subdirs.mk3
-rw-r--r--lib/inets/src/ftp/Makefile28
-rw-r--r--lib/inets/src/ftp/ftp.erl58
-rw-r--r--lib/inets/src/ftp/ftp_internal.hrl13
-rw-r--r--lib/inets/src/http_client/Makefile26
-rw-r--r--lib/inets/src/http_client/http.erl35
-rw-r--r--lib/inets/src/http_client/httpc.erl36
-rw-r--r--lib/inets/src/http_client/httpc_cookie.erl12
-rw-r--r--lib/inets/src/http_client/httpc_handler.erl163
-rw-r--r--lib/inets/src/http_client/httpc_internal.hrl14
-rw-r--r--lib/inets/src/http_client/httpc_manager.erl16
-rw-r--r--lib/inets/src/http_client/httpc_request.erl15
-rw-r--r--lib/inets/src/http_client/httpc_response.erl2
-rw-r--r--lib/inets/src/http_lib/Makefile27
-rw-r--r--lib/inets/src/http_lib/http_internal.hrl27
-rw-r--r--lib/inets/src/http_lib/http_transport.erl219
-rw-r--r--lib/inets/src/http_lib/http_util.erl106
-rw-r--r--lib/inets/src/http_server/Makefile26
-rw-r--r--lib/inets/src/http_server/httpd.erl279
-rw-r--r--lib/inets/src/http_server/httpd_acceptor.erl16
-rw-r--r--lib/inets/src/http_server/httpd_cgi.erl13
-rw-r--r--lib/inets/src/http_server/httpd_conf.erl98
-rw-r--r--lib/inets/src/http_server/httpd_esi.erl13
-rw-r--r--lib/inets/src/http_server/httpd_instance_sup.erl6
-rw-r--r--lib/inets/src/http_server/httpd_internal.hrl13
-rw-r--r--lib/inets/src/http_server/httpd_manager.erl38
-rw-r--r--lib/inets/src/http_server/httpd_request.erl66
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl104
-rw-r--r--lib/inets/src/http_server/httpd_sup.erl10
-rw-r--r--lib/inets/src/http_server/httpd_util.erl21
-rw-r--r--lib/inets/src/http_server/mod_alias.erl73
-rw-r--r--lib/inets/src/http_server/mod_esi.erl43
-rw-r--r--lib/inets/src/inets_app/Makefile18
-rw-r--r--lib/inets/src/inets_app/inets.app.src1
-rw-r--r--lib/inets/src/inets_app/inets.appup.src38
-rw-r--r--lib/inets/src/inets_app/inets.erl2
-rw-r--r--lib/inets/src/inets_app/inets.mk45
-rw-r--r--lib/inets/src/inets_app/inets_service.erl12
-rw-r--r--lib/inets/src/tftp/Makefile22
-rw-r--r--lib/inets/test/Makefile9
-rw-r--r--lib/inets/test/ftp_suite_lib.erl88
-rw-r--r--lib/inets/test/http_format_SUITE.erl16
-rw-r--r--lib/inets/test/httpc_SUITE.erl515
-rw-r--r--lib/inets/test/httpd_SUITE.erl1655
-rw-r--r--lib/inets/test/httpd_SUITE_data/server_root/Makefile209
-rw-r--r--lib/inets/test/httpd_block.erl101
-rw-r--r--lib/inets/test/httpd_mod.erl136
-rw-r--r--lib/inets/test/httpd_poll.erl66
-rw-r--r--lib/inets/test/httpd_test_data/server_root/Makefile209
-rw-r--r--lib/inets/test/httpd_test_lib.erl43
-rw-r--r--lib/inets/test/httpd_time_test.erl65
-rw-r--r--lib/inets/test/inets_sup_SUITE.erl4
-rw-r--r--lib/inets/test/inets_test_lib.erl218
-rw-r--r--lib/inets/test/inets_test_lib.hrl15
-rw-r--r--lib/inets/vsn.mk22
-rw-r--r--lib/kernel/doc/src/file.xml59
-rw-r--r--lib/kernel/src/code.erl2
-rw-r--r--lib/kernel/src/dist_util.erl15
-rw-r--r--lib/kernel/src/file.erl45
-rw-r--r--lib/kernel/src/file_io_server.erl16
-rw-r--r--lib/kernel/src/group.erl48
-rw-r--r--lib/kernel/src/inet.erl2
-rw-r--r--lib/kernel/src/inet_dns.erl44
-rw-r--r--lib/kernel/src/inet_res.erl11
-rw-r--r--lib/kernel/src/net_kernel.erl201
-rw-r--r--lib/kernel/src/os.erl31
-rw-r--r--lib/kernel/src/pg2.erl4
-rw-r--r--lib/kernel/src/ram_file.erl48
-rw-r--r--lib/kernel/test/file_SUITE.erl207
-rw-r--r--lib/kernel/test/os_SUITE.erl7
-rw-r--r--lib/kernel/test/pg2_SUITE.erl135
-rw-r--r--lib/kernel/test/prim_file_SUITE.erl115
-rw-r--r--lib/megaco/doc/src/Makefile18
-rw-r--r--lib/megaco/doc/src/files.mk14
-rw-r--r--lib/megaco/doc/src/megaco.xml16
-rw-r--r--lib/megaco/doc/src/megaco_performance.xml122
-rw-r--r--lib/megaco/doc/src/mstone1-s8flex.log234
-rw-r--r--lib/megaco/doc/src/mstone1.gifbin30004 -> 0 bytes
-rw-r--r--lib/megaco/doc/src/mstone1.jpgbin17845 -> 16746 bytes
-rw-r--r--lib/megaco/doc/src/mstone1.pngbin53099 -> 0 bytes
-rw-r--r--lib/megaco/doc/src/mstone1.ps1959
-rw-r--r--lib/megaco/doc/src/notes.xml506
-rw-r--r--lib/megaco/doc/src/notes_history.xml413
-rw-r--r--lib/megaco/src/app/megaco.appup.src21
-rw-r--r--lib/megaco/src/app/megaco_internal.hrl26
-rw-r--r--lib/megaco/src/engine/megaco_config.erl26
-rw-r--r--lib/megaco/src/engine/megaco_messenger.erl239
-rw-r--r--lib/megaco/src/engine/megaco_monitor.erl33
-rw-r--r--lib/megaco/vsn.mk6
-rw-r--r--lib/mnesia/doc/src/notes.xml16
-rw-r--r--lib/mnesia/examples/mnesia_meter.erl12
-rw-r--r--lib/mnesia/src/mnesia.appup.src64
-rw-r--r--lib/mnesia/src/mnesia_controller.erl62
-rw-r--r--lib/mnesia/src/mnesia_lib.erl40
-rw-r--r--lib/mnesia/src/mnesia_recover.erl41
-rw-r--r--lib/mnesia/src/mnesia_schema.erl33
-rw-r--r--lib/mnesia/test/Makefile118
-rw-r--r--lib/mnesia/test/README107
-rw-r--r--lib/mnesia/test/mnesia.spec23
-rw-r--r--lib/mnesia/test/mnesia.spec.vxworks362
-rw-r--r--lib/mnesia/test/mnesia_SUITE.erl203
-rw-r--r--lib/mnesia/test/mnesia_atomicity_test.erl839
-rw-r--r--lib/mnesia/test/mnesia_config_backup.erl105
-rw-r--r--lib/mnesia/test/mnesia_config_event.erl74
-rw-r--r--lib/mnesia/test/mnesia_config_test.erl1466
-rw-r--r--lib/mnesia/test/mnesia_consistency_test.erl1612
-rw-r--r--lib/mnesia/test/mnesia_cost.erl222
-rw-r--r--lib/mnesia/test/mnesia_dbn_meters.erl242
-rw-r--r--lib/mnesia/test/mnesia_dirty_access_test.erl927
-rw-r--r--lib/mnesia/test/mnesia_durability_test.erl1470
-rw-r--r--lib/mnesia/test/mnesia_evil_backup.erl750
-rw-r--r--lib/mnesia/test/mnesia_evil_coverage_test.erl2401
-rw-r--r--lib/mnesia/test/mnesia_examples_test.erl160
-rw-r--r--lib/mnesia/test/mnesia_frag_test.erl875
-rw-r--r--lib/mnesia/test/mnesia_inconsistent_database_test.erl74
-rw-r--r--lib/mnesia/test/mnesia_install_test.erl342
-rw-r--r--lib/mnesia/test/mnesia_isolation_test.erl2419
-rw-r--r--lib/mnesia/test/mnesia_measure_test.erl203
-rw-r--r--lib/mnesia/test/mnesia_meter.erl465
-rw-r--r--lib/mnesia/test/mnesia_nice_coverage_test.erl227
-rw-r--r--lib/mnesia/test/mnesia_qlc_test.erl475
-rw-r--r--lib/mnesia/test/mnesia_recovery_test.erl1701
-rw-r--r--lib/mnesia/test/mnesia_registry_test.erl137
-rw-r--r--lib/mnesia/test/mnesia_schema_recovery_test.erl787
-rw-r--r--lib/mnesia/test/mnesia_test_lib.erl1058
-rw-r--r--lib/mnesia/test/mnesia_test_lib.hrl132
-rw-r--r--lib/mnesia/test/mnesia_tpcb.erl1268
-rw-r--r--lib/mnesia/test/mnesia_trans_access_test.erl1254
-rwxr-xr-xlib/mnesia/test/mt60
-rw-r--r--lib/mnesia/test/mt.erl262
-rw-r--r--lib/mnesia/vsn.mk5
-rw-r--r--lib/observer/vsn.mk2
-rw-r--r--lib/odbc/test/Makefile114
-rw-r--r--lib/odbc/test/README86
-rw-r--r--lib/odbc/test/odbc.dynspec31
-rw-r--r--lib/odbc/test/odbc.spec9
-rw-r--r--lib/odbc/test/odbc.spec.win5
-rw-r--r--lib/odbc/test/odbc_connect_SUITE.erl816
-rw-r--r--lib/odbc/test/odbc_data_type_SUITE.erl1498
-rw-r--r--lib/odbc/test/odbc_query_SUITE.erl1453
-rw-r--r--lib/odbc/test/odbc_start_SUITE.erl147
-rw-r--r--lib/odbc/test/odbc_test.hrl37
-rw-r--r--lib/odbc/test/odbc_test_lib.erl77
-rw-r--r--lib/odbc/test/oracle.erl246
-rw-r--r--lib/odbc/test/postgres.erl294
-rw-r--r--lib/odbc/test/sqlserver.erl298
-rw-r--r--lib/parsetools/src/leex.erl53
-rw-r--r--lib/parsetools/vsn.mk2
-rw-r--r--lib/public_key/asn1/OTP-PKIX.asn12
-rw-r--r--lib/public_key/doc/src/notes.xml29
-rw-r--r--lib/public_key/src/pubkey_cert.erl19
-rw-r--r--lib/public_key/src/pubkey_cert_records.erl398
-rw-r--r--lib/public_key/src/pubkey_crypto.erl11
-rw-r--r--lib/public_key/src/pubkey_pem.erl16
-rw-r--r--lib/public_key/src/public_key.appup.src22
-rw-r--r--lib/public_key/src/public_key.erl8
-rw-r--r--lib/public_key/test/Makefile6
-rw-r--r--lib/public_key/test/pkey_test.erl412
-rw-r--r--lib/public_key/test/public_key.cover2
-rw-r--r--lib/public_key/test/public_key_SUITE.erl236
-rw-r--r--lib/public_key/vsn.mk2
-rw-r--r--lib/reltool/vsn.mk2
-rw-r--r--lib/runtime_tools/vsn.mk2
-rw-r--r--lib/snmp/doc/src/notes.xml70
-rw-r--r--lib/snmp/doc/src/snmp_app.xml2
-rw-r--r--lib/snmp/doc/src/snmp_config.xml2
-rw-r--r--lib/snmp/doc/src/snmpa.xml36
-rw-r--r--lib/snmp/doc/src/snmpa_mpd.xml42
-rw-r--r--lib/snmp/src/agent/snmpa.erl23
-rw-r--r--lib/snmp/src/agent/snmpa_agent.erl230
-rw-r--r--lib/snmp/src/agent/snmpa_internal.hrl12
-rw-r--r--lib/snmp/src/agent/snmpa_mib.erl12
-rw-r--r--lib/snmp/src/agent/snmpa_mpd.erl226
-rw-r--r--lib/snmp/src/agent/snmpa_trap.erl95
-rw-r--r--lib/snmp/src/agent/snmpa_usm.erl74
-rw-r--r--lib/snmp/src/app/snmp.appup.src188
-rw-r--r--lib/snmp/src/manager/snmpm_mpd.erl24
-rw-r--r--lib/snmp/src/manager/snmpm_server.erl26
-rw-r--r--lib/snmp/test/snmp_agent_test.erl49
-rw-r--r--lib/snmp/test/snmp_agent_test_lib.erl12
-rw-r--r--lib/snmp/test/snmp_manager_test.erl29
-rw-r--r--lib/snmp/test/snmp_manager_user_test.erl43
-rw-r--r--lib/snmp/test/snmp_test_lib.erl22
-rw-r--r--lib/snmp/vsn.mk171
-rw-r--r--lib/ssh/doc/src/notes.xml12
-rw-r--r--lib/ssh/src/ssh_acceptor.erl4
-rw-r--r--lib/ssh/src/ssh_cli.erl6
-rwxr-xr-xlib/ssh/src/ssh_connect.hrl3
-rw-r--r--lib/ssh/src/ssh_connection.erl20
-rw-r--r--lib/ssh/src/ssh_connection_controler.erl4
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl7
-rw-r--r--lib/ssh/src/ssh_connection_manager.erl7
-rw-r--r--lib/ssh/src/ssh_sftpd.erl9
-rw-r--r--lib/ssh/vsn.mk4
-rw-r--r--lib/ssl/doc/src/new_ssl.xml33
-rw-r--r--lib/ssl/doc/src/notes.xml61
-rw-r--r--lib/ssl/src/ssl.appup.src2
-rw-r--r--lib/ssl/src/ssl.erl5
-rw-r--r--lib/ssl/src/ssl_certificate.erl42
-rw-r--r--lib/ssl/src/ssl_cipher.erl563
-rw-r--r--lib/ssl/src/ssl_cipher.hrl59
-rw-r--r--lib/ssl/src/ssl_connection.erl349
-rw-r--r--lib/ssl/src/ssl_handshake.erl181
-rw-r--r--lib/ssl/src/ssl_manager.erl37
-rw-r--r--lib/ssl/src/ssl_record.erl82
-rw-r--r--lib/ssl/src/ssl_session_cache.erl16
-rw-r--r--lib/ssl/src/ssl_session_cache_api.erl12
-rw-r--r--lib/ssl/src/ssl_ssl3.erl80
-rw-r--r--lib/ssl/src/ssl_sup.erl33
-rw-r--r--lib/ssl/src/ssl_tls1.erl76
-rw-r--r--lib/ssl/test/Makefile13
-rw-r--r--lib/ssl/test/erl_make_certs.erl412
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl800
-rw-r--r--lib/ssl/test/ssl_test_lib.erl68
-rw-r--r--lib/ssl/test/ssl_to_openssl_SUITE.erl265
-rw-r--r--lib/ssl/vsn.mk6
-rw-r--r--lib/stdlib/doc/src/Makefile11
-rw-r--r--lib/stdlib/doc/src/binary.xml729
-rw-r--r--lib/stdlib/doc/src/gen_event.xml64
-rw-r--r--lib/stdlib/doc/src/gen_fsm.xml64
-rw-r--r--lib/stdlib/doc/src/gen_server.xml61
-rw-r--r--lib/stdlib/doc/src/re.xml6
-rw-r--r--lib/stdlib/doc/src/ref_man.xml7
-rw-r--r--lib/stdlib/src/Makefile1
-rw-r--r--lib/stdlib/src/beam_lib.erl2
-rw-r--r--lib/stdlib/src/binary.erl177
-rw-r--r--lib/stdlib/src/dets.erl11
-rw-r--r--lib/stdlib/src/digraph.erl12
-rw-r--r--lib/stdlib/src/edlin.erl5
-rw-r--r--lib/stdlib/src/epp.erl55
-rw-r--r--lib/stdlib/src/erl_compile.erl12
-rw-r--r--lib/stdlib/src/erl_expand_records.erl137
-rw-r--r--lib/stdlib/src/erl_internal.erl151
-rw-r--r--lib/stdlib/src/erl_lint.erl456
-rw-r--r--lib/stdlib/src/erl_parse.yrl49
-rw-r--r--lib/stdlib/src/erl_pp.erl35
-rw-r--r--lib/stdlib/src/erl_scan.erl47
-rw-r--r--lib/stdlib/src/ets.erl9
-rw-r--r--lib/stdlib/src/file_sorter.erl9
-rw-r--r--lib/stdlib/src/gen_event.erl29
-rw-r--r--lib/stdlib/src/gen_fsm.erl38
-rw-r--r--lib/stdlib/src/gen_server.erl38
-rw-r--r--lib/stdlib/src/io.erl11
-rw-r--r--lib/stdlib/src/io_lib.erl2
-rw-r--r--lib/stdlib/src/io_lib_fread.erl12
-rw-r--r--lib/stdlib/src/lists.erl3
-rw-r--r--lib/stdlib/src/proc_lib.erl12
-rw-r--r--lib/stdlib/src/proplists.erl12
-rw-r--r--lib/stdlib/src/stdlib.app.src11
-rw-r--r--lib/stdlib/src/supervisor.erl172
-rw-r--r--lib/stdlib/test/Makefile2
-rw-r--r--lib/stdlib/test/binary_module_SUITE.erl1323
-rw-r--r--lib/stdlib/test/binref.erl588
-rw-r--r--lib/stdlib/test/dummy1_h.erl15
-rw-r--r--lib/stdlib/test/epp_SUITE.erl62
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl235
-rw-r--r--lib/stdlib/test/erl_pp_SUITE.erl36
-rw-r--r--lib/stdlib/test/erl_scan_SUITE.erl5
-rw-r--r--lib/stdlib/test/gen_event_SUITE.erl59
-rw-r--r--lib/stdlib/test/gen_fsm_SUITE.erl67
-rw-r--r--lib/stdlib/test/gen_server_SUITE.erl64
-rw-r--r--lib/stdlib/test/qlc_SUITE.erl4
-rw-r--r--lib/syntax_tools/src/erl_comment_scan.erl1
-rw-r--r--lib/syntax_tools/src/erl_recomment.erl9
-rw-r--r--lib/syntax_tools/src/erl_syntax.erl1
-rw-r--r--lib/syntax_tools/src/erl_syntax_lib.erl7
-rw-r--r--lib/syntax_tools/src/prettypr.erl2
-rw-r--r--lib/syntax_tools/vsn.mk2
-rw-r--r--lib/tools/emacs/Makefile1
-rw-r--r--lib/tools/emacs/README9
-rw-r--r--lib/tools/emacs/erlang-eunit.el355
-rw-r--r--lib/tools/emacs/erlang-flymake.el102
-rw-r--r--lib/tools/emacs/erlang-start.el5
-rw-r--r--lib/tools/emacs/erlang.el253
-rw-r--r--lib/tools/emacs/test.erl.indented79
-rw-r--r--lib/tools/emacs/test.erl.orig83
-rw-r--r--lib/tools/src/xref_base.erl174
-rw-r--r--lib/tools/src/xref_compiler.erl133
-rw-r--r--lib/tools/src/xref_reader.erl52
-rw-r--r--lib/tools/test/xref_SUITE.erl334
-rw-r--r--lib/tools/test/xref_SUITE_data/read/read.erl12
-rw-r--r--lib/tools/vsn.mk12
-rw-r--r--lib/tv/src/tv_io_lib_format.erl17
-rw-r--r--lib/tv/src/tv_pb.erl37
-rw-r--r--lib/tv/src/tv_pg_gridfcns.erl59
-rw-r--r--lib/tv/vsn.mk12
-rw-r--r--lib/wx/doc/src/notes.xml17
-rw-r--r--lib/wx/src/wx_object.erl71
-rw-r--r--lib/wx/vsn.mk6
-rw-r--r--lib/xmerl/doc/src/notes.xml8
-rw-r--r--lib/xmerl/src/xmerl_xsd.erl19
-rw-r--r--lib/xmerl/vsn.mk3
411 files changed, 48634 insertions, 9509 deletions
diff --git a/lib/appmon/src/appmon.erl b/lib/appmon/src/appmon.erl
index 6f5d2824d2..2b982cddf0 100644
--- a/lib/appmon/src/appmon.erl
+++ b/lib/appmon/src/appmon.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
-module(appmon).
-behaviour(gen_server).
@@ -838,7 +838,7 @@ draw_apps(GUI, [App | Apps], X, Lx0, N, GSObjs) ->
%% Some necessary data
{_Pid, AppName, _Descr} = App,
Text = atom_to_list(AppName),
- Width = max(8*length(Text)+10, ?wBTN),
+ Width = erlang:max(8*length(Text)+10, ?wBTN),
%% Connect the application to the node label with a line
%% Lx0 = leftmost X coordinate (above previous application button)
@@ -1009,9 +1009,6 @@ bcast(MNodes, Msg) ->
end,
MNodes).
-max(X, Y) when X>Y -> X;
-max(_, Y) -> Y.
-
%% parse_nodes(MNodes) -> NodeApps
%% MNodes -> [#mnode{}]
%% NodeApps -> [{Node, Status, Apps}]
diff --git a/lib/appmon/src/appmon_info.erl b/lib/appmon/src/appmon_info.erl
index 4e36d3a13f..332140f69d 100644
--- a/lib/appmon/src/appmon_info.erl
+++ b/lib/appmon/src/appmon_info.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%----------------------------------------------------------------------
@@ -807,24 +807,21 @@ load(Opts) ->
case get_opt(load_scale, Opts) of
linear ->
- min(trunc(load_range()*(Td/Tot+Q/6)),
+ erlang:min(trunc(load_range()*(Td/Tot+Q/6)),
load_range());
prog ->
- min(trunc(load_range()*prog(Td/Tot+Q/6)),
+ erlang:min(trunc(load_range()*prog(Td/Tot+Q/6)),
load_range())
end;
queue ->
case get_opt(load_scale, Opts) of
linear ->
- min(trunc(load_range()*Q/6), load_range());
+ erlang:min(trunc(load_range()*Q/6), load_range());
prog ->
- min(trunc(load_range()*prog(Q/6)), load_range())
+ erlang:min(trunc(load_range()*prog(Q/6)), load_range())
end
end.
-min(X,Y) when X<Y -> X;
-min(_,Y)->Y.
-
%%
%% T shall be within 0 and 0.9 for this to work correctly
diff --git a/lib/appmon/src/appmon_place.erl b/lib/appmon/src/appmon_place.erl
index 5a6ae6aa48..fe1e909d7c 100644
--- a/lib/appmon/src/appmon_place.erl
+++ b/lib/appmon/src/appmon_place.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%------------------------------------------------------------
%%
@@ -155,10 +155,8 @@ move2(DG, V, LastX, DeltaX) ->
ChLX = foldl(fun(C, LX) -> move2(DG, C, LX, DeltaX) end,
tll(LastX),
appmon_dg:get(out, DG, V)),
- [max(NewX+appmon_dg:get(w, DG, V), hdd(LastX)) | ChLX].
+ [erlang:max(NewX+appmon_dg:get(w, DG, V), hdd(LastX)) | ChLX].
-max(A, B) when A>B -> A;
-max(_, B) -> B.
%%------------------------------------------------------------
%%
diff --git a/lib/appmon/vsn.mk b/lib/appmon/vsn.mk
index 78b95e5688..cfcb5d3eb6 100644
--- a/lib/appmon/vsn.mk
+++ b/lib/appmon/vsn.mk
@@ -16,4 +16,4 @@
#
# %CopyrightEnd%
-APPMON_VSN = 2.1.11
+APPMON_VSN = 2.1.12
diff --git a/lib/asn1/vsn.mk b/lib/asn1/vsn.mk
index 32151a0cac..b7e91e42a0 100644
--- a/lib/asn1/vsn.mk
+++ b/lib/asn1/vsn.mk
@@ -1,7 +1,12 @@
-#next version number to use is 1.6.14 | 1.7 | 2.0
-ASN1_VSN = 1.6.13
+#next version number to use is 1.6.15 | 1.7 | 2.0
+ASN1_VSN = 1.6.14
-TICKETS = OTP-8463
+TICKETS = OTP-8565 \
+ OTP-8516
+
+TICKETS_1.6.14 = \
+ OTP-8565 \
+ OTP-8516
TICKETS_1.6.13 = \
OTP-8463
diff --git a/lib/compiler/doc/src/compile.xml b/lib/compiler/doc/src/compile.xml
index bbd3f1043d..e1f24b602d 100644
--- a/lib/compiler/doc/src/compile.xml
+++ b/lib/compiler/doc/src/compile.xml
@@ -310,6 +310,23 @@
(there will not even be a warning if there is a mismatch).</p>
</item>
+ <tag><c>{no_auto_import,[F/A, ...]}</c></tag>
+ <item>
+ <p>Makes the function <c>F/A</c> no longer beeing
+ auto-imported from the module <c>erlang</c>, which resolves
+ BIF name clashes. This option has to be used to resolve name
+ clashes with BIFs auto-imported before R14A, if one wants to
+ call the local function with the same name as an
+ auto-imported BIF without module prefix.</p>
+ <note>
+ <p>From R14A and forward, the compiler resolves calls
+ without module prefix to local or imported functions before
+ trying auto-imported BIFs. If the BIF is to be
+ called, use the <c>erlang</c> module prefix in the call, not
+ <c>{ no_auto_import,[F/A, ...]}</c></p>
+ </note>
+ </item>
+
</taglist>
<p>If warnings are turned on (the <c>report_warnings</c> option
@@ -338,31 +355,35 @@
<tag><c>nowarn_bif_clash</c></tag>
<item>
- <p>By default, there will be a compilation error if a
- module contains an exported function with the same name
- as an auto-imported BIF (such as <c>size/1</c>) AND
- there is a call to it without a qualifying module name.
- The reason is that the BIF will be called, not
- the function in the same module. The recommended way to
- eliminate that warning is to use a call with a module
- name - either <c>erlang</c> to call the BIF or
- <c>?MODULE</c> to call the function in the same module.
- The warning can also be turned off using this option,
- but that is not recommended.</p>
+ <p>This option is removed, it will generate a fatal error if used.</p>
+
+ <warning>
+ <p>Beginning with R14A, the compiler no longer calls the
+ auto-imported BIF if the name clashes with a local or
+ explicitly imported function and a call without explicit
+ module name is issued. Instead the local or imported
+ function is called. Still accepting <c>nowarn_bif_clash</c> would makes a
+ module calling functions clashing with autoimported BIFs
+ compile with both the old and new compilers, but with
+ completely different semantics, why the option was removed.</p>
- <p><em>The use of this option is strongly discouraged,
- as code that uses it will probably break in a future
- major release (R14 or R15).</em></p>
+ <p>The use of this option has always been strongly discouraged.
+ From OTP R14A and forward it's an error to use it.</p>
+ <p>To resolve BIF clashes, use explicit module names or the
+ <c>{no_auto_import,[F/A]}</c> compiler directive.</p>
+ </warning>
</item>
<tag><c>{nowarn_bif_clash, FAs}</c></tag>
<item>
- <p>Turns off warnings as <c>nowarn_bif_clash</c> but only
- for the mentioned local functions. <c>FAs</c> is a tuple
- <c>{Name,Arity}</c> or a list of such tuples.</p>
- <p><em>The use of this option is strongly discouraged,
- as code that uses it will probably break in a future
- major release (R14 or R15).</em></p>
+ <p>This option is removed, it will generate a fatal error if used.</p>
+
+ <warning>
+ <p>The use of this option has always been strongly discouraged.
+ From OTP R14A and forward it's an error to use it.</p>
+ <p>To resolve BIF clashes, use explicit module names or the
+ <c>{no_auto_import,[F/A]}</c> compiler directive.</p>
+ </warning>
</item>
<tag><c>warn_export_all</c></tag>
diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl
index 115c228b0a..89d64834cf 100644
--- a/lib/compiler/src/beam_asm.erl
+++ b/lib/compiler/src/beam_asm.erl
@@ -261,7 +261,8 @@ make_op({gc_bif,Bif,Fail,Live,Args,Dest}, Dict) ->
Arity = length(Args),
BifOp = case Arity of
1 -> gc_bif1;
- 2 -> gc_bif2
+ 2 -> gc_bif2;
+ 3 -> gc_bif3
end,
encode_op(BifOp, [Fail,Live,{extfunc,erlang,Bif,Arity}|Args++[Dest]],Dict);
make_op({bs_add=Op,Fail,[Src1,Src2,Unit],Dest}, Dict) ->
diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl
index 32703b4dd1..9c6f835ab0 100644
--- a/lib/compiler/src/beam_block.erl
+++ b/lib/compiler/src/beam_block.erl
@@ -201,7 +201,6 @@ move_allocates_2(Alloc, [], Acc) ->
alloc_may_pass({set,_,_,{alloc,_,_}}) -> false;
alloc_may_pass({set,_,_,{set_tuple_element,_}}) -> false;
alloc_may_pass({set,_,_,put_list}) -> false;
-alloc_may_pass({set,_,_,{put_tuple,_}}) -> false;
alloc_may_pass({set,_,_,put}) -> false;
alloc_may_pass({set,_,_,_}) -> true.
diff --git a/lib/compiler/src/beam_bool.erl b/lib/compiler/src/beam_bool.erl
index dcc6ad4c7c..d9ea6f5a70 100644
--- a/lib/compiler/src/beam_bool.erl
+++ b/lib/compiler/src/beam_bool.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% Purpose: Optimizes booleans in guards.
@@ -631,10 +631,10 @@ fetch_reg(V, [{I,V}|_]) -> {x,I};
fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
live_regs(Regs) ->
- foldl(fun ({I,_}, _) -> I;
- ([], Max) -> Max end,
- -1, Regs)+1.
-
+ foldl(fun ({I,_}, _) ->
+ I
+ end, -1, Regs)+1.
+
%%%
%%% Convert a block to Static Single Assignment (SSA) form.
@@ -748,8 +748,7 @@ initialized_regs([{bs_context_to_binary,Src}|Is], Regs) ->
initialized_regs([{label,_},{func_info,_,_,Arity}|_], Regs) ->
InitRegs = free_vars_regs(Arity),
add_init_regs(InitRegs, Regs);
-initialized_regs([_|_], Regs) -> Regs;
-initialized_regs([], Regs) -> Regs.
+initialized_regs([_|_], Regs) -> Regs.
add_init_regs([{x,_}=X|T], Regs) ->
add_init_regs(T, ordsets:add_element(X, Regs));
diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl
index 7b4cd814a2..bb93110176 100644
--- a/lib/compiler/src/beam_dead.erl
+++ b/lib/compiler/src/beam_dead.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -281,12 +281,12 @@ forward([{test,is_eq_exact,_,[Dst,Src]}=I,{move,Src,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
forward([{test,is_nil,_,[Dst]}=I,{move,nil,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
-forward([{test,is_eq_exact,_,[_,{atom,_}]}=I|Is], D, Lc, [{label,_}|_]=Acc) ->
+forward([{test,is_eq_exact,_,_}=I|Is], D, Lc, Acc) ->
case Is of
[{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
_ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
end;
-forward([{test,is_ne_exact,_,[_,{atom,_}]}=I|Is], D, Lc, [{label,_}|_]=Acc) ->
+forward([{test,is_ne_exact,_,_}=I|Is], D, Lc, Acc) ->
case Is of
[{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
_ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
@@ -371,10 +371,10 @@ backward([{test,bs_start_match2,{f,To0},Live,[Src|_]=Info,Dst}|Is], D, Acc) ->
To = shortcut_bs_start_match(To0, Src, D),
I = {test,bs_start_match2,{f,To},Live,Info,Dst},
backward(Is, D, [I|Acc]);
-backward([{test,is_eq_exact=Op,{f,To0},[Reg,{atom,Val}]=Ops}|Is], D, Acc) ->
+backward([{test,is_eq_exact,{f,To0},[Reg,{atom,Val}]=Ops}|Is], D, Acc) ->
To1 = shortcut_bs_test(To0, Is, D),
To = shortcut_fail_label(To1, Reg, Val, D),
- I = {test,Op,{f,To},Ops},
+ I = combine_eqs(To, Ops, D, Acc),
backward(Is, D, [I|Acc]);
backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) ->
To1 = shortcut_bs_test(To0, Is, D),
@@ -394,7 +394,10 @@ backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) ->
_Code ->
To2
end,
- I = {test,Op,{f,To},Ops0},
+ I = case Op of
+ is_eq_exact -> combine_eqs(To, Ops0, D, Acc);
+ _ -> {test,Op,{f,To},Ops0}
+ end,
backward(Is, D, [I|Acc]);
backward([{test,Op,{f,To0},Live,Ops0,Dst}|Is], D, Acc) ->
To1 = shortcut_bs_test(To0, Is, D),
@@ -519,6 +522,41 @@ bif_to_test(Name, Args, Fail) ->
not_possible() -> throw(not_possible).
+%% combine_eqs(To, Operands, Acc) -> Instruction.
+%% Combine two is_eq_exact instructions or (an is_eq_exact
+%% instruction and a select_val instruction) to a select_val
+%% instruction if possible.
+%%
+%% Example:
+%%
+%% is_eq_exact F1 Reg Lit1 select_val Reg F2 [ Lit1 L1
+%% L1: . Lit2 L2 ]
+%% .
+%% . ==>
+%% .
+%% F1: is_eq_exact F2 Reg Lit2 F1: is_eq_exact F2 Reg Lit2
+%% L2: .... L2:
+%%
+combine_eqs(To, [Reg,{Type,_}=Lit1]=Ops, D, [{label,L1}|_])
+ when Type =:= atom; Type =:= integer ->
+ case beam_utils:code_at(To, D) of
+ [{test,is_eq_exact,{f,F2},[Reg,{Type,_}=Lit2]},
+ {label,L2}|_] when Lit1 =/= Lit2 ->
+ {select_val,Reg,{f,F2},{list,[Lit1,{f,L1},Lit2,{f,L2}]}};
+ [{select_val,Reg,{f,F2},{list,[{Type,_}|_]=List0}}|_] ->
+ List = remove_from_list(Lit1, List0),
+ {select_val,Reg,{f,F2},{list,[Lit1,{f,L1}|List]}};
+ _Is ->
+ {test,is_eq_exact,{f,To},Ops}
+ end;
+combine_eqs(To, Ops, _D, _Acc) ->
+ {test,is_eq_exact,{f,To},Ops}.
+
+remove_from_list(Lit, [Lit,{f,_}|T]) ->
+ T;
+remove_from_list(Lit, [Val,{f,_}=Fail|T]) ->
+ [Val,Fail|remove_from_list(Lit, T)];
+remove_from_list(_, []) -> [].
%% shortcut_bs_test(TargetLabel, [Instruction], D) -> TargetLabel'
%% Try to shortcut the failure label for a bit syntax matching.
diff --git a/lib/compiler/src/beam_disasm.erl b/lib/compiler/src/beam_disasm.erl
index 9571f817e3..017ca129b0 100644
--- a/lib/compiler/src/beam_disasm.erl
+++ b/lib/compiler/src/beam_disasm.erl
@@ -621,8 +621,7 @@ resolve_names(Fun, Imports, Str, Lbls, Lambdas, Literals, M) ->
%%
%% New make_fun2/4 instruction added in August 2001 (R8).
-%% New put_literal/2 instruction added in Feb 2006 R11B-4.
-%% We handle them specially here to avoid adding an argument to
+%% We handle it specially here to avoid adding an argument to
%% the clause for every instruction.
%%
@@ -631,8 +630,6 @@ resolve_inst({make_fun2,Args}, _, _, _, Lambdas, _, M) ->
{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}} =
lists:keyfind(OldIndex, 1, Lambdas),
{make_fun2,{M,F,A},OldIndex,OldUniq,NumFree};
-resolve_inst({put_literal,[{u,Index},Dst]},_,_,_,_,Literals,_) ->
- {put_literal,{literal,gb_trees:get(Index, Literals)},Dst};
resolve_inst(Instr, Imports, Str, Lbls, _Lambdas, _Literals, _M) ->
%% io:format(?MODULE_STRING":resolve_inst ~p.~n", [Instr]),
resolve_inst(Instr, Imports, Str, Lbls).
@@ -1004,13 +1001,17 @@ resolve_inst({gc_bif2,Args},Imports,_,_) ->
[F,Live,Bif,A1,A2,Reg] = resolve_args(Args),
{extfunc,_Mod,BifName,_Arity} = lookup(Bif+1,Imports),
{gc_bif,BifName,F,Live,[A1,A2],Reg};
+%%
+%% New instruction in R14, gc_bif with 3 arguments
+%%
+resolve_inst({gc_bif3,Args},Imports,_,_) ->
+ [F,Live,Bif,A1,A2,A3,Reg] = resolve_args(Args),
+ {extfunc,_Mod,BifName,_Arity} = lookup(Bif+1,Imports),
+ {gc_bif,BifName,F,Live,[A1,A2,A3],Reg};
%%
%% New instructions for creating non-byte aligned binaries.
%%
-resolve_inst({bs_bits_to_bytes2,[_Arg2,_Arg3]=Args},_,_,_) ->
- [A2,A3] = resolve_args(Args),
- {bs_bits_to_bytes2,A2,A3};
resolve_inst({bs_final2,[X,Y]},_,_,_) ->
{bs_final2,X,Y};
diff --git a/lib/compiler/src/beam_peep.erl b/lib/compiler/src/beam_peep.erl
index d03ac4b1f4..f39fc50b95 100644
--- a/lib/compiler/src/beam_peep.erl
+++ b/lib/compiler/src/beam_peep.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -64,22 +64,7 @@ function({function,Name,Arity,CLabel,Is0}) ->
%% InEncoding =:= latin1, OutEncoding =:= unicode;
%% InEncoding =:= latin1, OutEncoding =:= utf8 ->
%%
-%% (2) Code like
-%%
-%% is_ne_exact Fail Reg Literal1
-%% is_ne_exact Fail Reg Literal2
-%% is_ne_exact Fail Reg Literal3
-%% is_eq_exact UltimateFail Reg Literal4
-%% Fail: ....
-%%
-%% can be rewritten to
-%%
-%% select_val Reg UltimateFail [ Literal1 Fail
-%% Literal2 Fail
-%% Literal3 Fail
-%% Literal4 Fail ]
-%%
-%% (3) A select_val/4 instruction that only verifies that
+%% (2) A select_val/4 instruction that only verifies that
%% its argument is either 'true' or 'false' can be
%% be replaced with an is_boolean/2 instruction. That is:
%%
@@ -132,7 +117,7 @@ peep([{test,Op,_,Ops}=I|Is], SeenTests0, Acc) ->
false ->
%% Remember that we have seen this test.
SeenTests = gb_sets:insert(Test, SeenTests0),
- make_select_val(I, Is, SeenTests, Acc)
+ peep(Is, SeenTests, [I|Acc])
end
end;
peep([{select_val,Src,Fail,
@@ -151,33 +136,6 @@ peep([I|Is], _, Acc) ->
peep(Is, gb_sets:empty(), [I|Acc]);
peep([], _, Acc) -> reverse(Acc).
-make_select_val({test,is_ne_exact,{f,Fail},[Val,Lit]}=I0,
- Is0, SeenTests, Acc) ->
- try
- Type = case Lit of
- {atom,_} -> atom;
- {integer,_} -> integer;
- _ -> throw(impossible)
- end,
- {I,Is} = make_select_val_1(Is0, Fail, Val, Type, [Lit,{f,Fail}]),
- peep([I|Is], SeenTests, Acc)
- catch
- impossible ->
- peep(Is0, SeenTests, [I0|Acc])
- end;
-make_select_val(I, Is, SeenTests, Acc) ->
- peep(Is, SeenTests, [I|Acc]).
-
-make_select_val_1([{test,is_ne_exact,{f,Fail},[Val,{Type,_}=Lit]}|Is],
- Fail, Val, Type, Acc) ->
- make_select_val_1(Is, Fail, Val, Type, [Lit,{f,Fail}|Acc]);
-make_select_val_1([{test,is_eq_exact,{f,UltimateFail},[Val,{Type,_}=Lit]} |
- [{label,Fail}|_]=Is], Fail, Val, Type, Acc) ->
- Choices = [Lit,{f,Fail}|Acc],
- I = {select_val,Val,{f,UltimateFail},{list,Choices}},
- {I,Is};
-make_select_val_1(_Is, _Fail, _Val, _Type, _Acc) -> throw(impossible).
-
kill_seen(Dst, Seen0) ->
gb_sets:from_ordset(kill_seen_1(gb_sets:to_list(Seen0), Dst)).
@@ -187,5 +145,3 @@ kill_seen_1([{_,Ops}=Test|T], Dst) ->
false -> [Test|kill_seen_1(T, Dst)]
end;
kill_seen_1([], _) -> [].
-
-
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index 3729ccb0da..f83f73b224 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -76,9 +76,6 @@ simplify_basic_1([{set,[D],[{integer,Index},Reg],{bif,element,_}}=I0|Is], Ts0, A
end,
Ts = update(I, Ts0),
simplify_basic_1(Is, Ts, [I|Acc]);
-simplify_basic_1([{set,[_],[_],{bif,_,{f,0}}}=I|Is], Ts0, Acc) ->
- Ts = update(I, Ts0),
- simplify_basic_1(Is, Ts, [I|Acc]);
simplify_basic_1([{set,[D],[TupleReg],{get_tuple_element,0}}=I|Is0], Ts0, Acc) ->
case tdb_find(TupleReg, Ts0) of
{tuple,_,[Contents]} ->
@@ -118,7 +115,6 @@ simplify_basic_1([{test,is_record,_,[R,{atom,_}=Tag,{integer,Arity}]}=I|Is], Ts0
Ts = update(I, Ts0),
simplify_basic_1(Is, Ts, [I|Acc])
end;
-
simplify_basic_1([I|Is], Ts0, Acc) ->
Ts = update(I, Ts0),
simplify_basic_1(Is, Ts, [I|Acc]);
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index ac249e6672..761d4ffec0 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% Purpose : Common utilities used by several optimization passes.
@@ -424,12 +424,6 @@ check_liveness(R, [{bs_add,{f,0},Ss,D}|Is], St) ->
false when R =:= D -> {killed,St};
false -> check_liveness(R, Is, St)
end;
-check_liveness(R, [{bs_bits_to_bytes2,Src,Dst}|Is], St) ->
- case R of
- Src -> {used,St};
- Dst -> {killed,St};
- _ -> check_liveness(R, Is, St)
- end;
check_liveness(R, [{bs_put_binary,{f,0},Sz,_,_,Src}|Is], St) ->
case member(R, [Sz,Src]) of
true -> {used,St};
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 34065cfdce..f3a2b01e04 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -18,6 +18,8 @@
-module(beam_validator).
+-compile({no_auto_import,[min/2]}).
+
-export([file/1, files/1]).
%% Interface for compiler.
@@ -757,9 +759,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) ->
valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) ->
assert_term(A, Vst),
set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
-valfun_4({bs_bits_to_bytes2,Src,Dst}, Vst) ->
- assert_term(Src, Vst),
- set_type_reg({integer,[]}, Dst, Vst);
valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) ->
assert_term(Src, Vst),
set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl
index 74fc0878cf..d1fd9d40e2 100644
--- a/lib/compiler/src/cerl.erl
+++ b/lib/compiler/src/cerl.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%% =====================================================================
@@ -122,6 +122,9 @@
bitstr_bitsize/1, bitstr_unit/1, bitstr_type/1,
bitstr_flags/1]).
+-export_type([c_binary/0, c_call/0, c_clause/0, c_cons/0, c_fun/0, c_literal/0,
+ c_module/0, c_tuple/0, c_values/0, c_var/0, cerl/0, var_name/0]).
+
%%
%% needed by the include file below -- do not move
%%
diff --git a/lib/compiler/src/cerl_inline.erl b/lib/compiler/src/cerl_inline.erl
index 6d7eca0113..c15103999f 100644
--- a/lib/compiler/src/cerl_inline.erl
+++ b/lib/compiler/src/cerl_inline.erl
@@ -65,7 +65,6 @@
try_evars/1, try_handler/1, tuple_es/1, tuple_arity/1,
type/1, values_es/1, var_name/1]).
--import(erlang, [max/2]).
-import(lists, [foldl/3, foldr/3, mapfoldl/3, reverse/1]).
%%
@@ -201,9 +200,9 @@ start(Reply, Tree, Ctxt, Opts) ->
false ->
ok
end,
- Size = max(1, proplists:get_value(inline_size, Opts)),
- Effort = max(1, proplists:get_value(inline_effort, Opts)),
- Unroll = max(1, proplists:get_value(inline_unroll, Opts)),
+ Size = erlang:max(1, proplists:get_value(inline_size, Opts)),
+ Effort = erlang:max(1, proplists:get_value(inline_effort, Opts)),
+ Unroll = erlang:max(1, proplists:get_value(inline_unroll, Opts)),
case proplists:get_bool(verbose, Opts) of
true ->
io:fwrite("Inlining: inline_size=~w inline_effort=~w\n",
diff --git a/lib/compiler/src/cerl_trees.erl b/lib/compiler/src/cerl_trees.erl
index 7a2057713e..1e3755025f 100644
--- a/lib/compiler/src/cerl_trees.erl
+++ b/lib/compiler/src/cerl_trees.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%% @doc Basic functions on Core Erlang abstract syntax trees.
@@ -73,14 +73,12 @@ depth(T) ->
[] ->
0;
Gs ->
- 1 + lists:foldl(fun (G, A) -> max(depth_1(G), A) end, 0, Gs)
+ 1 + lists:foldl(fun (G, A) -> erlang:max(depth_1(G), A) end, 0, Gs)
end.
depth_1(Ts) ->
- lists:foldl(fun (T, A) -> max(depth(T), A) end, 0, Ts).
+ lists:foldl(fun (T, A) -> erlang:max(depth(T), A) end, 0, Ts).
-max(X, Y) when X > Y -> X;
-max(_, Y) -> Y.
%% @spec size(Tree::cerl()) -> integer()
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 3f250a6d5a..4642fb68b3 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -29,6 +29,8 @@
%% Erlc interface.
-export([compile/3,compile_beam/3,compile_asm/3,compile_core/3]).
+-export_type([option/0]).
+
-include("erl_compile.hrl").
-include("core_parse.hrl").
@@ -294,15 +296,6 @@ fold_comp([{Name,Pass}|Ps], Run, St0) ->
end;
fold_comp([], _Run, St) -> {ok,St}.
-os_process_size() ->
- case os:type() of
- {unix, sunos} ->
- Size = os:cmd("ps -o vsz -p " ++ os:getpid() ++ " | tail -1"),
- list_to_integer(lib:nonl(Size));
- _ ->
- 0
- end.
-
run_tc({Name,Fun}, St) ->
Before0 = statistics(runtime),
Val = (catch Fun(St)),
@@ -311,9 +304,8 @@ run_tc({Name,Fun}, St) ->
{After_c, _} = After0,
Mem0 = erts_debug:flat_size(Val)*erlang:system_info(wordsize),
Mem = lists:flatten(io_lib:format("~.1f kB", [Mem0/1024])),
- Sz = lists:flatten(io_lib:format("~.1f MB", [os_process_size()/1024])),
- io:format(" ~-30s: ~10.2f s ~12s ~10s\n",
- [Name,(After_c-Before_c) / 1000,Mem,Sz]),
+ io:format(" ~-30s: ~10.2f s ~12s\n",
+ [Name,(After_c-Before_c) / 1000,Mem]),
Val.
comp_ret_ok(#compile{code=Code,warnings=Warn0,module=Mod,options=Opts}=St) ->
@@ -910,13 +902,8 @@ expand_module(#compile{code=Code,options=Opts0}=St0) ->
{ok,St0#compile{module=Mod,options=Opts,code={Mod,Exp,Forms}}}.
core_module(#compile{code=Code0,options=Opts}=St) ->
- case v3_core:module(Code0, Opts) of
- {ok,Code,Ws} ->
- {ok,St#compile{code=Code,warnings=St#compile.warnings ++ Ws}};
- {error,Es,Ws} ->
- {error,St#compile{warnings=St#compile.warnings ++ Ws,
- errors=St#compile.errors ++ Es}}
- end.
+ {ok,Code,Ws} = v3_core:module(Code0, Opts),
+ {ok,St#compile{code=Code,warnings=St#compile.warnings ++ Ws}}.
core_fold_module(#compile{code=Code0,options=Opts,warnings=Warns}=St) ->
{ok,Code,Ws} = sys_core_fold:module(Code0, Opts),
diff --git a/lib/compiler/src/erl_bifs.erl b/lib/compiler/src/erl_bifs.erl
index e87bb276de..f8128702dd 100644
--- a/lib/compiler/src/erl_bifs.erl
+++ b/lib/compiler/src/erl_bifs.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% Purpose: Information about the Erlang built-in functions.
@@ -65,6 +65,8 @@ is_pure(erlang, 'xor', 2) -> true;
is_pure(erlang, abs, 1) -> true;
is_pure(erlang, atom_to_binary, 2) -> true;
is_pure(erlang, atom_to_list, 1) -> true;
+is_pure(erlang, binary_part, 2) -> true;
+is_pure(erlang, binary_part, 3) -> true;
is_pure(erlang, binary_to_atom, 2) -> true;
is_pure(erlang, binary_to_list, 1) -> true;
is_pure(erlang, binary_to_list, 3) -> true;
diff --git a/lib/compiler/src/genop.tab b/lib/compiler/src/genop.tab
index b57508ea8e..63527bda8f 100644
--- a/lib/compiler/src/genop.tab
+++ b/lib/compiler/src/genop.tab
@@ -208,7 +208,7 @@ BEAM_FORMAT_NUMBER=0
# New instructions in R10B.
109: bs_init2/6
-110: bs_bits_to_bytes/3
+110: -bs_bits_to_bytes/3
111: bs_add/5
112: apply/1
113: apply_last/2
@@ -279,3 +279,4 @@ BEAM_FORMAT_NUMBER=0
150: recv_mark/1
151: recv_set/1
+152: gc_bif3/7
diff --git a/lib/compiler/src/rec_env.erl b/lib/compiler/src/rec_env.erl
index 9b73e08ad8..31a1f8b0b7 100644
--- a/lib/compiler/src/rec_env.erl
+++ b/lib/compiler/src/rec_env.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% @author Richard Carlsson <[email protected]>
@@ -32,7 +32,7 @@
get/2, is_defined/2, is_empty/1, keys/1, lookup/2, new_key/1,
new_key/2, new_keys/2, new_keys/3, size/1, to_list/1]).
--import(erlang, [max/2]).
+-export_type([environment/0]).
-ifdef(DEBUG).
-export([test/1, test_custom/1, test_custom/2]).
@@ -586,7 +586,7 @@ new_key(N, R, _T, F, Env) ->
new_key(generate(N, R1), R1, 0, F, Env).
start_range(Env) ->
- max(env_size(Env) * ?START_RANGE_FACTOR, ?MINIMUM_RANGE).
+ erlang:max(env_size(Env) * ?START_RANGE_FACTOR, ?MINIMUM_RANGE).
%% The previous key might or might not be used to compute the next key
%% to be tried. It is currently not used.
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 6202f07479..96015fbe58 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -1038,6 +1038,8 @@ fold_non_lit_args(Call, lists, append, [Arg1,Arg2], _) ->
eval_append(Call, Arg1, Arg2);
fold_non_lit_args(Call, erlang, setelement, [Arg1,Arg2,Arg3], _) ->
eval_setelement(Call, Arg1, Arg2, Arg3);
+fold_non_lit_args(Call, erlang, is_record, [Arg1,Arg2,Arg3], Sub) ->
+ eval_is_record(Call, Arg1, Arg2, Arg3, Sub);
fold_non_lit_args(Call, erlang, N, Args, Sub) ->
NumArgs = length(Args),
case erl_internal:comp_op(N, NumArgs) of
@@ -1194,19 +1196,22 @@ eval_element(Call, #c_literal{val=Pos}, #c_tuple{es=Es}, _Types) when is_integer
true ->
eval_failure(Call, badarg)
end;
-%% eval_element(Call, #c_literal{val=Pos}, #c_var{name=V}, Types)
-%% when is_integer(Pos) ->
-%% case orddict:find(V, Types#sub.t) of
-%% {ok,#c_tuple{es=Elements}} ->
-%% if
-%% 1 =< Pos, Pos =< length(Elements) ->
-%% lists:nth(Pos, Elements);
-%% true ->
-%% eval_failure(Call, badarg)
-%% end;
-%% error ->
-%% Call
-%% end;
+eval_element(Call, #c_literal{val=Pos}, #c_var{name=V}, Types)
+ when is_integer(Pos) ->
+ case orddict:find(V, Types#sub.t) of
+ {ok,#c_tuple{es=Elements}} ->
+ if
+ 1 =< Pos, Pos =< length(Elements) ->
+ case lists:nth(Pos, Elements) of
+ #c_alias{var=Alias} -> Alias;
+ Res -> Res
+ end;
+ true ->
+ eval_failure(Call, badarg)
+ end;
+ error ->
+ Call
+ end;
eval_element(Call, Pos, Tuple, _Types) ->
case is_not_integer(Pos) orelse is_not_tuple(Tuple) of
true ->
@@ -1215,6 +1220,20 @@ eval_element(Call, Pos, Tuple, _Types) ->
Call
end.
+%% eval_is_record(Call, Var, Tag, Size, Types) -> Val.
+%% Evaluates is_record/3 using type information.
+%%
+eval_is_record(Call, #c_var{name=V}, #c_literal{val=NeededTag}=Lit,
+ #c_literal{val=Size}, Types) ->
+ case orddict:find(V, Types#sub.t) of
+ {ok,#c_tuple{es=[#c_literal{val=Tag}|_]=Es}} ->
+ Lit#c_literal{val=Tag =:= NeededTag andalso
+ length(Es) =:= Size};
+ _ ->
+ Call
+ end;
+eval_is_record(Call, _, _, _, _) -> Call.
+
%% is_not_integer(Core) -> true | false.
%% Returns true if Core is definitely not an integer.
diff --git a/lib/compiler/src/sys_pre_expand.erl b/lib/compiler/src/sys_pre_expand.erl
index f80d03dfac..480954adac 100644
--- a/lib/compiler/src/sys_pre_expand.erl
+++ b/lib/compiler/src/sys_pre_expand.erl
@@ -403,16 +403,21 @@ expr({'fun',Line,Body}, St) ->
expr({call,Line,{atom,La,N}=Atom,As0}, St0) ->
{As,St1} = expr_list(As0, St0),
Ar = length(As),
- case erl_internal:bif(N, Ar) of
- true ->
- {{call,Line,{remote,La,{atom,La,erlang},Atom},As},St1};
- false ->
- case imported(N, Ar, St1) of
- {yes,Mod} ->
- {{call,Line,{remote,La,{atom,La,Mod},Atom},As},St1};
- no ->
- {{call,Line,Atom,As},St1}
- end
+ case defined(N,Ar,St1) of
+ true ->
+ {{call,Line,Atom,As},St1};
+ _ ->
+ case imported(N, Ar, St1) of
+ {yes,Mod} ->
+ {{call,Line,{remote,La,{atom,La,Mod},Atom},As},St1};
+ no ->
+ case erl_internal:bif(N, Ar) of
+ true ->
+ {{call,Line,{remote,La,{atom,La,erlang},Atom},As},St1};
+ false -> %% This should have been handled by erl_lint
+ {{call,Line,Atom,As},St1}
+ end
+ end
end;
expr({call,Line,{record_field,_,_,_}=M,As0}, St0) ->
expr({call,Line,expand_package(M, St0),As0}, St0);
@@ -685,3 +690,6 @@ imported(F, A, St) ->
{ok,Mod} -> {yes,Mod};
error -> no
end.
+
+defined(F, A, St) ->
+ ordsets:is_element({F,A}, St#expand.defined).
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index b2f0ac75c7..f6bb45787c 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -122,7 +122,6 @@
| iclause() | ifun() | iletrec() | imatch() | iprimop()
| iprotect() | ireceive1() | ireceive2() | iset() | itry().
--type error() :: {file:filename(), [{integer(), module(), term()}]}.
-type warning() :: {file:filename(), [{integer(), module(), term()}]}.
-record(core, {vcount=0 :: non_neg_integer(), %Variable counter
@@ -130,7 +129,6 @@
in_guard=false :: boolean(), %In guard or not.
wanted=true :: boolean(), %Result wanted or not.
opts :: [compile:option()], %Options.
- es=[] :: [error()], %Errors.
ws=[] :: [warning()], %Warnings.
file=[{file,""}]}). %File
@@ -141,46 +139,41 @@
| {attribute, integer(), attribute(), _}.
-spec module({module(), [fa()], [form()]}, [compile:option()]) ->
- {'ok',cerl:c_module(),[warning()]} | {'error',[error()],[warning()]}.
+ {'ok',cerl:c_module(),[warning()]}.
module({Mod,Exp,Forms}, Opts) ->
Cexp = map(fun ({_N,_A} = NA) -> #c_var{name=NA} end, Exp),
- {Kfs0,As0,Es,Ws,_File} = foldl(fun (F, Acc) ->
- form(F, Acc, Opts)
- end, {[],[],[],[],[]}, Forms),
+ {Kfs0,As0,Ws,_File} = foldl(fun (F, Acc) ->
+ form(F, Acc, Opts)
+ end, {[],[],[],[]}, Forms),
Kfs = reverse(Kfs0),
As = reverse(As0),
- case Es of
- [] ->
- {ok,#c_module{name=#c_literal{val=Mod},exports=Cexp,attrs=As,defs=Kfs},Ws};
- _ ->
- {error,Es,Ws}
- end.
+ {ok,#c_module{name=#c_literal{val=Mod},exports=Cexp,attrs=As,defs=Kfs},Ws}.
-form({function,_,_,_,_}=F0, {Fs,As,Es0,Ws0,File}, Opts) ->
- {F,Es,Ws} = function(F0, Es0, Ws0, File, Opts),
- {[F|Fs],As,Es,Ws,File};
-form({attribute,_,file,{File,_Line}}, {Fs,As,Es,Ws,_}, _Opts) ->
- {Fs,As,Es,Ws,File};
-form({attribute,_,_,_}=F, {Fs,As,Es,Ws,File}, _Opts) ->
- {Fs,[attribute(F)|As],Es,Ws,File}.
+form({function,_,_,_,_}=F0, {Fs,As,Ws0,File}, Opts) ->
+ {F,Ws} = function(F0, Ws0, File, Opts),
+ {[F|Fs],As,Ws,File};
+form({attribute,_,file,{File,_Line}}, {Fs,As,Ws,_}, _Opts) ->
+ {Fs,As,Ws,File};
+form({attribute,_,_,_}=F, {Fs,As,Ws,File}, _Opts) ->
+ {Fs,[attribute(F)|As],Ws,File}.
attribute({attribute,Line,Name,Val}) ->
{#c_literal{val=Name, anno=[Line]}, #c_literal{val=Val, anno=[Line]}}.
-function({function,_,Name,Arity,Cs0}, Es0, Ws0, File, Opts) ->
+function({function,_,Name,Arity,Cs0}, Ws0, File, Opts) ->
%%ok = io:fwrite("~p - ", [{Name,Arity}]),
- St0 = #core{vcount=0,opts=Opts,es=Es0,ws=Ws0,file=[{file,File}]},
+ St0 = #core{vcount=0,opts=Opts,ws=Ws0,file=[{file,File}]},
{B0,St1} = body(Cs0, Name, Arity, St0),
%%ok = io:fwrite("1", []),
%%ok = io:fwrite("~w:~p~n", [?LINE,B0]),
{B1,St2} = ubody(B0, St1),
%%ok = io:fwrite("2", []),
%%ok = io:fwrite("~w:~p~n", [?LINE,B1]),
- {B2,#core{es=Es,ws=Ws}} = cbody(B1, St2),
+ {B2,#core{ws=Ws}} = cbody(B1, St2),
%%ok = io:fwrite("3~n", []),
%%ok = io:fwrite("~w:~p~n", [?LINE,B2]),
- {{#c_var{name={Name,Arity}},B2},Es,Ws}.
+ {{#c_var{name={Name,Arity}},B2},Ws}.
body(Cs0, Name, Arity, St0) ->
Anno = lineno_anno(element(2, hd(Cs0)), St0),
@@ -2096,20 +2089,12 @@ is_simple(#c_literal{}) -> true;
is_simple(#c_cons{hd=H,tl=T}) ->
is_simple(H) andalso is_simple(T);
is_simple(#c_tuple{es=Es}) -> is_simple_list(Es);
-is_simple(#c_binary{segments=Es}) -> is_simp_bin(Es);
is_simple(_) -> false.
-spec is_simple_list([cerl:cerl()]) -> boolean().
is_simple_list(Es) -> lists:all(fun is_simple/1, Es).
--spec is_simp_bin([cerl:cerl()]) -> boolean().
-
-is_simp_bin(Es) ->
- lists:all(fun (#c_bitstr{val=E,size=S}) ->
- is_simple(E) andalso is_simple(S)
- end, Es).
-
%%%
%%% Handling of warnings.
%%%
diff --git a/lib/compiler/src/v3_life.erl b/lib/compiler/src/v3_life.erl
index 9fda37530b..a7a4d4dc91 100644
--- a/lib/compiler/src/v3_life.erl
+++ b/lib/compiler/src/v3_life.erl
@@ -361,8 +361,6 @@ match_fail(#k_literal{anno=Anno,val={Atom,Val}}, I, A) when is_atom(Atom) ->
match_fail(#k_tuple{anno=Anno,es=[#k_atom{val=Atom},#k_literal{val=Val}]}, I, A);
match_fail(#k_literal{anno=Anno,val={Atom}}, I, A) when is_atom(Atom) ->
match_fail(#k_tuple{anno=Anno,es=[#k_atom{val=Atom}]}, I, A);
-match_fail(#k_literal{anno=Anno,val=Atom}, I, A) when is_atom(Atom) ->
- match_fail(#k_atom{anno=Anno,val=Atom}, I, A);
match_fail(#k_tuple{es=[#k_atom{val=function_clause}|As]}, I, A) ->
#l{ke={match_fail,{function_clause,literal_list(As, [])}},i=I,a=A};
match_fail(#k_tuple{es=[#k_atom{val=badmatch},Val]}, I, A) ->
diff --git a/lib/compiler/test/andor_SUITE.erl b/lib/compiler/test/andor_SUITE.erl
index a460d54239..84cfd16e60 100644
--- a/lib/compiler/test/andor_SUITE.erl
+++ b/lib/compiler/test/andor_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(andor_SUITE).
@@ -141,6 +141,10 @@ t_and_or(Config) when is_list(Config) ->
ok.
+-define(GUARD(E), if E -> true;
+ true -> false
+ end).
+
t_andalso(Config) when is_list(Config) ->
Bs = [true,false],
Ps = [{X,Y} || X <- Bs, Y <- Bs],
@@ -151,6 +155,11 @@ t_andalso(Config) when is_list(Config) ->
?line false = false andalso true,
?line false = false andalso false,
+ ?line true = ?GUARD(true andalso true),
+ ?line false = ?GUARD(true andalso false),
+ ?line false = ?GUARD(false andalso true),
+ ?line false = ?GUARD(false andalso false),
+
?line false = false andalso glurf,
?line false = false andalso exit(exit_now),
@@ -176,6 +185,11 @@ t_orelse(Config) when is_list(Config) ->
?line true = false orelse true,
?line false = false orelse false,
+ ?line true = ?GUARD(true orelse true),
+ ?line true = ?GUARD(true orelse false),
+ ?line true = ?GUARD(false orelse true),
+ ?line false = ?GUARD(false orelse false),
+
?line true = true orelse glurf,
?line true = true orelse exit(exit_now),
diff --git a/lib/compiler/test/bs_match_SUITE.erl b/lib/compiler/test/bs_match_SUITE.erl
index 75b6f801e7..caaa587006 100644
--- a/lib/compiler/test/bs_match_SUITE.erl
+++ b/lib/compiler/test/bs_match_SUITE.erl
@@ -30,7 +30,8 @@
multiple_uses/1,zero_label/1,followed_by_catch/1,
matching_meets_construction/1,simon/1,matching_and_andalso/1,
otp_7188/1,otp_7233/1,otp_7240/1,otp_7498/1,
- match_string/1,zero_width/1,bad_size/1,haystack/1]).
+ match_string/1,zero_width/1,bad_size/1,haystack/1,
+ cover_beam_bool/1]).
-export([coverage_id/1]).
@@ -45,7 +46,7 @@ all(suite) ->
wfbm,degenerated_match,bs_sum,coverage,multiple_uses,zero_label,
followed_by_catch,matching_meets_construction,simon,matching_and_andalso,
otp_7188,otp_7233,otp_7240,otp_7498,match_string,zero_width,bad_size,
- haystack].
+ haystack,cover_beam_bool].
init_per_testcase(Case, Config) when is_atom(Case), is_list(Config) ->
Dog = test_server:timetrap(?t:minutes(1)),
@@ -985,6 +986,25 @@ fc(_, Args, {'EXIT',{{case_clause,ActualArgs},_}})
when ?MODULE =:= bs_match_inline_SUITE ->
Args = tuple_to_list(ActualArgs).
+%% Cover the clause handling bs_context to binary in
+%% beam_block:initialized_regs/2.
+cover_beam_bool(Config) when is_list(Config) ->
+ ?line ok = do_cover_beam_bool(<<>>, 3),
+ ?line <<19>> = do_cover_beam_bool(<<19>>, 2),
+ ?line <<42>> = do_cover_beam_bool(<<42>>, 1),
+ ?line <<17>> = do_cover_beam_bool(<<13,17>>, 0),
+ ok.
+
+do_cover_beam_bool(Bin, X) when X > 0 ->
+ if
+ X =:= 1; X =:= 2 ->
+ Bin;
+ true ->
+ ok
+ end;
+do_cover_beam_bool(<<_,Bin/binary>>, X) ->
+ do_cover_beam_bool(Bin, X+1).
+
check(F, R) ->
R = F().
diff --git a/lib/compiler/test/compiler.cover b/lib/compiler/test/compiler.cover
index 5ec2408a35..69d284ea6c 100644
--- a/lib/compiler/test/compiler.cover
+++ b/lib/compiler/test/compiler.cover
@@ -1,3 +1,3 @@
%% -*- erlang -*-
-{exclude,[sys_pre_attributes,core_parse]}.
+{exclude,[sys_pre_attributes,core_scan,core_parse]}.
diff --git a/lib/compiler/test/core_SUITE_data/.gitignore b/lib/compiler/test/core_SUITE_data/.gitignore
new file mode 100644
index 0000000000..d11d93d37f
--- /dev/null
+++ b/lib/compiler/test/core_SUITE_data/.gitignore
@@ -0,0 +1 @@
+!*.core
diff --git a/lib/compiler/test/error_SUITE.erl b/lib/compiler/test/error_SUITE.erl
index 4530313bb0..ec58a0761e 100644
--- a/lib/compiler/test/error_SUITE.erl
+++ b/lib/compiler/test/error_SUITE.erl
@@ -21,11 +21,133 @@
-include("test_server.hrl").
-export([all/1,
- head_mismatch_line/1,warnings_as_errors/1]).
+ head_mismatch_line/1,warnings_as_errors/1, bif_clashes/1]).
all(suite) ->
test_lib:recompile(?MODULE),
- [head_mismatch_line,warnings_as_errors].
+ [head_mismatch_line,warnings_as_errors,bif_clashes].
+
+
+bif_clashes(Config) when is_list(Config) ->
+ Ts = [{bif_clashes1,
+ <<"
+ -export([t/0]).
+ t() ->
+ length([a,b,c]).
+
+ length(X) ->
+ erlang:length(X).
+ ">>,
+ [return_warnings],
+ {error,
+ [{4, erl_lint,{call_to_redefined_old_bif,{length,1}}}], []} }],
+ ?line [] = run(Config, Ts),
+ Ts1 = [{bif_clashes2,
+ <<"
+ -export([t/0]).
+ -import(x,[length/1]).
+ t() ->
+ length([a,b,c]).
+ ">>,
+ [return_warnings],
+ {error,
+ [{3, erl_lint,{redefine_old_bif_import,{length,1}}}], []} }],
+ ?line [] = run(Config, Ts1),
+ Ts00 = [{bif_clashes3,
+ <<"
+ -export([t/0]).
+ -compile({no_auto_import,[length/1]}).
+ t() ->
+ length([a,b,c]).
+
+ length(X) ->
+ erlang:length(X).
+ ">>,
+ [return_warnings],
+ []}],
+ ?line [] = run(Config, Ts00),
+ Ts11 = [{bif_clashes4,
+ <<"
+ -export([t/0]).
+ -compile({no_auto_import,[length/1]}).
+ -import(x,[length/1]).
+ t() ->
+ length([a,b,c]).
+ ">>,
+ [return_warnings],
+ []}],
+ ?line [] = run(Config, Ts11),
+ Ts000 = [{bif_clashes5,
+ <<"
+ -export([t/0]).
+ t() ->
+ binary_part(<<1,2,3,4>>,1,2).
+
+ binary_part(X,Y,Z) ->
+ erlang:binary_part(X,Y,Z).
+ ">>,
+ [return_warnings],
+ {warning,
+ [{4, erl_lint,{call_to_redefined_bif,{binary_part,3}}}]} }],
+ ?line [] = run(Config, Ts000),
+ Ts111 = [{bif_clashes6,
+ <<"
+ -export([t/0]).
+ -import(x,[binary_part/3]).
+ t() ->
+ binary_part(<<1,2,3,4>>,1,2).
+ ">>,
+ [return_warnings],
+ {warning,
+ [{3, erl_lint,{redefine_bif_import,{binary_part,3}}}]} }],
+ ?line [] = run(Config, Ts111),
+ Ts2 = [{bif_clashes7,
+ <<"
+ -export([t/0]).
+ -compile({no_auto_import,[length/1]}).
+ -import(x,[length/1]).
+ t() ->
+ length([a,b,c]).
+ length(X) ->
+ erlang:length(X).
+ ">>,
+ [],
+ {error,
+ [{7,erl_lint,{define_import,{length,1}}}],
+ []} }],
+ ?line [] = run2(Config, Ts2),
+ Ts3 = [{bif_clashes8,
+ <<"
+ -export([t/1]).
+ -compile({no_auto_import,[length/1]}).
+ t(X) when length(X) > 3 ->
+ length([a,b,c]).
+ length(X) ->
+ erlang:length(X).
+ ">>,
+ [],
+ {error,
+ [{4,erl_lint,{illegal_guard_local_call,{length,1}}}],
+ []} }],
+ ?line [] = run2(Config, Ts3),
+ Ts4 = [{bif_clashes9,
+ <<"
+ -export([t/1]).
+ -compile({no_auto_import,[length/1]}).
+ -import(x,[length/1]).
+ t(X) when length(X) > 3 ->
+ length([a,b,c]).
+ ">>,
+ [],
+ {error,
+ [{5,erl_lint,{illegal_guard_local_call,{length,1}}}],
+ []} }],
+ ?line [] = run2(Config, Ts4),
+
+ ok.
+
+
+
%% Tests that a head mismatch is reported on the correct line (OTP-2125).
head_mismatch_line(Config) when is_list(Config) ->
@@ -49,7 +171,7 @@ warnings_as_errors(Config) when is_list(Config) ->
A = unused,
ok.
">>,
- [warnings_as_errors],
+ [export_all,warnings_as_errors],
{error,
[],
[{3,erl_lint,{unused_var,'A'}}]} }],
@@ -70,6 +192,24 @@ run(Config, Tests) ->
end,
lists:foldl(F, [], Tests).
+run2(Config, Tests) ->
+ F = fun({N,P,Ws,E}, BadL) ->
+ case catch filter(run_test(Config, P, Ws)) of
+ E ->
+ BadL;
+ Bad ->
+ ?t:format("~nTest ~p failed. Expected~n ~p~n"
+ "but got~n ~p~n", [N, E, Bad]),
+ fail()
+ end
+ end,
+ lists:foldl(F, [], Tests).
+
+filter({error,Es,_Ws}) ->
+ {error,Es,[]};
+filter(X) ->
+ X.
+
%% Compiles a test module and returns the list of errors and warnings.
@@ -78,17 +218,29 @@ run_test(Conf, Test0, Warnings) ->
?line DataDir = ?config(priv_dir, Conf),
?line Test = ["-module(errors_test). ", Test0],
?line File = filename:join(DataDir, Filename),
- ?line Opts = [binary,export_all,return|Warnings],
+ ?line Opts = [binary,return_errors|Warnings],
?line ok = file:write_file(File, Test),
%% Compile once just to print all errors and warnings.
- ?line compile:file(File, [binary,export_all,report|Warnings]),
+ ?line compile:file(File, [binary,report|Warnings]),
%% Test result of compilation.
?line Res = case compile:file(File, Opts) of
- {error,[{_File,Es}],Ws} ->
+ {ok,errors_test,_,[{_File,Ws}]} ->
+ %io:format("compile:file(~s,~p) ->~n~p~n",
+ % [File,Opts,Ws]),
+ {warning,Ws};
+ {ok,errors_test,_,[]} ->
+ %io:format("compile:file(~s,~p) ->~n~p~n",
+ % [File,Opts,Ws]),
+ [];
+ {error,[{XFile,Es}],Ws} = _ZZ when is_list(XFile) ->
+ %io:format("compile:file(~s,~p) ->~n~p~n",
+ % [File,Opts,_ZZ]),
{error,Es,Ws};
- {error,Es,[{_File,Ws}]} ->
+ {error,Es,[{_File,Ws}]} = _ZZ->
+ %io:format("compile:file(~s,~p) ->~n~p~n",
+ % [File,Opts,_ZZ]),
{error,Es,Ws}
end,
file:delete(File),
diff --git a/lib/compiler/test/float_SUITE.erl b/lib/compiler/test/float_SUITE.erl
index 07779ddd5a..b48b1daa32 100644
--- a/lib/compiler/test/float_SUITE.erl
+++ b/lib/compiler/test/float_SUITE.erl
@@ -82,6 +82,14 @@ bad_negate(X, Y) when is_float(X) ->
Y1 = -Y, %BIF call.
{X2, Y1}.
+%% Some math functions are not implemented on all platforms.
+-define(OPTIONAL(Expected, Expr),
+ try
+ Expected = Expr
+ catch
+ error:undef -> ok
+ end).
+
math_functions(Config) when is_list(Config) ->
%% Mostly silly coverage.
?line 0.0 = math:tan(0),
@@ -93,6 +101,14 @@ math_functions(Config) when is_list(Config) ->
?line -1.0 = math:cos(math:pi()),
?line 1.0 = math:exp(0),
?line 1.0 = math:pow(math:pi(), 0),
+ ?line 0.0 = math:log(1),
+ ?line 0.0 = math:asin(0),
+ ?line 0.0 = math:acos(1),
+ ?line ?OPTIONAL(0.0, math:asinh(0)),
+ ?line ?OPTIONAL(0.0, math:acosh(1)),
+ ?line ?OPTIONAL(0.0, math:atanh(0)),
+ ?line ?OPTIONAL(0.0, math:erf(0)),
+ ?line ?OPTIONAL(1.0, math:erfc(0)),
?line 0.0 = math:tan(id(0)),
?line 0.0 = math:atan2(id(0), 1),
@@ -101,6 +117,14 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:tanh(id(0)),
?line 1.0 = math:log10(id(10)),
?line 1.0 = math:exp(id(0)),
+ ?line 0.0 = math:log(id(1)),
+ ?line 0.0 = math:asin(id(0)),
+ ?line 0.0 = math:acos(id(1)),
+ ?line ?OPTIONAL(0.0, math:asinh(id(0))),
+ ?line ?OPTIONAL(0.0, math:acosh(id(1))),
+ ?line ?OPTIONAL(0.0, math:atanh(id(0))),
+ ?line ?OPTIONAL(0.0, math:erf(id(0))),
+ ?line ?OPTIONAL(1.0, math:erfc(id(0))),
%% Only for coverage (of beam_type.erl).
?line {'EXIT',{undef,_}} = (catch math:fnurfla(0)),
diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl
index f3960b28c3..8f23bd2e5a 100644
--- a/lib/compiler/test/guard_SUITE.erl
+++ b/lib/compiler/test/guard_SUITE.erl
@@ -31,7 +31,7 @@
t_is_boolean/1,is_function_2/1,
tricky/1,rel_ops/1,literal_type_tests/1,
basic_andalso_orelse/1,traverse_dcd/1,
- check_qlc_hrl/1,andalso_semi/1,tuple_size/1]).
+ check_qlc_hrl/1,andalso_semi/1,t_tuple_size/1,binary_part/1]).
all(suite) ->
test_lib:recompile(?MODULE),
@@ -43,7 +43,7 @@ all(suite) ->
build_in_guard,old_guard_tests,gbif,
t_is_boolean,is_function_2,tricky,rel_ops,literal_type_tests,
basic_andalso_orelse,traverse_dcd,check_qlc_hrl,andalso_semi,
- tuple_size].
+ t_tuple_size,binary_part].
misc(Config) when is_list(Config) ->
?line 42 = case id(42) of
@@ -1330,7 +1330,7 @@ andalso_semi_bar(Bar) when is_list(Bar) andalso length(Bar) =:= 3; Bar =:= 1 ->
ok.
-tuple_size(Config) when is_list(Config) ->
+t_tuple_size(Config) when is_list(Config) ->
?line 10 = do_tuple_size({1,2,3,4}),
?line fc(catch do_tuple_size({1,2,3})),
?line fc(catch do_tuple_size(42)),
@@ -1362,6 +1362,146 @@ ludicrous_tuple_size(T)
when tuple_size(T) =:= 16#FFFFFFFFFFFFFFFF -> ok;
ludicrous_tuple_size(_) -> error.
+%%
+%% The binary_part/2,3 guard BIFs
+%%
+-define(MASK_ERROR(EXPR),mask_error((catch (EXPR)))).
+mask_error({'EXIT',{Err,_}}) ->
+ Err;
+mask_error(Else) ->
+ Else.
+
+binary_part(doc) ->
+ ["Tests the binary_part/2,3 guard (GC) bif's"];
+binary_part(Config) when is_list(Config) ->
+ %% This is more or less a copy of what the guard_SUITE in emulator
+ %% does to cover the guard bif's
+ ?line 1 = bptest(<<1,2,3>>),
+ ?line 2 = bptest(<<2,1,3>>),
+ ?line error = bptest(<<1>>),
+ ?line error = bptest(<<>>),
+ ?line error = bptest(apa),
+ ?line 3 = bptest(<<2,3,3>>),
+ % With one variable (pos)
+ ?line 1 = bptest(<<1,2,3>>,1),
+ ?line 2 = bptest(<<2,1,3>>,1),
+ ?line error = bptest(<<1>>,1),
+ ?line error = bptest(<<>>,1),
+ ?line error = bptest(apa,1),
+ ?line 3 = bptest(<<2,3,3>>,1),
+ % With one variable (length)
+ ?line 1 = bptesty(<<1,2,3>>,1),
+ ?line 2 = bptesty(<<2,1,3>>,1),
+ ?line error = bptesty(<<1>>,1),
+ ?line error = bptesty(<<>>,1),
+ ?line error = bptesty(apa,1),
+ ?line 3 = bptesty(<<2,3,3>>,2),
+ % With one variable (whole tuple)
+ ?line 1 = bptestx(<<1,2,3>>,{1,1}),
+ ?line 2 = bptestx(<<2,1,3>>,{1,1}),
+ ?line error = bptestx(<<1>>,{1,1}),
+ ?line error = bptestx(<<>>,{1,1}),
+ ?line error = bptestx(apa,{1,1}),
+ ?line 3 = bptestx(<<2,3,3>>,{1,2}),
+ % With two variables
+ ?line 1 = bptest(<<1,2,3>>,1,1),
+ ?line 2 = bptest(<<2,1,3>>,1,1),
+ ?line error = bptest(<<1>>,1,1),
+ ?line error = bptest(<<>>,1,1),
+ ?line error = bptest(apa,1,1),
+ ?line 3 = bptest(<<2,3,3>>,1,2),
+ % Direct (autoimported) call, these will be evaluated by the compiler...
+ ?line <<2>> = binary_part(<<1,2,3>>,1,1),
+ ?line <<1>> = binary_part(<<2,1,3>>,1,1),
+ % Compiler warnings due to constant evaluation expected (3)
+ ?line badarg = ?MASK_ERROR(binary_part(<<1>>,1,1)),
+ ?line badarg = ?MASK_ERROR(binary_part(<<>>,1,1)),
+ ?line badarg = ?MASK_ERROR(binary_part(apa,1,1)),
+ ?line <<3,3>> = binary_part(<<2,3,3>>,1,2),
+ % Direct call through apply
+ ?line <<2>> = apply(erlang,binary_part,[<<1,2,3>>,1,1]),
+ ?line <<1>> = apply(erlang,binary_part,[<<2,1,3>>,1,1]),
+ % Compiler warnings due to constant evaluation expected (3)
+ ?line badarg = ?MASK_ERROR(apply(erlang,binary_part,[<<1>>,1,1])),
+ ?line badarg = ?MASK_ERROR(apply(erlang,binary_part,[<<>>,1,1])),
+ ?line badarg = ?MASK_ERROR(apply(erlang,binary_part,[apa,1,1])),
+ ?line <<3,3>> = apply(erlang,binary_part,[<<2,3,3>>,1,2]),
+ % Constant propagation
+ ?line Bin = <<1,2,3>>,
+ ?line ok = if
+ binary_part(Bin,1,1) =:= <<2>> ->
+ ok;
+ %% Compiler warning, clause cannot match (expected)
+ true ->
+ error
+ end,
+ ?line ok = if
+ binary_part(Bin,{1,1}) =:= <<2>> ->
+ ok;
+ %% Compiler warning, clause cannot match (expected)
+ true ->
+ error
+ end,
+ ok.
+
+
+bptest(B) when length(B) =:= 1337 ->
+ 1;
+bptest(B) when binary_part(B,{1,1}) =:= <<2>> ->
+ 1;
+bptest(B) when erlang:binary_part(B,1,1) =:= <<1>> ->
+ 2;
+bptest(B) when erlang:binary_part(B,{1,2}) =:= <<3,3>> ->
+ 3;
+bptest(_) ->
+ error.
+
+bptest(B,A) when length(B) =:= A ->
+ 1;
+bptest(B,A) when binary_part(B,{A,1}) =:= <<2>> ->
+ 1;
+bptest(B,A) when erlang:binary_part(B,A,1) =:= <<1>> ->
+ 2;
+bptest(B,A) when erlang:binary_part(B,{A,2}) =:= <<3,3>> ->
+ 3;
+bptest(_,_) ->
+ error.
+
+bptestx(B,A) when length(B) =:= A ->
+ 1;
+bptestx(B,A) when binary_part(B,A) =:= <<2>> ->
+ 1;
+bptestx(B,A) when erlang:binary_part(B,A) =:= <<1>> ->
+ 2;
+bptestx(B,A) when erlang:binary_part(B,A) =:= <<3,3>> ->
+ 3;
+bptestx(_,_) ->
+ error.
+
+bptesty(B,A) when length(B) =:= A ->
+ 1;
+bptesty(B,A) when binary_part(B,{1,A}) =:= <<2>> ->
+ 1;
+bptesty(B,A) when erlang:binary_part(B,1,A) =:= <<1>> ->
+ 2;
+bptesty(B,A) when erlang:binary_part(B,{1,A}) =:= <<3,3>> ->
+ 3;
+bptesty(_,_) ->
+ error.
+
+bptest(B,A,_C) when length(B) =:= A ->
+ 1;
+bptest(B,A,C) when binary_part(B,{A,C}) =:= <<2>> ->
+ 1;
+bptest(B,A,C) when erlang:binary_part(B,A,C) =:= <<1>> ->
+ 2;
+bptest(B,A,C) when erlang:binary_part(B,{A,C}) =:= <<3,3>> ->
+ 3;
+bptest(_,_,_) ->
+ error.
+
+
+
%% Call this function to turn off constant propagation.
id(I) -> I.
diff --git a/lib/compiler/test/match_SUITE.erl b/lib/compiler/test/match_SUITE.erl
index 9c4687efa1..fd51b777ac 100644
--- a/lib/compiler/test/match_SUITE.erl
+++ b/lib/compiler/test/match_SUITE.erl
@@ -112,6 +112,12 @@ aliases(Config) when is_list(Config) ->
?line {42,42,42,42} = multiple_aliases_1(42),
?line {7,7,7} = multiple_aliases_2(7),
?line {{a,b},{a,b},{a,b}} = multiple_aliases_3({a,b}),
+
+ %% Lists/literals.
+ ?line {a,b} = list_alias1([a,b]),
+ ?line {a,b} = list_alias2([a,b]),
+ ?line {a,b} = list_alias3([a,b]),
+
ok.
str_alias(V) ->
@@ -206,6 +212,15 @@ multiple_aliases_2((A=B)=(A=C)) ->
multiple_aliases_3((A={_,_}=B)={_,_}=C) ->
{A,B,C}.
+list_alias1([a,b]=[X,Y]) ->
+ {X,Y}.
+
+list_alias2([X,Y]=[a,b]) ->
+ {X,Y}.
+
+list_alias3([X,b]=[a,Y]) ->
+ {X,Y}.
+
%% OTP-7018.
match_in_call(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl
index 793c53ac31..450a4e279d 100644
--- a/lib/compiler/test/misc_SUITE.erl
+++ b/lib/compiler/test/misc_SUITE.erl
@@ -20,10 +20,23 @@
-export([all/1,init_per_testcase/2,fin_per_testcase/2,
tobias/1,empty_string/1,md5/1,silly_coverage/1,
- confused_literals/1,integer_encoding/1]).
+ confused_literals/1,integer_encoding/1,override_bif/1]).
-include("test_server.hrl").
+%% For the override_bif testcase.
+%% NB, no other testcases in this testsuite can use these without erlang:prefix!
+-compile({no_auto_import,[abs/1]}).
+-compile({no_auto_import,[binary_part/3]}).
+-compile({no_auto_import,[binary_part/2]}).
+-import(test_lib,[binary_part/2]).
+
+%% This should do no harm (except for fun byte_size/1 which does not, by design, work with import
+-compile({no_auto_import,[byte_size/1]}).
+-import(erlang,[byte_size/1]).
+
+
+
%% Include an opaque declaration to cover the stripping of
%% opaque types from attributes in v3_kernel.
-opaque misc_SUITE_test_cases() :: [atom()].
@@ -42,7 +55,39 @@ fin_per_testcase(Case, Config) when is_atom(Case), is_list(Config) ->
all(suite) ->
test_lib:recompile(?MODULE),
[tobias,empty_string,md5,silly_coverage,confused_literals,
- integer_encoding].
+ integer_encoding, override_bif].
+
+
+%%
+%% Functions that override new and old bif's
+%%
+abs(_N) ->
+ dummy_abs.
+
+binary_part(_,_,_) ->
+ dummy_bp.
+
+% Make sure that auto-imported BIF's are overridden correctly
+
+override_bif(suite) ->
+ [];
+override_bif(doc) ->
+ ["Test dat local functions and imports override auto-imported BIFs."];
+override_bif(Config) when is_list(Config) ->
+ ?line dummy_abs = abs(1),
+ ?line dummy_bp = binary_part(<<"hello">>,1,1),
+ ?line dummy = binary_part(<<"hello">>,{1,1}),
+ ?line 1 = erlang:abs(1),
+ ?line <<"e">> = erlang:binary_part(<<"hello">>,1,1),
+ ?line <<"e">> = erlang:binary_part(<<"hello">>,{1,1}),
+ F = fun(X) when byte_size(X) =:= 4 ->
+ four;
+ (X) ->
+ byte_size(X)
+ end,
+ ?line four = F(<<1,2,3,4>>),
+ ?line 5 = F(<<1,2,3,4,5>>),
+ ok.
%% A bug reported by Tobias Lindahl for a development version of R11B.
@@ -104,7 +149,17 @@ silly_coverage(Config) when is_list(Config) ->
?line expect_error(fun() -> sys_core_dsetel:module(BadCoreErlang, []) end),
?line expect_error(fun() -> v3_kernel:module(BadCoreErlang, []) end),
- %% v3_codgen
+ %% v3_life
+ BadKernel = {k_mdef,[],?MODULE,
+ [{foo,0}],
+ [],
+ [{k_fdef,
+ {k,[],[],[]},
+ f,0,[],
+ seriously_bad_body}]},
+ ?line expect_error(fun() -> v3_life:module(BadKernel, []) end),
+
+ %% v3_codegen
CodegenInput = {?MODULE,[{foo,0}],[],[{function,foo,0,[a|b],a,b}]},
?line expect_error(fun() -> v3_codegen:module(CodegenInput, []) end),
diff --git a/lib/compiler/test/test_lib.erl b/lib/compiler/test/test_lib.erl
index 05236ee010..d8799952a9 100644
--- a/lib/compiler/test/test_lib.erl
+++ b/lib/compiler/test/test_lib.erl
@@ -19,8 +19,8 @@
-module(test_lib).
-include("test_server.hrl").
-
--export([recompile/1,opt_opts/1,get_data_dir/1,smoke_disasm/1,p_run/2]).
+-compile({no_auto_import,[binary_part/2]}).
+-export([recompile/1,opt_opts/1,get_data_dir/1,smoke_disasm/1,p_run/2,binary_part/2]).
recompile(Mod) when is_atom(Mod) ->
case whereis(cover_server) of
@@ -104,3 +104,7 @@ p_run_loop(Test, List, N, Refs0, Errors0, Ws0) ->
Refs = Refs0 -- [Ref],
p_run_loop(Test, List, N, Refs, Errors, Ws)
end.
+
+%% This is for the misc_SUITE:override_bif testcase
+binary_part(_A,_B) ->
+ dummy.
diff --git a/lib/compiler/vsn.mk b/lib/compiler/vsn.mk
index a5e6de7b5f..47feab5fe1 100644
--- a/lib/compiler/vsn.mk
+++ b/lib/compiler/vsn.mk
@@ -1 +1 @@
-COMPILER_VSN = 4.6.5
+COMPILER_VSN = 4.7
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index a71df1d7fd..68079f06c7 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -198,7 +198,7 @@ static ErlNifFunc nif_funcs[] = {
{"rand_bytes", 3, rand_bytes_3},
{"rand_uniform_nif", 2, rand_uniform_nif},
{"mod_exp_nif", 3, mod_exp_nif},
- {"dss_verify", 3, dss_verify},
+ {"dss_verify", 4, dss_verify},
{"rsa_verify", 4, rsa_verify},
{"aes_cbc_crypt", 4, aes_cbc_crypt},
{"exor", 2, exor},
@@ -207,7 +207,7 @@ static ErlNifFunc nif_funcs[] = {
{"rc4_encrypt_with_state", 2, rc4_encrypt_with_state},
{"rc2_40_cbc_crypt", 4, rc2_40_cbc_crypt},
{"rsa_sign_nif", 3, rsa_sign_nif},
- {"dss_sign_nif", 2, dss_sign_nif},
+ {"dss_sign_nif", 3, dss_sign_nif},
{"rsa_public_crypt", 4, rsa_public_crypt},
{"rsa_private_crypt", 4, rsa_private_crypt},
{"dh_generate_parameters_nif", 2, dh_generate_parameters_nif},
@@ -255,6 +255,7 @@ static ERL_NIF_TERM atom_unable_to_check_generator;
static ERL_NIF_TERM atom_not_suitable_generator;
static ERL_NIF_TERM atom_check_failed;
static ERL_NIF_TERM atom_unknown;
+static ERL_NIF_TERM atom_none;
static int is_ok_load_info(ErlNifEnv* env, ERL_NIF_TERM load_info)
@@ -264,15 +265,15 @@ static int is_ok_load_info(ErlNifEnv* env, ERL_NIF_TERM load_info)
}
static void* crypto_alloc(size_t size)
{
- return enif_alloc(NULL, size);
+ return enif_alloc(size);
}
static void* crypto_realloc(void* ptr, size_t size)
{
- return enif_realloc(NULL, ptr, size);
+ return enif_realloc(ptr, size);
}
static void crypto_free(void* ptr)
{
- enif_free(NULL, ptr);
+ enif_free(ptr);
}
static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
@@ -289,7 +290,7 @@ static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
if (sys_info.scheduler_threads > 1) {
int i;
- lock_vec = enif_alloc(env,CRYPTO_num_locks()*sizeof(*lock_vec));
+ lock_vec = enif_alloc(CRYPTO_num_locks()*sizeof(*lock_vec));
if (lock_vec==NULL) return -1;
memset(lock_vec,0,CRYPTO_num_locks()*sizeof(*lock_vec));
@@ -322,6 +323,7 @@ static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
atom_not_suitable_generator = enif_make_atom(env,"not_suitable_generator");
atom_check_failed = enif_make_atom(env,"check_failed");
atom_unknown = enif_make_atom(env,"unknown");
+ atom_none = enif_make_atom(env,"none");
*priv_data = NULL;
library_refc++;
@@ -371,7 +373,7 @@ static void unload(ErlNifEnv* env, void* priv_data)
enif_rwlock_destroy(lock_vec[i]);
}
}
- enif_free(env,lock_vec);
+ enif_free(lock_vec);
}
}
/*else NIF library still used by other (new) module code */
@@ -766,7 +768,7 @@ static int inspect_mpint(ErlNifEnv* env, ERL_NIF_TERM term, ErlNifBinary* bin)
}
static ERL_NIF_TERM dss_verify(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (Data,Signature,Key=[P, Q, G, Y]) */
+{/* (DigestType,Data,Signature,Key=[P, Q, G, Y]) */
ErlNifBinary data_bin, sign_bin;
BIGNUM *dsa_p, *dsa_q, *dsa_g, *dsa_y;
unsigned char hmacbuf[SHA_DIGEST_LENGTH];
@@ -774,9 +776,8 @@ static ERL_NIF_TERM dss_verify(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv
DSA *dsa;
int i;
- if (!inspect_mpint(env,argv[0],&data_bin)
- || !inspect_mpint(env,argv[1],&sign_bin)
- || !enif_get_list_cell(env, argv[2], &head, &tail)
+ if (!inspect_mpint(env, argv[2], &sign_bin)
+ || !enif_get_list_cell(env, argv[3], &head, &tail)
|| !get_bn_from_mpint(env, head, &dsa_p)
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_mpint(env, head, &dsa_q)
@@ -785,10 +786,18 @@ static ERL_NIF_TERM dss_verify(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_mpint(env, head, &dsa_y)
|| !enif_is_empty_list(env,tail)) {
-
return enif_make_badarg(env);
}
- SHA1(data_bin.data+4, data_bin.size-4, hmacbuf);
+ if (argv[0] == atom_sha && inspect_mpint(env, argv[1], &data_bin)) {
+ SHA1(data_bin.data+4, data_bin.size-4, hmacbuf);
+ }
+ else if (argv[0] == atom_none && enif_inspect_binary(env, argv[1], &data_bin)
+ && data_bin.size == SHA_DIGEST_LENGTH) {
+ memcpy(hmacbuf, data_bin.data, SHA_DIGEST_LENGTH);
+ }
+ else {
+ return enif_make_badarg(env);
+ }
dsa = DSA_new();
dsa->p = dsa_p;
@@ -994,7 +1003,7 @@ static ERL_NIF_TERM rsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar
RSA_free(rsa);
return enif_make_badarg(env);
}
- enif_alloc_binary(env, RSA_size(rsa), &ret_bin);
+ enif_alloc_binary(RSA_size(rsa), &ret_bin);
if (is_sha) {
SHA1(data_bin.data+4, data_bin.size-4, hmacbuf);
ERL_VALGRIND_ASSERT_MEM_DEFINED(hmacbuf, SHA_DIGEST_LENGTH);
@@ -1011,19 +1020,19 @@ static ERL_NIF_TERM rsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar
if (i) {
ERL_VALGRIND_MAKE_MEM_DEFINED(ret_bin.data, rsa_s_len);
if (rsa_s_len != data_bin.size) {
- enif_realloc_binary(env, &ret_bin, rsa_s_len);
+ enif_realloc_binary(&ret_bin, rsa_s_len);
ERL_VALGRIND_ASSERT_MEM_DEFINED(ret_bin.data, rsa_s_len);
}
return enif_make_binary(env,&ret_bin);
}
else {
- enif_release_binary(env, &ret_bin);
+ enif_release_binary(&ret_bin);
return atom_error;
}
}
static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (Data,Key=[P,Q,G,PrivKey]) */
+{/* (DigesType, Data, Key=[P,Q,G,PrivKey]) */
ErlNifBinary data_bin, ret_bin;
ERL_NIF_TERM head, tail;
unsigned char hmacbuf[SHA_DIGEST_LENGTH];
@@ -1032,8 +1041,7 @@ static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar
int i;
dsa->pub_key = NULL;
- if (!inspect_mpint(env, argv[0], &data_bin)
- || !enif_get_list_cell(env, argv[1], &head, &tail)
+ if (!enif_get_list_cell(env, argv[2], &head, &tail)
|| !get_bn_from_mpint(env, head, &dsa->p)
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_mpint(env, head, &dsa->q)
@@ -1042,20 +1050,28 @@ static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_mpint(env, head, &dsa->priv_key)
|| !enif_is_empty_list(env,tail)) {
-
+ goto badarg;
+ }
+ if (argv[0] == atom_sha && inspect_mpint(env, argv[1], &data_bin)) {
+ SHA1(data_bin.data+4, data_bin.size-4, hmacbuf);
+ }
+ else if (argv[0] == atom_none && enif_inspect_binary(env,argv[1],&data_bin)
+ && data_bin.size == SHA_DIGEST_LENGTH) {
+ memcpy(hmacbuf, data_bin.data, SHA_DIGEST_LENGTH);
+ }
+ else {
+ badarg:
DSA_free(dsa);
return enif_make_badarg(env);
}
- SHA1(data_bin.data+4, data_bin.size-4, hmacbuf);
-
- enif_alloc_binary(env, DSA_size(dsa), &ret_bin);
+ enif_alloc_binary(DSA_size(dsa), &ret_bin);
i = DSA_sign(NID_sha1, hmacbuf, SHA_DIGEST_LENGTH,
ret_bin.data, &dsa_s_len, dsa);
DSA_free(dsa);
if (i) {
if (dsa_s_len != ret_bin.size) {
- enif_realloc_binary(env, &ret_bin, dsa_s_len);
+ enif_realloc_binary(&ret_bin, dsa_s_len);
}
return enif_make_binary(env, &ret_bin);
}
@@ -1100,7 +1116,7 @@ static ERL_NIF_TERM rsa_public_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TER
return enif_make_badarg(env);
}
- enif_alloc_binary(env, RSA_size(rsa), &ret_bin);
+ enif_alloc_binary(RSA_size(rsa), &ret_bin);
if (argv[3] == atom_true) {
ERL_VALGRIND_ASSERT_MEM_DEFINED(buf+i,data_len);
@@ -1115,7 +1131,7 @@ static ERL_NIF_TERM rsa_public_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TER
ret_bin.data, rsa, padding);
if (i > 0) {
ERL_VALGRIND_MAKE_MEM_DEFINED(ret_bin.data, i);
- enif_realloc_binary(env, &ret_bin, i);
+ enif_realloc_binary(&ret_bin, i);
}
}
RSA_free(rsa);
@@ -1148,7 +1164,7 @@ static ERL_NIF_TERM rsa_private_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TE
return enif_make_badarg(env);
}
- enif_alloc_binary(env, RSA_size(rsa), &ret_bin);
+ enif_alloc_binary(RSA_size(rsa), &ret_bin);
if (argv[3] == atom_true) {
ERL_VALGRIND_ASSERT_MEM_DEFINED(buf+i,data_len);
@@ -1163,7 +1179,7 @@ static ERL_NIF_TERM rsa_private_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TE
ret_bin.data, rsa, padding);
if (i > 0) {
ERL_VALGRIND_MAKE_MEM_DEFINED(ret_bin.data, i);
- enif_realloc_binary(env, &ret_bin, i);
+ enif_realloc_binary(&ret_bin, i);
}
}
RSA_free(rsa);
@@ -1293,11 +1309,11 @@ static ERL_NIF_TERM dh_compute_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_T
ret = enif_make_badarg(env);
}
else {
- enif_alloc_binary(env, DH_size(dh_params), &ret_bin);
+ enif_alloc_binary(DH_size(dh_params), &ret_bin);
i = DH_compute_key(ret_bin.data, pubkey, dh_params);
if (i > 0) {
if (i != ret_bin.size) {
- enif_realloc_binary(env, &ret_bin, i);
+ enif_realloc_binary(&ret_bin, i);
}
ret = enif_make_binary(env, &ret_bin);
}
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index 256eab3e3c..e1431cfd81 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -755,39 +755,44 @@ Mpint() = <![CDATA[<<ByteLen:32/integer-big, Bytes:ByteLen/binary>>]]>
<func>
<name>dss_sign(Data, Key) -> Signature</name>
+ <name>dss_sign(DigestType, Data, Key) -> Signature</name>
<fsummary>Sign the data using dsa with given private key.</fsummary>
<type>
- <v>Digest = Mpint</v>
+ <v>DigestType = sha | none (default is sha)</v>
+ <v>Data = Mpint | ShaDigest</v>
<v>Key = [P, Q, G, X]</v>
<v>P, Q, G, X = Mpint</v>
<d> Where <c>P</c>, <c>Q</c> and <c>G</c> are the dss
parameters and <c>X</c> is the private key.</d>
- <v>Mpint = binary()</v>
+ <v>ShaDigest = binary() with length 20 bytes</v>
<v>Signature = binary()</v>
</type>
<desc>
- <p>Calculates the sha digest of the <c>Data</c>
- and creates a DSS signature with the private key <c>Key</c>
- of the digest.</p>
+ <p>Creates a DSS signature with the private key <c>Key</c> of a digest.
+ If <c>DigestType</c> is 'sha', the digest is calculated as SHA1 of <c>Data</c>.
+ If <c>DigestType</c> is 'none', <c>Data</c> is the precalculated SHA1 digest.</p>
</desc>
</func>
<func>
<name>dss_verify(Data, Signature, Key) -> Verified</name>
+ <name>dss_verify(DigestType, Data, Signature, Key) -> Verified</name>
<fsummary>Verify the data and signature using dsa with given public key.</fsummary>
<type>
<v>Verified = boolean()</v>
- <v>Digest, Signature = Mpint</v>
+ <v>DigestType = sha | none</v>
+ <v>Data = Mpint | ShaDigest</v>
+ <v>Signature = Mpint</v>
<v>Key = [P, Q, G, Y]</v>
<v>P, Q, G, Y = Mpint</v>
<d> Where <c>P</c>, <c>Q</c> and <c>G</c> are the dss
parameters and <c>Y</c> is the public key.</d>
- <v>Mpint = binary()</v>
+ <v>ShaDigest = binary() with length 20 bytes</v>
</type>
<desc>
- <p>Calculates the sha digest of the <c>Data</c> and verifies that the
- digest matches the DSS signature using the public key <c>Key</c>.
- </p>
+ <p>Verifies that a digest matches the DSS signature using the public key <c>Key</c>.
+ If <c>DigestType</c> is 'sha', the digest is calculated as SHA1 of <c>Data</c>.
+ If <c>DigestType</c> is 'none', <c>Data</c> is the precalculated SHA1 digest.</p>
</desc>
</func>
diff --git a/lib/crypto/src/crypto.app.src b/lib/crypto/src/crypto.app.src
index a24760a781..5548b6a1b5 100644
--- a/lib/crypto/src/crypto.app.src
+++ b/lib/crypto/src/crypto.app.src
@@ -1,23 +1,23 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
{application, crypto,
- [{description, "CRYPTO version 1"},
+ [{description, "CRYPTO version 2"},
{vsn, "%VSN%"},
{modules, [crypto,
crypto_app,
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 5b1ce96caf..39512d27e1 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -40,8 +40,8 @@
-export([exor/2]).
-export([rc4_encrypt/2, rc4_set_key/1, rc4_encrypt_with_state/2]).
-export([rc2_40_cbc_encrypt/3, rc2_40_cbc_decrypt/3]).
--export([dss_verify/3, rsa_verify/3, rsa_verify/4]).
--export([dss_sign/2, rsa_sign/2, rsa_sign/3]).
+-export([dss_verify/3, dss_verify/4, rsa_verify/3, rsa_verify/4]).
+-export([dss_sign/2, dss_sign/3, rsa_sign/2, rsa_sign/3]).
-export([rsa_public_encrypt/3, rsa_private_decrypt/3]).
-export([rsa_private_encrypt/3, rsa_public_decrypt/3]).
-export([dh_generate_key/1, dh_generate_key/2, dh_compute_key/3]).
@@ -82,6 +82,10 @@
aes_cbc_256_encrypt, aes_cbc_256_decrypt,
info_lib]).
+-type rsa_digest_type() :: 'md5' | 'sha'.
+-type dss_digest_type() :: 'none' | 'sha'.
+-type crypto_integer() :: binary() | integer().
+
-define(nif_stub,nif_stub_error(?LINE)).
-on_load(on_load/0).
@@ -118,7 +122,7 @@ on_load() ->
nif_stub_error(Line) ->
- erlang:error({nif_not_loaded,module,?MODULE,line,Line}).
+ erlang:nif_error({nif_not_loaded,module,?MODULE,line,Line}).
start() ->
application:start(crypto).
@@ -146,6 +150,12 @@ version() -> ?CRYPTO_VSN.
%%
%% MD5
%%
+
+-spec md5(iodata()) -> binary().
+-spec md5_init() -> binary().
+-spec md5_update(binary(), iodata()) -> binary().
+-spec md5_final(binary()) -> binary().
+
md5(_Data) -> ?nif_stub.
md5_init() -> ?nif_stub.
md5_update(_Context, _Data) -> ?nif_stub.
@@ -154,6 +164,11 @@ md5_final(_Context) -> ?nif_stub.
%%
%% MD4
%%
+-spec md4(iodata()) -> binary().
+-spec md4_init() -> binary().
+-spec md4_update(binary(), iodata()) -> binary().
+-spec md4_final(binary()) -> binary().
+
md4(_Data) -> ?nif_stub.
md4_init() -> ?nif_stub.
md4_update(_Context, _Data) -> ?nif_stub.
@@ -162,6 +177,11 @@ md4_final(_Context) -> ?nif_stub.
%%
%% SHA
%%
+-spec sha(iodata()) -> binary().
+-spec sha_init() -> binary().
+-spec sha_update(binary(), iodata()) -> binary().
+-spec sha_final(binary()) -> binary().
+
sha(_Data) -> ?nif_stub.
sha_init() -> ?nif_stub.
sha_update(_Context, _Data) -> ?nif_stub.
@@ -175,6 +195,9 @@ sha_final(_Context) -> ?nif_stub.
%%
%% MD5_MAC
%%
+-spec md5_mac(iodata(), iodata()) -> binary.
+-spec md5_mac_96(iodata(), iodata()) -> binary.
+
md5_mac(Key, Data) ->
md5_mac_n(Key,Data,16).
@@ -186,6 +209,9 @@ md5_mac_n(_Key,_Data,_MacSz) -> ?nif_stub.
%%
%% SHA_MAC
%%
+-spec sha_mac(iodata(), iodata()) -> binary.
+-spec sha_mac_96(iodata(), iodata()) -> binary.
+
sha_mac(Key, Data) ->
sha_mac_n(Key,Data,20).
@@ -201,6 +227,9 @@ sha_mac_n(_Key,_Data,_MacSz) -> ?nif_stub.
%%
%% DES - in cipher block chaining mode (CBC)
%%
+-spec des_cbc_encrypt(iodata(), binary(), iodata()) -> binary().
+-spec des_cbc_decrypt(iodata(), binary(), iodata()) -> binary().
+
des_cbc_encrypt(Key, IVec, Data) ->
des_cbc_crypt(Key, IVec, Data, true).
@@ -215,6 +244,8 @@ des_cbc_crypt(_Key, _IVec, _Data, _IsEncrypt) -> ?nif_stub.
%% Returns the IVec to be used in the next iteration of
%% des_cbc_[encrypt|decrypt].
%%
+-spec des_cbc_ivec(iodata()) -> binary().
+
des_cbc_ivec(Data) when is_binary(Data) ->
{_, IVec} = split_binary(Data, size(Data) - 8),
IVec;
@@ -224,6 +255,9 @@ des_cbc_ivec(Data) when is_list(Data) ->
%%
%% DES - in electronic codebook mode (ECB)
%%
+-spec des_ecb_encrypt(iodata(), iodata()) -> binary().
+-spec des_ecb_decrypt(iodata(), iodata()) -> binary().
+
des_ecb_encrypt(Key, Data) ->
des_ecb_crypt(Key, Data, true).
des_ecb_decrypt(Key, Data) ->
@@ -233,6 +267,11 @@ des_ecb_crypt(_Key, _Data, _IsEncrypt) -> ?nif_stub.
%%
%% DES3 - in cipher block chaining mode (CBC)
%%
+-spec des3_cbc_encrypt(iodata(), iodata(), iodata(), binary(), iodata()) ->
+ binary().
+-spec des3_cbc_decrypt(iodata(), iodata(), iodata(), binary(), iodata()) ->
+ binary().
+
des3_cbc_encrypt(Key1, Key2, Key3, IVec, Data) ->
des_ede3_cbc_encrypt(Key1, Key2, Key3, IVec, Data).
des_ede3_cbc_encrypt(Key1, Key2, Key3, IVec, Data) ->
@@ -248,6 +287,14 @@ des_ede3_cbc_crypt(_Key1, _Key2, _Key3, _IVec, _Data, _IsEncrypt) -> ?nif_stub.
%%
%% Blowfish
%%
+-spec blowfish_ecb_encrypt(iodata(), iodata()) -> binary().
+-spec blowfish_ecb_decrypt(iodata(), iodata()) -> binary().
+-spec blowfish_cbc_encrypt(iodata(), binary(), iodata()) -> binary().
+-spec blowfish_cbc_decrypt(iodata(), binary(), iodata()) -> binary().
+-spec blowfish_cfb64_encrypt(iodata(), binary(), iodata()) -> binary().
+-spec blowfish_cfb64_decrypt(iodata(), binary(), iodata()) -> binary().
+-spec blowfish_ofb64_encrypt(iodata(), binary(), iodata()) -> binary().
+
blowfish_ecb_encrypt(Key, Data) ->
bf_ecb_crypt(Key,Data, true).
@@ -277,6 +324,9 @@ blowfish_ofb64_encrypt(_Key, _IVec, _Data) -> ?nif_stub.
%%
%% AES in cipher feedback mode (CFB)
%%
+-spec aes_cfb_128_encrypt(iodata(), binary(), iodata()) -> binary().
+-spec aes_cfb_128_decrypt(iodata(), binary(), iodata()) -> binary().
+
aes_cfb_128_encrypt(Key, IVec, Data) ->
aes_cfb_128_crypt(Key, IVec, Data, true).
@@ -289,6 +339,10 @@ aes_cfb_128_crypt(_Key, _IVec, _Data, _IsEncrypt) -> ?nif_stub.
%%
%% RAND - pseudo random numbers using RN_ functions in crypto lib
%%
+-spec rand_bytes(non_neg_integer()) -> binary().
+-spec rand_uniform(crypto_integer(), crypto_integer()) ->
+ crypto_integer().
+
rand_bytes(_Bytes) -> ?nif_stub.
rand_bytes(_Bytes, _Topmask, _Bottommask) -> ?nif_stub.
@@ -331,9 +385,16 @@ mod_exp_nif(_Base,_Exp,_Mod) -> ?nif_stub.
%%
%% DSS, RSA - verify
%%
+-spec dss_verify(binary(), binary(), [binary()]) -> boolean().
+-spec dss_verify(dss_digest_type(), binary(), binary(), [binary()]) -> boolean().
+-spec rsa_verify(binary(), binary(), [binary()]) -> boolean().
+-spec rsa_verify(rsa_digest_type(), binary(), binary(), [binary()]) ->
+ boolean().
%% Key = [P,Q,G,Y] P,Q,G=DSSParams Y=PublicKey
-dss_verify(_Data,_Signature,_Key) -> ?nif_stub.
+dss_verify(Data,Signature,Key) ->
+ dss_verify(sha, Data, Signature, Key).
+dss_verify(_Type,_Data,_Signature,_Key) -> ?nif_stub.
% Key = [E,N] E=PublicExponent N=PublicModulus
rsa_verify(Data,Signature,Key) ->
@@ -345,13 +406,20 @@ rsa_verify(_Type,_Data,_Signature,_Key) -> ?nif_stub.
%% DSS, RSA - sign
%%
%% Key = [P,Q,G,X] P,Q,G=DSSParams X=PrivateKey
-dss_sign(Data, Key) ->
- case dss_sign_nif(Data,Key) of
+-spec dss_sign(binary(), [binary()]) -> binary().
+-spec dss_sign(dss_digest_type(), binary(), [binary()]) -> binary().
+-spec rsa_sign(binary(), [binary()]) -> binary().
+-spec rsa_sign(rsa_digest_type(), binary(), [binary()]) -> binary().
+
+dss_sign(Data,Key) ->
+ dss_sign(sha,Data,Key).
+dss_sign(Type, Data, Key) ->
+ case dss_sign_nif(Type,Data,Key) of
error -> erlang:error(badkey, [Data, Key]);
Sign -> Sign
end.
-dss_sign_nif(_Data,_Key) -> ?nif_stub.
+dss_sign_nif(_Type,_Data,_Key) -> ?nif_stub.
%% Key = [E,N,D] E=PublicExponent N=PublicModulus D=PrivateExponent
rsa_sign(Data,Key) ->
@@ -368,6 +436,16 @@ rsa_sign_nif(_Type,_Data,_Key) -> ?nif_stub.
%%
%% rsa_public_encrypt
%% rsa_private_decrypt
+-type rsa_padding() :: 'rsa_pkcs1_padding' | 'rsa_pkcs1_oaep_padding' | 'rsa_no_padding'.
+
+-spec rsa_public_encrypt(binary(), [binary()], rsa_padding()) ->
+ binary().
+-spec rsa_public_decrypt(binary(), [binary()], rsa_padding()) ->
+ binary().
+-spec rsa_private_encrypt(binary(), [binary()], rsa_padding()) ->
+ binary().
+-spec rsa_private_decrypt(binary(), [binary()], rsa_padding()) ->
+ binary().
%% Binary, Key = [E,N]
rsa_public_encrypt(BinMesg, Key, Padding) ->
@@ -409,6 +487,14 @@ rsa_public_decrypt(BinMesg, Key, Padding) ->
%%
%% AES - with 128 or 256 bit key in cipher block chaining mode (CBC)
%%
+-spec aes_cbc_128_encrypt(iodata(), binary(), iodata()) ->
+ binary().
+-spec aes_cbc_128_decrypt(iodata(), binary(), iodata()) ->
+ binary().
+-spec aes_cbc_256_encrypt(iodata(), binary(), iodata()) ->
+ binary().
+-spec aes_cbc_256_decrypt(iodata(), binary(), iodata()) ->
+ binary().
aes_cbc_128_encrypt(Key, IVec, Data) ->
aes_cbc_crypt(Key, IVec, Data, true).
@@ -443,11 +529,15 @@ aes_cbc_ivec(Data) when is_list(Data) ->
%% NB doesn't check that they are the same size, just concatenates
%% them and sends them to the driver
%%
+-spec exor(iodata(), iodata()) -> binary().
+
exor(_A, _B) -> ?nif_stub.
%%
%% RC4 - symmetric stream cipher
%%
+-spec rc4_encrypt(iodata(), iodata()) -> binary().
+
rc4_encrypt(_Key, _Data) -> ?nif_stub.
rc4_set_key(_Key) -> ?nif_stub.
rc4_encrypt_with_state(_State, _Data) -> ?nif_stub.
@@ -490,6 +580,10 @@ dh_check([_Prime,_Gen]) -> ?nif_stub.
%% DHParameters = [P (Prime)= mpint(), G(Generator) = mpint()]
%% PrivKey = mpint()
+-spec dh_generate_key([binary()]) -> {binary(),binary()}.
+-spec dh_generate_key(binary()|undefined, [binary()]) ->
+ {binary(),binary()}.
+
dh_generate_key(DHParameters) ->
dh_generate_key(undefined, DHParameters).
dh_generate_key(PrivateKey, DHParameters) ->
@@ -502,6 +596,8 @@ dh_generate_key_nif(_PrivateKey, _DHParameters) -> ?nif_stub.
%% DHParameters = [P (Prime)= mpint(), G(Generator) = mpint()]
%% MyPrivKey, OthersPublicKey = mpint()
+-spec dh_compute_key(binary(), binary(), [binary()]) -> binary().
+
dh_compute_key(OthersPublicKey, MyPrivateKey, DHParameters) ->
case dh_compute_key_nif(OthersPublicKey,MyPrivateKey,DHParameters) of
error -> erlang:error(computation_failed, [OthersPublicKey,MyPrivateKey,DHParameters]);
diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl
index 08d7a0ce99..576949d38d 100644
--- a/lib/crypto/test/crypto_SUITE.erl
+++ b/lib/crypto/test/crypto_SUITE.erl
@@ -770,18 +770,18 @@ dsa_verify_test(Config) when is_list(Config) ->
crypto:mpint(Key)
],
- ?line m(crypto:dss_verify(sized_binary(Msg), sized_binary(SigBlob),
+ ?line m(my_dss_verify(sized_binary(Msg), sized_binary(SigBlob),
ValidKey), true),
BadMsg = one_bit_wrong(Msg),
- ?line m(crypto:dss_verify(sized_binary(BadMsg), sized_binary(SigBlob),
+ ?line m(my_dss_verify(sized_binary(BadMsg), sized_binary(SigBlob),
ValidKey), false),
BadSig = one_bit_wrong(SigBlob),
- ?line m(crypto:dss_verify(sized_binary(Msg), sized_binary(BadSig),
+ ?line m(my_dss_verify(sized_binary(Msg), sized_binary(BadSig),
ValidKey), false),
SizeErr = size(SigBlob) - 13,
- BadArg = (catch crypto:dss_verify(sized_binary(Msg), <<SizeErr:32, SigBlob/binary>>,
+ BadArg = (catch my_dss_verify(sized_binary(Msg), <<SizeErr:32, SigBlob/binary>>,
ValidKey)),
?line m(element(1,element(2,BadArg)), badarg),
@@ -791,9 +791,12 @@ dsa_verify_test(Config) when is_list(Config) ->
crypto:mpint(Key+17)
],
- ?line m(crypto:dss_verify(sized_binary(Msg), sized_binary(SigBlob),
+ ?line m(my_dss_verify(sized_binary(Msg), sized_binary(SigBlob),
InValidKey), false).
+
+one_bit_wrong(List) when is_list(List) ->
+ lists:map(fun(Bin) -> one_bit_wrong(Bin) end, List);
one_bit_wrong(Bin) ->
Half = size(Bin) div 2,
<<First:Half/binary, Byte:8, Last/binary>> = Bin,
@@ -843,15 +846,15 @@ dsa_sign_test(Config) when is_list(Config) ->
ParamG = 18320614775012672475365915366944922415598782131828709277168615511695849821411624805195787607930033958243224786899641459701930253094446221381818858674389863050420226114787005820357372837321561754462061849169568607689530279303056075793886577588606958623645901271866346406773590024901668622321064384483571751669,
Params = [crypto:mpint(ParamP), crypto:mpint(ParamQ), crypto:mpint(ParamG)],
- ?line Sig1 = crypto:dss_sign(sized_binary(Msg), Params ++ [crypto:mpint(PrivKey)]),
+ ?line Sig1 = my_dss_sign(sized_binary(Msg), Params ++ [crypto:mpint(PrivKey)]),
- ?line m(crypto:dss_verify(sized_binary(Msg), sized_binary(Sig1),
+ ?line m(my_dss_verify(sized_binary(Msg), Sig1,
Params ++ [crypto:mpint(PubKey)]), true),
- ?line m(crypto:dss_verify(sized_binary(one_bit_wrong(Msg)), sized_binary(Sig1),
+ ?line m(my_dss_verify(sized_binary(one_bit_wrong(Msg)), Sig1,
Params ++ [crypto:mpint(PubKey)]), false),
- ?line m(crypto:dss_verify(sized_binary(Msg), sized_binary(one_bit_wrong(Sig1)),
+ ?line m(my_dss_verify(sized_binary(Msg), one_bit_wrong(Sig1),
Params ++ [crypto:mpint(PubKey)]), false),
%%?line Bad = crypto:dss_sign(sized_binary(Msg), [Params, crypto:mpint(PubKey)]),
@@ -1132,3 +1135,24 @@ zero_bin(N) when is_integer(N) ->
<<0:N8/integer>>;
zero_bin(B) when is_binary(B) ->
zero_bin(size(B)).
+
+my_dss_verify(Data,[Sign|Tail],Key) ->
+ Res = my_dss_verify(Data,sized_binary(Sign),Key),
+ case Tail of
+ [] -> Res;
+ _ -> ?line Res = my_dss_verify(Data,Tail,Key)
+ end;
+my_dss_verify(Data,Sign,Key) ->
+ ?line Res = crypto:dss_verify(Data, Sign, Key),
+ ?line Res = crypto:dss_verify(sha, Data, Sign, Key),
+ ?line <<_:32,Raw/binary>> = Data,
+ ?line Res = crypto:dss_verify(none, crypto:sha(Raw), Sign, Key),
+ Res.
+
+my_dss_sign(Data,Key) ->
+ ?line S1 = crypto:dss_sign(Data, Key),
+ ?line S2 = crypto:dss_sign(sha, Data, Key),
+ ?line <<_:32,Raw/binary>> = Data,
+ ?line S3 = crypto:dss_sign(none, crypto:sha(Raw), Key),
+ [S1,S2,S3].
+
diff --git a/lib/debugger/src/dbg_iload.erl b/lib/debugger/src/dbg_iload.erl
index 1216338006..ec54c646c8 100644
--- a/lib/debugger/src/dbg_iload.erl
+++ b/lib/debugger/src/dbg_iload.erl
@@ -1,29 +1,25 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(dbg_iload).
-%% External exports
-export([load_mod/4]).
-%% Internal exports
--export([load_mod1/4]).
-
%%====================================================================
%% External exports
%%====================================================================
@@ -36,29 +32,29 @@
%% Db = ETS identifier
%% Load a new module into the database.
%%
-%% We want the loading of a module to be syncronous so no other
+%% We want the loading of a module to be synchronous so that no other
%% process tries to interpret code in a module not being completely
%% loaded. This is achieved as this function is called from
%% dbg_iserver. We are suspended until the module has been loaded.
%%--------------------------------------------------------------------
+-spec load_mod(Mod, file:filename(), binary(), ets:tid()) ->
+ {'ok', Mod} when is_subtype(Mod, atom()).
+
load_mod(Mod, File, Binary, Db) ->
Flag = process_flag(trap_exit, true),
- Pid = spawn_link(?MODULE, load_mod1, [Mod, File, Binary, Db]),
+ Pid = spawn_link(fun () -> load_mod1(Mod, File, Binary, Db) end),
receive
{'EXIT', Pid, What} ->
process_flag(trap_exit, Flag),
What
end.
-%%====================================================================
-%% Internal exports
-%%====================================================================
+-spec load_mod1(atom(), file:filename(), binary(), ets:tid()) -> no_return().
load_mod1(Mod, File, Binary, Db) ->
store_module(Mod, File, Binary, Db),
exit({ok, Mod}).
-
%%====================================================================
%% Internal functions
%%====================================================================
@@ -84,7 +80,7 @@ store_module(Mod, File, Binary, Db) ->
Attr = store_forms(Forms, Mod, Db, Exp, []),
erase(mod_md5),
erase(current_function),
-% store_funs(Db, Mod),
+ %% store_funs(Db, Mod),
erase(vcount),
erase(funs),
erase(fun_count),
diff --git a/lib/debugger/src/dbg_ui_trace_win.erl b/lib/debugger/src/dbg_ui_trace_win.erl
index dbf93c7f45..c6f041a63d 100644
--- a/lib/debugger/src/dbg_ui_trace_win.erl
+++ b/lib/debugger/src/dbg_ui_trace_win.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(dbg_ui_trace_win).
@@ -106,7 +106,7 @@ create_win(GS, Title, TraceWin, Menus) ->
gs:read('CodeArea', height) +
gs:read('RB1', height) +
gs:read('ButtonArea', height) +
- max(gs:read('EvalArea', height),
+ erlang:max(gs:read('EvalArea', height),
gs:read('BindArea', height)) +
gs:read('RB2', height) +
gs:read('TraceArea', height)}),
@@ -1032,7 +1032,7 @@ config_v() ->
gs:config('RB3', {y,Y3}),
gs:config('BindArea', {y,Y3}),
- Y4 = Y3 + max(gs:read('EvalArea', height),
+ Y4 = Y3 + erlang:max(gs:read('EvalArea', height),
gs:read('BindArea', height)),
gs:config('RB2', {y,Y4}),
@@ -1061,7 +1061,7 @@ configure(WinInfo, NewW, NewH) ->
OldH = 25+gs:read('CodeArea', height)+
gs:read('RB1', height)+
gs:read('ButtonArea', height)+
- max(gs:read('EvalArea', height), gs:read('BindArea', height))+
+ erlang:max(gs:read('EvalArea', height), gs:read('BindArea', height))+
gs:read('RB2', height)+
gs:read('TraceArea', height),
@@ -1112,7 +1112,7 @@ configure_widths(OldW, NewW, Flags) ->
{_Bu,Ev,Bi,_Tr} = Flags,
%% Difference between old and new width, considering min window width
- Diff = abs(max(OldW,330)-max(NewW,330)),
+ Diff = abs(erlang:max(OldW,330)-erlang:max(NewW,330)),
%% Check how much the frames can be resized in reality
Limits = if
@@ -1166,7 +1166,7 @@ configure_heights(OldH, NewH, Flags) ->
%% Difference between old and new height, considering min win height
MinH = min_height(Flags),
- Diff = abs(max(OldH,MinH)-max(NewH,MinH)),
+ Diff = abs(erlang:max(OldH,MinH)-erlang:max(NewH,MinH)),
%% Check how much the frames can be resized in reality
{T,Sf,Ff} = if
@@ -1392,7 +1392,7 @@ rblimits('RB1',_W,H) ->
H-112;
_ ->
Y = gs:read('RB2',y),
- max(Min,Y-140)
+ erlang:max(Min,Y-140)
end,
{Min,Max};
@@ -1403,7 +1403,7 @@ rblimits('RB2',_W,H) ->
%% Min is decided by a minimum distance to 'RB1'
Y = gs:read('RB1',y),
- Min = min(Max,Y+140),
+ Min = erlang:min(Max,Y+140),
{Min,Max};
@@ -1412,13 +1412,7 @@ rblimits('RB3',W,_H) ->
%% Neither CodeArea nor BindArea should occupy
%% less than 1/3 of the total window width and EvalFrame should
%% be at least 289 pixels wide
- {max(round(W/3),289),round(2*W/3)}.
-
-max(A, B) when A>B -> A;
-max(_A, B) -> B.
-
-min(A, B) when A<B -> A;
-min(_A, B) -> B.
+ {erlang:max(round(W/3),289),round(2*W/3)}.
%%====================================================================
diff --git a/lib/debugger/src/dbg_ui_win.erl b/lib/debugger/src/dbg_ui_win.erl
index 9840aa54da..74ff2503ab 100644
--- a/lib/debugger/src/dbg_ui_win.erl
+++ b/lib/debugger/src/dbg_ui_win.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(dbg_ui_win).
@@ -76,13 +76,10 @@ min_size(Font, Strings, MinW, MinH) ->
min_size(GS, Font, [String|Strings], MinW, MinH) ->
{W, H} = gs:read(GS, {font_wh, {Font, String}}),
- min_size(GS, Font, Strings, max(MinW, W), max(MinH, H));
+ min_size(GS, Font, Strings, erlang:max(MinW, W), erlang:max(MinH, H));
min_size(_GS, _Font, [], W, H) ->
{W, H}.
-max(X, Y) when X>Y -> X;
-max(_X, Y) -> Y.
-
%%--------------------------------------------------------------------
%% create_menus(MenuBar, [Menu])
%% MenuBar = gsobj()
diff --git a/lib/debugger/src/dbg_wx_trace_win.erl b/lib/debugger/src/dbg_wx_trace_win.erl
index 3799acdc1b..2b4a1164ad 100755
--- a/lib/debugger/src/dbg_wx_trace_win.erl
+++ b/lib/debugger/src/dbg_wx_trace_win.erl
@@ -632,7 +632,7 @@ handle_event(#wx{id=?SASH_CODE, event=#wxSash{dragRect={_X,_Y,_W,H}}}, Wi) ->
Change = CH - H,
ChangeH = fun(Item) ->
{ItemW, ItemH} = wxSizerItem:getMinSize(Item),
- wxSizerItem:setInitSize(Item, ItemW, max(ItemH+Change,-1))
+ wxSizerItem:setInitSize(Item, ItemW, erlang:max(ItemH+Change,-1))
end,
if Enable ->
{IW, IH} = wxSizer:getMinSize(InfoSzr),
@@ -694,7 +694,7 @@ handle_event(#wx{id=?SASH_TRACE, event=#wxSash{dragRect={_X,_Y,_W,H}}}, Wi) ->
true -> %% Change the Eval and Bindings area
ChangeH = fun(Item) ->
{ItemW, ItemH} = wxSizerItem:getMinSize(Item),
- wxSizerItem:setInitSize(Item, ItemW, max(ItemH+Change,-1))
+ wxSizerItem:setInitSize(Item, ItemW, erlang:max(ItemH+Change,-1))
end,
{IW, IH} = wxSizer:getMinSize(InfoSzr),
[ChangeH(Child) || Child <- wxSizer:getChildren(InfoSzr)],
@@ -1021,9 +1021,3 @@ helpwin(Type, WinInfo = #winInfo{sg=Sg =#sub{in=Sa}}) ->
search -> wxWindow:setFocus(Sa#sa.search)
end,
Wi.
-
-max(X,Y) when X > Y -> X;
-max(_,Y) -> Y.
-
-
-
diff --git a/lib/debugger/vsn.mk b/lib/debugger/vsn.mk
index 5ce37a6bde..f33d66b5cf 100644
--- a/lib/debugger/vsn.mk
+++ b/lib/debugger/vsn.mk
@@ -1 +1 @@
-DEBUGGER_VSN = 3.2.2
+DEBUGGER_VSN = 3.2.3
diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES
index b668142327..62b0c92f97 100644
--- a/lib/dialyzer/RELEASE_NOTES
+++ b/lib/dialyzer/RELEASE_NOTES
@@ -3,6 +3,19 @@
(in reversed chronological order)
==============================================================================
+Version 2.3.0 (in Erlang/OTP R14)
+---------------------------------
+ - Dialyzer properly supports the new attribute -export_type and checks
+ that remote types only refer to exported types. A warning is produced
+ if some files/applications refer to types defined in modules which are
+ neither in the PLT nor in the analyzed applications.
+ - Support for detecting data races involving whereis/1 and unregister/1.
+ - More precise identification of the reason(s) why a record construction
+ violates the types declared for its fields.
+ - Fixed bug in the handling of the 'or' guard.
+ - Better handling of the erlang:element/2 BIF.
+ - Complete handling of Erlang BIFs.
+
Version 2.2.0 (in Erlang/OTP R13B04)
------------------------------------
- Much better support for opaque types (thanks to Manouk Manoukian).
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index 3b7b68e8c4..d8fd073ca6 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -33,8 +33,8 @@
%% NOTE: Only functions exported by this module are available to
%% other applications.
%%--------------------------------------------------------------------
--export([plain_cl/0,
- run/1,
+-export([plain_cl/0,
+ run/1,
gui/0,
gui/1,
plt_info/1,
@@ -55,7 +55,7 @@
plain_cl() ->
case dialyzer_cl_parse:start() of
- {check_init, Opts} ->
+ {check_init, Opts} ->
cl_halt(cl_check_init(Opts), Opts);
{plt_info, Opts} ->
cl_halt(cl_print_plt_info(Opts), Opts);
@@ -72,7 +72,7 @@ plain_cl() ->
false ->
gui_halt(internal_gui(Type, Opts), Opts)
end;
- {cl, Opts} ->
+ {cl, Opts} ->
case Opts#options.check_plt of
true ->
case cl_check_init(Opts#options{get_warnings = false}) of
@@ -82,7 +82,7 @@ plain_cl() ->
false ->
cl_halt(cl(Opts), Opts)
end;
- {error, Msg} ->
+ {error, Msg} ->
cl_error(Msg)
end.
@@ -146,7 +146,7 @@ cl(Opts) ->
-spec run(dial_options()) -> [dial_warning()].
run(Opts) ->
- try dialyzer_options:build([{report_mode, quiet},
+ try dialyzer_options:build([{report_mode, quiet},
{erlang_mode, true}|Opts]) of
{error, Msg} ->
throw({dialyzer_error, Msg});
@@ -161,7 +161,7 @@ run(Opts) ->
throw({dialyzer_error, ErrorMsg1})
end
catch
- throw:{dialyzer_error, ErrorMsg} ->
+ throw:{dialyzer_error, ErrorMsg} ->
erlang:error({dialyzer_error, lists:flatten(ErrorMsg)})
end.
@@ -226,7 +226,7 @@ plt_info(Plt) ->
%%-----------
doit(F) ->
- try
+ try
{ok, F()}
catch
throw:{dialyzer_error, Msg} ->
@@ -241,9 +241,9 @@ gui_halt(R, Opts) ->
-spec cl_halt({'ok',dial_ret()} | {'error',string()}, #options{}) -> no_return().
-cl_halt({ok, R = ?RET_NOTHING_SUSPICIOUS}, #options{report_mode = quiet}) ->
+cl_halt({ok, R = ?RET_NOTHING_SUSPICIOUS}, #options{report_mode = quiet}) ->
halt(R);
-cl_halt({ok, R = ?RET_DISCREPANCIES}, #options{report_mode = quiet}) ->
+cl_halt({ok, R = ?RET_DISCREPANCIES}, #options{report_mode = quiet}) ->
halt(R);
cl_halt({ok, R = ?RET_NOTHING_SUSPICIOUS}, #options{}) ->
io:put_chars("done (passed successfully)\n"),
@@ -267,7 +267,7 @@ cl_check_log(Output) ->
-spec format_warning(dial_warning()) -> string().
-format_warning({_Tag, {File, Line}, Msg}) when is_list(File),
+format_warning({_Tag, {File, Line}, Msg}) when is_list(File),
is_integer(Line) ->
BaseName = filename:basename(File),
String = lists:flatten(message_to_string(Msg)),
@@ -290,7 +290,7 @@ message_to_string({app_call, [M, F, Args, Culprit, ExpectedType, FoundType]}) ->
message_to_string({bin_construction, [Culprit, Size, Seg, Type]}) ->
io_lib:format("Binary construction will fail since the ~s field ~s in"
" segment ~s has type ~s\n", [Culprit, Size, Seg, Type]);
-message_to_string({call, [M, F, Args, ArgNs, FailReason,
+message_to_string({call, [M, F, Args, ArgNs, FailReason,
SigArgs, SigRet, Contract]}) ->
io_lib:format("The call ~w:~w~s ", [M, F, Args]) ++
call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
@@ -329,9 +329,9 @@ message_to_string({no_return, [Type|Name]}) ->
only_normal -> NameString ++ "has no local return\n";
both -> NameString ++ "has no local return\n"
end;
-message_to_string({record_constr, [Types, Name]}) ->
+message_to_string({record_constr, [RecConstr, FieldDiffs]}) ->
io_lib:format("Record construction ~s violates the"
- " declared type for #~w{}\n", [Types, Name]);
+ " declared type of field ~s\n", [RecConstr, FieldDiffs]);
message_to_string({record_constr, [Name, Field, Type]}) ->
io_lib:format("Record construction violates the declared type for #~w{}"
" since ~s cannot be of type ~s\n", [Name, Field, Type]);
@@ -358,7 +358,7 @@ message_to_string({contract_diff, [M, F, _A, Contract, Sig]}) ->
[M, F, Contract, M, F, Sig]);
message_to_string({contract_subtype, [M, F, _A, Contract, Sig]}) ->
io_lib:format("Type specification ~w:~w~s"
- " is a subtype of the success typing: ~w:~w~s\n",
+ " is a subtype of the success typing: ~w:~w~s\n",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_supertype, [M, F, _A, Contract, Sig]}) ->
io_lib:format("Type specification ~w:~w~s"
@@ -427,7 +427,7 @@ message_to_string({spec_missing, [B, F, A]}) ->
%% Auxiliary functions below
%%-----------------------------------------------------------------------------
-call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
+call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
{IsOverloaded, Contract}) ->
PositionString = form_position_string(ArgNs),
case FailReason of
@@ -442,7 +442,7 @@ call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
" from the success typing arguments: ~s\n",
[PositionString, SigArgs])
end;
- only_contract ->
+ only_contract ->
case (ArgNs =:= []) orelse IsOverloaded of
true ->
%% We do not know which arguments caused the failure
@@ -494,7 +494,7 @@ form_position_string(ArgNs) ->
case ArgNs of
[] -> "";
[N1] -> ordinal(N1);
- [_,_|_] ->
+ [_,_|_] ->
[Last|Prevs] = lists:reverse(ArgNs),
", " ++ Head = lists:flatten([io_lib:format(", ~s",[ordinal(N)]) ||
N <- lists:reverse(Prevs)]),
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index ab1bbe5ade..3438cc8c7e 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -96,6 +96,9 @@ loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
end;
{AnalPid, ext_calls, NewExtCalls} ->
loop(State, Analysis, NewExtCalls);
+ {AnalPid, ext_types, ExtTypes} ->
+ send_ext_types(Parent, ExtTypes),
+ loop(State, Analysis, ExtCalls);
{AnalPid, unknown_behaviours, UnknownBehaviour} ->
send_unknown_behaviours(Parent, UnknownBehaviour),
loop(State, Analysis, ExtCalls);
@@ -123,8 +126,7 @@ analysis_start(Parent, Analysis) ->
parent = Parent,
start_from = Analysis#analysis.start_from,
use_contracts = Analysis#analysis.use_contracts,
- behaviours = {Analysis#analysis.behaviours_chk,
- []}
+ behaviours = {Analysis#analysis.behaviours_chk, []}
},
Files = ordsets:from_list(Analysis#analysis.files),
{Callgraph, NoWarn, TmpCServer0} = compile_and_store(Files, State),
@@ -132,22 +134,36 @@ analysis_start(Parent, Analysis) ->
NewCServer =
try
NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0),
- OldRecords = dialyzer_plt:get_types(State#analysis_state.plt),
+ NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0),
+ OldRecords = dialyzer_plt:get_types(Plt),
+ OldExpTypes0 = dialyzer_plt:get_exported_types(Plt),
MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
+ RemMods =
+ [case Analysis#analysis.start_from of
+ byte_code -> list_to_atom(filename:basename(F, ".beam"));
+ src_code -> list_to_atom(filename:basename(F, ".erl"))
+ end || F <- Files],
+ OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0),
+ MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
- TmpCServer2 = dialyzer_utils:process_record_remote_types(TmpCServer1),
- dialyzer_contracts:process_contract_remote_types(TmpCServer2)
+ TmpCServer2 =
+ dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes,
+ TmpCServer1),
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3)
catch
throw:{error, _ErrorMsg} = Error -> exit(Error)
end,
- NewPlt = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
- State0 = State#analysis_state{plt = NewPlt},
+ NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
+ ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer),
+ NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes),
+ State0 = State#analysis_state{plt = NewPlt1},
dump_callgraph(Callgraph, State0, Analysis),
State1 = State0#analysis_state{codeserver = NewCServer},
State2 = State1#analysis_state{no_warn_unused = NoWarn},
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
- Plt1 = dialyzer_plt:delete_list(NewPlt, AllNodes),
+ Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
Exports = dialyzer_codeserver:get_exports(NewCServer),
NewCallgraph =
case Analysis#analysis.race_detection of
@@ -155,6 +171,7 @@ analysis_start(Parent, Analysis) ->
false -> Callgraph
end,
State3 = analyze_callgraph(NewCallgraph, State2#analysis_state{plt = Plt1}),
+ rcv_and_send_ext_types(Parent),
NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
NonExportsList = sets:to_list(NonExports),
Plt3 = dialyzer_plt:delete_list(State3#analysis_state.plt, NonExportsList),
@@ -371,14 +388,28 @@ compile_byte(File, Callgraph, CServer, UseContracts) ->
store_core(Mod, Core, NoWarn, Callgraph, CServer) ->
Exp = get_exports_from_core(Core),
+ OldExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer),
+ NewExpTypes = get_exported_types_from_core(Core),
+ MergedExpTypes = sets:union(NewExpTypes, OldExpTypes),
CServer1 = dialyzer_codeserver:insert_exports(Exp, CServer),
- {LabeledCore, CServer2} = label_core(Core, CServer1),
- store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer2, NoWarn).
+ CServer2 = dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes,
+ CServer1),
+ {LabeledCore, CServer3} = label_core(Core, CServer2),
+ store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer3, NoWarn).
abs_get_nowarn(Abs, M) ->
- [{M, F, A}
+ [{M, F, A}
|| {attribute, _, compile, {nowarn_unused_function, {F, A}}} <- Abs].
+get_exported_types_from_core(Core) ->
+ Attrs = cerl:module_attrs(Core),
+ ExpTypes1 = [cerl:concrete(L2) || {L1, L2} <- Attrs, cerl:is_literal(L1),
+ cerl:is_literal(L2),
+ cerl:concrete(L1) =:= 'export_type'],
+ ExpTypes2 = lists:flatten(ExpTypes1),
+ M = cerl:atom_val(cerl:module_name(Core)),
+ sets:from_list([{M, F, A} || {F, A} <- ExpTypes2]).
+
get_exports_from_core(Core) ->
Tree = cerl:from_records(Core),
Exports1 = cerl:module_exports(Tree),
@@ -390,7 +421,7 @@ label_core(Core, CServer) ->
NextLabel = dialyzer_codeserver:get_next_core_label(CServer),
CoreTree = cerl:from_records(Core),
{LabeledTree, NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
- {cerl:to_records(LabeledTree),
+ {cerl:to_records(LabeledTree),
dialyzer_codeserver:set_next_core_label(NewNextLabel, CServer)}.
store_code_and_build_callgraph(Mod, Core, Callgraph, CServer, NoWarn) ->
@@ -454,6 +485,20 @@ default_includes(Dir) ->
%% Handle Messages
%%-------------------------------------------------------------------
+rcv_and_send_ext_types(Parent) ->
+ Self = self(),
+ Self ! {Self, done},
+ ExtTypes = rcv_ext_types(Self, []),
+ Parent ! {Self, ext_types, ExtTypes},
+ ok.
+
+rcv_ext_types(Self, ExtTypes) ->
+ receive
+ {Self, ext_types, ExtType} ->
+ rcv_ext_types(Self, [ExtType|ExtTypes]);
+ {Self, done} -> lists:usort(ExtTypes)
+ end.
+
send_log(Parent, Msg) ->
Parent ! {self(), log, Msg},
ok.
@@ -471,11 +516,15 @@ filter_warnings(LegalWarnings, Warnings) ->
send_analysis_done(Parent, Plt, DocPlt) ->
Parent ! {self(), done, Plt, DocPlt},
ok.
-
+
send_ext_calls(Parent, ExtCalls) ->
Parent ! {self(), ext_calls, ExtCalls},
ok.
+send_ext_types(Parent, ExtTypes) ->
+ Parent ! {self(), ext_types, ExtTypes},
+ ok.
+
send_unknown_behaviours(Parent, UnknownBehaviours) ->
Parent ! {self(), unknown_behaviours, UnknownBehaviours},
ok.
@@ -491,7 +540,7 @@ send_mod_deps(Parent, ModuleDeps) ->
Parent ! {self(), mod_deps, ModuleDeps},
ok.
-format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
+format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
when A =:= 0; A =:= 1 ->
format_bad_calls(Left, CodeServer, Acc);
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
@@ -504,7 +553,7 @@ format_bad_calls([], _CodeServer, Acc) ->
Acc.
find_call_file_and_line(Tree, MFA) ->
- Fun =
+ Fun =
fun(SubTree, Acc) ->
case cerl:is_c_call(SubTree) of
true ->
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index 4e8dceaa8e..47ce9ba6eb 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -34,19 +34,25 @@
translate_behaviour_api_call/5, translatable_behaviours/1,
translate_callgraph/3]).
+-export_type([behaviour/0, behaviour_api_dict/0]).
+
%%--------------------------------------------------------------------
-include("dialyzer.hrl").
%%--------------------------------------------------------------------
+-type behaviour() :: atom().
+
-record(state, {plt :: dialyzer_plt:plt(),
codeserver :: dialyzer_codeserver:codeserver(),
- filename :: string(),
- behlines :: [{atom(), number()}]}).
+ filename :: file:filename(),
+ behlines :: [{behaviour(), non_neg_integer()}]}).
+
+%%--------------------------------------------------------------------
-spec get_behaviours([module()], dialyzer_codeserver:codeserver()) ->
- {[atom()], [atom()]}.
+ {[behaviour()], [behaviour()]}.
get_behaviours(Modules, Codeserver) ->
get_behaviours(Modules, Codeserver, [], []).
@@ -59,29 +65,37 @@ check_callbacks(Module, Attrs, Plt, Codeserver) ->
{Behaviours, BehLines} = get_behaviours(Attrs),
case Behaviours of
[] -> [];
- _ -> {_Var,Code} =
- dialyzer_codeserver:lookup_mfa_code({Module,module_info,0},
- Codeserver),
- File = get_file(cerl:get_ann(Code)),
- State = #state{plt = Plt, codeserver = Codeserver, filename = File,
- behlines = BehLines},
- Warnings = get_warnings(Module, Behaviours, State),
- [add_tag_file_line(Module, W, State) || W <- Warnings]
+ _ ->
+ MFA = {Module,module_info,0},
+ {_Var,Code} = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ File = get_file(cerl:get_ann(Code)),
+ State = #state{plt = Plt, codeserver = Codeserver, filename = File,
+ behlines = BehLines},
+ Warnings = get_warnings(Module, Behaviours, State),
+ [add_tag_file_line(Module, W, State) || W <- Warnings]
end.
--spec translatable_behaviours(cerl:c_module()) -> [{atom(),[_]}].
+-spec translatable_behaviours(cerl:c_module()) -> behaviour_api_dict().
translatable_behaviours(Tree) ->
Attrs = cerl:module_attrs(Tree),
{Behaviours, _BehLines} = get_behaviours(Attrs),
[{B, Calls} || B <- Behaviours, (Calls = behaviour_api_calls(B)) =/= []].
--spec get_behaviour_apis([atom()]) -> [mfa()].
+-spec get_behaviour_apis([behaviour()]) -> [mfa()].
get_behaviour_apis(Behaviours) ->
get_behaviour_apis(Behaviours, []).
--spec translate_behaviour_api_call(_, _, _, _, _) -> _.
+-spec translate_behaviour_api_call(dialyzer_races:mfa_or_funlbl(),
+ [erl_types:erl_type()],
+ [dialyzer_races:core_vars()],
+ module(),
+ behaviour_api_dict()) ->
+ {dialyzer_races:mfa_or_funlbl(),
+ [erl_types:erl_type()],
+ [dialyzer_races:core_vars()]}
+ | 'plain_call'.
translate_behaviour_api_call(_Fun, _ArgTypes, _Args, _Module, []) ->
plain_call;
@@ -101,8 +115,9 @@ translate_behaviour_api_call({Module, Fun, Arity}, ArgTypes, Args,
translate_behaviour_api_call(_Fun, _ArgTypes, _Args, _Module, _BehApiInfo) ->
plain_call.
--spec translate_callgraph([{atom(), _}], atom(), dialyzer_callgraph:callgraph())
- -> dialyzer_callgraph:callgraph().
+-spec translate_callgraph(behaviour_api_dict(), atom(),
+ dialyzer_callgraph:callgraph()) ->
+ dialyzer_callgraph:callgraph().
translate_callgraph([{Behaviour,_}|Behaviours], Module, Callgraph) ->
UsedCalls = [Call || {_From, {M, _F, _A}} = Call <-
@@ -156,9 +171,11 @@ check_all_callbacks(Module, Behaviour, Callbacks, State) ->
check_all_callbacks(_Module, _Behaviour, [], _State, Acc) ->
Acc;
-check_all_callbacks(Module, Behaviour, [{Fun, Arity, Spec}|Rest], State, Acc) ->
- Records = dialyzer_codeserver:get_records(State#state.codeserver),
- case parse_spec(Spec, Records) of
+check_all_callbacks(Module, Behaviour, [{Fun, Arity, Spec}|Rest],
+ #state{codeserver = CServer} = State, Acc) ->
+ Records = dialyzer_codeserver:get_records(CServer),
+ ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
+ case parse_spec(Spec, ExpTypes, Records) of
{ok, Fun, Type} ->
RetType = erl_types:t_fun_range(Type),
ArgTypes = erl_types:t_fun_args(Type),
@@ -172,7 +189,7 @@ check_all_callbacks(Module, Behaviour, [{Fun, Arity}|Rest], State, Acc) ->
Warns = {spec_missing, [Behaviour, Fun, Arity]},
check_all_callbacks(Module, Behaviour, Rest, State, [Warns|Acc]).
-parse_spec(String, Records) ->
+parse_spec(String, ExpTypes, Records) ->
case erl_scan:string(String) of
{ok, Tokens, _} ->
case erl_parse:parse(Tokens) of
@@ -181,7 +198,8 @@ parse_spec(String, Records) ->
{attribute, _, 'spec', {{Fun, _}, [TypeForm|_Constraint]}} ->
MaybeRemoteType = erl_types:t_from_form(TypeForm),
try
- Type = erl_types:t_solve_remote(MaybeRemoteType, Records),
+ Type = erl_types:t_solve_remote(MaybeRemoteType, ExpTypes,
+ Records),
{ok, Fun, Type}
catch
throw:{error,Msg} -> {spec_remote_error, Msg}
@@ -260,7 +278,7 @@ get_line([]) -> -1.
get_file([{file, File}|_]) -> File;
get_file([_|Tail]) -> get_file(Tail).
-%%------------------------------------------------------------------------------
+%%-----------------------------------------------------------------------------
get_behaviours([], _Codeserver, KnownAcc, UnknownAcc) ->
{KnownAcc, UnknownAcc};
@@ -289,7 +307,7 @@ call_behaviours([Behaviour|Rest], KnownAcc, UnknownAcc) ->
_:_ -> call_behaviours(Rest, KnownAcc, [Behaviour | UnknownAcc])
end.
-%-------------------------------------------------------------------------------
+%------------------------------------------------------------------------------
get_behaviour_apis([], Acc) ->
Acc;
@@ -298,14 +316,22 @@ get_behaviour_apis([Behaviour | Rest], Acc) ->
{{Fun, Arity}, _} <- behaviour_api_calls(Behaviour)],
get_behaviour_apis(Rest, MFAs ++ Acc).
-%-------------------------------------------------------------------------------
+%------------------------------------------------------------------------------
nth_or_0(0, _List, Zero) ->
Zero;
nth_or_0(N, List, _Zero) ->
lists:nth(N, List).
-%-------------------------------------------------------------------------------
+%------------------------------------------------------------------------------
+
+-type behaviour_api_dict()::[{behaviour(), behaviour_api_info()}].
+-type behaviour_api_info()::[{original_fun(), replacement_fun()}].
+-type original_fun()::{atom(), arity()}.
+-type replacement_fun()::{atom(), arity(), arg_list()}.
+-type arg_list()::[byte()].
+
+-spec behaviour_api_calls(behaviour()) -> behaviour_api_info().
behaviour_api_calls(gen_server) ->
[{{start_link, 3}, {init, 1, [2]}},
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index f932f43548..d3de5aaf45 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -59,6 +59,8 @@
put_named_tables/2, put_public_tables/2, put_behaviour_api_calls/2,
get_behaviour_api_calls/1]).
+-export_type([callgraph/0]).
+
-include("dialyzer.hrl").
%%----------------------------------------------------------------------
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index d533e734db..57f0d6e736 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -38,6 +38,7 @@
{backend_pid :: pid(),
erlang_mode = false :: boolean(),
external_calls = [] :: [mfa()],
+ external_types = [] :: [mfa()],
legal_warnings = ordsets:new() :: [dial_warn_tag()],
mod_deps = dict:new() :: dict(),
output = standard_io :: io:device(),
@@ -47,7 +48,7 @@
report_mode = normal :: rep_mode(),
return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(),
stored_warnings = [] :: [dial_warning()],
- unknown_behaviours = [] :: [atom()]
+ unknown_behaviours = [] :: [dialyzer_behaviours:behaviour()]
}).
%%--------------------------------------------------------------------
@@ -538,6 +539,8 @@ cl_loop(State, LogCache) ->
return_value(State, NewPlt);
{BackendPid, ext_calls, ExtCalls} ->
cl_loop(State#cl_state{external_calls = ExtCalls}, LogCache);
+ {BackendPid, ext_types, ExtTypes} ->
+ cl_loop(State#cl_state{external_types = ExtTypes}, LogCache);
{BackendPid, mod_deps, ModDeps} ->
NewState = State#cl_state{mod_deps = ModDeps},
cl_loop(NewState, LogCache);
@@ -574,7 +577,7 @@ format_log_cache(LogCache) ->
store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) ->
St#cl_state{stored_warnings = StoredWarnings ++ Warnings}.
--spec store_unknown_behaviours(#cl_state{}, [_]) -> #cl_state{}.
+-spec store_unknown_behaviours(#cl_state{}, [dialyzer_behaviours:behaviour()]) -> #cl_state{}.
store_unknown_behaviours(#cl_state{unknown_behaviours = Behs} = St, Beh) ->
St#cl_state{unknown_behaviours = Beh ++ Behs}.
@@ -613,6 +616,7 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
false ->
print_warnings(State),
print_ext_calls(State),
+ print_ext_types(State),
print_unknown_behaviours(State),
maybe_close_output_file(State),
{RetValue, []};
@@ -649,10 +653,41 @@ do_print_ext_calls(Output, [{M,F,A}|T], Before) ->
do_print_ext_calls(_, [], _) ->
ok.
+print_ext_types(#cl_state{report_mode = quiet}) ->
+ ok;
+print_ext_types(#cl_state{output = Output,
+ external_calls = Calls,
+ external_types = Types,
+ stored_warnings = Warnings,
+ output_format = Format}) ->
+ case Types =:= [] of
+ true -> ok;
+ false ->
+ case Warnings =:= [] andalso Calls =:= [] of
+ true -> io:nl(Output); %% Need to do a newline first
+ false -> ok
+ end,
+ case Format of
+ formatted ->
+ io:put_chars(Output, "Unknown types:\n"),
+ do_print_ext_types(Output, Types, " ");
+ raw ->
+ io:put_chars(Output, "%% Unknown types:\n"),
+ do_print_ext_types(Output, Types, "%% ")
+ end
+ end.
+
+do_print_ext_types(Output, [{M,F,A}|T], Before) ->
+ io:format(Output, "~s~p:~p/~p\n", [Before,M,F,A]),
+ do_print_ext_types(Output, T, Before);
+do_print_ext_types(_, [], _) ->
+ ok.
+
%%print_unknown_behaviours(#cl_state{report_mode = quiet}) ->
%% ok;
print_unknown_behaviours(#cl_state{output = Output,
external_calls = Calls,
+ external_types = Types,
stored_warnings = Warnings,
unknown_behaviours = DupBehaviours,
legal_warnings = LegalWarnings,
@@ -662,7 +697,7 @@ print_unknown_behaviours(#cl_state{output = Output,
false -> ok;
true ->
Behaviours = lists:usort(DupBehaviours),
- case Warnings =:= [] andalso Calls =:= [] of
+ case Warnings =:= [] andalso Calls =:= [] andalso Types =:= [] of
true -> io:nl(Output); %% Need to do a newline first
false -> ok
end,
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index 3bc5fadc21..b2097f7e53 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -21,7 +21,7 @@
%%%-------------------------------------------------------------------
%%% File : dialyzer_codeserver.erl
%%% Author : Tobias Lindahl <[email protected]>
-%%% Description :
+%%% Description :
%%%
%%% Created : 4 Apr 2005 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
@@ -29,15 +29,19 @@
-export([delete/1,
finalize_contracts/2,
+ finalize_exported_types/2,
finalize_records/2,
get_contracts/1,
- get_exports/1,
+ get_exported_types/1,
+ get_exports/1,
get_records/1,
get_next_core_label/1,
get_temp_contracts/1,
+ get_temp_exported_types/1,
get_temp_records/1,
- insert/3,
- insert_exports/2,
+ insert/3,
+ insert_exports/2,
+ insert_temp_exported_types/2,
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
@@ -52,17 +56,21 @@
store_contracts/3,
store_temp_contracts/3]).
+-export_type([codeserver/0]).
+
-include("dialyzer.hrl").
%%--------------------------------------------------------------------
--record(codeserver, {table_pid :: pid(),
- exports = sets:new() :: set(), % set(mfa())
- next_core_label = 0 :: label(),
- records = dict:new() :: dict(),
- temp_records = dict:new() :: dict(),
- contracts = dict:new() :: dict(),
- temp_contracts = dict:new() :: dict()}).
+-record(codeserver, {table_pid :: pid(),
+ exported_types = sets:new() :: set(), % set(mfa())
+ temp_exported_types = sets:new() :: set(), % set(mfa())
+ exports = sets:new() :: set(), % set(mfa())
+ next_core_label = 0 :: label(),
+ records = dict:new() :: dict(),
+ temp_records = dict:new() :: dict(),
+ contracts = dict:new() :: dict(),
+ temp_contracts = dict:new() :: dict()}).
-opaque codeserver() :: #codeserver{}.
@@ -78,12 +86,17 @@ new() ->
delete(#codeserver{table_pid = TablePid}) ->
table__delete(TablePid).
--spec insert(module(), cerl:c_module(), codeserver()) -> codeserver().
+-spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver().
insert(Mod, ModCode, CS) ->
NewTablePid = table__insert(CS#codeserver.table_pid, Mod, ModCode),
CS#codeserver{table_pid = NewTablePid}.
+-spec insert_temp_exported_types(set(), codeserver()) -> codeserver().
+
+insert_temp_exported_types(Set, CS) ->
+ CS#codeserver{temp_exported_types = Set}.
+
-spec insert_exports([mfa()], codeserver()) -> codeserver().
insert_exports(List, #codeserver{exports = Exports} = CS) ->
@@ -96,12 +109,27 @@ insert_exports(List, #codeserver{exports = Exports} = CS) ->
is_exported(MFA, #codeserver{exports = Exports}) ->
sets:is_element(MFA, Exports).
+-spec get_exported_types(codeserver()) -> set(). % set(mfa())
+
+get_exported_types(#codeserver{exported_types = ExpTypes}) ->
+ ExpTypes.
+
+-spec get_temp_exported_types(codeserver()) -> set().
+
+get_temp_exported_types(#codeserver{temp_exported_types = TempExpTypes}) ->
+ TempExpTypes.
+
-spec get_exports(codeserver()) -> set(). % set(mfa())
get_exports(#codeserver{exports = Exports}) ->
Exports.
--spec lookup_mod_code(module(), codeserver()) -> cerl:c_module().
+-spec finalize_exported_types(set(), codeserver()) -> codeserver().
+
+finalize_exported_types(Set, CS) ->
+ CS#codeserver{exported_types = Set, temp_exported_types = sets:new()}.
+
+-spec lookup_mod_code(atom(), codeserver()) -> cerl:c_module().
lookup_mod_code(Mod, CS) when is_atom(Mod) ->
table__lookup(CS#codeserver.table_pid, Mod).
@@ -121,7 +149,7 @@ get_next_core_label(#codeserver{next_core_label = NCL}) ->
set_next_core_label(NCL, CS) ->
CS#codeserver{next_core_label = NCL}.
--spec store_records(module(), dict(), codeserver()) -> codeserver().
+-spec store_records(atom(), dict(), codeserver()) -> codeserver().
store_records(Mod, Dict, #codeserver{records = RecDict} = CS)
when is_atom(Mod) ->
@@ -130,7 +158,7 @@ store_records(Mod, Dict, #codeserver{records = RecDict} = CS)
false -> CS#codeserver{records = dict:store(Mod, Dict, RecDict)}
end.
--spec lookup_mod_records(module(), codeserver()) -> dict().
+-spec lookup_mod_records(atom(), codeserver()) -> dict().
lookup_mod_records(Mod, #codeserver{records = RecDict})
when is_atom(Mod) ->
@@ -139,12 +167,12 @@ lookup_mod_records(Mod, #codeserver{records = RecDict})
{ok, Dict} -> Dict
end.
--spec get_records(codeserver()) -> dict().
+-spec get_records(codeserver()) -> dict().
get_records(#codeserver{records = RecDict}) ->
RecDict.
--spec store_temp_records(module(), dict(), codeserver()) -> codeserver().
+-spec store_temp_records(atom(), dict(), codeserver()) -> codeserver().
store_temp_records(Mod, Dict, #codeserver{temp_records = TempRecDict} = CS)
when is_atom(Mod) ->
@@ -153,7 +181,7 @@ store_temp_records(Mod, Dict, #codeserver{temp_records = TempRecDict} = CS)
false -> CS#codeserver{temp_records = dict:store(Mod, Dict, TempRecDict)}
end.
--spec get_temp_records(codeserver()) -> dict().
+-spec get_temp_records(codeserver()) -> dict().
get_temp_records(#codeserver{temp_records = TempRecDict}) ->
TempRecDict.
@@ -163,12 +191,12 @@ get_temp_records(#codeserver{temp_records = TempRecDict}) ->
set_temp_records(Dict, CS) ->
CS#codeserver{temp_records = Dict}.
--spec finalize_records(dict(), codeserver()) -> codeserver().
+-spec finalize_records(dict(), codeserver()) -> codeserver().
finalize_records(Dict, CS) ->
CS#codeserver{records = Dict, temp_records = dict:new()}.
--spec store_contracts(module(), dict(), codeserver()) -> codeserver().
+-spec store_contracts(atom(), dict(), codeserver()) -> codeserver().
store_contracts(Mod, Dict, #codeserver{contracts = C} = CS) when is_atom(Mod) ->
case dict:size(Dict) =:= 0 of
@@ -176,7 +204,7 @@ store_contracts(Mod, Dict, #codeserver{contracts = C} = CS) when is_atom(Mod) ->
false -> CS#codeserver{contracts = dict:store(Mod, Dict, C)}
end.
--spec lookup_mod_contracts(module(), codeserver()) -> dict().
+-spec lookup_mod_contracts(atom(), codeserver()) -> dict().
lookup_mod_contracts(Mod, #codeserver{contracts = ContDict})
when is_atom(Mod) ->
@@ -185,7 +213,7 @@ lookup_mod_contracts(Mod, #codeserver{contracts = ContDict})
{ok, Dict} -> Dict
end.
--spec lookup_mfa_contract(mfa(), codeserver()) ->
+-spec lookup_mfa_contract(mfa(), codeserver()) ->
'error' | {'ok', dialyzer_contracts:file_contract()}.
lookup_mfa_contract({M,_F,_A} = MFA, #codeserver{contracts = ContDict}) ->
@@ -194,12 +222,12 @@ lookup_mfa_contract({M,_F,_A} = MFA, #codeserver{contracts = ContDict}) ->
{ok, Dict} -> dict:find(MFA, Dict)
end.
--spec get_contracts(codeserver()) -> dict().
+-spec get_contracts(codeserver()) -> dict().
get_contracts(#codeserver{contracts = ContDict}) ->
ContDict.
--spec store_temp_contracts(module(), dict(), codeserver()) -> codeserver().
+-spec store_temp_contracts(atom(), dict(), codeserver()) -> codeserver().
store_temp_contracts(Mod, Dict, #codeserver{temp_contracts = C} = CS)
when is_atom(Mod) ->
@@ -263,7 +291,7 @@ table__loop(Cached, Map) ->
Pid ! {self(), Mod, Ans},
table__loop({Mod, Ans}, Map);
{insert, List} ->
- NewMap = lists:foldl(fun({Key, Val}, AccMap) ->
+ NewMap = lists:foldl(fun({Key, Val}, AccMap) ->
dict:store(Key, Val, AccMap)
end, Map, List),
table__loop(Cached, NewMap)
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 3486c72748..bf80c6f470 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -21,7 +21,7 @@
-module(dialyzer_contracts).
-export([check_contract/2,
- check_contracts/3,
+ check_contracts/3,
contracts_without_fun/3,
contract_to_string/1,
get_invalid_contract_warnings/3,
@@ -33,6 +33,8 @@
process_contract_remote_types/1,
store_tmp_contract/5]).
+-export_type([file_contract/0, plt_contracts/0]).
+
%%-----------------------------------------------------------------------
-include("dialyzer.hrl").
@@ -50,7 +52,7 @@
%% to expand records and/or remote types that they might contain.
%%-----------------------------------------------------------------------
--type tmp_contract_fun() :: fun((dict()) -> contract_pair()).
+-type tmp_contract_fun() :: fun((set(), dict()) -> contract_pair()).
-record(tmp_contract, {contract_funs = [] :: [tmp_contract_fun()],
forms = [] :: [{_, _}]}).
@@ -104,13 +106,13 @@ contract_to_string(#contract{forms = Forms}) ->
contract_to_string_1([{Contract, []}]) ->
strip_fun(erl_types:t_form_to_string(Contract));
contract_to_string_1([{Contract, []}|Rest]) ->
- strip_fun(erl_types:t_form_to_string(Contract)) ++ "\n ; "
+ strip_fun(erl_types:t_form_to_string(Contract)) ++ "\n ; "
++ contract_to_string_1(Rest);
contract_to_string_1([{Contract, Constraints}]) ->
- strip_fun(erl_types:t_form_to_string(Contract)) ++ " when "
+ strip_fun(erl_types:t_form_to_string(Contract)) ++ " when "
++ constraints_to_string(Constraints);
contract_to_string_1([{Contract, Constraints}|Rest]) ->
- strip_fun(erl_types:t_form_to_string(Contract)) ++ " when "
+ strip_fun(erl_types:t_form_to_string(Contract)) ++ " when "
++ constraints_to_string(Constraints) ++ ";" ++
contract_to_string_1(Rest).
@@ -128,7 +130,7 @@ constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}]) ->
sequence([erl_types:t_form_to_string(T) || T <- Types], ",") ++ ")";
constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}|Rest]) ->
atom_to_list(What) ++ "(" ++
- sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
+ sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
++ "), " ++ constraints_to_string(Rest).
sequence([], _Delimiter) -> "";
@@ -140,10 +142,11 @@ sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
process_contract_remote_types(CodeServer) ->
TmpContractDict = dialyzer_codeserver:get_temp_contracts(CodeServer),
+ ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
RecordDict = dialyzer_codeserver:get_records(CodeServer),
ContractFun =
fun({_M, _F, _A}, {File, #tmp_contract{contract_funs = CFuns, forms = Forms}}) ->
- NewCs = [CFun(RecordDict) || CFun <- CFuns],
+ NewCs = [CFun(ExpTypes, RecordDict) || CFun <- CFuns],
Args = general_domain(NewCs),
{File, #contract{contracts = NewCs, args = Args, forms = Forms}}
end,
@@ -153,21 +156,21 @@ process_contract_remote_types(CodeServer) ->
end,
NewContractDict = dict:map(ModuleFun, TmpContractDict),
dialyzer_codeserver:finalize_contracts(NewContractDict, CodeServer).
-
+
-spec check_contracts([{mfa(), file_contract()}],
dialyzer_callgraph:callgraph(), dict()) -> plt_contracts().
check_contracts(Contracts, Callgraph, FunTypes) ->
FoldFun =
- fun(Label, Type, NewContracts) ->
+ fun(Label, Type, NewContracts) ->
{ok, {M,F,A} = MFA} = dialyzer_callgraph:lookup_name(Label, Callgraph),
case orddict:find(MFA, Contracts) of
- {ok, {_FileLine, Contract}} ->
+ {ok, {_FileLine, Contract}} ->
case check_contract(Contract, Type) of
ok ->
case erl_bif_types:is_known(M, F, A) of
true ->
- %% Disregard the contracts since
+ %% Disregard the contracts since
%% this is a known function.
NewContracts;
false ->
@@ -184,8 +187,8 @@ check_contracts(Contracts, Callgraph, FunTypes) ->
-spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}.
check_contract(#contract{contracts = Contracts}, SuccType) ->
- try
- Contracts1 = [{Contract, insert_constraints(Constraints, dict:new())}
+ try
+ Contracts1 = [{Contract, insert_constraints(Constraints, dict:new())}
|| {Contract, Constraints} <- Contracts],
Contracts2 = [erl_types:t_subst(Contract, Dict)
|| {Contract, Dict} <- Contracts1],
@@ -194,7 +197,7 @@ check_contract(#contract{contracts = Contracts}, SuccType) ->
error ->
{error, {overlapping_contract, []}};
ok ->
- InfList = [erl_types:t_inf(Contract, SuccType, opaque)
+ InfList = [erl_types:t_inf(Contract, SuccType, opaque)
|| Contract <- Contracts2],
case check_contract_inf_list(InfList, SuccType) of
{error, _} = Invalid -> Invalid;
@@ -226,7 +229,7 @@ check_contract_inf_list([FunType|Left], SuccType) ->
STRange = erl_types:t_fun_range(SuccType),
case erl_types:t_is_none_or_unit(STRange) of
true -> ok;
- false ->
+ false ->
Range = erl_types:t_fun_range(FunType),
case erl_types:t_is_none(erl_types:t_inf(STRange, Range, opaque)) of
true -> check_contract_inf_list(Left, SuccType);
@@ -258,9 +261,9 @@ check_extraneous_1(Contract, SuccType) ->
process_contracts(OverContracts, Args) ->
process_contracts(OverContracts, Args, erl_types:t_none()).
-
+
process_contracts([OverContract|Left], Args, AccRange) ->
- NewAccRange =
+ NewAccRange =
case process_contract(OverContract, Args) of
error -> AccRange;
{ok, Range} -> erl_types:t_sup(AccRange, Range)
@@ -273,12 +276,12 @@ process_contracts([], _Args, AccRange) ->
process_contract({Contract, Constraints}, CallTypes0) ->
CallTypesFun = erl_types:t_fun(CallTypes0, erl_types:t_any()),
- ContArgsFun = erl_types:t_fun(erl_types:t_fun_args(Contract),
+ ContArgsFun = erl_types:t_fun(erl_types:t_fun_args(Contract),
erl_types:t_any()),
?debug("Instance: Contract: ~s\n Arguments: ~s\n",
- [erl_types:t_to_string(ContArgsFun),
+ [erl_types:t_to_string(ContArgsFun),
erl_types:t_to_string(CallTypesFun)]),
- case solve_constraints(ContArgsFun, CallTypesFun, Constraints) of
+ case solve_constraints(ContArgsFun, CallTypesFun, Constraints) of
{ok, VarDict} ->
{ok, erl_types:t_subst(erl_types:t_fun_range(Contract), VarDict)};
error -> error
@@ -288,7 +291,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% First make sure the call follows the constraints
CDict = insert_constraints(Constraints, dict:new()),
Contract1 = erl_types:t_subst(Contract, CDict),
- %% Just a safe over-approximation.
+ %% Just a safe over-approximation.
%% TODO: Find the types for type variables properly
ContrArgs = erl_types:t_fun_args(Contract1),
CallArgs = erl_types:t_fun_args(Call),
@@ -309,7 +312,7 @@ solve_constraints(Contract, Call, Constraints) ->
-spec contracts_without_fun(dict(), [_], dialyzer_callgraph:callgraph()) -> [dial_warning()].
contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
- AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
+ AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
|| {Label, Arity} <- AllFuns0],
AllFuns2 = [{M, F, A} || {{ok, {M, F, _}}, A} <- AllFuns1],
AllContractMFAs = dict:fetch_keys(Contracts),
@@ -351,46 +354,49 @@ contract_from_form(Forms, RecDict) ->
{CFuns, Forms1} = contract_from_form(Forms, RecDict, [], []),
#tmp_contract{contract_funs = CFuns, forms = Forms1}.
-contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
+contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
TypeAcc, FormAcc) ->
- TypeFun =
- fun(AllRecords) ->
+ TypeFun =
+ fun(ExpTypes, AllRecords) ->
Type = erl_types:t_from_form(Form, RecDict),
- NewType = erl_types:t_solve_remote(Type, AllRecords),
+ NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords),
{NewType, []}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, []} | FormAcc],
contract_from_form(Left, RecDict, NewTypeAcc, NewFormAcc);
-contract_from_form([{type, _L1, bounded_fun,
+contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
RecDict, TypeAcc, FormAcc) ->
- TypeFun =
- fun(AllRecords) ->
- Constr1 = [constraint_from_form(C, RecDict, AllRecords) || C <- Constr],
+ TypeFun =
+ fun(ExpTypes, AllRecords) ->
+ Constr1 = [constraint_from_form(C, RecDict, ExpTypes, AllRecords)
+ || C <- Constr],
VarDict = insert_constraints(Constr1, dict:new()),
Type = erl_types:t_from_form(Form, RecDict, VarDict),
- NewType = erl_types:t_solve_remote(Type, AllRecords),
+ NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords),
{NewType, Constr1}
- end,
+ end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, Constr} | FormAcc],
contract_from_form(Left, RecDict, NewTypeAcc, NewFormAcc);
-contract_from_form([], _RecDict, TypeAcc, FormAcc) ->
+contract_from_form([], _RecDict, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-constraint_from_form({type, _, constraint, [{atom, _, is_subtype},
- [Type1, Type2]]}, RecDict, AllRecords) ->
+constraint_from_form({type, _, constraint, [{atom, _, is_subtype},
+ [Type1, Type2]]}, RecDict,
+ ExpTypes, AllRecords) ->
T1 = erl_types:t_from_form(Type1, RecDict),
T2 = erl_types:t_from_form(Type2, RecDict),
- T3 = erl_types:t_solve_remote(T1, AllRecords),
- T4 = erl_types:t_solve_remote(T2, AllRecords),
+ T3 = erl_types:t_solve_remote(T1, ExpTypes, AllRecords),
+ T4 = erl_types:t_solve_remote(T2, ExpTypes, AllRecords),
{subtype, T3, T4};
-constraint_from_form({type, _, constraint, [{atom,_,Name}, List]}, _RecDict, _) ->
+constraint_from_form({type, _, constraint, [{atom,_,Name}, List]}, _RecDict,
+ _ExpTypes, _AllRecords) ->
N = length(List),
throw({error, io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])}).
-%% Gets the most general domain of a list of domains of all
+%% Gets the most general domain of a list of domains of all
%% the overloaded contracts
general_domain(List) ->
@@ -419,7 +425,7 @@ get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, Acc) ->
get_invalid_contract_warnings_modules([], _CodeServer, _Plt, Acc) ->
Acc.
-get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left],
+get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left],
Plt, RecDict, Acc) ->
case dialyzer_plt:lookup(Plt, MFA) of
none ->
@@ -447,15 +453,15 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left],
BifRet = erl_bif_types:type(M, F, A),
BifSig = erl_types:t_fun(BifArgs, BifRet),
case check_contract(Contract, BifSig) of
- {error, _} ->
+ {error, _} ->
[invalid_contract_warning(MFA, FileLine, BifSig, RecDict)
|Acc];
ok ->
- picky_contract_check(CSig, BifSig, MFA, FileLine,
+ picky_contract_check(CSig, BifSig, MFA, FileLine,
Contract, RecDict, Acc)
end;
false ->
- picky_contract_check(CSig, Sig, MFA, FileLine, Contract,
+ picky_contract_check(CSig, Sig, MFA, FileLine, Contract,
RecDict, Acc)
end
end,
@@ -479,12 +485,12 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
Sig = erl_types:t_abstract_records(Sig0, RecDict),
case erl_types:t_is_equal(CSig, Sig) of
true -> Acc;
- false ->
+ false ->
case (erl_types:t_is_none(erl_types:t_fun_range(Sig)) andalso
erl_types:t_is_unit(erl_types:t_fun_range(CSig))) of
true -> Acc;
false ->
- case extra_contract_warning(MFA, FileLine, Contract,
+ case extra_contract_warning(MFA, FileLine, Contract,
CSig, Sig, RecDict) of
no_warning -> Acc;
{warning, Warning} -> [Warning|Acc]
@@ -503,16 +509,16 @@ extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
ContractString = contract_to_string(Contract),
{Tag, Msg} =
case erl_types:t_is_subtype(CSig, Sig) of
- true ->
- {?WARN_CONTRACT_SUBTYPE,
+ true ->
+ {?WARN_CONTRACT_SUBTYPE,
{contract_subtype, [M, F, A, ContractString, SigString]}};
false ->
case erl_types:t_is_subtype(Sig, CSig) of
true ->
- {?WARN_CONTRACT_SUPERTYPE,
+ {?WARN_CONTRACT_SUPERTYPE,
{contract_supertype, [M, F, A, ContractString, SigString]}};
false ->
- {?WARN_CONTRACT_NOT_EQUAL,
+ {?WARN_CONTRACT_NOT_EQUAL,
{contract_diff, [M, F, A, ContractString, SigString]}}
end
end,
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index 1ccfaaa52f..b80c7efc1a 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -21,7 +21,7 @@
%%%-------------------------------------------------------------------
%%% File : dialyzer_dataflow.erl
%%% Author : Tobias Lindahl <[email protected]>
-%%% Description :
+%%% Description :
%%%
%%% Created : 19 Apr 2005 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
@@ -30,6 +30,7 @@
-export([get_fun_types/4, get_warnings/5, format_args/3]).
+%% Data structure interfaces.
-export([state__add_warning/2, state__cleanup/1,
state__get_callgraph/1, state__get_races/1,
state__get_records/1, state__put_callgraph/2,
@@ -38,9 +39,11 @@
%% Debug and test interfaces.
-export([get_top_level_signatures/2, pp/1]).
+-export_type([state/0]).
+
-include("dialyzer.hrl").
--import(erl_types,
+-import(erl_types,
[any_none/1, t_any/0, t_atom/0, t_atom/1, t_atom_vals/1,
t_binary/0, t_boolean/0,
t_bitstr/0, t_bitstr/2, t_bitstr_concat/1, t_bitstr_match/2,
@@ -88,14 +91,15 @@
fun_tab :: dict(),
plt :: dialyzer_plt:plt(),
opaques :: [erl_types:erl_type()],
- races :: dialyzer_races:races(),
- records :: dict(),
+ races = dialyzer_races:new() :: dialyzer_races:races(),
+ records = dict:new() :: dict(),
tree_map :: dict(),
warning_mode = false :: boolean(),
warnings = [] :: [dial_warning()],
work :: {[_], [_], set()},
module :: module(),
- behaviour_api_info = [] :: [{atom(),[_]}]}).
+ behaviour_api_dict = [] ::
+ dialyzer_behaviours:behaviour_api_dict()}).
%% Exported Types
@@ -163,20 +167,20 @@ get_top_level_signatures(Code, Records) ->
error ->
Arity = cerl:fname_arity(V),
Type = t_fun(lists:duplicate(Arity,
- t_none()),
+ t_none()),
t_none()),
dict:store(Label, Type, Acc);
{ok, _} -> Acc
end
end, FunTypes, cerl:module_defs(Tree)),
dialyzer_callgraph:delete(Callgraph),
- Sigs = [{{cerl:fname_id(V), cerl:fname_arity(V)},
- dict:fetch(get_label(F), FunTypes1)}
+ Sigs = [{{cerl:fname_id(V), cerl:fname_arity(V)},
+ dict:fetch(get_label(F), FunTypes1)}
|| {V, F} <- cerl:module_defs(Tree)],
ordsets:from_list(Sigs).
get_def_plt() ->
- try
+ try
dialyzer_plt:from_file(dialyzer_plt:get_default_plt())
catch
throw:{dialyzer_error, _} -> dialyzer_plt:new()
@@ -202,7 +206,7 @@ annotate_module(Code, Plt) ->
annotate(Tree, State) ->
case cerl:subtrees(Tree) of
[] -> set_type(Tree, State);
- List ->
+ List ->
NewSubTrees = [[annotate(Subtree, State) || Subtree <- Group]
|| Group <- List],
NewTree = cerl:update_tree(Tree, NewSubTrees),
@@ -214,9 +218,9 @@ set_type(Tree, State) ->
'fun' ->
Type = state__fun_type(Tree, State),
case t_is_any(Type) of
- true ->
+ true ->
cerl:set_ann(Tree, delete_ann(typesig, cerl:get_ann(Tree)));
- false ->
+ false ->
cerl:set_ann(Tree, append_ann(typesig, Type, cerl:get_ann(Tree)))
end;
apply ->
@@ -224,10 +228,10 @@ set_type(Tree, State) ->
unknown -> Tree;
ReturnType ->
case t_is_any(ReturnType) of
- true ->
+ true ->
cerl:set_ann(Tree, delete_ann(type, cerl:get_ann(Tree)));
- false ->
- cerl:set_ann(Tree, append_ann(type, ReturnType,
+ false ->
+ cerl:set_ann(Tree, append_ann(type, ReturnType,
cerl:get_ann(Tree)))
end
end;
@@ -236,7 +240,7 @@ set_type(Tree, State) ->
end.
append_ann(Tag, Val, [X | Xs]) ->
- if tuple_size(X) >= 1, element(1, X) =:= Tag ->
+ if tuple_size(X) >= 1, element(1, X) =:= Tag ->
append_ann(Tag, Val, Xs);
true ->
[X | append_ann(Tag, Val, Xs)]
@@ -245,7 +249,7 @@ append_ann(Tag, Val, []) ->
[{Tag, Val}].
delete_ann(Tag, [X | Xs]) ->
- if tuple_size(X) >= 1, element(1, X) =:= Tag ->
+ if tuple_size(X) >= 1, element(1, X) =:= Tag ->
delete_ann(Tag, Xs);
true ->
[X | delete_ann(Tag, Xs)]
@@ -314,21 +318,21 @@ analyze_loop(#state{callgraph = Callgraph, races = Races} = State) ->
{Fun, NewState} ->
ArgTypes = state__get_args(Fun, NewState),
case any_none(ArgTypes) of
- true ->
- ?debug("Not handling1 ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ true ->
+ ?debug("Not handling1 ~w: ~s\n",
+ [state__lookup_name(get_label(Fun), State),
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState);
- false ->
+ false ->
case state__fun_env(Fun, NewState) of
- none ->
- ?debug("Not handling2 ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ none ->
+ ?debug("Not handling2 ~w: ~s\n",
+ [state__lookup_name(get_label(Fun), State),
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState);
Map ->
- ?debug("Handling fun ~p: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ ?debug("Handling fun ~p: ~s\n",
+ [state__lookup_name(get_label(Fun), State),
t_to_string(state__fun_type(Fun, NewState))]),
NewState1 = state__mark_fun_as_handled(NewState, Fun),
Vars = cerl:fun_vars(Fun),
@@ -339,19 +343,19 @@ analyze_loop(#state{callgraph = Callgraph, races = Races} = State) ->
RaceAnalysis = dialyzer_races:get_race_analysis(Races),
NewState3 =
case RaceDetection andalso RaceAnalysis of
- true ->
+ true ->
NewState2 = state__renew_curr_fun(
state__lookup_name(FunLabel, NewState1), FunLabel,
NewState1),
state__renew_race_list([], 0, NewState2);
false -> NewState1
end,
- {NewState4, _Map2, BodyType} =
+ {NewState4, _Map2, BodyType} =
traverse(Body, Map1, NewState3),
- ?debug("Done analyzing: ~w:~s\n",
+ ?debug("Done analyzing: ~w:~s\n",
[state__lookup_name(get_label(Fun), State),
t_to_string(t_fun(ArgTypes, BodyType))]),
- NewState5 =
+ NewState5 =
case RaceDetection andalso RaceAnalysis of
true ->
Races1 = NewState4#state.races,
@@ -382,7 +386,7 @@ traverse(Tree, Map, State) ->
%% This only happens when checking for illegal record patterns
%% so the handling is a bit rudimentary.
traverse(cerl:alias_pat(Tree), Map, State);
- apply ->
+ apply ->
handle_apply(Tree, Map, State);
binary ->
Segs = cerl:binary_segments(Tree),
@@ -416,7 +420,7 @@ traverse(Tree, Map, State) ->
%% By not including the variables in scope we can assure that we
%% will get the current function type when using the variables.
FoldFun = fun({Var, Fun}, {AccState, AccMap}) ->
- {NewAccState, NewAccMap0, FunType} =
+ {NewAccState, NewAccMap0, FunType} =
traverse(Fun, AccMap, AccState),
NewAccMap = enter_type(Var, FunType, NewAccMap0),
{NewAccState, NewAccMap}
@@ -428,7 +432,7 @@ traverse(Tree, Map, State) ->
case cerl:unfold_literal(Tree) of
Tree ->
Type = literal_type(Tree),
- NewType =
+ NewType =
case erl_types:t_opaque_match_atom(Type, State#state.opaques) of
[Opaque] -> Opaque;
_ -> Type
@@ -446,8 +450,8 @@ traverse(Tree, Map, State) ->
bs_init_writable -> t_from_term(<<>>);
Other -> erlang:error({'Unsupported primop', Other})
end,
- {State, Map, Type};
- 'receive' ->
+ {State, Map, Type};
+ 'receive' ->
handle_receive(Tree, Map, State);
seq ->
Arg = cerl:seq_arg(Tree),
@@ -457,13 +461,13 @@ traverse(Tree, Map, State) ->
true ->
SMA;
false ->
- State2 =
+ State2 =
case (t_is_any(ArgType) orelse t_is_simple(ArgType)
orelse is_call_to_send(Arg)) of
true -> % do not warn in these cases
State1;
false ->
- state__add_warning(State1, ?WARN_UNMATCHED_RETURN, Arg,
+ state__add_warning(State1, ?WARN_UNMATCHED_RETURN, Arg,
{unmatched_return,
[format_type(ArgType, State1)]})
end,
@@ -481,12 +485,12 @@ traverse(Tree, Map, State) ->
var ->
?debug("Looking up unknown variable: ~p\n", [Tree]),
case state__lookup_type_for_rec_var(Tree, State) of
- error ->
+ error ->
LType = lookup_type(Tree, Map),
Opaques = State#state.opaques,
case t_opaque_match_record(LType, Opaques) of
[Opaque] -> {State, Map, Opaque};
- _ ->
+ _ ->
case t_opaque_match_atom(LType, Opaques) of
[Opaque] -> {State, Map, Opaque};
_ -> {State, Map, LType}
@@ -506,7 +510,7 @@ traverse_list([Tree|Tail], Map, State, Acc) ->
traverse_list(Tail, Map1, State1, [Type|Acc]);
traverse_list([], Map, State, Acc) ->
{State, Map, lists:reverse(Acc)}.
-
+
%%________________________________________
%%
%% Special instructions
@@ -518,7 +522,7 @@ handle_apply(Tree, Map, State) ->
{State1, Map1, ArgTypes} = traverse_list(Args, Map, State),
{State2, Map2, OpType} = traverse(Op, Map1, State1),
case any_none(ArgTypes) of
- true ->
+ true ->
{State2, Map2, t_none()};
false ->
{CallSitesKnown, FunList} =
@@ -533,7 +537,7 @@ handle_apply(Tree, Map, State) ->
OpType1 = t_inf(OpType, t_fun(Arity, t_any())),
case t_is_none(OpType1) of
true ->
- Msg = {fun_app_no_fun,
+ Msg = {fun_app_no_fun,
[format_cerl(Op), format_type(OpType, State2), Arity]},
State3 = state__add_warning(State2, ?WARN_FAILING_CALL,
Tree, Msg),
@@ -541,7 +545,7 @@ handle_apply(Tree, Map, State) ->
false ->
NewArgs = t_inf_lists(ArgTypes, t_fun_args(OpType1)),
case any_none(NewArgs) of
- true ->
+ true ->
Msg = {fun_app_args,
[format_args(Args, ArgTypes, State),
format_type(OpType, State)]},
@@ -554,7 +558,7 @@ handle_apply(Tree, Map, State) ->
end
end;
true ->
- FunInfoList = [{local, state__fun_info(Fun, State)}
+ FunInfoList = [{local, state__fun_info(Fun, State)}
|| Fun <- FunList],
handle_apply_or_call(FunInfoList, Args, ArgTypes, Map2, Tree, State1)
end
@@ -562,7 +566,7 @@ handle_apply(Tree, Map, State) ->
handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State) ->
None = t_none(),
- handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State,
+ handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State,
[None || _ <- ArgTypes], None).
handle_apply_or_call([{local, external}|Left], Args, ArgTypes, Map, Tree, State,
@@ -577,7 +581,7 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
Any = t_any(),
AnyArgs = [Any || _ <- Args],
GenSig = {AnyArgs, fun(_) -> t_any() end},
- {CArgs, CRange} =
+ {CArgs, CRange} =
case Contr of
{value, #contract{args = As} = C} ->
{As, fun(FunArgs) ->
@@ -627,9 +631,9 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
end
end,
ArgModeMask = [case lists:member(Arg, Opaques) of
- true -> opaque;
- false -> structured
- end || Arg <- ArgTypes],
+ true -> opaque;
+ false -> structured
+ end || Arg <- ArgTypes],
NewArgsSig = t_inf_lists_masked(SigArgs, ArgTypes, ArgModeMask),
NewArgsContract = t_inf_lists_masked(CArgs, ArgTypes, ArgModeMask),
NewArgsBif = t_inf_lists_masked(BifArgs, ArgTypes, ArgModeMask),
@@ -637,7 +641,7 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
NewArgTypes = t_inf_lists_masked(NewArgTypes0, NewArgsBif, ArgModeMask),
BifRet = BifRange(NewArgTypes),
{TmpArgTypes, TmpArgsContract} =
- case (TypeOfApply == remote) andalso (not IsBIF) of
+ case (TypeOfApply =:= remote) andalso (not IsBIF) of
true ->
List1 = lists:zip(CArgs, NewArgTypes),
List2 = lists:zip(CArgs, NewArgsContract),
@@ -648,16 +652,17 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
false -> {NewArgTypes, NewArgsContract}
end,
ContrRet = CRange(TmpArgTypes),
- RetMode = case t_contains_opaque(ContrRet) orelse t_contains_opaque(BifRet) of
- true -> opaque;
- false -> structured
- end,
+ RetMode =
+ case t_contains_opaque(ContrRet) orelse t_contains_opaque(BifRet) of
+ true -> opaque;
+ false -> structured
+ end,
RetWithoutLocal = t_inf(t_inf(ContrRet, BifRet, RetMode), SigRange, RetMode),
?debug("--------------------------------------------------------\n", []),
?debug("Fun: ~p\n", [Fun]),
?debug("Args: ~s\n", [erl_types:t_to_string(t_product(ArgTypes))]),
?debug("NewArgsSig: ~s\n", [erl_types:t_to_string(t_product(NewArgsSig))]),
- ?debug("NewArgsContract: ~s\n",
+ ?debug("NewArgsContract: ~s\n",
[erl_types:t_to_string(t_product(NewArgsContract))]),
?debug("NewArgsBif: ~s\n", [erl_types:t_to_string(t_product(NewArgsBif))]),
?debug("NewArgTypes: ~s\n", [erl_types:t_to_string(t_product(NewArgTypes))]),
@@ -677,11 +682,11 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
%% respective callback module's function.
Module = State#state.module,
- BehApiInfo = State#state.behaviour_api_info,
+ BehApiDict = State#state.behaviour_api_dict,
{RealFun, RealArgTypes, RealArgs} =
case dialyzer_behaviours:translate_behaviour_api_call(Fun, ArgTypes,
Args, Module,
- BehApiInfo) of
+ BehApiDict) of
plain_call -> {Fun, ArgTypes, Args};
BehaviourAPI -> BehaviourAPI
end,
@@ -698,10 +703,10 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
FailedSig = any_none(NewArgsSig),
FailedContract = any_none([CRange(TmpArgsContract)|NewArgsContract]),
FailedBif = any_none([BifRange(NewArgsBif)|NewArgsBif]),
- InfSig = t_inf(t_fun(SigArgs, SigRange),
+ InfSig = t_inf(t_fun(SigArgs, SigRange),
t_fun(BifArgs, BifRange(BifArgs))),
FailReason = apply_fail_reason(FailedSig, FailedBif, FailedContract),
- Msg = get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes, InfSig,
+ Msg = get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes, InfSig,
Contr, CArgs, State1, FailReason),
WarnType = case Msg of
{call, _} -> ?WARN_FAILING_CALL;
@@ -725,15 +730,15 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
remote ->
add_bif_warnings(Fun, NewArgTypes, Tree, State2)
end,
- NewAccArgTypes =
+ NewAccArgTypes =
case FailedConj of
true -> AccArgTypes;
false -> [t_sup(X, Y) || {X, Y} <- lists:zip(NewArgTypes, AccArgTypes)]
end,
NewAccRet = t_sup(AccRet, t_inf(RetWithoutLocal, LocalRet, opaque)),
- handle_apply_or_call(Left, Args, ArgTypes, Map, Tree,
+ handle_apply_or_call(Left, Args, ArgTypes, Map, Tree,
State3, NewAccArgTypes, NewAccRet);
-handle_apply_or_call([], Args, _ArgTypes, Map, _Tree, State,
+handle_apply_or_call([], Args, _ArgTypes, Map, _Tree, State,
AccArgTypes, AccRet) ->
NewMap = enter_type_lists(Args, AccArgTypes, Map),
{State, NewMap, AccRet}.
@@ -745,13 +750,13 @@ apply_fail_reason(FailedSig, FailedBif, FailedContract) ->
true -> both
end.
-get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
+get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
Sig, Contract, ContrArgs, State, FailReason) ->
ArgStrings = format_args(Args, ArgTypes, State),
ContractInfo =
case Contract of
{value, #contract{} = C} ->
- {dialyzer_contracts:is_overloaded(C),
+ {dialyzer_contracts:is_overloaded(C),
dialyzer_contracts:contract_to_string(C)};
none -> {false, none}
end,
@@ -765,7 +770,7 @@ get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
{M, F, _A} ->
case is_opaque_type_test_problem(Fun, NewArgTypes, State) of
true ->
- [Opaque] = NewArgTypes,
+ [Opaque] = NewArgTypes,
{opaque_type_test, [atom_to_list(F), erl_types:t_to_string(Opaque)]};
false ->
SigArgs = t_fun_args(Sig),
@@ -789,7 +794,7 @@ get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
{call_without_opaque, [M, F, ArgStrings, ExpectedTriples]};
false -> %% there is a structured term clash in some argument
{call, [M, F, ArgStrings,
- ArgNs, FailReason,
+ ArgNs, FailReason,
format_sig_args(Sig, State),
format_type(t_fun_range(Sig), State),
ContractInfo]}
@@ -797,8 +802,8 @@ get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
end
end;
Label when is_integer(Label) ->
- {apply, [ArgStrings,
- ArgNs, FailReason,
+ {apply, [ArgStrings,
+ ArgNs, FailReason,
format_sig_args(Sig, State),
format_type(t_fun_range(Sig), State),
ContractInfo]}
@@ -826,7 +831,7 @@ is_opaque_type_test_problem(Fun, ArgTypes, State) ->
FN =:= is_number; FN =:= is_pid; FN =:= is_port;
FN =:= is_reference; FN =:= is_tuple ->
[Type] = ArgTypes,
- erl_types:t_is_opaque(Type) andalso
+ erl_types:t_is_opaque(Type) andalso
not lists:member(Type, State#state.opaques);
_ -> false
end.
@@ -1043,7 +1048,7 @@ handle_cons(Tree, Map, State) ->
Tl = cerl:cons_tl(Tree),
{State1, Map1, HdType} = traverse(Hd, Map, State),
{State2, Map2, TlType} = traverse(Tl, Map1, State1),
- State3 =
+ State3 =
case t_is_none(t_inf(TlType, t_list())) of
true ->
Msg = {improper_list_constr, [format_type(TlType, State2)]},
@@ -1088,7 +1093,7 @@ handle_let(Tree, Map, #state{callgraph = Callgraph, races = Races} = State) ->
case cerl:is_literal(Mod) andalso
cerl:concrete(Mod) =:= ets andalso
cerl:is_literal(Name) andalso
- cerl:concrete(Name) =:= new of
+ cerl:concrete(Name) =:= new of
true ->
NewTable = dialyzer_races:get_new_table(State1#state.races),
renew_public_tables(Vars, NewTable,
@@ -1112,7 +1117,7 @@ handle_module(Tree, Map, State) ->
%% By not including the variables in scope we can assure that we
%% will get the current function type when using the variables.
Defs = cerl:module_defs(Tree),
- PartFun = fun({_Var, Fun}) ->
+ PartFun = fun({_Var, Fun}) ->
state__is_escaping(get_label(Fun), State)
end,
{Defs1, Defs2} = lists:partition(PartFun, Defs),
@@ -1143,12 +1148,12 @@ handle_receive(Tree, Map,
RaceListSize + 1, State);
false -> State
end,
- {MapList, State2, ReceiveType} =
+ {MapList, State2, ReceiveType} =
handle_clauses(Clauses, ?no_arg, t_any(), t_any(), State1, [], Map,
[], []),
Map1 = join_maps(MapList, Map),
{State3, Map2, TimeoutType} = traverse(Timeout, Map1, State2),
- case (t_is_atom(TimeoutType) andalso
+ case (t_is_atom(TimeoutType) andalso
(t_atom_vals(TimeoutType) =:= ['infinity'])) of
true ->
{State3, Map2, ReceiveType};
@@ -1168,17 +1173,17 @@ handle_try(Tree, Map, State) ->
Vars = cerl:try_vars(Tree),
Body = cerl:try_body(Tree),
Handler = cerl:try_handler(Tree),
- {State1, Map1, ArgType} = traverse(Arg, Map, State),
+ {State1, Map1, ArgType} = traverse(Arg, Map, State),
Map2 = mark_as_fresh(Vars, Map1),
{SuccState, SuccMap, SuccType} =
case bind_pat_vars(Vars, t_to_tlist(ArgType), [], Map2, State1) of
{error, _, _, _, _} ->
{State1, map__new(), t_none()};
{SuccMap1, VarTypes} ->
- %% Try to bind the argument. Will only succeed if
+ %% Try to bind the argument. Will only succeed if
%% it is a simple structured term.
SuccMap2 =
- case bind_pat_vars_reverse([Arg], [t_product(VarTypes)], [],
+ case bind_pat_vars_reverse([Arg], [t_product(VarTypes)], [],
SuccMap1, State1) of
{error, _, _, _, _} -> SuccMap1;
{SM, _} -> SM
@@ -1212,10 +1217,10 @@ handle_tuple(Tree, Map, State) ->
RecFields = t_tuple_args(RecStruct),
case bind_pat_vars(Elements, RecFields, [], Map1, State1) of
{error, _, ErrorPat, ErrorType, _} ->
- Msg = {record_constr,
+ Msg = {record_constr,
[TagVal, format_patterns(ErrorPat),
format_type(ErrorType, State1)]},
- State2 = state__add_warning(State1, ?WARN_MATCHING,
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
Tree, Msg),
{State2, Map1, t_none()};
{Map2, _ETypes} ->
@@ -1224,26 +1229,24 @@ handle_tuple(Tree, Map, State) ->
_ ->
case state__lookup_record(TagVal, length(Left), State1) of
error -> {State1, Map1, TupleType};
- {ok, Prototype} ->
- %% io:format("In handle_tuple:\n Prototype = ~p\n", [Prototype]),
- InfTupleType = t_inf(Prototype, TupleType),
- %% io:format(" TupleType = ~p,\n Inf = ~p\n", [TupleType, InfTupleType]),
+ {ok, RecType} ->
+ InfTupleType = t_inf(RecType, TupleType),
case t_is_none(InfTupleType) of
true ->
- Msg = {record_constr,
- [format_type(TupleType, State1), TagVal]},
- State2 = state__add_warning(State1, ?WARN_MATCHING,
+ RecC = format_type(TupleType, State1),
+ FieldDiffs = format_field_diffs(TupleType, State1),
+ Msg = {record_constr, [RecC, FieldDiffs]},
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
Tree, Msg),
{State2, Map1, t_none()};
false ->
- case bind_pat_vars(Elements, t_tuple_args(Prototype),
+ case bind_pat_vars(Elements, t_tuple_args(RecType),
[], Map1, State1) of
{error, bind, ErrorPat, ErrorType, _} ->
- %% io:format("error\n", []),
- Msg = {record_constr,
+ Msg = {record_constr,
[TagVal, format_patterns(ErrorPat),
format_type(ErrorType, State1)]},
- State2 = state__add_warning(State1, ?WARN_MATCHING,
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
Tree, Msg),
{State2, Map1, t_none()};
{Map2, ETypes} ->
@@ -1305,7 +1308,7 @@ handle_clauses([C|Left], Arg, ArgType, OrigArgType,
handle_clauses([], _Arg, _ArgType, _OrigArgType,
#state{callgraph = Callgraph, races = Races} = State,
CaseTypes, _MapIn, Acc, ClauseAcc) ->
- State1 =
+ State1 =
case dialyzer_callgraph:get_race_detection(Callgraph) andalso
dialyzer_races:get_race_analysis(Races) of
true ->
@@ -1313,7 +1316,7 @@ handle_clauses([], _Arg, _ArgType, _OrigArgType,
[dialyzer_races:end_case_new(ClauseAcc)|
dialyzer_races:get_race_list(Races)],
dialyzer_races:get_race_list_size(Races) + 1, State);
- false -> State
+ false -> State
end,
{lists:reverse(Acc), State1, t_sup(CaseTypes)}.
@@ -1324,7 +1327,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
Body = cerl:clause_body(C),
RaceDetection = dialyzer_callgraph:get_race_detection(Callgraph),
RaceAnalysis = dialyzer_races:get_race_analysis(Races),
- State1 =
+ State1 =
case RaceDetection andalso RaceAnalysis of
true ->
state__renew_fun_args(Pats, State);
@@ -1339,7 +1342,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
true ->
{error, bind, Pats, ArgType0, ArgType0};
false ->
- ArgTypes =
+ ArgTypes =
case t_is_any(ArgType0) of
true -> [ArgType0 || _ <- Pats];
false -> t_to_tlist(ArgType0)
@@ -1348,7 +1351,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
end,
case BindRes of
{error, BindOrOpaque, NewPats, Type, OpaqueTerm} ->
- ?debug("Failed binding pattern: ~s\nto ~s\n",
+ ?debug("Failed binding pattern: ~s\nto ~s\n",
[cerl_prettypr:format(C), format_type(ArgType0, State1)]),
case state__warning_mode(State1) of
false ->
@@ -1359,7 +1362,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
bind -> format_patterns(Pats);
opaque -> format_patterns(NewPats)
end,
- {Msg, Force} =
+ {Msg, Force} =
case t_is_none(ArgType0) of
true ->
PatTypes = [PatString, format_type(OrigArgType, State1)],
@@ -1375,13 +1378,13 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
{_, _} -> {{pattern_match_cov, PatTypes}, false}
end;
false ->
- %% Try to find out if this is a default clause in a list
+ %% Try to find out if this is a default clause in a list
%% comprehension and supress this. A real Hack(tm)
Force0 =
case is_compiler_generated(cerl:get_ann(C)) of
true ->
case Pats of
- [Pat] ->
+ [Pat] ->
case cerl:is_c_cons(Pat) of
true ->
not (cerl:is_c_var(cerl:cons_hd(Pat)) andalso
@@ -1398,9 +1401,9 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
end,
PatTypes = case BindOrOpaque of
bind -> [PatString, format_type(ArgType0, State1)];
- opaque -> [PatString, format_type(Type, State1),
+ opaque -> [PatString, format_type(Type, State1),
format_type(OpaqueTerm, State1)]
- end,
+ end,
FailedMsg = case BindOrOpaque of
bind -> {pattern_match, PatTypes};
opaque -> {opaque_match, PatTypes}
@@ -1420,9 +1423,9 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
case Arg =:= ?no_arg of
true -> Map2;
false ->
- %% Try to bind the argument. Will only succeed if
+ %% Try to bind the argument. Will only succeed if
%% it is a simple structured term.
- case bind_pat_vars_reverse([Arg], [t_product(PatTypes)],
+ case bind_pat_vars_reverse([Arg], [t_product(PatTypes)],
[], Map2, State1) of
{error, _, _, _, _} -> Map2;
{NewMap, _} -> NewMap
@@ -1436,11 +1439,11 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
t_subtract(t_product(t_to_tlist(ArgType0)), GenType)
end,
case bind_guard(Guard, Map3, State1) of
- {error, Reason} ->
- ?debug("Failed guard: ~s\n",
+ {error, Reason} ->
+ ?debug("Failed guard: ~s\n",
[cerl_prettypr:format(C, [{hook, cerl_typean:pp_hook()}])]),
PatString = format_patterns(Pats),
- DefaultMsg =
+ DefaultMsg =
case Pats =:= [] of
true -> {guard_fail, []};
false ->
@@ -1470,7 +1473,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
bind_subst(Arg, Pats, Map) ->
case cerl:type(Arg) of
- values ->
+ values ->
bind_subst_list(cerl:values_es(Arg), Pats, Map);
var ->
[Pat] = Pats,
@@ -1499,16 +1502,16 @@ bind_subst_list([], [], Map) ->
%%
bind_pat_vars(Pats, Types, Acc, Map, State) ->
- try
+ try
bind_pat_vars(Pats, Types, Acc, Map, State, false)
- catch
+ catch
throw:Error -> Error % Error = {error, bind | opaque, ErrorPats, ErrorType}
end.
bind_pat_vars_reverse(Pats, Types, Acc, Map, State) ->
- try
+ try
bind_pat_vars(Pats, Types, Acc, Map, State, true)
- catch
+ catch
throw:Error -> Error % Error = {error, bind | opaque, ErrorPats, ErrorType}
end.
@@ -1520,7 +1523,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
AliasPat = cerl:alias_pat(Pat),
Var = cerl:alias_var(Pat),
Map1 = enter_subst(Var, AliasPat, Map),
- {Map2, [PatType]} = bind_pat_vars([AliasPat], [Type], [],
+ {Map2, [PatType]} = bind_pat_vars([AliasPat], [Type], [],
Map1, State, Rev),
{enter_type(Var, PatType, Map2), PatType};
binary ->
@@ -1541,18 +1544,18 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
cons ->
Cons = t_inf(Type, t_cons()),
case t_is_none(Cons) of
- true ->
+ true ->
bind_opaque_pats(t_cons(), Type, Pat, Map, State, Rev);
false ->
- {Map1, [HdType, TlType]} =
+ {Map1, [HdType, TlType]} =
bind_pat_vars([cerl:cons_hd(Pat), cerl:cons_tl(Pat)],
- [t_cons_hd(Cons), t_cons_tl(Cons)],
+ [t_cons_hd(Cons), t_cons_tl(Cons)],
[], Map, State, Rev),
{Map1, t_cons(HdType, TlType)}
end;
literal ->
Literal = literal_type(Pat),
- LiteralOrOpaque =
+ LiteralOrOpaque =
case t_opaque_match_atom(Literal, State#state.opaques) of
[Opaque] -> Opaque;
_ -> Literal
@@ -1564,7 +1567,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
end;
tuple ->
Es = cerl:tuple_es(Pat),
- Prototype =
+ Prototype =
case Es of
[] -> t_tuple([]);
[Tag|Left] ->
@@ -1585,10 +1588,10 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
false ->
SubTuples = t_tuple_subtypes(Tuple),
%% Need to call the top function to get the try-catch wrapper
- Results =
+ Results =
case Rev of
true ->
- [bind_pat_vars_reverse(Es, t_tuple_args(SubTuple), [],
+ [bind_pat_vars_reverse(Es, t_tuple_args(SubTuple), [],
Map, State)
|| SubTuple <- SubTuples];
false ->
@@ -1632,12 +1635,12 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
end,
%% Must do inf when binding args to pats. Vars in pats are fresh.
VarType2 = t_inf(VarType1, Type),
- VarType3 =
+ VarType3 =
case Opaques =/= [] of
true ->
case t_opaque_match_record(VarType2, Opaques) of
[OpaqueRec] -> OpaqueRec;
- _ ->
+ _ ->
case t_opaque_match_atom(VarType2, Opaques) of
[OpaqueAtom] -> OpaqueAtom;
_ -> VarType2
@@ -1648,9 +1651,9 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
case t_is_none(VarType3) of
true ->
case t_find_opaque_mismatch(VarType1, Type) of
- {ok, T1, T2} ->
+ {ok, T1, T2} ->
bind_error([Pat], T1, T2, opaque);
- error ->
+ error ->
bind_error([Pat], Type, t_none(), bind)
end;
false ->
@@ -1752,10 +1755,10 @@ bind_guard(Guard, Map, State) ->
end.
bind_guard(Guard, Map, Env, Eval, State) ->
- ?debug("Handling ~w guard: ~s\n",
+ ?debug("Handling ~w guard: ~s\n",
[Eval, cerl_prettypr:format(Guard, [{noann, true}])]),
case cerl:type(Guard) of
- binary ->
+ binary ->
{Map, t_binary()};
'case' ->
Arg = cerl:case_arg(Guard),
@@ -1793,10 +1796,10 @@ bind_guard(Guard, Map, Env, Eval, State) ->
var ->
?debug("Looking for var(~w)...", [cerl_trees:get_label(Guard)]),
case dict:find(get_label(Guard), Env) of
- error ->
+ error ->
?debug("Did not find it\n", []),
Type = lookup_type(Guard, Map),
- Constr =
+ Constr =
case Eval of
pos -> t_atom(true);
neg -> t_atom(false);
@@ -1804,7 +1807,7 @@ bind_guard(Guard, Map, Env, Eval, State) ->
end,
Inf = t_inf(Constr, Type),
{enter_type(Guard, Inf, Map), Inf};
- {ok, Tree} ->
+ {ok, Tree} ->
?debug("Found it\n", []),
{Map1, Type} = bind_guard(Tree, Map, Env, Eval, State),
{enter_type(Guard, Type, Map1), Type}
@@ -1827,7 +1830,7 @@ handle_guard_call(Guard, Map, Env, Eval, State) ->
handle_guard_type_test(Guard, F, Map, Env, Eval, State);
{erlang, is_function, 2} ->
handle_guard_is_function(Guard, Map, Env, Eval, State);
- MFA when (MFA =:= {erlang, internal_is_record, 3}) or
+ MFA when (MFA =:= {erlang, internal_is_record, 3}) or
(MFA =:= {erlang, is_record, 3}) ->
handle_guard_is_record(Guard, Map, Env, Eval, State);
{erlang, '=:=', 2} ->
@@ -1840,7 +1843,7 @@ handle_guard_call(Guard, Map, Env, Eval, State) ->
handle_guard_or(Guard, Map, Env, Eval, State);
{erlang, 'not', 1} ->
handle_guard_not(Guard, Map, Env, Eval, State);
- {erlang, Comp, 2} when Comp =:= '<'; Comp =:= '=<';
+ {erlang, Comp, 2} when Comp =:= '<'; Comp =:= '=<';
Comp =:= '>'; Comp =:= '>=' ->
handle_guard_comp(Guard, Comp, Map, Env, Eval, State);
_ ->
@@ -1875,7 +1878,7 @@ handle_guard_gen_fun({M, F, A}, Guard, Map, Env, Eval, State) ->
List -> List
end,
Map2 = enter_type_lists(Args, t_inf_lists(BifArgs, As0, Mode), Map1),
- Ret =
+ Ret =
case Eval of
pos -> t_inf(t_atom(true), BifRet);
neg -> t_inf(t_atom(false), BifRet);
@@ -1892,14 +1895,14 @@ handle_guard_gen_fun({M, F, A}, Guard, Map, Env, Eval, State) ->
end.
handle_guard_type_test(Guard, F, Map, Env, Eval, State) ->
- [Arg] = cerl:call_args(Guard),
+ [Arg] = cerl:call_args(Guard),
{Map1, ArgType} = bind_guard(Arg, Map, Env, dont_know, State),
case bind_type_test(Eval, F, ArgType, State) of
- error ->
+ error ->
?debug("Type test: ~w failed\n", [F]),
signal_guard_fail(Guard, [ArgType], State);
- {ok, NewArgType, Ret} ->
- ?debug("Type test: ~w succeeded, NewType: ~s, Ret: ~s\n",
+ {ok, NewArgType, Ret} ->
+ ?debug("Type test: ~w succeeded, NewType: ~s, Ret: ~s\n",
[F, t_to_string(NewArgType), t_to_string(Ret)]),
{enter_type(Arg, NewArgType, Map1), Ret}
end.
@@ -1930,13 +1933,13 @@ bind_type_test(Eval, TypeTest, ArgType, State) ->
end;
neg ->
case Mode of
- opaque ->
+ opaque ->
Struct = erl_types:t_opaque_structure(ArgType),
case t_is_none(t_subtract(Struct, Type)) of
true -> error;
false -> {ok, ArgType, t_atom(false)}
end;
- structured ->
+ structured ->
Sub = t_subtract(ArgType, Type),
case t_is_none(Sub) of
true -> error;
@@ -1974,7 +1977,7 @@ handle_guard_comp(Guard, Comp, Map, Env, Eval, State) ->
error -> signal_guard_fail(Guard, ArgTypes, State);
{ok, NewMap} -> {NewMap, t_atom(true)}
end;
- {_, _} ->
+ {_, _} ->
handle_guard_gen_fun({erlang, Comp, 2}, Guard, Map, Env, Eval, State)
end.
@@ -2021,7 +2024,7 @@ handle_guard_is_function(Guard, Map, Env, Eval, State) ->
end,
FunType = t_inf(FunType0, FunTypeConstr),
case t_is_none(FunType) of
- true ->
+ true ->
case Eval of
pos -> signal_guard_fail(Guard, ArgTypes0, State);
neg -> {Map1, t_atom(false)};
@@ -2057,16 +2060,16 @@ handle_guard_is_record(Guard, Map, Env, Eval, State) ->
end,
Type = t_inf(NewTupleType, RecType, Mode),
case t_is_none(Type) of
- true ->
+ true ->
case Eval of
- pos -> signal_guard_fail(Guard,
- [RecType, t_from_term(Tag),
+ pos -> signal_guard_fail(Guard,
+ [RecType, t_from_term(Tag),
t_from_term(Arity)],
State);
neg -> {Map1, t_atom(false)};
dont_know -> {Map1, t_atom(false)}
end;
- false ->
+ false ->
case Eval of
pos -> {enter_type(Rec, Type, Map1), t_atom(true)};
neg -> {Map1, t_atom(false)};
@@ -2079,17 +2082,17 @@ handle_guard_eq(Guard, Map, Env, Eval, State) ->
case {cerl:type(Arg1), cerl:type(Arg2)} of
{literal, literal} ->
case cerl:concrete(Arg1) =:= cerl:concrete(Arg2) of
- true ->
- if
+ true ->
+ if
Eval =:= pos -> {Map, t_atom(true)};
Eval =:= neg -> throw({fail, none});
Eval =:= dont_know -> {Map, t_atom(true)}
end;
false ->
- if
+ if
Eval =:= neg -> {Map, t_atom(false)};
Eval =:= dont_know -> {Map, t_atom(false)};
- Eval =:= pos ->
+ Eval =:= pos ->
ArgTypes = [t_from_term(cerl:concrete(Arg1)),
t_from_term(cerl:concrete(Arg2))],
signal_guard_fail(Guard, ArgTypes, State)
@@ -2144,7 +2147,7 @@ handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
false ->
if Eval =:= neg -> {Map, t_atom(false)};
Eval =:= dont_know -> {Map, t_atom(false)};
- Eval =:= pos ->
+ Eval =:= pos ->
ArgTypes = [t_from_term(cerl:concrete(Arg1)),
t_from_term(cerl:concrete(Arg2))],
signal_guard_fail(Guard, ArgTypes, State)
@@ -2161,7 +2164,7 @@ handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
{Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
{Map2, Type2} = bind_guard(Arg2, Map1, Env, dont_know, State),
- ?debug("Types are:~s =:= ~s\n", [t_to_string(Type1),
+ ?debug("Types are:~s =:= ~s\n", [t_to_string(Type1),
t_to_string(Type2)]),
Inf = t_inf(Type1, Type2),
case t_is_none(Inf) of
@@ -2202,15 +2205,15 @@ bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State) ->
{_, Type} = MT = bind_guard(Arg2, Map, Env, pos, State),
case t_is_atom(true, Type) of
true -> MT;
- false ->
+ false ->
{_, Type0} = bind_guard(Arg2, Map, Env, dont_know, State),
signal_guard_fail(Guard, [Type0, t_atom(true)], State)
end;
- false ->
+ false ->
{Map1, Type} = bind_guard(Arg2, Map, Env, neg, State),
case t_is_atom(false, Type) of
true -> {Map1, t_atom(true)};
- false ->
+ false ->
{_, Type0} = bind_guard(Arg2, Map, Env, dont_know, State),
signal_guard_fail(Guard, [Type0, t_atom(true)], State)
end;
@@ -2242,11 +2245,11 @@ handle_guard_and(Guard, Map, Env, Eval, State) ->
end
end;
neg ->
- {Map1, Type1} =
+ {Map1, Type1} =
try bind_guard(Arg1, Map, Env, neg, State)
catch throw:{fail, _} -> bind_guard(Arg2, Map, Env, pos, State)
end,
- {Map2, Type2} =
+ {Map2, Type2} =
try bind_guard(Arg1, Map, Env, neg, State)
catch throw:{fail, _} -> bind_guard(Arg2, Map, Env, pos, State)
end,
@@ -2272,18 +2275,18 @@ handle_guard_or(Guard, Map, Env, Eval, State) ->
[Arg1, Arg2] = cerl:call_args(Guard),
case Eval of
pos ->
- {Map1, Bool1} =
+ {Map1, Bool1} =
try bind_guard(Arg1, Map, Env, pos, State)
- catch
+ catch
throw:{fail,_} -> bind_guard(Arg1, Map, Env, dont_know, State)
end,
- {Map2, Bool2} =
+ {Map2, Bool2} =
try bind_guard(Arg2, Map, Env, pos, State)
- catch
+ catch
throw:{fail,_} -> bind_guard(Arg2, Map, Env, dont_know, State)
end,
case ((t_is_atom(true, Bool1) andalso t_is_boolean(Bool2))
- orelse
+ orelse
(t_is_atom(true, Bool2) andalso t_is_boolean(Bool1))) of
true -> {join_maps([Map1, Map2], Map), t_atom(true)};
false -> throw({fail, none})
@@ -2311,19 +2314,19 @@ handle_guard_or(Guard, Map, Env, Eval, State) ->
handle_guard_not(Guard, Map, Env, Eval, State) ->
[Arg] = cerl:call_args(Guard),
case Eval of
- neg ->
+ neg ->
{Map1, Type} = bind_guard(Arg, Map, Env, pos, State),
case t_is_atom(true, Type) of
true -> {Map1, t_atom(false)};
false -> throw({fail, none})
end;
- pos ->
+ pos ->
{Map1, Type} = bind_guard(Arg, Map, Env, neg, State),
case t_is_atom(false, Type) of
true -> {Map1, t_atom(true)};
false -> throw({fail, none})
end;
- dont_know ->
+ dont_know ->
{Map1, Type} = bind_guard(Arg, Map, Env, dont_know, State),
Bool = t_inf(Type, t_boolean()),
case t_is_none(Bool) of
@@ -2355,10 +2358,10 @@ signal_guard_fail(Guard, ArgTypes, State) ->
MFA = {cerl:atom_val(cerl:call_module(Guard)), F, length(Args)},
Msg =
case is_infix_op(MFA) of
- true ->
+ true ->
[ArgType1, ArgType2] = ArgTypes,
[Arg1, Arg2] = Args,
- {guard_fail, [format_args_1([Arg1], [ArgType1], State),
+ {guard_fail, [format_args_1([Arg1], [ArgType1], State),
atom_to_list(F),
format_args_1([Arg2], [ArgType2], State)]};
false ->
@@ -2381,7 +2384,7 @@ is_infix_op({M, F, A}) when is_atom(M), is_atom(F),
no_return().
signal_guard_fatal_fail(Guard, ArgTypes, State) ->
- Args = cerl:call_args(Guard),
+ Args = cerl:call_args(Guard),
F = cerl:atom_val(cerl:call_name(Guard)),
Msg = mk_guard_msg(F, Args, ArgTypes, State),
throw({fatal_fail, {Guard, Msg}}).
@@ -2392,11 +2395,11 @@ mk_guard_msg(F, Args, ArgTypes, State) ->
true -> {opaque_guard, FArgs};
false -> {guard_fail, FArgs}
end.
-
+
bind_guard_case_clauses(Arg, Clauses, Map, Env, Eval, State) ->
Clauses1 = filter_fail_clauses(Clauses),
{GenMap, GenArgType} = bind_guard(Arg, Map, Env, dont_know, State),
- bind_guard_case_clauses(GenArgType, GenMap, Arg, Clauses1, Map, Env, Eval,
+ bind_guard_case_clauses(GenArgType, GenMap, Arg, Clauses1, Map, Env, Eval,
t_none(), [], State).
filter_fail_clauses([Clause|Left]) ->
@@ -2413,7 +2416,7 @@ filter_fail_clauses([Clause|Left]) ->
filter_fail_clauses([]) ->
[].
-bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
+bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
Map, Env, Eval, AccType, AccMaps, State) ->
Pats = cerl:clause_pats(Clause),
{NewMap0, ArgType} =
@@ -2427,7 +2430,7 @@ bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
false -> bind_guard(ArgExpr, Map, Env, neg, State);
_ -> {GenMap, GenArgType}
end
- catch
+ catch
throw:{fail, _} -> {none, GenArgType}
end;
false ->
@@ -2457,7 +2460,7 @@ bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
NewGenArgType = t_subtract(GenArgType, GenPatType),
case (NewMap1 =:= none) orelse t_is_none(GenArgType) of
true ->
- bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
+ bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
Eval, AccType, AccMaps, State);
false ->
{NewAccType, NewAccMaps} =
@@ -2467,15 +2470,15 @@ bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
true -> throw({fail, none});
false -> ok
end,
- {NewMap3, CType} = bind_guard(cerl:clause_body(Clause), NewMap2,
+ {NewMap3, CType} = bind_guard(cerl:clause_body(Clause), NewMap2,
Env, Eval, State),
case Eval of
- pos ->
+ pos ->
case t_is_atom(true, CType) of
true -> ok;
false -> throw({fail, none})
end;
- neg ->
+ neg ->
case t_is_atom(false, CType) of
true -> ok;
false -> throw({fail, none})
@@ -2487,10 +2490,10 @@ bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
catch
throw:{fail, _What} -> {AccType, AccMaps}
end,
- bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
+ bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
Eval, NewAccType, NewAccMaps, State)
end;
-bind_guard_case_clauses(_GenArgType, _GenMap, _ArgExpr, [], Map, _Env, _Eval,
+bind_guard_case_clauses(_GenArgType, _GenMap, _ArgExpr, [], Map, _Env, _Eval,
AccType, AccMaps, _State) ->
case t_is_none(AccType) of
true -> throw({fail, none});
@@ -2576,7 +2579,7 @@ enter_type(Key, Val, {Map, Subst} = MS) ->
enter_subst(Key, Val, {Map, Subst} = MS) ->
KeyLabel = get_label(Key),
case cerl:is_literal(Val) of
- true ->
+ true ->
NewMap = dict:store(KeyLabel, literal_type(Val), Map),
{NewMap, Subst};
false ->
@@ -2598,13 +2601,13 @@ enter_subst(Key, Val, {Map, Subst} = MS) ->
end
end.
-lookup_type(Key, {Map, Subst}) ->
+lookup_type(Key, {Map, Subst}) ->
lookup(Key, Map, Subst, t_none()).
lookup(Key, Map, Subst, AnyNone) ->
case cerl:is_literal(Key) of
true -> literal_type(Key);
- false ->
+ false ->
Label = get_label(Key),
case dict:find(Label, Subst) of
{ok, NewKey} -> lookup(NewKey, Map, Subst, AnyNone);
@@ -2669,7 +2672,7 @@ get_label(T) ->
t_is_simple(ArgType) ->
t_is_atom(ArgType) orelse t_is_number(ArgType) orelse t_is_port(ArgType)
- orelse t_is_pid(ArgType) orelse t_is_reference(ArgType)
+ orelse t_is_pid(ArgType) orelse t_is_reference(ArgType)
orelse t_is_nil(ArgType).
%% t_is_structured(ArgType) ->
@@ -2687,8 +2690,8 @@ is_call_to_send(Tree) ->
Mod = cerl:call_module(Tree),
Name = cerl:call_name(Tree),
Arity = cerl:call_arity(Tree),
- cerl:is_c_atom(Mod)
- andalso cerl:is_c_atom(Name)
+ cerl:is_c_atom(Mod)
+ andalso cerl:is_c_atom(Name)
andalso (cerl:atom_val(Name) =:= '!')
andalso (cerl:atom_val(Mod) =:= erlang)
andalso (Arity =:= 2)
@@ -2714,7 +2717,7 @@ filter_match_fail([Clause] = Cls) ->
filter_match_fail([H|T]) ->
[H|filter_match_fail(T)];
filter_match_fail([]) ->
- %% This can actually happen, for example in
+ %% This can actually happen, for example in
%% receive after 1 -> ok end
[].
@@ -2731,9 +2734,11 @@ determine_mode(Type, Opaques) ->
%%% ===========================================================================
state__new(Callgraph, Tree, Plt, Module, Records, BehaviourTranslations) ->
+ Opaques = erl_types:module_builtin_opaques(Module) ++
+ erl_types:t_opaque_from_records(Records),
TreeMap = build_tree_map(Tree),
Funs = dict:fetch_keys(TreeMap),
- FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
+ FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt, Opaques),
Work = init_work([get_label(Tree)]),
Env = dict:store(top, map__new(), dict:new()),
Opaques = erl_types:module_builtin_opaques(Module) ++
@@ -2741,12 +2746,12 @@ state__new(Callgraph, Tree, Plt, Module, Records, BehaviourTranslations) ->
#state{callgraph = Callgraph, envs = Env, fun_tab = FunTab, opaques = Opaques,
plt = Plt, races = dialyzer_races:new(), records = Records,
warning_mode = false, warnings = [], work = Work, tree_map = TreeMap,
- module = Module, behaviour_api_info = BehaviourTranslations}.
+ module = Module, behaviour_api_dict = BehaviourTranslations}.
state__mark_fun_as_handled(#state{fun_tab = FunTab} = State, Fun0) ->
Fun = get_label(Fun0),
case dict:find(Fun, FunTab) of
- {ok, {not_handled, Entry}} ->
+ {ok, {not_handled, Entry}} ->
State#state{fun_tab = dict:store(Fun, Entry, FunTab)};
{ok, {_, _}} ->
State
@@ -2800,7 +2805,7 @@ state__add_warning(State, Tag, Tree, Msg) ->
state__add_warning(#state{warning_mode = false} = State, _, _, _, _) ->
State;
-state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
+state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Tag, Tree, Msg, Force) ->
Ann = cerl:get_ann(Tree),
case Force of
@@ -2848,7 +2853,7 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
{Name, Contract} =
case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
error -> {[], none};
- {ok, {_M, F, A} = MFA} ->
+ {ok, {_M, F, A} = MFA} ->
{[F, A], dialyzer_plt:lookup_contract(Plt, MFA)}
end,
case t_is_none(Ret) of
@@ -2866,19 +2871,19 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
case classify_returns(Fun) of
no_match ->
Msg = {no_return, [no_match|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
only_explicit ->
Msg = {no_return, [only_explicit|Name]},
- state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT,
+ state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT,
Fun, Msg);
only_normal ->
Msg = {no_return, [only_normal|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
both ->
Msg = {no_return, [both|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
Fun, Msg)
end;
false ->
@@ -2916,10 +2921,10 @@ state__lookup_name(Fun, #state{callgraph = Callgraph}) ->
state__lookup_record(Tag, Arity, #state{records = Records}) ->
case erl_types:lookup_record(Tag, Arity, Records) of
- {ok, Fields} ->
+ {ok, Fields} ->
{ok, t_tuple([t_atom(Tag)|
[FieldType || {_FieldName, FieldType} <- Fields]])};
- error ->
+ error ->
error
end.
@@ -2942,15 +2947,15 @@ build_tree_map(Tree) ->
end,
cerl_trees:fold(Fun, dict:new(), Tree).
-init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) ->
+init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt, Opaques) ->
NewDict = dict:store(top, {not_handled, {[], t_none()}}, Dict),
- init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt);
-init_fun_tab([Fun|Left], Dict, TreeMap, Callgraph, Plt) ->
+ init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt, Opaques);
+init_fun_tab([Fun|Left], Dict, TreeMap, Callgraph, Plt, Opaques) ->
Arity = cerl:fun_arity(dict:fetch(Fun, TreeMap)),
FunEntry =
case dialyzer_callgraph:is_escaping(Fun, Callgraph) of
true ->
- Args = lists:duplicate(Arity, t_any()),
+ Args = lists:duplicate(Arity, t_any()),
case lookup_fun_sig(Fun, Callgraph, Plt) of
none -> {Args, t_unit()};
{value, {RetType, _}} ->
@@ -2962,8 +2967,8 @@ init_fun_tab([Fun|Left], Dict, TreeMap, Callgraph, Plt) ->
false -> {lists:duplicate(Arity, t_none()), t_unit()}
end,
NewDict = dict:store(Fun, {not_handled, FunEntry}, Dict),
- init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt);
-init_fun_tab([], Dict, _TreeMap, _Callgraph, _Plt) ->
+ init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt, Opaques);
+init_fun_tab([], Dict, _TreeMap, _Callgraph, _Plt, _Opaques) ->
Dict.
state__update_fun_env(Tree, Map, #state{envs = Envs} = State) ->
@@ -2990,7 +2995,7 @@ state__all_fun_types(#state{fun_tab = FunTab}) ->
dict:map(fun(_Fun, {Args, Ret}) -> t_fun(Args, Ret)end, Tab1).
state__fun_type(Fun, #state{fun_tab = FunTab}) ->
- Label =
+ Label =
if is_integer(Fun) -> Fun;
true -> get_label(Fun)
end,
@@ -3001,10 +3006,10 @@ state__fun_type(Fun, #state{fun_tab = FunTab}) ->
t_fun(A, R)
end.
-state__update_fun_entry(Tree, ArgTypes, Out0,
+state__update_fun_entry(Tree, ArgTypes, Out0,
#state{fun_tab=FunTab, callgraph=CG, plt=Plt} = State)->
Fun = get_label(Tree),
- Out1 =
+ Out1 =
if Fun =:= top -> Out0;
true ->
case lookup_fun_sig(Fun, CG, Plt) of
@@ -3016,15 +3021,15 @@ state__update_fun_entry(Tree, ArgTypes, Out0,
case dict:find(Fun, FunTab) of
{ok, {ArgTypes, OldOut}} ->
case t_is_equal(OldOut, Out) of
- true ->
- ?debug("Fixpoint for ~w: ~s\n",
- [state__lookup_name(Fun, State),
+ true ->
+ ?debug("Fixpoint for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
t_to_string(t_fun(ArgTypes, Out))]),
State;
false ->
NewEntry = {ArgTypes, Out},
- ?debug("New Entry for ~w: ~s\n",
- [state__lookup_name(Fun, State),
+ ?debug("New Entry for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
t_to_string(t_fun(ArgTypes, Out))]),
NewFunTab = dict:store(Fun, NewEntry, FunTab),
State1 = State#state{fun_tab = NewFunTab},
@@ -3033,8 +3038,8 @@ state__update_fun_entry(Tree, ArgTypes, Out0,
{ok, {NewArgTypes, _OldOut}} ->
%% Can only happen in self-recursive functions. Only update the out type.
NewEntry = {NewArgTypes, Out},
- ?debug("New Entry for ~w: ~s\n",
- [state__lookup_name(Fun, State),
+ ?debug("New Entry for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
t_to_string(t_fun(NewArgTypes, Out))]),
NewFunTab = dict:store(Fun, NewEntry, FunTab),
State1 = State#state{fun_tab = NewFunTab},
@@ -3053,9 +3058,9 @@ state__add_work_from_fun(Tree, #state{callgraph = Callgraph,
MFAList ->
LabelList = [dialyzer_callgraph:lookup_label(MFA, Callgraph)
|| MFA <- MFAList],
- %% Must filter the result for results in this module.
+ %% Must filter the result for results in this module.
FilteredList = [L || {ok, L} <- LabelList, dict:is_key(L, TreeMap)],
- ?debug("~w: Will try to add:~w\n",
+ ?debug("~w: Will try to add:~w\n",
[state__lookup_name(get_label(Tree), State), MFAList]),
lists:foldl(fun(L, AccState) ->
state__add_work(L, AccState)
@@ -3086,15 +3091,15 @@ state__fun_info(external, #state{}) ->
external;
state__fun_info({_, _, _} = MFA, #state{plt = PLT}) ->
{MFA,
- dialyzer_plt:lookup(PLT, MFA),
+ dialyzer_plt:lookup(PLT, MFA),
dialyzer_plt:lookup_contract(PLT, MFA),
t_any()};
state__fun_info(Fun, #state{callgraph = CG, fun_tab = FunTab, plt = PLT}) ->
{Sig, Contract} =
case dialyzer_callgraph:lookup_name(Fun, CG) of
- error ->
+ error ->
{dialyzer_plt:lookup(PLT, Fun), none};
- {ok, MFA} ->
+ {ok, MFA} ->
{dialyzer_plt:lookup(PLT, MFA), dialyzer_plt:lookup_contract(PLT, MFA)}
end,
LocalRet =
@@ -3122,18 +3127,18 @@ state__find_apply_return(Tree, #state{callgraph = Callgraph} = State) ->
forward_args(Fun, ArgTypes, #state{work = Work, fun_tab = FunTab} = State) ->
{OldArgTypes, OldOut, Fixpoint} =
case dict:find(Fun, FunTab) of
- {ok, {not_handled, {OldArgTypes0, OldOut0}}} ->
+ {ok, {not_handled, {OldArgTypes0, OldOut0}}} ->
{OldArgTypes0, OldOut0, false};
{ok, {OldArgTypes0, OldOut0}} ->
- {OldArgTypes0, OldOut0,
+ {OldArgTypes0, OldOut0,
t_is_subtype(t_product(ArgTypes), t_product(OldArgTypes0))}
end,
case Fixpoint of
true -> State;
- false ->
+ false ->
NewArgTypes = [t_sup(X, Y) || {X, Y} <- lists:zip(ArgTypes, OldArgTypes)],
NewWork = add_work(Fun, Work),
- ?debug("~w: forwarding args ~s\n",
+ ?debug("~w: forwarding args ~s\n",
[state__lookup_name(Fun, State),
t_to_string(t_product(NewArgTypes))]),
NewFunTab = dict:store(Fun, {NewArgTypes, OldOut}, FunTab),
@@ -3248,7 +3253,7 @@ get_file([_|Tail]) -> get_file(Tail).
is_compiler_generated(Ann) ->
lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
--spec format_args([term()], [erl_types:erl_type()], state()) ->
+-spec format_args([cerl:cerl()], [erl_types:erl_type()], state()) ->
nonempty_string().
format_args([], [], _State) ->
@@ -3256,9 +3261,6 @@ format_args([], [], _State) ->
format_args(ArgList, TypeList, State) ->
"(" ++ format_args_1(ArgList, TypeList, State) ++ ")".
--spec format_args_1([term(),...], [erl_types:erl_type(),...], state()) ->
- string().
-
format_args_1([Arg], [Type], State) ->
format_arg(Arg) ++ format_type(Type, State);
format_args_1([Arg|Args], [Type|Types], State) ->
@@ -3291,6 +3293,11 @@ format_arg(Arg) ->
format_type(Type, #state{records = R}) ->
t_to_string(Type, R).
+-spec format_field_diffs(erl_types:erl_type(), state()) -> string().
+
+format_field_diffs(RecConstruction, #state{records = R}) ->
+ erl_types:record_field_diffs_to_string(RecConstruction, R).
+
-spec format_sig_args(erl_types:erl_type(), state()) -> string().
format_sig_args(Type, #state{records = R}) ->
@@ -3298,12 +3305,12 @@ format_sig_args(Type, #state{records = R}) ->
case SigArgs of
[] -> "()";
[SArg|SArgs] ->
- lists:flatten("(" ++ t_to_string(SArg, R)
+ lists:flatten("(" ++ t_to_string(SArg, R)
++ ["," ++ t_to_string(T, R) || T <- SArgs] ++ ")")
end.
format_cerl(Tree) ->
- cerl_prettypr:format(cerl:set_ann(Tree, []),
+ cerl_prettypr:format(cerl:set_ann(Tree, []),
[{hook, dialyzer_utils:pp_hook()},
{noann, true},
{paper, 100000}, %% These guys strip
@@ -3366,7 +3373,7 @@ find_terminals(Tree) ->
true ->
M = cerl:concrete(M0),
F = cerl:concrete(F0),
- case (erl_bif_types:is_known(M, F, A)
+ case (erl_bif_types:is_known(M, F, A)
andalso t_is_none(erl_bif_types:type(M, F, A))) of
true -> {true, false};
false -> {false, true}
@@ -3381,12 +3388,12 @@ find_terminals(Tree) ->
letrec -> find_terminals(cerl:letrec_body(Tree));
literal -> {false, true};
primop -> {false, false}; %% match_fail, etc. are not explicit exits.
- 'receive' ->
+ 'receive' ->
Timeout = cerl:receive_timeout(Tree),
Clauses = cerl:receive_clauses(Tree),
case (cerl:is_literal(Timeout) andalso
(cerl:concrete(Timeout) =:= infinity)) of
- true ->
+ true ->
if Clauses =:= [] -> {false, true}; %% A never ending receive.
true -> find_terminals_list(Clauses)
end;
@@ -3454,11 +3461,11 @@ find_rec_warnings_tuple(Tree, State) ->
TagVal = cerl:atom_val(Tag),
case state__lookup_record(TagVal, length(Left), State) of
error -> State;
- {ok, Prototype} ->
+ {ok, Prototype} ->
InfTupleType = t_inf(Prototype, TupleType),
case t_is_none(InfTupleType) of
true ->
- Msg = {record_matching,
+ Msg = {record_matching,
[format_patterns([Tree]), TagVal]},
state__add_warning(State, ?WARN_MATCHING, Tree, Msg);
false ->
@@ -3476,7 +3483,7 @@ find_rec_warnings_tuple(Tree, State) ->
%%----------------------------------------------------------------------------
-ifdef(DEBUG_PP).
-debug_pp(Tree, true) ->
+debug_pp(Tree, true) ->
io:put_chars(cerl_prettypr:format(Tree, [{hook, cerl_typean:pp_hook()}])),
io:nl(),
ok;
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index da0e1f9aaf..010625b7bd 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -184,7 +184,7 @@ build_options([], Options) ->
assert_filenames(Term, [FileName|Left]) when length(FileName) >= 0 ->
case filelib:is_file(FileName) orelse filelib:is_dir(FileName) of
true -> ok;
- false -> bad_option("No such file or directory", FileName)
+ false -> bad_option("No such file, directory or application", FileName)
end,
assert_filenames(Term, Left);
assert_filenames(_Term, []) ->
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index e387077a46..268ec4a5f0 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -21,7 +21,7 @@
%%%-------------------------------------------------------------------
%%% File : dialyzer_plt.erl
%%% Author : Tobias Lindahl <[email protected]>
-%%% Description : Interface to display information in the persistent
+%%% Description : Interface to display information in the persistent
%%% lookup tables.
%%%
%%% Created : 23 Jul 2004 by Tobias Lindahl <[email protected]>
@@ -39,10 +39,12 @@
from_file/1,
get_default_plt/0,
get_types/1,
+ get_exported_types/1,
%% insert/3,
insert_list/2,
insert_contract_list/2,
insert_types/2,
+ insert_exported_types/2,
lookup/2,
lookup_contract/2,
lookup_module/2,
@@ -57,6 +59,8 @@
%% Debug utilities
-export([pp_non_returning/0, pp_mod/1]).
+-export_type([plt/0, plt_info/0]).
+
%%----------------------------------------------------------------------
-type mod_deps() :: dict().
@@ -70,9 +74,10 @@
%%----------------------------------------------------------------------
--record(plt, {info = table_new() :: dict(),
- types = table_new() :: dict(),
- contracts = table_new() :: dict()}).
+-record(plt, {info = table_new() :: dict(),
+ types = table_new() :: dict(),
+ contracts = table_new() :: dict(),
+ exported_types = sets:new() :: set()}).
-opaque plt() :: #plt{}.
-include("dialyzer.hrl").
@@ -80,13 +85,14 @@
-type file_md5() :: {file:filename(), binary()}.
-type plt_info() :: {[file_md5()], dict()}.
--record(file_plt, {version = "" :: string(),
- file_md5_list = [] :: [file_md5()],
- info = dict:new() :: dict(),
- contracts = dict:new() :: dict(),
- types = dict:new() :: dict(),
- mod_deps :: mod_deps(),
- implementation_md5 = [] :: [file_md5()]}).
+-record(file_plt, {version = "" :: string(),
+ file_md5_list = [] :: [file_md5()],
+ info = dict:new() :: dict(),
+ contracts = dict:new() :: dict(),
+ types = dict:new() :: dict(),
+ exported_types = sets:new() :: set(),
+ mod_deps :: mod_deps(),
+ implementation_md5 = [] :: [file_md5()]}).
%%----------------------------------------------------------------------
@@ -95,19 +101,23 @@
new() ->
#plt{}.
--spec delete_module(plt(), module()) -> plt().
+-spec delete_module(plt(), atom()) -> plt().
-delete_module(#plt{info = Info, types = Types, contracts = Contracts}, Mod) ->
+delete_module(#plt{info = Info, types = Types, contracts = Contracts,
+ exported_types = ExpTypes}, Mod) ->
#plt{info = table_delete_module(Info, Mod),
types = table_delete_module2(Types, Mod),
- contracts = table_delete_module(Contracts, Mod)}.
+ contracts = table_delete_module(Contracts, Mod),
+ exported_types = table_delete_module1(ExpTypes, Mod)}.
-spec delete_list(plt(), [mfa() | integer()]) -> plt().
-delete_list(#plt{info = Info, types = Types, contracts = Contracts}, List) ->
+delete_list(#plt{info = Info, types = Types, contracts = Contracts,
+ exported_types = ExpTypes}, List) ->
#plt{info = table_delete_list(Info, List),
types = Types,
- contracts = table_delete_list(Contracts, List)}.
+ contracts = table_delete_list(Contracts, List),
+ exported_types = ExpTypes}.
-spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt().
@@ -126,7 +136,7 @@ delete_contract_list(#plt{contracts = Contracts} = PLT, List) ->
PLT#plt{contracts = table_delete_list(Contracts, List)}.
%% -spec insert(plt(), mfa() | integer(), {_, _}) -> plt().
-%%
+%%
%% insert(#plt{info = Info} = PLT, Id, Types) ->
%% PLT#plt{info = table_insert(Info, Id, Types)}.
@@ -150,19 +160,29 @@ lookup(#plt{info = Info}, Label) when is_integer(Label) ->
insert_types(PLT, Rec) ->
PLT#plt{types = Rec}.
+-spec insert_exported_types(plt(), set()) -> plt().
+
+insert_exported_types(PLT, Set) ->
+ PLT#plt{exported_types = Set}.
+
-spec get_types(plt()) -> dict().
get_types(#plt{types = Types}) ->
Types.
+-spec get_exported_types(plt()) -> set().
+
+get_exported_types(#plt{exported_types = ExpTypes}) ->
+ ExpTypes.
+
-type mfa_types() :: {mfa(), erl_types:erl_type(), [erl_types:erl_type()]}.
--spec lookup_module(plt(), module()) -> 'none' | {'value', [mfa_types()]}.
+-spec lookup_module(plt(), atom()) -> 'none' | {'value', [mfa_types()]}.
lookup_module(#plt{info = Info}, M) when is_atom(M) ->
table_lookup_module(Info, M).
--spec contains_module(plt(), module()) -> boolean().
+-spec contains_module(plt(), atom()) -> boolean().
contains_module(#plt{info = Info, contracts = Cs}, M) when is_atom(M) ->
table_contains_module(Info, M) orelse table_contains_module(Cs, M).
@@ -170,7 +190,7 @@ contains_module(#plt{info = Info, contracts = Cs}, M) when is_atom(M) ->
-spec contains_mfa(plt(), mfa()) -> boolean().
contains_mfa(#plt{info = Info, contracts = Contracts}, MFA) ->
- (table_lookup(Info, MFA) =/= none)
+ (table_lookup(Info, MFA) =/= none)
orelse (table_lookup(Contracts, MFA) =/= none).
-spec get_default_plt() -> file:filename().
@@ -201,13 +221,14 @@ from_file(FileName, ReturnInfo) ->
case get_record_from_file(FileName) of
{ok, Rec} ->
case check_version(Rec) of
- error ->
+ error ->
Msg = io_lib:format("Old PLT file ~s\n", [FileName]),
error(Msg);
- ok ->
+ ok ->
Plt = #plt{info = Rec#file_plt.info,
types = Rec#file_plt.types,
- contracts = Rec#file_plt.contracts},
+ contracts = Rec#file_plt.contracts,
+ exported_types = Rec#file_plt.exported_types},
case ReturnInfo of
false -> Plt;
true ->
@@ -217,12 +238,12 @@ from_file(FileName, ReturnInfo) ->
end
end;
{error, Reason} ->
- error(io_lib:format("Could not read PLT file ~s: ~p\n",
+ error(io_lib:format("Could not read PLT file ~s: ~p\n",
[FileName, Reason]))
end.
-type inc_file_err_rsn() :: 'no_such_file' | 'read_error'.
--spec included_files(file:filename()) -> {'ok', [file:filename()]}
+-spec included_files(file:filename()) -> {'ok', [file:filename()]}
| {'error', inc_file_err_rsn()}.
included_files(FileName) ->
@@ -247,12 +268,12 @@ get_record_from_file(FileName) ->
try binary_to_term(Bin) of
#file_plt{} = FilePLT -> {ok, FilePLT};
_ -> {error, not_valid}
- catch
+ catch
_:_ -> {error, not_valid}
end;
{error, enoent} ->
{error, no_such_file};
- {error, _} ->
+ {error, _} ->
{error, read_error}
end.
@@ -261,19 +282,22 @@ get_record_from_file(FileName) ->
merge_plts(List) ->
InfoList = [Info || #plt{info = Info} <- List],
TypesList = [Types || #plt{types = Types} <- List],
+ ExpTypesList = [ExpTypes || #plt{exported_types = ExpTypes} <- List],
ContractsList = [Contracts || #plt{contracts = Contracts} <- List],
#plt{info = table_merge(InfoList),
types = table_merge(TypesList),
+ exported_types = sets_merge(ExpTypesList),
contracts = table_merge(ContractsList)}.
-spec to_file(file:filename(), plt(), mod_deps(), {[file_md5()], mod_deps()}) -> 'ok'.
to_file(FileName,
- #plt{info = Info, types = Types, contracts = Contracts},
+ #plt{info = Info, types = Types, contracts = Contracts,
+ exported_types = ExpTypes},
ModDeps, {MD5, OldModDeps}) ->
- NewModDeps = dict:merge(fun(_Key, OldVal, NewVal) ->
+ NewModDeps = dict:merge(fun(_Key, OldVal, NewVal) ->
ordsets:union(OldVal, NewVal)
- end,
+ end,
OldModDeps, ModDeps),
ImplMd5 = compute_implementation_md5(),
Record = #file_plt{version = ?VSN,
@@ -281,13 +305,14 @@ to_file(FileName,
info = Info,
contracts = Contracts,
types = Types,
+ exported_types = ExpTypes,
mod_deps = NewModDeps,
implementation_md5 = ImplMd5},
Bin = term_to_binary(Record, [compressed]),
case file:write_file(FileName, Bin) of
ok -> ok;
{error, Reason} ->
- Msg = io_lib:format("Could not write PLT file ~s: ~w\n",
+ Msg = io_lib:format("Could not write PLT file ~s: ~w\n",
[FileName, Reason]),
throw({dialyzer_error, Msg})
end.
@@ -295,8 +320,8 @@ to_file(FileName,
-type md5_diff() :: [{'differ', atom()} | {'removed', atom()}].
-type check_error() :: 'not_valid' | 'no_such_file' | 'read_error'
| {'no_file_to_remove', file:filename()}.
-
--spec check_plt(file:filename(), [file:filename()], [file:filename()]) ->
+
+-spec check_plt(file:filename(), [file:filename()], [file:filename()]) ->
'ok'
| {'error', check_error()}
| {'differ', [file_md5()], md5_diff(), mod_deps()}
@@ -306,7 +331,7 @@ check_plt(FileName, RemoveFiles, AddFiles) ->
case get_record_from_file(FileName) of
{ok, #file_plt{file_md5_list = Md5, mod_deps = ModDeps} = Rec} ->
case check_version(Rec) of
- ok ->
+ ok ->
case compute_new_md5(Md5, RemoveFiles, AddFiles) of
ok -> ok;
{differ, NewMd5, DiffMd5} -> {differ, NewMd5, DiffMd5, ModDeps};
@@ -363,7 +388,7 @@ compute_md5_from_files(Files) ->
compute_md5_from_file(File) ->
case filelib:is_regular(File) of
- false ->
+ false ->
Msg = io_lib:format("Not a regular file: ~s\n", [File]),
throw({dialyzer_error, Msg});
true ->
@@ -394,7 +419,7 @@ init_md5_list_1([{File, _Md5}|Md5Left], [{remove, File}|DiffLeft], Acc) ->
init_md5_list_1(Md5Left, DiffLeft, Acc);
init_md5_list_1([{File, _Md5} = Entry|Md5Left], [{add, File}|DiffLeft], Acc) ->
init_md5_list_1(Md5Left, DiffLeft, [Entry|Acc]);
-init_md5_list_1([{File1, _Md5} = Entry|Md5Left] = Md5List,
+init_md5_list_1([{File1, _Md5} = Entry|Md5Left] = Md5List,
[{Tag, File2}|DiffLeft] = DiffList, Acc) ->
case File1 < File2 of
true -> init_md5_list_1(Md5Left, DiffList, [Entry|Acc]);
@@ -425,7 +450,7 @@ get_specs(#plt{info = Info}) ->
beam_file_to_module(Filename) ->
list_to_atom(filename:basename(Filename, ".beam")).
--spec get_specs(plt(), module(), atom(), arity_patt()) -> 'none' | string().
+-spec get_specs(plt(), atom(), atom(), arity_patt()) -> 'none' | string().
get_specs(#plt{info = Info}, M, F, A) when is_atom(M), is_atom(F) ->
MFA = {M, F, A},
@@ -435,7 +460,7 @@ get_specs(#plt{info = Info}, M, F, A) when is_atom(M), is_atom(F) ->
end.
create_specs([{{M, F, _A}, {Ret, Args}}|Left], M) ->
- [io_lib:format("-spec ~w(~s) -> ~s\n",
+ [io_lib:format("-spec ~w(~s) -> ~s\n",
[F, expand_args(Args), erl_types:t_to_string(Ret)])
| create_specs(Left, M)];
create_specs(List = [{{M, _F, _A}, {_Ret, _Args}}| _], _M) ->
@@ -475,6 +500,9 @@ table_delete_module(Plt, Mod) ->
(_, _) -> true
end, Plt).
+table_delete_module1(Plt, Mod) ->
+ sets:filter(fun({M, _F, _A}) -> M =/= Mod end, Plt).
+
table_delete_module2(Plt, Mod) ->
dict:filter(fun(M, _Val) -> M =/= Mod end, Plt).
@@ -488,7 +516,7 @@ table_insert_list(Plt, [{Key, Val}|Left]) ->
table_insert_list(Plt, []) ->
Plt.
-table_insert(Plt, Key, {_Ret, _Arg} = Obj) ->
+table_insert(Plt, Key, {_Ret, _Arg} = Obj) ->
dict:store(Key, Obj, Plt);
table_insert(Plt, Key, #contract{} = C) ->
dict:store(Key, C, Plt).
@@ -526,6 +554,15 @@ table_merge([Plt|Plts], Acc) ->
NewAcc = dict:merge(fun(_Key, Val, Val) -> Val end, Plt, Acc),
table_merge(Plts, NewAcc).
+sets_merge([H|T]) ->
+ sets_merge(T, H).
+
+sets_merge([], Acc) ->
+ Acc;
+sets_merge([Plt|Plts], Acc) ->
+ NewAcc = sets:union(Plt, Acc),
+ sets_merge(Plts, NewAcc).
+
%%---------------------------------------------------------------------------
%% Debug utilities.
@@ -555,7 +592,7 @@ pp_non_returning() ->
[M, F, dialyzer_utils:format_sig(Type)])
end, lists:sort(None)).
--spec pp_mod(module()) -> 'ok'.
+-spec pp_mod(atom()) -> 'ok'.
pp_mod(Mod) when is_atom(Mod) ->
PltFile = get_default_plt(),
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 4972967960..ec8d613b96 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -21,7 +21,7 @@
%%%----------------------------------------------------------------------
%%% File : dialyzer_races.erl
%%% Author : Maria Christakis <[email protected]>
-%%% Description : Utility functions for race condition detection
+%%% Description : Utility functions for race condition detection
%%%
%%% Created : 21 Nov 2008 by Maria Christakis <[email protected]>
%%%----------------------------------------------------------------------
@@ -39,6 +39,8 @@
let_tag_new/2, new/0, put_curr_fun/3, put_fun_args/2,
put_race_analysis/2, put_race_list/3]).
+-export_type([races/0, mfa_or_funlbl/0, core_vars/0]).
+
-include("dialyzer.hrl").
%%% ===========================================================================
@@ -80,7 +82,7 @@
-type call() :: 'whereis' | 'register' | 'unregister' | 'ets_new'
| 'ets_lookup' | 'ets_insert' | 'mnesia_dirty_read1'
| 'mnesia_dirty_read2' | 'mnesia_dirty_write1'
- | 'mnesia_dirty_write2' | 'function_call'.
+ | 'mnesia_dirty_write2' | 'function_call'.
-type race_tag() :: 'whereis_register' | 'whereis_unregister'
| 'ets_lookup_insert' | 'mnesia_dirty_read_write'.
@@ -157,7 +159,7 @@
%%% ===========================================================================
-spec store_race_call(mfa_or_funlbl(), [erl_types:erl_type()], [core_vars()],
- file_line(), dialyzer_dataflow:state()) ->
+ file_line(), dialyzer_dataflow:state()) ->
dialyzer_dataflow:state().
store_race_call(Fun, ArgTypes, Args, FileLine, State) ->
@@ -166,7 +168,7 @@ store_race_call(Fun, ArgTypes, Args, FileLine, State) ->
CurrFunLabel = Races#races.curr_fun_label,
RaceTags = Races#races.race_tags,
CleanState = dialyzer_dataflow:state__records_only(State),
- {NewRaceList, NewRaceListSize, NewRaceTags, NewTable} =
+ {NewRaceList, NewRaceListSize, NewRaceTags, NewTable} =
case CurrFun of
{_Module, module_info, A} when A =:= 0 orelse A =:= 1 ->
{[], 0, RaceTags, no_t};
@@ -422,7 +424,7 @@ fixup_race_forward_pullout(CurrFun, CurrFunLabel, Calls, Code, RaceList,
Races = dialyzer_dataflow:state__get_races(State),
{RetCurrFun, RetCurrFunLabel, RetCalls, RetCode,
RetRaceList, RetRaceVarMap, RetFunDefVars, RetFunCallVars,
- RetFunArgTypes, RetNestingLevel} =
+ RetFunArgTypes, RetNestingLevel} =
fixup_race_forward_helper(NewCurrFun,
NewCurrFunLabel, Fun, Int, NewCalls, NewCalls,
[#curr_fun{status = out, mfa = NewCurrFun,
@@ -576,7 +578,7 @@ fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
RaceTag ->
PublicTables = dialyzer_callgraph:get_public_tables(Callgraph),
NamedTables = dialyzer_callgraph:get_named_tables(Callgraph),
- WarnVarArgs1 =
+ WarnVarArgs1 =
var_type_analysis(FunDefVars, FunArgTypes, WarnVarArgs,
RaceWarnTag, RaceVarMap,
dialyzer_dataflow:state__records_only(State)),
@@ -598,7 +600,7 @@ fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
[#warn_call{call_name = ets_insert, args = WarnVarArgs,
var_map = RaceVarMap}],
[Tab, Names, _, _] = WarnVarArgs,
- case IsPublic orelse
+ case IsPublic orelse
compare_var_list(Tab, PublicTables, RaceVarMap)
orelse
length(Names -- NamedTables) < length(Names) of
@@ -636,7 +638,7 @@ fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
#curr_fun{mfa = CurrFun2, label = CurrFunLabel2,
var_map = RaceVarMap2, def_vars = FunDefVars2,
call_vars = FunCallVars2, arg_types = FunArgTypes2},
- Code2, NestingLevel2} =
+ Code2, NestingLevel2} =
remove_clause(NewRL,
#curr_fun{mfa = CurrFun, label = CurrFunLabel,
var_map = RaceVarMap1,
@@ -648,7 +650,7 @@ fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
RaceVarMap2, FunDefVars2, FunCallVars2, FunArgTypes2,
NestingLevel2, false};
false ->
- {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
FunDefVars, FunCallVars, FunArgTypes, NewNL, false}
end;
#end_clause{arg = Arg, pats = Pats, guard = Guard} ->
@@ -893,7 +895,7 @@ do_clause(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
PublicTables, NamedTables) ->
{DepList, IsPublic, Continue} =
get_deplist_paths(fixup_case_path(RaceList, 0), WarnVarArgs,
- RaceWarnTag, RaceVarMap, CurrLevel,
+ RaceWarnTag, RaceVarMap, CurrLevel,
PublicTables, NamedTables),
{fixup_case_rest_paths(RaceList, 0), DepList, IsPublic, Continue}.
@@ -963,7 +965,7 @@ fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
#curr_fun{mfa = NewCurrFun, label = NewCurrFunLabel,
var_map = NewRaceVarMap, def_vars = NewFunDefVars,
call_vars = NewFunCallVars, arg_types = NewFunArgTypes},
- NewCode, NewNestingLevel} =
+ NewCode, NewNestingLevel} =
remove_clause(RaceList,
#curr_fun{mfa = CurrFun, label = CurrFunLabel, var_map = RaceVarMap,
def_vars = FunDefVars, call_vars = FunCallVars,
@@ -995,7 +997,7 @@ fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
arg_types = NewFunTypes}],
[#curr_fun{status = in, mfa = Fun,
label = FunLabel, var_map = NewRaceVarMap,
- def_vars = Args, call_vars = NewFunArgs,
+ def_vars = Args, call_vars = NewFunArgs,
arg_types = NewFunTypes}|
lists:reverse(StateRaceList)] ++
RetC, NewRaceVarMap),
@@ -1060,7 +1062,7 @@ fixup_race_backward(CurrFun, Calls, CallsToAnalyze, Parents, Height) ->
case Height =:= 0 of
true -> Parents;
false ->
- case Calls of
+ case Calls of
[] ->
case is_integer(CurrFun) orelse lists:member(CurrFun, Parents) of
true -> Parents;
@@ -1219,7 +1221,7 @@ are_bound_vars(Vars1, Vars2, RaceVarMap) ->
callgraph__renew_tables(Table, Callgraph) ->
case Table of
{named, NameLabel, Names} ->
- PTablesToAdd =
+ PTablesToAdd =
case NameLabel of
?no_label -> [];
_Other -> [NameLabel]
@@ -1438,7 +1440,7 @@ lists_key_members_lists_helper(Elem, List, N) when is_integer(Elem) ->
end;
lists_key_members_lists_helper(_Elem, _List, _N) ->
[0].
-
+
lists_key_replace(N, List, NewMember) ->
{Before, [_|After]} = lists:split(N - 1, List),
Before ++ [NewMember|After].
@@ -1488,7 +1490,7 @@ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag, DependencyList,
false -> DependencyList
end.
-remove_clause(RaceList, CurrTuple, Code, NestingLevel) ->
+remove_clause(RaceList, CurrTuple, Code, NestingLevel) ->
NewRaceList = fixup_case_rest_paths(RaceList, 0),
{NewCurrTuple, NewCode} =
cleanup_clause_code(CurrTuple, Code, 0, NestingLevel),
@@ -1621,7 +1623,7 @@ compare_ets_insert(OldWarnVarArgs, NewWarnVarArgs, RaceVarMap) ->
end
end,
case Bool of
- true ->
+ true ->
case any_args(Old4) of
true ->
case compare_list_vars(Old3, ets_list_args(New3), [], RaceVarMap) of
@@ -1690,7 +1692,6 @@ compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) ->
false ->
compare_var_list(VA1, WVA1, RaceVarMap) orelse
compare_argtypes(VA2, WVA2)
-
end
end;
?WARN_WHEREIS_UNREGISTER ->
@@ -1704,7 +1705,6 @@ compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) ->
false ->
compare_var_list(VA1, WVA1, RaceVarMap) orelse
compare_argtypes(VA2, WVA2)
-
end
end;
?WARN_ETS_LOOKUP_INSERT ->
@@ -1716,12 +1716,12 @@ compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) ->
false ->
case any_args(WVA2) of
true -> compare_var_list(VA1, WVA1, RaceVarMap);
- false ->
+ false ->
compare_var_list(VA1, WVA1, RaceVarMap) orelse
compare_argtypes(VA2, WVA2)
end
end,
- Bool andalso
+ Bool andalso
(case any_args(VA4) of
true ->
compare_var_list(VA3, WVA3, RaceVarMap);
@@ -2158,7 +2158,7 @@ race_var_map_guard_helper1(Arg, Pats, RaceVarMap, Op) ->
_Else -> {RaceVarMap, false}
end;
false -> {RaceVarMap, false}
- end;
+ end;
_Other -> {RaceVarMap, false}
end;
_Other -> {RaceVarMap, false}
@@ -2241,7 +2241,7 @@ var_analysis(FunDefArgs, FunCallArgs, WarnVarArgs, RaceWarnTag) ->
[WVA1, WVA2|T] = WarnVarArgs,
ArgNos = lists_key_members_lists(WVA1, FunDefArgs),
[[lists_get(N, FunCallArgs) || N <- ArgNos], WVA2|T]
- end.
+ end.
var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
RaceVarMap, CleanState) ->
@@ -2286,7 +2286,7 @@ var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
ets_tuple_argtypes1(lists:nth(N2 + 1, FunVarArgs), [], [], 0),
[]),
FirstVarArg ++ [Vars2, NewWVA4]
-
+
end;
?WARN_MNESIA_DIRTY_READ_WRITE ->
[WVA1, WVA2|T] = WarnVarArgs,
@@ -2330,7 +2330,7 @@ get_race_warn(Fun, Args, ArgTypes, DepList, State) ->
-spec get_race_warnings(races(), dialyzer_dataflow:state()) ->
{races(), dialyzer_dataflow:state()}.
-
+
get_race_warnings(#races{race_warnings = RaceWarnings}, State) ->
get_race_warnings_helper(RaceWarnings, State).
@@ -2430,12 +2430,12 @@ end_clause_new(Arg, Pats, Guard) ->
#end_clause{arg = Arg, pats = Pats, guard = Guard}.
-spec get_curr_fun(races()) -> mfa_or_funlbl().
-
+
get_curr_fun(#races{curr_fun = CurrFun}) ->
CurrFun.
-spec get_curr_fun_args(races()) -> core_args().
-
+
get_curr_fun_args(#races{curr_fun_args = CurrFunArgs}) ->
CurrFunArgs.
@@ -2445,17 +2445,17 @@ get_new_table(#races{new_table = Table}) ->
Table.
-spec get_race_analysis(races()) -> boolean().
-
+
get_race_analysis(#races{race_analysis = RaceAnalysis}) ->
RaceAnalysis.
-spec get_race_list(races()) -> code().
-
+
get_race_list(#races{race_list = RaceList}) ->
RaceList.
-spec get_race_list_size(races()) -> non_neg_integer().
-
+
get_race_list_size(#races{race_list_size = RaceListSize}) ->
RaceListSize.
@@ -2483,10 +2483,10 @@ put_fun_args(Args, #races{curr_fun_args = CurrFunArgs} = Races) ->
empty -> Races#races{curr_fun_args = Args};
_Other -> Races
end.
-
+
-spec put_race_analysis(boolean(), races()) ->
races().
-
+
put_race_analysis(Analysis, Races) ->
Races#races{race_analysis = Analysis}.
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 1ff4783852..8bfc66fc39 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -435,7 +435,7 @@ format_scc(SCC) ->
%%
%% ============================================================================
--spec doit(module() | string()) -> 'ok'.
+-spec doit(atom() | file:filename()) -> 'ok'.
doit(Module) ->
{ok, AbstrCode} = dialyzer_utils:get_abstract_code_from_src(Module),
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 35b283a00a..3effb1c2e6 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -21,7 +21,7 @@
%%%-------------------------------------------------------------------
%%% File : dialyzer_typesig.erl
%%% Author : Tobias Lindahl <[email protected]>
-%%% Description :
+%%% Description :
%%%
%%% Created : 25 Apr 2005 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
@@ -31,12 +31,12 @@
-export([analyze_scc/5]).
-export([get_safe_underapprox/2]).
--import(erl_types,
+-import(erl_types,
[t_any/0, t_atom/0, t_atom_vals/1,
t_binary/0, t_bitstr/0, t_bitstr/2, t_bitstr_concat/1, t_boolean/0,
t_collect_vars/1, t_cons/2, t_cons_hd/1, t_cons_tl/1,
t_float/0, t_from_range/2, t_from_term/1,
- t_fun/0, t_fun/2, t_fun_args/1, t_fun_range/1,
+ t_fun/0, t_fun/2, t_fun_args/1, t_fun_range/1,
t_has_var/1,
t_inf/2, t_inf/3, t_integer/0,
t_is_any/1, t_is_atom/1, t_is_atom/2, t_is_cons/1, t_is_equal/2,
@@ -44,7 +44,7 @@
t_is_integer/1, t_non_neg_integer/0,
t_is_list/1, t_is_nil/1, t_is_none/1, t_is_number/1,
- t_is_subtype/2, t_limit/2, t_list/0, t_list/1,
+ t_is_subtype/2, t_limit/2, t_list/0, t_list/1,
t_list_elements/1, t_nonempty_list/1, t_maybe_improper_list/0,
t_module/0, t_number/0, t_number_vals/1,
t_opaque_match_record/2, t_opaque_matching_structure/2,
@@ -101,7 +101,7 @@
name_map = dict:new() :: dict(),
next_label :: label(),
non_self_recs = [] :: [label()],
- plt :: dialyzer_plt:plt(),
+ plt :: dialyzer_plt:plt(),
prop_types = dict:new() :: dict(),
records = dict:new() :: dict(),
opaques = [] :: [erl_types:erl_type()],
@@ -140,8 +140,8 @@
%% where Def = {Var, Fun} as in the Core Erlang module definitions.
%% Records = dict(RecName, {Arity, [{FieldName, FieldType}]})
%% NextLabel - An integer that is higher than any label in the code.
-%% CallGraph - A callgraph as produced by dialyzer_callgraph.erl
-%% Note: The callgraph must have been built with all the
+%% CallGraph - A callgraph as produced by dialyzer_callgraph.erl
+%% Note: The callgraph must have been built with all the
%% code that the SCC is a part of.
%% PLT - A dialyzer PLT. This PLT should contain available information
%% about functions that can be called by this SCC.
@@ -150,7 +150,7 @@
%%-----------------------------------------------------------------------------
-spec analyze_scc(typesig_scc(), label(),
- dialyzer_callgraph:callgraph(),
+ dialyzer_callgraph:callgraph(),
dialyzer_plt:plt(), dict()) -> dict().
analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes) ->
@@ -202,7 +202,7 @@ traverse(Tree, DefinedVars, State) ->
{State1, OpType} = traverse(Op, DefinedVars, State0),
{State2, FunType} = state__get_fun_prototype(OpType, Arity, State1),
State3 = state__store_conj(FunType, eq, OpType, State2),
- State4 = state__store_conj(mk_var(Tree), sub, t_fun_range(FunType),
+ State4 = state__store_conj(mk_var(Tree), sub, t_fun_range(FunType),
State3),
State5 = state__store_conj_lists(ArgTypes, sub, t_fun_args(FunType),
State4),
@@ -216,7 +216,7 @@ traverse(Tree, DefinedVars, State) ->
end
end;
binary ->
- {State1, SegTypes} = traverse_list(cerl:binary_segments(Tree),
+ {State1, SegTypes} = traverse_list(cerl:binary_segments(Tree),
DefinedVars, State),
Type = mk_fun_var(fun(Map) ->
TmpSegTypes = lookup_type_list(SegTypes, Map),
@@ -227,7 +227,7 @@ traverse(Tree, DefinedVars, State) ->
Size = cerl:bitstr_size(Tree),
UnitVal = cerl:int_val(cerl:bitstr_unit(Tree)),
Val = cerl:bitstr_val(Tree),
- {State1, [SizeType, ValType]} =
+ {State1, [SizeType, ValType]} =
traverse_list([Size, Val], DefinedVars, State),
{State2, TypeConstr} =
case cerl:bitstr_bitsize(Tree) of
@@ -250,7 +250,7 @@ traverse(Tree, DefinedVars, State) ->
case state__is_in_match(State1) of
true ->
Flags = cerl:concrete(cerl:bitstr_flags(Tree)),
- mk_fun_var(bitstr_val_constr(SizeType, UnitVal, Flags),
+ mk_fun_var(bitstr_val_constr(SizeType, UnitVal, Flags),
[SizeType]);
false -> t_integer()
end;
@@ -282,7 +282,7 @@ traverse(Tree, DefinedVars, State) ->
false ->
ConsVar = mk_var(Tree),
ConsType = mk_fun_var(fun(Map) ->
- t_cons(lookup_type(HdVar, Map),
+ t_cons(lookup_type(HdVar, Map),
lookup_type(TlVar, Map))
end, [HdVar, TlVar]),
HdType = mk_fun_var(fun(Map) ->
@@ -299,8 +299,8 @@ traverse(Tree, DefinedVars, State) ->
true -> t_cons_tl(Cons)
end
end, [ConsVar]),
- State2 = state__store_conj_lists([HdVar, TlVar, ConsVar], sub,
- [HdType, TlType, ConsType],
+ State2 = state__store_conj_lists([HdVar, TlVar, ConsVar], sub,
+ [HdType, TlType, ConsType],
State1),
{State2, ConsVar}
end;
@@ -314,14 +314,14 @@ traverse(Tree, DefinedVars, State) ->
error -> t_fun(length(Vars), t_none());
{ok, Dom} -> t_fun(Dom, t_none())
end,
- State2 =
+ State2 =
try
State1 = case state__add_prop_constrs(Tree, State0) of
not_called -> State0;
PropState -> PropState
end,
{BodyState, BodyVar} = traverse(Body, DefinedVars1, State1),
- state__store_conj(mk_var(Tree), eq,
+ state__store_conj(mk_var(Tree), eq,
t_fun(mk_var_list(Vars), BodyVar), BodyState)
catch
throw:error ->
@@ -340,7 +340,7 @@ traverse(Tree, DefinedVars, State) ->
Arg = cerl:let_arg(Tree),
Body = cerl:let_body(Tree),
{State1, ArgVars} = traverse(Arg, DefinedVars, State),
- State2 = state__store_conj(t_product(mk_var_list(Vars)), eq,
+ State2 = state__store_conj(t_product(mk_var_list(Vars)), eq,
ArgVars, State1),
DefinedVars1 = add_def_list(Vars, DefinedVars),
traverse(Body, DefinedVars1, State2);
@@ -353,12 +353,12 @@ traverse(Tree, DefinedVars, State) ->
DefinedVars1 = add_def_list(Vars, DefinedVars),
{State2, _} = traverse_list(Funs, DefinedVars1, State1),
traverse(Body, DefinedVars1, State2);
- literal ->
+ literal ->
%% This is needed for finding records
case cerl:unfold_literal(Tree) of
- Tree ->
+ Tree ->
Type = t_from_term(cerl:concrete(Tree)),
- NewType =
+ NewType =
case erl_types:t_opaque_match_atom(Type, State#state.opaques) of
[Opaque] -> Opaque;
_ -> Type
@@ -370,7 +370,7 @@ traverse(Tree, DefinedVars, State) ->
Defs = cerl:module_defs(Tree),
Funs = [Fun || {_Var, Fun} <- Defs],
Vars = [Var || {Var, _Fun} <- Defs],
- DefinedVars1 = add_def_list(Vars, DefinedVars),
+ DefinedVars1 = add_def_list(Vars, DefinedVars),
State1 = state__store_funs(Vars, Funs, State),
FoldFun = fun(Fun, AccState) ->
{S, _} = traverse(Fun, DefinedVars1,
@@ -388,7 +388,7 @@ traverse(Tree, DefinedVars, State) ->
'receive' ->
Clauses = filter_match_fail(cerl:receive_clauses(Tree)),
Timeout = cerl:receive_timeout(Tree),
- case (cerl:is_c_atom(Timeout) andalso
+ case (cerl:is_c_atom(Timeout) andalso
(cerl:atom_val(Timeout) =:= infinity)) of
true ->
handle_clauses(Clauses, mk_var(Tree), [], DefinedVars, State);
@@ -421,7 +421,7 @@ traverse(Tree, DefinedVars, State) ->
case t_has_var(Var) of
true ->
{AccState1, NewVar} = state__mk_var(AccState),
- {NewVar,
+ {NewVar,
state__store_conj(Var, eq, NewVar, AccState1)};
false ->
{Var, AccState}
@@ -431,7 +431,7 @@ traverse(Tree, DefinedVars, State) ->
{TmpState, t_tuple(NewEvars)}
end,
case Elements of
- [Tag|Fields] ->
+ [Tag|Fields] ->
case cerl:is_c_atom(Tag) of
true ->
%% Check if an opaque term is constructed.
@@ -534,7 +534,7 @@ handle_try(Tree, DefinedVars, State) ->
Handler = cerl:try_handler(Tree),
State1 = state__new_constraint_context(State),
{ArgBodyState, BodyVar} =
- try
+ try
{State2, ArgVar} = traverse(Arg, DefinedVars, State1),
DefinedVars1 = add_def_list(Vars, DefinedVars),
{State3, BodyVar1} = traverse(Body, DefinedVars1, State2),
@@ -542,17 +542,17 @@ handle_try(Tree, DefinedVars, State) ->
State3),
{State4, BodyVar1}
catch
- throw:error ->
+ throw:error ->
{State1, t_none()}
end,
State6 = state__new_constraint_context(ArgBodyState),
{HandlerState, HandlerVar} =
try
- DefinedVars2 = add_def_list([X || X <- EVars, cerl:is_c_var(X)],
+ DefinedVars2 = add_def_list([X || X <- EVars, cerl:is_c_var(X)],
DefinedVars),
traverse(Handler, DefinedVars2, State6)
catch
- throw:error ->
+ throw:error ->
{State6, t_none()}
end,
ArgBodyCs = state__cs(ArgBodyState),
@@ -561,7 +561,7 @@ handle_try(Tree, DefinedVars, State) ->
OldCs = state__cs(State),
case state__is_in_guard(State) of
true ->
- Conj1 = mk_conj_constraint_list([ArgBodyCs,
+ Conj1 = mk_conj_constraint_list([ArgBodyCs,
mk_constraint(BodyVar, eq, TreeVar)]),
Disj = mk_disj_constraint_list([Conj1,
mk_constraint(HandlerVar, eq, TreeVar)]),
@@ -573,10 +573,10 @@ handle_try(Tree, DefinedVars, State) ->
{NewCs, ReturnVar} =
case {t_is_none(BodyVar), t_is_none(HandlerVar)} of
{false, false} ->
- Conj1 =
+ Conj1 =
mk_conj_constraint_list([ArgBodyCs,
mk_constraint(TreeVar, eq, BodyVar)]),
- Conj2 =
+ Conj2 =
mk_conj_constraint_list([HandlerCs,
mk_constraint(TreeVar, eq, HandlerVar)]),
Disj = mk_disj_constraint_list([Conj1, Conj2]),
@@ -603,7 +603,7 @@ handle_try(Tree, DefinedVars, State) ->
%% Call
%%
-handle_call(Call, DefinedVars, State) ->
+handle_call(Call, DefinedVars, State) ->
Args = cerl:call_args(Call),
Mod = cerl:call_module(Call),
Fun = cerl:call_name(Call),
@@ -618,7 +618,7 @@ handle_call(Call, DefinedVars, State) ->
case state__lookup_rec_var_in_scope(MFA, State) of
error ->
case get_bif_constr(MFA, Dst, ArgVars, State1) of
- none ->
+ none ->
{get_plt_constr(MFA, Dst, ArgVars, State1), Dst};
C ->
{state__store_conj(C, State1), Dst}
@@ -647,7 +647,7 @@ get_plt_constr(MFA, Dst, ArgVars, State) ->
case PltRes of
none -> State;
{value, {PltRetType, PltArgTypes}} ->
- state__store_conj_lists([Dst|ArgVars], sub,
+ state__store_conj_lists([Dst|ArgVars], sub,
[PltRetType|PltArgTypes], State)
end;
{value, #contract{args = GenArgs} = C} ->
@@ -655,7 +655,7 @@ get_plt_constr(MFA, Dst, ArgVars, State) ->
case PltRes of
none ->
{mk_fun_var(fun(Map) ->
- ArgTypes = lookup_type_list(ArgVars, Map),
+ ArgTypes = lookup_type_list(ArgVars, Map),
dialyzer_contracts:get_contract_return(C, ArgTypes)
end, ArgVars), GenArgs};
{value, {PltRetType, PltArgTypes}} ->
@@ -692,7 +692,7 @@ filter_match_fail([Clause] = Cls) ->
filter_match_fail([H|T]) ->
[H|filter_match_fail(T)];
filter_match_fail([]) ->
- %% This can actually happen, for example in
+ %% This can actually happen, for example in
%% receive after 1 -> ok end
[].
@@ -714,16 +714,16 @@ handle_clauses(Clauses, TopVar, Arg, Action, DefinedVars, State) ->
if length(Clauses) > ?MAX_NOF_CLAUSES -> overflow;
true -> []
end,
- {State1, CList} = handle_clauses_1(Clauses, TopVar, Arg, DefinedVars,
+ {State1, CList} = handle_clauses_1(Clauses, TopVar, Arg, DefinedVars,
State, SubtrTypeList, []),
{NewCs, NewState} =
case Action of
- none ->
+ none ->
if CList =:= [] -> throw(error);
true -> {CList, State1}
end;
- _ ->
- try
+ _ ->
+ try
{State2, ActionVar} = traverse(Action, DefinedVars, State1),
TmpC = mk_constraint(TopVar, eq, ActionVar),
ActionCs = mk_conj_constraint_list([state__cs(State2),TmpC]),
@@ -740,7 +740,7 @@ handle_clauses(Clauses, TopVar, Arg, Action, DefinedVars, State) ->
FinalState = state__new_constraint_context(NewState),
{state__store_conj_list([OldCs, NewCList], FinalState), TopVar}.
-handle_clauses_1([Clause|Tail], TopVar, Arg, DefinedVars,
+handle_clauses_1([Clause|Tail], TopVar, Arg, DefinedVars,
State, SubtrTypes, Acc) ->
State0 = state__new_constraint_context(State),
Pats = cerl:clause_pats(Clause),
@@ -749,22 +749,22 @@ handle_clauses_1([Clause|Tail], TopVar, Arg, DefinedVars,
NewSubtrTypes =
case SubtrTypes =:= overflow of
true -> overflow;
- false ->
+ false ->
ordsets:add_element(get_safe_underapprox(Pats, Guard), SubtrTypes)
end,
- try
+ try
DefinedVars1 = add_def_from_tree_list(Pats, DefinedVars),
State1 = state__set_in_match(State0, true),
{State2, PatVars} = traverse_list(Pats, DefinedVars1, State1),
State3 =
case Arg =:= [] of
true -> State2;
- false ->
+ false ->
S = state__store_conj(Arg, eq, t_product(PatVars), State2),
case SubtrTypes =:= overflow of
true -> S;
false ->
- SubtrPatVar = mk_fun_var(fun(Map) ->
+ SubtrPatVar = mk_fun_var(fun(Map) ->
TmpType = lookup_type(Arg, Map),
t_subtract_list(TmpType, SubtrTypes)
end, [Arg]),
@@ -772,15 +772,15 @@ handle_clauses_1([Clause|Tail], TopVar, Arg, DefinedVars,
end
end,
State4 = handle_guard(Guard, DefinedVars1, State3),
- {State5, BodyVar} = traverse(Body, DefinedVars1,
+ {State5, BodyVar} = traverse(Body, DefinedVars1,
state__set_in_match(State4, false)),
State6 = state__store_conj(TopVar, eq, BodyVar, State5),
Cs = state__cs(State6),
- handle_clauses_1(Tail, TopVar, Arg, DefinedVars, State6,
+ handle_clauses_1(Tail, TopVar, Arg, DefinedVars, State6,
NewSubtrTypes, [Cs|Acc])
catch
- throw:error ->
- handle_clauses_1(Tail, TopVar, Arg, DefinedVars,
+ throw:error ->
+ handle_clauses_1(Tail, TopVar, Arg, DefinedVars,
State, NewSubtrTypes, Acc)
end;
handle_clauses_1([], _TopVar, _Arg, _DefinedVars, State, _SubtrType, Acc) ->
@@ -792,7 +792,7 @@ get_safe_underapprox(Pats, Guard) ->
try
Map1 = cerl_trees:fold(fun(X, Acc) ->
case cerl:is_c_var(X) of
- true ->
+ true ->
dict:store(cerl_trees:get_label(X), t_any(),
Acc);
false -> Acc
@@ -804,8 +804,8 @@ get_safe_underapprox(Pats, Guard) ->
false ->
case cerl:is_c_var(Guard) of
false -> Map2;
- true ->
- dict:store(cerl_trees:get_label(Guard),
+ true ->
+ dict:store(cerl_trees:get_label(Guard),
t_from_term(true), Map2)
end
end,
@@ -819,8 +819,8 @@ get_underapprox_from_guard(Tree, Map) ->
True = t_from_term(true),
case cerl:type(Tree) of
call ->
- case {cerl:concrete(cerl:call_module(Tree)),
- cerl:concrete(cerl:call_name(Tree)),
+ case {cerl:concrete(cerl:call_module(Tree)),
+ cerl:concrete(cerl:call_name(Tree)),
length(cerl:call_args(Tree))} of
{erlang, is_function, 2} ->
[Fun, Arity] = cerl:call_args(Tree),
@@ -856,15 +856,15 @@ get_underapprox_from_guard(Tree, Map) ->
{erlang, '==', 2} -> throw(dont_know);
{erlang, 'and', 2} ->
[Arg1, Arg2] = cerl:call_args(Tree),
- case ((cerl:is_c_var(Arg1) orelse cerl:is_literal(Arg1))
+ case ((cerl:is_c_var(Arg1) orelse cerl:is_literal(Arg1))
andalso
(cerl:is_c_var(Arg2) orelse cerl:is_literal(Arg2))) of
true ->
{Arg1Type, _} = get_underapprox_from_guard(Arg1, Map),
{Arg2Type, _} = get_underapprox_from_guard(Arg2, Map),
- case (t_is_equal(True, Arg1Type) andalso
+ case (t_is_equal(True, Arg1Type) andalso
t_is_equal(True, Arg2Type)) of
- true -> {True, Map};
+ true -> {True, Map};
false -> throw(dont_know)
end;
false ->
@@ -876,7 +876,7 @@ get_underapprox_from_guard(Tree, Map) ->
end
end;
var ->
- Type =
+ Type =
case dict:find(cerl_trees:get_label(Tree), Map) of
error -> throw(dont_know);
{ok, T} -> T
@@ -931,7 +931,7 @@ bitstr_constr(SizeType, UnitVal) ->
MinSize = erl_types:number_min(TmpSizeType),
t_bitstr(UnitVal, MinSize * UnitVal)
end;
- false ->
+ false ->
t_bitstr(UnitVal, 0)
end
end.
@@ -975,9 +975,9 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
end;
binary ->
%% TODO: Can maybe do something here
- throw(dont_know);
+ throw(dont_know);
cons ->
- {[Hd, Tl], Map1} =
+ {[Hd, Tl], Map1} =
get_safe_underapprox_1([cerl:cons_hd(Pat), cerl:cons_tl(Pat)], [], Map),
case t_is_any(Tl) of
true -> get_safe_underapprox_1(Left, [t_nonempty_list(Hd)|Acc], Map1);
@@ -1020,7 +1020,7 @@ get_safe_underapprox_1([], Acc, Map) ->
%% Guards
%%
-handle_guard(Guard, DefinedVars, State) ->
+handle_guard(Guard, DefinedVars, State) ->
True = t_from_term(true),
State1 = state__set_in_guard(State, true),
State2 = state__new_constraint_context(State1),
@@ -1039,7 +1039,7 @@ handle_guard(Guard, DefinedVars, State) ->
%%
%%=============================================================================
-get_bif_constr({erlang, Op, 2}, Dst, Args = [Arg1, Arg2], _State)
+get_bif_constr({erlang, Op, 2}, Dst, Args = [Arg1, Arg2], _State)
when Op =:= '+'; Op =:= '-'; Op =:= '*' ->
ReturnType = mk_fun_var(fun(Map) ->
TmpArgTypes = lookup_type_list(Args, Map),
@@ -1047,14 +1047,14 @@ get_bif_constr({erlang, Op, 2}, Dst, Args = [Arg1, Arg2], _State)
end, Args),
ArgFun =
fun(A, Pos) ->
- F =
+ F =
fun(Map) ->
DstType = lookup_type(Dst, Map),
AType = lookup_type(A, Map),
case t_is_integer(DstType) of
true ->
case t_is_integer(AType) of
- true ->
+ true ->
eval_inv_arith(Op, Pos, DstType, AType);
false ->
%% This must be temporary.
@@ -1062,7 +1062,7 @@ get_bif_constr({erlang, Op, 2}, Dst, Args = [Arg1, Arg2], _State)
end;
false ->
case t_is_float(DstType) of
- true ->
+ true ->
case t_is_integer(AType) of
true -> t_float();
false -> t_number()
@@ -1079,9 +1079,9 @@ get_bif_constr({erlang, Op, 2}, Dst, Args = [Arg1, Arg2], _State)
mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType),
mk_constraint(Arg1, sub, Arg1FunVar),
mk_constraint(Arg2, sub, Arg2FunVar)]);
-get_bif_constr({erlang, Op, 2}, Dst, [Arg1, Arg2] = Args, _State)
+get_bif_constr({erlang, Op, 2}, Dst, [Arg1, Arg2] = Args, _State)
when Op =:= '<'; Op =:= '=<'; Op =:= '>'; Op =:= '>=' ->
- ArgFun =
+ ArgFun =
fun(LocalArg1, LocalArg2, LocalOp) ->
fun(Map) ->
DstType = lookup_type(Dst, Map),
@@ -1098,19 +1098,19 @@ get_bif_constr({erlang, Op, 2}, Dst, [Arg1, Arg2] = Args, _State)
Max2 = erl_types:number_max(Arg2Type),
Min2 = erl_types:number_min(Arg2Type),
case LocalOp of
- '=<' ->
+ '=<' ->
if IsTrue -> t_from_range(Min1, Max2);
IsFalse -> t_from_range(range_inc(Min2), Max1)
end;
- '<' ->
+ '<' ->
if IsTrue -> t_from_range(Min1, range_dec(Max2));
IsFalse -> t_from_range(Min2, Max1)
end;
- '>=' ->
+ '>=' ->
if IsTrue -> t_from_range(Min2, Max1);
IsFalse -> t_from_range(Min1, range_dec(Max2))
end;
- '>' ->
+ '>' ->
if IsTrue -> t_from_range(range_inc(Min2), Max1);
IsFalse -> t_from_range(Min1, Max2)
end
@@ -1131,7 +1131,7 @@ get_bif_constr({erlang, Op, 2}, Dst, [Arg1, Arg2] = Args, _State)
DstArgs = [Dst, Arg1, Arg2],
Arg1Var = mk_fun_var(Arg1Fun, DstArgs),
Arg2Var = mk_fun_var(Arg2Fun, DstArgs),
- DstVar = mk_fun_var(fun(Map) ->
+ DstVar = mk_fun_var(fun(Map) ->
TmpArgTypes = lookup_type_list(Args, Map),
erl_bif_types:type(erlang, Op, 2, TmpArgTypes)
end, Args),
@@ -1143,9 +1143,9 @@ get_bif_constr({erlang, '++', 2}, Dst, [Hd, Tl] = Args, _State) ->
DstType = lookup_type(Dst, Map),
case t_is_cons(DstType) of
true -> t_list(t_cons_hd(DstType));
- false ->
+ false ->
case t_is_list(DstType) of
- true ->
+ true ->
case t_is_nil(DstType) of
true -> DstType;
false -> t_list(t_list_elements(DstType))
@@ -1160,7 +1160,7 @@ get_bif_constr({erlang, '++', 2}, Dst, [Hd, Tl] = Args, _State) ->
true -> t_sup(t_cons_tl(DstType), DstType);
false ->
case t_is_list(DstType) of
- true ->
+ true ->
case t_is_nil(DstType) of
true -> DstType;
false -> t_list(t_list_elements(DstType))
@@ -1170,10 +1170,10 @@ get_bif_constr({erlang, '++', 2}, Dst, [Hd, Tl] = Args, _State) ->
end
end,
DstL = [Dst],
- HdVar = mk_fun_var(HdFun, DstL),
+ HdVar = mk_fun_var(HdFun, DstL),
TlVar = mk_fun_var(TlFun, DstL),
ArgTypes = erl_bif_types:arg_types(erlang, '++', 2),
- ReturnType = mk_fun_var(fun(Map) ->
+ ReturnType = mk_fun_var(fun(Map) ->
TmpArgTypes = lookup_type_list(Args, Map),
erl_bif_types:type(erlang, '++', 2, TmpArgTypes)
end, Args),
@@ -1198,7 +1198,7 @@ get_bif_constr({erlang, is_function, 2}, Dst, [Fun, Arity], _State) ->
ArgFun = fun(Map) ->
DstType = lookup_type(Dst, Map),
case t_is_atom(true, DstType) of
- true ->
+ true ->
ArityType = lookup_type(Arity, Map),
case t_number_vals(ArityType) of
unknown -> t_fun();
@@ -1231,7 +1231,7 @@ get_bif_constr({erlang, is_record, 2}, Dst, [Var, Tag] = Args, _State) ->
end
end,
ArgV = mk_fun_var(ArgFun, [Dst]),
- DstFun = fun(Map) ->
+ DstFun = fun(Map) ->
TmpArgTypes = lookup_type_list(Args, Map),
erl_bif_types:type(erlang, is_record, 2, TmpArgTypes)
end,
@@ -1241,7 +1241,7 @@ get_bif_constr({erlang, is_record, 2}, Dst, [Var, Tag] = Args, _State) ->
mk_constraint(Var, sub, ArgV)]);
get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
%% TODO: Revise this to make it precise for Tag and Arity.
- ArgFun =
+ ArgFun =
fun(Map) ->
case t_is_atom(true, lookup_type(Dst, Map)) of
true ->
@@ -1257,14 +1257,14 @@ get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
GenRecord = t_tuple([TagType|AnyElems]),
case t_atom_vals(TagType) of
[TagVal] ->
- case state__lookup_record(State, TagVal,
+ case state__lookup_record(State, TagVal,
ArityVal - 1) of
{ok, Type} ->
AllOpaques = State#state.opaques,
case t_opaque_match_record(Type, AllOpaques) of
[Opaque] -> Opaque;
_ -> Type
- end;
+ end;
error -> GenRecord
end;
_ -> GenRecord
@@ -1279,9 +1279,9 @@ get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
end
end,
ArgV = mk_fun_var(ArgFun, [Tag, Arity, Dst]),
- DstFun = fun(Map) ->
+ DstFun = fun(Map) ->
[TmpVar, TmpTag, TmpArity] = TmpArgTypes = lookup_type_list(Args, Map),
- TmpArgTypes2 =
+ TmpArgTypes2 =
case lists:member(TmpVar, State#state.opaques) of
true ->
case t_is_integer(TmpArity) of
@@ -1293,7 +1293,7 @@ get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
case t_atom_vals(TmpTag) of
[TmpTagVal] ->
case state__lookup_record(State, TmpTagVal, TmpArityVal - 1) of
- {ok, TmpType} ->
+ {ok, TmpType} ->
case t_is_none(t_inf(TmpType, TmpVar, opaque)) of
true -> TmpArgTypes;
false -> [TmpType, TmpTag, TmpArity]
@@ -1312,7 +1312,7 @@ get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
end,
erl_bif_types:type(erlang, is_record, 3, TmpArgTypes2)
end,
- DstV = mk_fun_var(DstFun, Args),
+ DstV = mk_fun_var(DstFun, Args),
mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
mk_constraint(Arity, sub, t_integer()),
mk_constraint(Tag, sub, t_atom()),
@@ -1334,7 +1334,7 @@ get_bif_constr({erlang, 'and', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
true -> False;
false -> t_boolean()
end;
- false ->
+ false ->
t_boolean()
end
end
@@ -1349,7 +1349,7 @@ get_bif_constr({erlang, 'and', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
case t_is_atom(false, Arg2Type) of
true -> False;
false ->
- case (t_is_atom(true, Arg1Type)
+ case (t_is_atom(true, Arg1Type)
andalso t_is_atom(true, Arg2Type)) of
true -> True;
false -> t_boolean()
@@ -1378,7 +1378,7 @@ get_bif_constr({erlang, 'or', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
true -> True;
false -> t_boolean()
end;
- false ->
+ false ->
t_boolean()
end
end
@@ -1393,7 +1393,7 @@ get_bif_constr({erlang, 'or', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
case t_is_atom(true, Arg2Type) of
true -> True;
false ->
- case (t_is_atom(false, Arg1Type)
+ case (t_is_atom(false, Arg1Type)
andalso t_is_atom(false, Arg2Type)) of
true -> False;
false -> t_boolean()
@@ -1414,7 +1414,7 @@ get_bif_constr({erlang, 'or', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
get_bif_constr({erlang, 'not', 1}, Dst, [Arg] = Args, _State) ->
True = t_from_term(true),
False = t_from_term(false),
- Fun = fun(Var) ->
+ Fun = fun(Var) ->
fun(Map) ->
Type = lookup_type(Var, Map),
case t_is_atom(true, Type) of
@@ -1439,7 +1439,7 @@ get_bif_constr({erlang, '=:=', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
OtherVarType = lookup_type(OtherVar, Map),
case t_is_atom(true, DstType) of
true -> OtherVarType;
- false ->
+ false ->
case t_is_atom(false, DstType) of
true ->
case is_singleton_type(OtherVarType) of
@@ -1492,7 +1492,7 @@ get_bif_constr({erlang, '==', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
true ->
case t_is_number(VarType) of
true -> t_number();
- false ->
+ false ->
case t_is_atom(VarType) of
true -> VarType;
false -> t_any()
@@ -1511,7 +1511,8 @@ get_bif_constr({erlang, '==', 2}, Dst, [Arg1, Arg2] = Args, _State) ->
mk_conj_constraint_list([mk_constraint(Dst, sub, DstV),
mk_constraint(Arg1, sub, ArgV1),
mk_constraint(Arg2, sub, ArgV2)]);
-get_bif_constr({erlang, element, 2} = _BIF, Dst, Args, State) ->
+get_bif_constr({erlang, element, 2} = _BIF, Dst, Args,
+ #state{cs = Constrs} = State) ->
GenType = erl_bif_types:type(erlang, element, 2),
case t_is_none(GenType) of
true -> ?debug("Bif: ~w failed\n", [_BIF]), throw(error);
@@ -1525,9 +1526,14 @@ get_bif_constr({erlang, element, 2} = _BIF, Dst, Args, State) ->
erl_bif_types:type(erlang, element, 2, ATs2)
end,
ReturnType = mk_fun_var(Fun, Args),
- ArgTypes = erl_bif_types:arg_types(erlang, element, 2),
+ ArgTypes = erl_bif_types:arg_types(erlang, element, 2),
Cs = mk_constraints(Args, sub, ArgTypes),
- mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType)|Cs])
+ NewCs =
+ case find_element(Args, Constrs) of
+ 'unknown' -> Cs;
+ Elem -> [mk_constraint(Dst, eq, Elem)|Cs]
+ end,
+ mk_conj_constraint_list([mk_constraint(Dst, sub, ReturnType)|NewCs])
end;
get_bif_constr({M, F, A} = _BIF, Dst, Args, State) ->
GenType = erl_bif_types:type(M, F, A),
@@ -1541,7 +1547,7 @@ get_bif_constr({M, F, A} = _BIF, Dst, Args, State) ->
false -> T
end
end,
- ReturnType = mk_fun_var(fun(Map) ->
+ ReturnType = mk_fun_var(fun(Map) ->
TmpArgTypes0 = lookup_type_list(Args, Map),
TmpArgTypes = [UnopaqueFun(T) || T<- TmpArgTypes0],
erl_bif_types:type(M, F, A, TmpArgTypes)
@@ -1561,12 +1567,12 @@ get_bif_constr({M, F, A} = _BIF, Dst, Args, State) ->
end
end.
-eval_inv_arith('+', _Pos, Dst, Arg) ->
+eval_inv_arith('+', _Pos, Dst, Arg) ->
erl_bif_types:type(erlang, '-', 2, [Dst, Arg]);
-eval_inv_arith('*', _Pos, Dst, Arg) ->
+eval_inv_arith('*', _Pos, Dst, Arg) ->
case t_number_vals(Arg) of
[0] -> t_integer();
- _ ->
+ _ ->
TmpRet = erl_bif_types:type(erlang, 'div', 2, [Dst, Arg]),
Zero = t_from_term(0),
%% If 0 is not part of the result, it cannot be part of the argument.
@@ -1575,9 +1581,9 @@ eval_inv_arith('*', _Pos, Dst, Arg) ->
true -> TmpRet
end
end;
-eval_inv_arith('-', 1, Dst, Arg) ->
+eval_inv_arith('-', 1, Dst, Arg) ->
erl_bif_types:type(erlang, '-', 2, [Arg, Dst]);
-eval_inv_arith('-', 2, Dst, Arg) ->
+eval_inv_arith('-', 2, Dst, Arg) ->
erl_bif_types:type(erlang, '+', 2, [Arg, Dst]).
range_inc(neg_inf) -> neg_inf;
@@ -1614,7 +1620,7 @@ get_bif_test_constr(Dst, Arg, Type, State) ->
end;
false -> t_from_term(false)
end;
- false ->
+ false ->
case t_is_subtype(ArgType, Type) of
true -> t_from_term(true);
false -> t_boolean()
@@ -1632,11 +1638,11 @@ get_bif_test_constr(Dst, Arg, Type, State) ->
%%=============================================================================
solve([Fun], State) ->
- ?debug("============ Analyzing Fun: ~w ===========\n",
+ ?debug("============ Analyzing Fun: ~w ===========\n",
[debug_lookup_name(Fun)]),
solve_fun(Fun, dict:new(), State);
solve([_|_] = SCC, State) ->
- ?debug("============ Analyzing SCC: ~w ===========\n",
+ ?debug("============ Analyzing SCC: ~w ===========\n",
[[debug_lookup_name(F) || F <- SCC]]),
solve_scc(SCC, dict:new(), State, false).
@@ -1655,7 +1661,7 @@ solve_fun(Fun, FunMap, State) ->
solve_scc(SCC, Map, State, TryingUnit) ->
State1 = state__mark_as_non_self_rec(SCC, State),
- Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
+ Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
Vars = [Var || {_, {ok, Var}} <- Vars0],
Funs = [Fun || {Fun, {ok, _}} <- Vars0],
Types = unsafe_lookup_type_list(Funs, Map),
@@ -1682,7 +1688,7 @@ solve_scc(SCC, Map, State, TryingUnit) ->
false ->
Map2
end;
- false ->
+ false ->
?debug("SCC ~w did not reach fixpoint\n", [SCC]),
solve_scc(SCC, Map2, State, TryingUnit)
end.
@@ -1704,29 +1710,29 @@ scc_fold_fun(F, FunMap, State) ->
format_type(NewType)]),
NewFunMap.
-solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
+solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
Map, MapDict, State) ->
- {OldLocalMap, Check} =
+ {OldLocalMap, Check} =
case dict:find(Id, MapDict) of
error -> {dict:new(), false};
{ok, M} -> {M, true}
- end,
+ end,
?debug("Checking ref to fun: ~w\n", [debug_lookup_name(Id)]),
CheckDeps = ordsets:del_element(t_var_name(Id), Deps),
case Check andalso maps_are_equal(OldLocalMap, Map, CheckDeps) of
- true ->
+ true ->
?debug("Equal\n", []),
{ok, MapDict, Map};
false ->
?debug("Not equal. Solving\n", []),
Cs = state__get_cs(Id, State),
- Res =
+ Res =
case state__is_self_rec(Id, State) of
true -> solve_self_recursive(Cs, Map, MapDict, Id, t_none(), State);
false -> solve_ref_or_list(Cs, Map, MapDict, State)
end,
case Res of
- {error, NewMapDict} ->
+ {error, NewMapDict} ->
?debug("Error solving for function ~p\n", [debug_lookup_name(Id)]),
Arity = state__fun_arity(Id, State),
FunType =
@@ -1755,17 +1761,17 @@ solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
end;
solve_ref_or_list(#constraint_list{type=Type, list = Cs, deps = Deps, id = Id},
Map, MapDict, State) ->
- {OldLocalMap, Check} =
+ {OldLocalMap, Check} =
case dict:find(Id, MapDict) of
error -> {dict:new(), false};
{ok, M} -> {M, true}
end,
?debug("Checking ref to list: ~w\n", [Id]),
case Check andalso maps_are_equal(OldLocalMap, Map, Deps) of
- true ->
+ true ->
?debug("~w equal ~w\n", [Type, Id]),
{ok, MapDict, Map};
- false ->
+ false ->
?debug("~w not equal: ~w. Solving\n", [Type, Id]),
solve_clist(Cs, Type, Id, Deps, MapDict, Map, State)
end.
@@ -1793,7 +1799,7 @@ solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
[[{X, format_type(Y)} || {X, Y} <- dict:to_list(NewMap)]]),
NewRecType = unsafe_lookup_type(Id, NewMap),
case t_is_equal(NewRecType, RecType0) of
- true ->
+ true ->
{ok, NewMapDict, enter_type(RecVar, NewRecType, NewMap)};
false ->
solve_self_recursive(Cs, Map, MapDict, Id, NewRecType, State)
@@ -1801,7 +1807,7 @@ solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
end.
solve_clist(Cs, conj, Id, Deps, MapDict, Map, State) ->
- case solve_cs(Cs, Map, MapDict, State) of
+ case solve_cs(Cs, Map, MapDict, State) of
{error, _} = Error -> Error;
{ok, NewMapDict, NewMap} = Ret ->
case Cs of
@@ -1821,12 +1827,12 @@ solve_clist(Cs, disj, Id, _Deps, MapDict, Map, State) ->
{ok, NewDict, NewMap} -> {{ok, NewMap}, NewDict};
{error, _NewDict} = Error -> Error
end
- end,
+ end,
{Maps, NewMapDict} = lists:mapfoldl(Fun, MapDict, Cs),
case [X || {ok, X} <- Maps] of
[] -> {error, NewMapDict};
- MapList ->
- NewMap = join_maps(MapList),
+ MapList ->
+ NewMap = join_maps(MapList),
{ok, dict:store(Id, NewMap, NewMapDict), NewMap}
end.
@@ -1844,13 +1850,13 @@ solve_cs([#constraint{} = C|Tail], Map, MapDict, State) ->
case solve_one_c(C, Map, State#state.opaques) of
error ->
?debug("+++++++++++\nFailed: ~s :: ~s ~w ~s :: ~s\n+++++++++++\n",
- [format_type(C#constraint.lhs),
+ [format_type(C#constraint.lhs),
format_type(lookup_type(C#constraint.lhs, Map)),
C#constraint.op,
- format_type(C#constraint.rhs),
+ format_type(C#constraint.rhs),
format_type(lookup_type(C#constraint.rhs, Map))]),
{error, MapDict};
- {ok, NewMap} ->
+ {ok, NewMap} ->
solve_cs(Tail, NewMap, MapDict, State)
end;
solve_cs([], Map, MapDict, _State) ->
@@ -1863,7 +1869,7 @@ solve_one_c(#constraint{lhs = Lhs, rhs = Rhs, op = Op}, Map, Opaques) ->
?debug("Solving: ~s :: ~s ~w ~s :: ~s\n\tInf: ~s\n",
[format_type(Lhs), format_type(LhsType), Op,
format_type(Rhs), format_type(RhsType), format_type(Inf)]),
- case t_is_none(Inf) of
+ case t_is_none(Inf) of
true -> error;
false ->
case Op of
@@ -1887,8 +1893,8 @@ solve_subtype(Type, Inf, Map, Opaques) ->
try t_unify(Type, Inf, Opaques) of
{_, List} -> {ok, enter_type_list(List, Map)}
catch
- throw:{mismatch, _T1, _T2} ->
- ?debug("Mismatch between ~s and ~s\n",
+ throw:{mismatch, _T1, _T2} ->
+ ?debug("Mismatch between ~s and ~s\n",
[format_type(_T1), format_type(_T2)]),
error
end.
@@ -1936,9 +1942,9 @@ maps_are_equal_1(Map1, Map2, [H|Tail]) ->
T2 = lookup_type(H, Map2),
case t_is_equal(T1, T2) of
true -> maps_are_equal_1(Map1, Map2, Tail);
- false ->
+ false ->
?debug("~w: ~s =/= ~s\n", [H, format_type(T1), format_type(T2)]),
- false
+ false
end;
maps_are_equal_1(_Map1, _Map2, []) ->
true.
@@ -1953,7 +1959,7 @@ prune_keys(Map1, Map2, Deps) ->
true ->
Keys1 = dict:fetch_keys(Map1),
case length(Keys1) > NofDeps of
- true ->
+ true ->
Set1 = lists:sort(Keys1),
Set2 = lists:sort(dict:fetch_keys(Map2)),
ordsets:intersection(ordsets:union(Set1, Set2), Deps);
@@ -2035,7 +2041,7 @@ lookup_type(Key, Map) ->
mk_var(Var) ->
case cerl:is_literal(Var) of
true -> Var;
- false ->
+ false ->
case cerl:is_c_values(Var) of
true -> t_product(mk_var_no_lit_list(cerl:values_es(Var)));
false -> t_var(cerl_trees:get_label(Var))
@@ -2076,10 +2082,10 @@ state__set_opaques(#state{records = RecDict} = State, {M, _F, _A}) ->
state__lookup_record(#state{records = Records}, Tag, Arity) ->
case erl_types:lookup_record(Tag, Arity, Records) of
- {ok, Fields} ->
+ {ok, Fields} ->
{ok, t_tuple([t_from_term(Tag)|
[FieldType || {_FieldName, FieldType} <- Fields]])};
- error ->
+ error ->
error
end.
@@ -2098,12 +2104,12 @@ state__is_in_guard(#state{in_guard = Bool}) ->
state__get_fun_prototype(Op, Arity, State) ->
case t_is_fun(Op) of
true -> {State, Op};
- false ->
+ false ->
{State1, [Ret|Args]} = state__mk_vars(Arity+1, State),
Fun = t_fun(Args, Ret),
{State1, Fun}
end.
-
+
state__lookup_rec_var_in_scope(MFA, #state{name_map = NameMap}) ->
dict:find(MFA, NameMap).
@@ -2115,11 +2121,11 @@ state__store_fun_arity(Tree, #state{fun_arities = Map} = State) ->
state__fun_arity(Id, #state{fun_arities = Map}) ->
dict:fetch(Id, Map).
-state__lookup_undef_var(Tree, #state{callgraph = CG, plt = Plt}) ->
+state__lookup_undef_var(Tree, #state{callgraph = CG, plt = Plt}) ->
Label = cerl_trees:get_label(Tree),
case dialyzer_callgraph:lookup_rec_var(Label, CG) of
error -> error;
- {ok, MFA} ->
+ {ok, MFA} ->
case dialyzer_plt:lookup(Plt, MFA) of
none -> error;
{value, {RetType, ArgTypes}} -> {ok, t_fun(ArgTypes, RetType)}
@@ -2179,7 +2185,7 @@ state__add_prop_constrs(Tree, #state{prop_types = PropTypes} = State) ->
case erl_types:any_none(ArgTypes) of
true -> not_called;
false ->
- ?debug("Adding propagated constr: ~s for function ~w\n",
+ ?debug("Adding propagated constr: ~s for function ~w\n",
[format_type(FunType), debug_lookup_name(mk_var(Tree))]),
FunVar = mk_var(Tree),
state__store_conj(FunVar, sub, FunType, State)
@@ -2225,7 +2231,7 @@ state__store_conj_lists_1([], _Op, [], State) ->
state__mk_var(#state{next_label = NL} = State) ->
{State#state{next_label = NL+1}, t_var(NL)}.
-
+
state__mk_vars(N, #state{next_label = NL} = State) ->
NewLabel = NL + N,
Vars = [t_var(X) || X <- lists:seq(NL, NewLabel-1)],
@@ -2235,7 +2241,7 @@ state__store_constrs(Id, Cs, #state{cmap = Dict} = State) ->
NewDict = dict:store(Id, Cs, Dict),
State#state{cmap = NewDict}.
-state__get_cs(Var, #state{cmap = Dict}) ->
+state__get_cs(Var, #state{cmap = Dict}) ->
dict:fetch(Var, Dict).
%% The functions here will not be treated as self recursive.
@@ -2286,7 +2292,7 @@ mk_constraint(Lhs, Op, Rhs) ->
%% This constraint is constant. Solve it immediately.
case solve_one_c(C, dict:new(), []) of
error -> throw(error);
- _ ->
+ _ ->
%% This is always true, keep it anyway for logistic reasons
C
end;
@@ -2335,7 +2341,7 @@ mk_constraint_1(Lhs, eq, Rhs) when Lhs < Rhs ->
mk_constraint_1(Lhs, eq, Rhs) ->
#constraint{lhs = Rhs, op = eq, rhs = Lhs};
mk_constraint_1(Lhs, Op, Rhs) ->
- #constraint{lhs = Lhs, op = Op, rhs = Rhs}.
+ #constraint{lhs = Lhs, op = Op, rhs = Rhs}.
mk_constraints([Lhs|LhsTail], Op, [Rhs|RhsTail]) ->
[mk_constraint(Lhs, Op, Rhs)|mk_constraints(LhsTail, Op, RhsTail)];
@@ -2350,7 +2356,7 @@ mk_constraint_list(Type, List) ->
List2 = ordsets:filter(fun(X) -> get_deps(X) =/= [] end, List1),
Deps = calculate_deps(List2),
case Deps =:= [] of
- true -> #constraint_list{type = conj,
+ true -> #constraint_list{type = conj,
list = [mk_constraint(t_any(), eq, t_any())],
deps = []};
false -> #constraint_list{type = Type, list = List2, deps = Deps}
@@ -2372,11 +2378,11 @@ update_constraint_list(CL, List) ->
%% We expand guard constraints into dijunctive normal form to gain
%% precision in simple guards. However, because of the exponential
%% growth of this expansion in the presens of disjunctions we can even
-%% get into trouble while expanding.
+%% get into trouble while expanding.
%%
%% To limit this we only expand when the number of disjunctions are
%% below a certain limit. This limit is currently set based on the
-%% behaviour of boolean 'or'.
+%% behaviour of boolean 'or'.
%%
%% V1 = V2 or V3
%%
@@ -2395,7 +2401,7 @@ update_constraint_list(CL, List) ->
-define(DISJ_NORM_FORM_LIMIT, 28).
mk_disj_norm_form(#constraint_list{} = CL) ->
- try
+ try
List1 = expand_to_conjunctions(CL),
mk_disj_constraint_list(List1)
catch
@@ -2409,7 +2415,7 @@ expand_to_conjunctions(#constraint_list{type = conj, list = List}) ->
true -> [mk_conj_constraint_list(List1)];
false ->
case List2 of
- [JustOneList] ->
+ [JustOneList] ->
[mk_conj_constraint_list([L|List1]) || L <- JustOneList];
_ ->
combine_conj_lists(List2, List1)
@@ -2422,7 +2428,7 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) ->
List1 = [C || C <- List, is_simple_constraint(C)],
%% Just an assert.
[] = [C || #constraint{} = C <- List1],
- Expanded = lists:flatten([expand_to_conjunctions(C)
+ Expanded = lists:flatten([expand_to_conjunctions(C)
|| #constraint_list{} = C <- List]),
ReturnList = Expanded ++ List1,
if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
@@ -2467,7 +2473,7 @@ wrap_simple_constr(#constraint_list{} = C) -> C;
wrap_simple_constr(#constraint_ref{} = C) -> C.
enumerate_constraints(State) ->
- Cs = [mk_constraint_ref(Id, get_deps(state__get_cs(Id, State)))
+ Cs = [mk_constraint_ref(Id, get_deps(state__get_cs(Id, State)))
|| Id <- state__scc(State)],
{_, _, NewState} = enumerate_constraints(Cs, 0, [], State),
NewState.
@@ -2475,9 +2481,9 @@ enumerate_constraints(State) ->
enumerate_constraints([#constraint_ref{id = Id} = C|Tail], N, Acc, State) ->
Cs = state__get_cs(Id, State),
{[NewCs], NewN, NewState1} = enumerate_constraints([Cs], N, [], State),
- NewState2 = state__store_constrs(Id, NewCs, NewState1),
+ NewState2 = state__store_constrs(Id, NewCs, NewState1),
enumerate_constraints(Tail, NewN+1, [C|Acc], NewState2);
-enumerate_constraints([#constraint_list{type = conj, list = List} = C|Tail],
+enumerate_constraints([#constraint_list{type = conj, list = List} = C|Tail],
N, Acc, State) ->
%% Separate the flat constraints from the deep ones to make a
%% separate fixpoint interation over the flat ones for speed.
@@ -2496,7 +2502,7 @@ enumerate_constraints([#constraint_list{type = conj, list = List} = C|Tail],
end,
NewAcc = [C#constraint_list{list = NewList, id = {list, N3}}|Acc],
enumerate_constraints(Tail, N3+1, NewAcc, State2);
-enumerate_constraints([#constraint_list{list = List, type = disj} = C|Tail],
+enumerate_constraints([#constraint_list{list = List, type = disj} = C|Tail],
N, Acc, State) ->
{NewList, NewN, NewState} = enumerate_constraints(List, N, [], State),
NewAcc = [C#constraint_list{list = NewList, id = {list, NewN}}|Acc],
@@ -2515,7 +2521,7 @@ group_constraints_in_components(Cs, N) ->
case find_dep_components(DepList, []) of
[_] -> {Cs, N};
[_|_] = Components ->
- ConstrComp = [[C || #constraint{deps = D} = C <- Cs,
+ ConstrComp = [[C || #constraint{deps = D} = C <- Cs,
ordsets:is_subset(D, Comp)]
|| Comp <- Components],
lists:mapfoldl(fun(CComp, TmpN) ->
@@ -2545,7 +2551,7 @@ find_dep_components([], AccSet, Ungrouped) ->
%% Put the fun ref constraints last in any conjunction since we need
%% to separate the environment from the interior of the function.
order_fun_constraints(State) ->
- Cs = [mk_constraint_ref(Id, get_deps(state__get_cs(Id, State)))
+ Cs = [mk_constraint_ref(Id, get_deps(state__get_cs(Id, State)))
|| Id <- state__scc(State)],
order_fun_constraints(Cs, State).
@@ -2565,8 +2571,8 @@ order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail],
case Type of
conj -> order_fun_constraints(List, [], [], State);
disj ->
- FoldFun = fun(X, AccState) ->
- {[NewX], NewAccState} =
+ FoldFun = fun(X, AccState) ->
+ {[NewX], NewAccState} =
order_fun_constraints([X], [], [], AccState),
{NewX, NewAccState}
end,
@@ -2588,7 +2594,7 @@ order_fun_constraints([], Funs, Acc, State) ->
is_singleton_non_number_type(Type) ->
case t_is_number(Type) of
- true -> false;
+ true -> false;
false -> is_singleton_type(Type)
end.
@@ -2613,6 +2619,41 @@ is_singleton_type(Type) ->
end
end.
+find_element(Args, Cs) ->
+ [Pos, Tuple] = Args,
+ case erl_types:t_is_number(Pos) of
+ true ->
+ case erl_types:t_number_vals(Pos) of
+ 'unknown' -> 'unknown';
+ [I] ->
+ case find_constraint(Tuple, Cs) of
+ 'unknown' -> 'unknown';
+ #constraint{lhs = ExTuple} ->
+ case erl_types:t_is_tuple(ExTuple) of
+ true ->
+ Elems = erl_types:t_tuple_args(ExTuple),
+ Elem = lists:nth(I, Elems),
+ case erl_types:t_is_var(Elem) of
+ true -> Elem;
+ false -> 'unknown'
+ end;
+ false -> 'unknown'
+ end
+ end;
+ _ -> 'unknown'
+ end;
+ false -> 'unknown'
+ end.
+
+find_constraint(_Tuple, []) ->
+ 'unknown';
+find_constraint(Tuple, [#constraint{op = 'eq', rhs = Tuple} = C|_]) ->
+ C;
+find_constraint(Tuple, [#constraint_list{list = List}|Cs]) ->
+ find_constraint(Tuple, List ++ Cs);
+find_constraint(Tuple, [_|Cs]) ->
+ find_constraint(Tuple, Cs).
+
%% ============================================================================
%%
%% Pretty printer and debug facilities.
@@ -2638,7 +2679,7 @@ format_type(Type) ->
-ifdef(DEBUG_NAME_MAP).
debug_make_name_map(Vars, Funs) ->
Map = get(dialyzer_typesig_map),
- NewMap =
+ NewMap =
if Map =:= undefined -> debug_make_name_map(Vars, Funs, dict:new());
true -> debug_make_name_map(Vars, Funs, Map)
end,
@@ -2676,15 +2717,15 @@ pp_constraints(Cs, State) ->
io:nl(),
Res.
-pp_constraints([List|Tail], Separator, Level, MaxDepth,
+pp_constraints([List|Tail], Separator, Level, MaxDepth,
State) when is_list(List) ->
pp_constraints(List++Tail, Separator, Level, MaxDepth, State);
-pp_constraints([#constraint_ref{id = Id}|Left], Separator,
+pp_constraints([#constraint_ref{id = Id}|Left], Separator,
Level, MaxDepth, State) ->
Cs = state__get_cs(Id, State),
io:format("%Ref ~w%", [t_var_name(Id)]),
pp_constraints([Cs|Left], Separator, Level, MaxDepth, State);
-pp_constraints([#constraint{lhs = Lhs, op = Op, rhs = Rhs}], _Separator,
+pp_constraints([#constraint{lhs = Lhs, op = Op, rhs = Rhs}], _Separator,
Level, MaxDepth, _State) ->
io:format("~s ~w ~s", [format_type(Lhs), Op, format_type(Rhs)]),
erlang:max(Level, MaxDepth);
@@ -2721,7 +2762,7 @@ pp_constrs_scc(_SCC, _State) ->
constraints_to_dot_scc(SCC, State) ->
io:format("SCC: ~p\n", [SCC]),
- Name = lists:flatten([io_lib:format("'~w'", [debug_lookup_name(Fun)])
+ Name = lists:flatten([io_lib:format("'~w'", [debug_lookup_name(Fun)])
|| Fun <- SCC]),
Cs = [state__get_cs(Fun, State) || Fun <- SCC],
constraints_to_dot(Cs, Name, State).
@@ -2737,22 +2778,22 @@ constraints_to_dot(Cs0, Name, State) ->
constraints_to_nodes([{Name, #constraint_list{type = Type, list = List, id=Id}}
|Left], N, Level, Graph, Opts, State) ->
- N1 = N + length(List),
+ N1 = N + length(List),
NewList = lists:zip(lists:seq(N, N1 - 1), List),
Names = [SubName || {SubName, _C} <- NewList],
Edges = [{Name, SubName} || SubName <- Names],
- ThisNode = [{Name, Opt} || Opt <- [{label,
+ ThisNode = [{Name, Opt} || Opt <- [{label,
lists:flatten(io_lib:format("~w", [Id]))},
{shape, get_shape(Type)},
{level, Level}]],
- {NewGraph, NewOpts, N2} = constraints_to_nodes(NewList, N1, Level+1,
- [Edges|Graph],
+ {NewGraph, NewOpts, N2} = constraints_to_nodes(NewList, N1, Level+1,
+ [Edges|Graph],
[ThisNode|Opts], State),
constraints_to_nodes(Left, N2, Level, NewGraph, NewOpts, State);
constraints_to_nodes([{Name, #constraint{lhs = Lhs, op = Op, rhs = Rhs}}|Left],
N, Level, Graph, Opts, State) ->
- Label = lists:flatten(io_lib:format("~s ~w ~s",
- [format_type(Lhs), Op,
+ Label = lists:flatten(io_lib:format("~s ~w ~s",
+ [format_type(Lhs), Op,
format_type(Rhs)])),
ThisNode = [{Name, Opt} || Opt <- [{label, Label}, {level, Level}]],
NewOpts = [ThisNode|Opts],
@@ -2761,20 +2802,20 @@ constraints_to_nodes([{Name, #constraint_ref{id = Id0}}|Left],
N, Level, Graph, Opts, State) ->
Id = debug_lookup_name(Id0),
CList = state__get_cs(Id0, State),
- ThisNode = [{Name, Opt} || Opt <- [{label,
+ ThisNode = [{Name, Opt} || Opt <- [{label,
lists:flatten(io_lib:format("~w", [Id]))},
{shape, ellipse},
- {level, Level}]],
- NewList = [{N, CList}],
- {NewGraph, NewOpts, N1} = constraints_to_nodes(NewList, N + 1, Level + 1,
+ {level, Level}]],
+ NewList = [{N, CList}],
+ {NewGraph, NewOpts, N1} = constraints_to_nodes(NewList, N + 1, Level + 1,
[{Name, N}|Graph],
[ThisNode|Opts], State),
constraints_to_nodes(Left, N1, Level, NewGraph, NewOpts, State);
constraints_to_nodes([], N, _Level, Graph, Opts, _State) ->
{lists:flatten(Graph), lists:flatten(Opts), N}.
-
+
get_shape(conj) -> box;
-get_shape(disj) -> diamond.
+get_shape(disj) -> diamond.
-else.
constraints_to_dot_scc(_SCC, _State) ->
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 6ea243c26f..a9da229061 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -21,7 +21,7 @@
%%%-------------------------------------------------------------------
%%% File : dialyzer_utils.erl
%%% Author : Tobias Lindahl <[email protected]>
-%%% Description :
+%%% Description :
%%%
%%% Created : 5 Dec 2006 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
@@ -42,6 +42,7 @@
merge_records/2,
pp_hook/0,
process_record_remote_types/1,
+ sets_filter/2,
src_compiler_opts/0
]).
@@ -73,12 +74,11 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
-define(debug(D_), ok).
-endif.
-%%
-%% Types that need to be imported from somewhere else
-%%
+%% ----------------------------------------------------------------------------
--type abstract_code() :: [tuple()]. %% XXX: refine
--type comp_options() :: [atom()]. %% XXX: only a resticted set of options used
+-type abstract_code() :: [tuple()]. %% XXX: import from somewhere
+-type comp_options() :: [compile:option()].
+-type mod_or_fname() :: atom() | file:filename().
%% ============================================================================
%%
@@ -86,13 +86,13 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
%%
%% ============================================================================
--spec get_abstract_code_from_src(atom() | file:filename()) ->
+-spec get_abstract_code_from_src(mod_or_fname()) ->
{'ok', abstract_code()} | {'error', [string()]}.
get_abstract_code_from_src(File) ->
get_abstract_code_from_src(File, src_compiler_opts()).
--spec get_abstract_code_from_src(atom() | file:filename(), comp_options()) ->
+-spec get_abstract_code_from_src(mod_or_fname(), comp_options()) ->
{'ok', abstract_code()} | {'error', [string()]}.
get_abstract_code_from_src(File, Opts) ->
@@ -169,13 +169,13 @@ get_record_and_type_info(AbstractCode) ->
Module = get_module(AbstractCode),
get_record_and_type_info(AbstractCode, Module, dict:new()).
--spec get_record_and_type_info(abstract_code(), atom(), dict()) ->
+-spec get_record_and_type_info(abstract_code(), module(), dict()) ->
{'ok', dict()} | {'error', string()}.
get_record_and_type_info(AbstractCode, Module, RecDict) ->
get_record_and_type_info(AbstractCode, Module, [], RecDict).
-get_record_and_type_info([{attribute, _, record, {Name, Fields0}}|Left],
+get_record_and_type_info([{attribute, _, record, {Name, Fields0}}|Left],
Module, Records, RecDict) ->
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
@@ -188,7 +188,7 @@ get_record_and_type_info([{attribute, _, type, {{record, Name}, Fields0, []}}
Arity = length(Fields),
NewRecDict = dict:store({record, Name}, [{Arity, Fields}], RecDict),
get_record_and_type_info(Left, Module, Records, NewRecDict);
-get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left],
+get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left],
Module, Records, RecDict) when Attr =:= 'type';
Attr =:= 'opaque' ->
try
@@ -197,7 +197,7 @@ get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left],
catch
throw:{error, _} = Error -> Error
end;
-get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left],
+get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left],
Module, Records, RecDict) when Attr =:= 'type';
Attr =:= 'opaque' ->
try
@@ -219,7 +219,7 @@ get_record_and_type_info([], _Module, Records, RecDict) ->
end.
add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) ->
- case erl_types:type_is_defined(TypeOrOpaque, Name, RecDict) of
+ case erl_types:type_is_defined(TypeOrOpaque, Name, RecDict) of
true ->
throw({error, io_lib:format("Type already defined: ~w\n", [Name])});
false ->
@@ -237,7 +237,7 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) ->
get_record_fields(Fields, RecDict) ->
get_record_fields(Fields, RecDict, []).
-get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
+get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
RecDict, Acc) ->
Name =
case OrdRecField of
@@ -278,13 +278,16 @@ type_record_fields([RecKey|Recs], RecDict) ->
process_record_remote_types(CServer) ->
TempRecords = dialyzer_codeserver:get_temp_records(CServer),
+ TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer),
RecordFun =
fun(Key, Value) ->
case Key of
{record, _Name} ->
FieldFun =
fun(_Arity, Fields) ->
- [{Name, erl_types:t_solve_remote(Field, TempRecords)} || {Name, Field} <- Fields]
+ [{Name, erl_types:t_solve_remote(Field, TempExpTypes,
+ TempRecords)}
+ || {Name, Field} <- Fields]
end,
orddict:map(FieldFun, Value);
_Other -> Value
@@ -295,7 +298,8 @@ process_record_remote_types(CServer) ->
dict:map(RecordFun, Record)
end,
NewRecords = dict:map(ModuleFun, TempRecords),
- dialyzer_codeserver:finalize_records(NewRecords, CServer).
+ CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
+ dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1).
-spec merge_records(dict(), dict()) -> dict().
@@ -308,19 +312,19 @@ merge_records(NewRecords, OldRecords) ->
%%
%% ============================================================================
--spec get_spec_info(module(), abstract_code(), dict()) ->
+-spec get_spec_info(atom(), abstract_code(), dict()) ->
{'ok', dict()} | {'error', string()}.
get_spec_info(ModName, AbstractCode, RecordsDict) ->
get_spec_info(AbstractCode, dict:new(), RecordsDict, ModName, "nofile").
-%% TypeSpec is a list of conditional contracts for a function.
+%% TypeSpec is a list of conditional contracts for a function.
%% Each contract is of the form {[Argument], Range, [Constraint]} where
%% - Argument and Range are in erl_types:erl_type() format and
%% - Constraint is of the form {subtype, T1, T2} where T1 and T2
%% are erl_types:erl_type()
-get_spec_info([{attribute, Ln, spec, {Id, TypeSpec}}|Left],
+get_spec_info([{attribute, Ln, spec, {Id, TypeSpec}}|Left],
SpecDict, RecordsDict, ModName, File) when is_list(TypeSpec) ->
MFA = case Id of
{_, _, _} = T -> T;
@@ -335,7 +339,7 @@ get_spec_info([{attribute, Ln, spec, {Id, TypeSpec}}|Left],
{ok, {{OtherFile, L},_C}} ->
{Mod, Fun, Arity} = MFA,
Msg = io_lib:format(" Contract for function ~w:~w/~w "
- "already defined in ~s:~w\n",
+ "already defined in ~s:~w\n",
[Mod, Fun, Arity, OtherFile, L]),
throw({error, Msg})
catch
@@ -353,15 +357,30 @@ get_spec_info([], SpecDict, _RecordsDict, _ModName, _File) ->
%% ============================================================================
%%
+%% Exported types
+%%
+%% ============================================================================
+
+-spec sets_filter([module()], set()) -> set().
+
+sets_filter([], ExpTypes) ->
+ ExpTypes;
+sets_filter([Mod|Mods], ExpTypes) ->
+ NewExpTypes = sets:filter(fun({M, _F, _A}) -> M =/= Mod end, ExpTypes),
+ sets_filter(Mods, NewExpTypes).
+
+%% ============================================================================
+%%
%% Util utils
%%
%% ============================================================================
--spec src_compiler_opts() -> comp_options().
+-spec src_compiler_opts() -> [compile:option(),...].
src_compiler_opts() ->
[no_copt, to_core, binary, return_errors,
- no_inline, strict_record_tests, strict_record_updates].
+ no_inline, strict_record_tests, strict_record_updates,
+ no_is_record_optimization].
-spec get_module(abstract_code()) -> module().
@@ -381,7 +400,7 @@ cleanup_parse_transforms([]) ->
-spec format_errors([{module(), string()}]) -> [string()].
format_errors([{Mod, Errors}|Left]) ->
- FormatedError =
+ FormatedError =
[io_lib:format("~s:~w: ~s\n", [Mod, Line, M:format_error(Desc)])
|| {Line, M, Desc} <- Errors],
[lists:flatten(FormatedError) | format_errors(Left)];
@@ -456,7 +475,7 @@ pp_size(Size, Ctxt, Cont) ->
end.
pp_opts(Type, Flags) ->
- FinalFlags =
+ FinalFlags =
case cerl:atom_val(Type) of
binary -> [];
float -> keep_endian(cerl:concrete(Flags));
diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk
index e3e3f6d668..f2daf86def 100644
--- a/lib/dialyzer/vsn.mk
+++ b/lib/dialyzer/vsn.mk
@@ -1 +1 @@
-DIALYZER_VSN = 2.2.0
+DIALYZER_VSN = 2.3.0
diff --git a/lib/erl_interface/doc/src/ei.xml b/lib/erl_interface/doc/src/ei.xml
index 9083ae02b0..d7af7a1b67 100644
--- a/lib/erl_interface/doc/src/ei.xml
+++ b/lib/erl_interface/doc/src/ei.xml
@@ -581,7 +581,7 @@ ei_x_encode_empty_list(&amp;x);
<c><![CDATA[term]]></c> union, it is decoded, and the appropriate field
in <c><![CDATA[term->value]]></c> is set, and <c><![CDATA[*index]]></c> is
incremented by the term size.</p>
- <p>The function returns 0 on successful encoding, -1 on error,
+ <p>The function returns 0 on successful decoding, -1 on error,
and 1 if the term seems alright, but does not fit in the
<c><![CDATA[term]]></c> structure. If it returns 0, the <c><![CDATA[index]]></c>
will be incremented, and the <c><![CDATA[term]]></c> contains the
diff --git a/lib/erl_interface/include/ei.h b/lib/erl_interface/include/ei.h
index d1a697615a..729b9fc367 100644
--- a/lib/erl_interface/include/ei.h
+++ b/lib/erl_interface/include/ei.h
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
#ifndef EI_H
@@ -110,6 +110,7 @@
#define ERL_SMALL_INTEGER_EXT 'a'
#define ERL_INTEGER_EXT 'b'
#define ERL_FLOAT_EXT 'c'
+#define NEW_FLOAT_EXT 'F'
#define ERL_ATOM_EXT 'd'
#define ERL_REFERENCE_EXT 'e'
#define ERL_NEW_REFERENCE_EXT 'r'
diff --git a/lib/erl_interface/src/connect/ei_connect.c b/lib/erl_interface/src/connect/ei_connect.c
index d2d0a7e7c1..b1b79aa0e5 100644
--- a/lib/erl_interface/src/connect/ei_connect.c
+++ b/lib/erl_interface/src/connect/ei_connect.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2000-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2000-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
/*
@@ -1323,7 +1323,8 @@ static int send_name_or_challenge(int fd, char *nodename,
put32be(s, (DFLAG_EXTENDED_REFERENCES
| DFLAG_EXTENDED_PIDS_PORTS
| DFLAG_FUN_TAGS
- | DFLAG_NEW_FUN_TAGS));
+ | DFLAG_NEW_FUN_TAGS
+ | DFLAG_NEW_FLOATS));
if (f_chall)
put32be(s, challenge);
memcpy(s, nodename, strlen(nodename));
@@ -1393,6 +1394,11 @@ static int recv_challenge(int fd, unsigned *challenge,
goto error;
}
+ if (!(*flags & DFLAG_NEW_FLOATS)) {
+ EI_TRACE_ERR0("recv_challenge","<- RECV_CHALLENGE peer cannot "
+ "handle binary float encoding");
+ goto error;
+ }
if (getpeername(fd, (struct sockaddr *) &sin, &sin_len) < 0) {
EI_TRACE_ERR0("recv_challenge","<- RECV_CHALLENGE can't get peername");
diff --git a/lib/erl_interface/src/connect/ei_connect_int.h b/lib/erl_interface/src/connect/ei_connect_int.h
index 9926f799df..3c42b49b82 100644
--- a/lib/erl_interface/src/connect/ei_connect_int.h
+++ b/lib/erl_interface/src/connect/ei_connect_int.h
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2001-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2001-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
/*
@@ -101,6 +101,7 @@ extern int h_errno;
#define DFLAG_FUN_TAGS 16
#define DFLAG_NEW_FUN_TAGS 0x80
#define DFLAG_EXTENDED_PIDS_PORTS 0x100
+#define DFLAG_NEW_FLOATS 0x800
ei_cnode *ei_fd_to_cnode(int fd);
int ei_distversion(int fd);
diff --git a/lib/erl_interface/src/decode/decode_double.c b/lib/erl_interface/src/decode/decode_double.c
index 66dbe474ec..ed6e39655e 100644
--- a/lib/erl_interface/src/decode/decode_double.c
+++ b/lib/erl_interface/src/decode/decode_double.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
#include <stdio.h>
@@ -26,14 +26,22 @@ int ei_decode_double(const char *buf, int *index, double *p)
{
const char *s = buf + *index;
const char *s0 = s;
- double f;
+ FloatExt f;
- if (get8(s) != ERL_FLOAT_EXT) return -1;
-
- if (sscanf(s, "%lf", &f) != 1) return -1;
+ switch (get8(s)) {
+ case ERL_FLOAT_EXT:
+ if (sscanf(s, "%lf", &f.d) != 1) return -1;
+ s += 31;
+ break;
+ case NEW_FLOAT_EXT:
+ /* IEEE 754 format */
+ f.val = get64be(s);
+ break;
+ default:
+ return -1;
+ }
- s += 31;
- if (p) *p = f;
+ if (p) *p = f.d;
*index += s-s0;
return 0;
}
diff --git a/lib/erl_interface/src/decode/decode_skip.c b/lib/erl_interface/src/decode/decode_skip.c
index 316b5bee98..f6c5d861ab 100644
--- a/lib/erl_interface/src/decode/decode_skip.c
+++ b/lib/erl_interface/src/decode/decode_skip.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2002-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2002-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
#include "eidef.h"
@@ -77,6 +77,7 @@ int ei_skip_term(const char* buf, int* index)
if (ei_decode_big(buf, index, NULL) < 0) return -1;
break;
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
if (ei_decode_double(buf, index, NULL) < 0) return -1;
break;
case ERL_FUN_EXT:
diff --git a/lib/erl_interface/src/encode/encode_double.c b/lib/erl_interface/src/encode/encode_double.c
index 53f3d52ba6..148a49f73a 100644
--- a/lib/erl_interface/src/encode/encode_double.c
+++ b/lib/erl_interface/src/encode/encode_double.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
#include <stdio.h>
@@ -27,13 +27,13 @@ int ei_encode_double(char *buf, int *index, double p)
char *s = buf + *index;
char *s0 = s;
- if (!buf) s ++;
+ if (!buf)
+ s += 9;
else {
- put8(s,ERL_FLOAT_EXT);
- memset(s, 0, 31);
- sprintf(s, "%.20e", p);
+ /* IEEE 754 format */
+ put8(s, NEW_FLOAT_EXT);
+ put64be(s, ((FloatExt*)&p)->val);
}
- s += 31;
*index += s-s0;
diff --git a/lib/erl_interface/src/legacy/decode_term.c b/lib/erl_interface/src/legacy/decode_term.c
index ef29d6f57d..796cebdfef 100644
--- a/lib/erl_interface/src/legacy/decode_term.c
+++ b/lib/erl_interface/src/legacy/decode_term.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
#include "eidef.h"
@@ -59,6 +59,7 @@ int ei_decode_term(const char *buf, int *index, void *t)
return ei_decode_long(buf,index,NULL);
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
return ei_decode_double(buf,index,NULL);
case ERL_ATOM_EXT:
diff --git a/lib/erl_interface/src/legacy/erl_marshal.c b/lib/erl_interface/src/legacy/erl_marshal.c
index 4b5f28178f..c57c552b90 100644
--- a/lib/erl_interface/src/legacy/erl_marshal.c
+++ b/lib/erl_interface/src/legacy/erl_marshal.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1996-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1996-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
/*
@@ -102,6 +102,7 @@ void erl_init_marshal(void)
cmp_array[ERL_SMALL_INTEGER_EXT] = 1;
cmp_array[ERL_INTEGER_EXT] = 1;
cmp_array[ERL_FLOAT_EXT] = 1;
+ cmp_array[NEW_FLOAT_EXT] = 1;
cmp_array[ERL_SMALL_BIG_EXT] = 1;
cmp_array[ERL_LARGE_BIG_EXT] = 1;
cmp_array[ERL_ATOM_EXT] = 2;
@@ -124,6 +125,7 @@ void erl_init_marshal(void)
cmp_num_class[ERL_SMALL_INTEGER_EXT] = SMALL;
cmp_num_class[ERL_INTEGER_EXT] = SMALL;
cmp_num_class[ERL_FLOAT_EXT] = FLOAT;
+ cmp_num_class[NEW_FLOAT_EXT] = FLOAT;
cmp_num_class[ERL_SMALL_BIG_EXT] = BIG;
cmp_num_class[ERL_LARGE_BIG_EXT] = BIG;
init_cmp_num_class_p = 0;
@@ -1008,10 +1010,13 @@ static ETERM *erl_decode_it(unsigned char **ext)
return ep;
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
ERL_TYPE(ep) = ERL_FLOAT;
- if (sscanf((char *) *ext, "%lf", &ff) != 1)
+ cp = (char *) *ext;
+ i = -1;
+ if (ei_decode_double(cp, &i, &ff) == -1)
goto failure;
- *ext += 31;
+ *ext += i;
ep->uval.fval.f = ff;
return ep;
@@ -1176,6 +1181,7 @@ unsigned char erl_ext_type(unsigned char *ext)
case ERL_LARGE_TUPLE_EXT:
return ERL_TUPLE;
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
return ERL_FLOAT;
case ERL_BINARY_EXT:
return ERL_BINARY;
@@ -1218,6 +1224,7 @@ int erl_ext_size(unsigned char *t)
case ERL_BINARY_EXT:
case ERL_STRING_EXT:
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
case ERL_SMALL_BIG_EXT:
case ERL_LARGE_BIG_EXT:
return 0;
@@ -1332,6 +1339,9 @@ static int jump(unsigned char **ext)
case ERL_FLOAT_EXT:
*ext += 31;
break;
+ case NEW_FLOAT_EXT:
+ *ext += 8;
+ break;
case ERL_BINARY_EXT:
i = (**ext << 24) | ((*ext)[1] << 16) |((*ext)[2] << 8) | (*ext)[3];
*ext += 4+i;
@@ -1696,12 +1706,15 @@ static int cmp_exe2(unsigned char **e1, unsigned char **e2)
}
return 0;
case ERL_FLOAT_EXT:
- if (sscanf((char *) *e1, "%lf", &ff1) != 1)
- return -1;
- *e1 += 31;
- if (sscanf((char *) *e2, "%lf", &ff2) != 1)
- return -1;
- *e2 += 31;
+ case NEW_FLOAT_EXT:
+ i = -1;
+ if (ei_decode_double((char *) *e1, &i, &ff1) != 0)
+ return -1;
+ *e1 += i;
+ j = -1;
+ if (ei_decode_double((char *) *e2, &j, &ff2) != 0)
+ return -1;
+ *e2 += j;
return cmp_floats(ff1,ff2);
case ERL_BINARY_EXT:
diff --git a/lib/erl_interface/src/misc/ei_decode_term.c b/lib/erl_interface/src/misc/ei_decode_term.c
index 7b95ff232f..ddcbfa5a9a 100644
--- a/lib/erl_interface/src/misc/ei_decode_term.c
+++ b/lib/erl_interface/src/misc/ei_decode_term.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2001-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2001-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*
@@ -25,9 +25,9 @@
#include "ei_decode_term.h"
#include "putget.h"
-/* Returns 1 if term is decoded, 0 if term is OK, but not decoded here
- and -1 if something is wrong.
- ONLY changes index if term is decoded (return value 1)! */
+/* Returns 0 on successful encoding, -1 on error, and 1 if the term seems
+ alright, but does not fit in the term structure. If it returns 0, the
+ index will be incremented, and the term contains the decoded term. */
int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
{
@@ -46,11 +46,8 @@ int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
term->value.i_val = get32be(s);
break;
case ERL_FLOAT_EXT:
- if (s[30]) return -1;
- if (sscanf(s, "%lf", &f) != 1) return -1;
- s += 31;
- term->value.d_val = f;
- break;
+ case NEW_FLOAT_EXT:
+ return ei_decode_double(buf, index, &term->value.d_val);
case ERL_ATOM_EXT:
len = get16be(s);
memcpy(term->value.atom_name, s, len);
diff --git a/lib/erl_interface/src/misc/ei_printterm.c b/lib/erl_interface/src/misc/ei_printterm.c
index 8d0eef5e79..98473f780e 100644
--- a/lib/erl_interface/src/misc/ei_printterm.c
+++ b/lib/erl_interface/src/misc/ei_printterm.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2001-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2001-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*
@@ -272,6 +272,7 @@ static int print_term(FILE* fp, ei_x_buff* x,
break;
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
if (ei_decode_double(buf, index, &d) < 0) goto err;
ch_written += xprintf(fp, x, "%f", d);
break;
diff --git a/lib/erl_interface/src/misc/get_type.c b/lib/erl_interface/src/misc/get_type.c
index d67a6a80d3..2a680d0f94 100644
--- a/lib/erl_interface/src/misc/get_type.c
+++ b/lib/erl_interface/src/misc/get_type.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*
@@ -122,7 +122,12 @@ int ei_get_type_internal(const char *buf, const int *index,
case ERL_STRING_EXT:
*len = get16be(s);
break;
-
+
+ case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
+ *type = ERL_FLOAT_EXT;
+ break;
+
case ERL_LARGE_TUPLE_EXT:
case ERL_LIST_EXT:
case ERL_BINARY_EXT:
diff --git a/lib/erl_interface/src/misc/putget.h b/lib/erl_interface/src/misc/putget.h
index 98d9ebb64c..7a43de324b 100644
--- a/lib/erl_interface/src/misc/putget.h
+++ b/lib/erl_interface/src/misc/putget.h
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*
@@ -54,6 +54,18 @@
(s) += 4; \
} while (0)
+#define put64be(s,n) do { \
+ (s)[0] = ((n) >> 56) & 0xff; \
+ (s)[1] = ((n) >> 48) & 0xff; \
+ (s)[2] = ((n) >> 40) & 0xff; \
+ (s)[3] = ((n) >> 32) & 0xff; \
+ (s)[4] = ((n) >> 24) & 0xff; \
+ (s)[5] = ((n) >> 16) & 0xff; \
+ (s)[6] = ((n) >> 8) & 0xff; \
+ (s)[7] = (n) & 0xff; \
+ (s) += 8; \
+} while (0)
+
#define get8(s) \
((s) += 1, \
((unsigned char *)(s))[-1] & 0xff)
@@ -82,4 +94,20 @@
(((unsigned char *)(s))[-2] << 8) | \
((unsigned char *)(s))[-1]))
+#define get64be(s) \
+ ((s) += 8, \
+ (((EI_ULONGLONG)((unsigned char *)(s))[-8] << 56) | \
+ ((EI_ULONGLONG)((unsigned char *)(s))[-7] << 48) | \
+ ((EI_ULONGLONG)((unsigned char *)(s))[-6] << 40) | \
+ ((EI_ULONGLONG)((unsigned char *)(s))[-5] << 32) | \
+ ((EI_ULONGLONG)((unsigned char *)(s))[-4] << 24) | \
+ ((EI_ULONGLONG)((unsigned char *)(s))[-3] << 16) | \
+ ((EI_ULONGLONG)((unsigned char *)(s))[-2] << 8) | \
+ (EI_ULONGLONG)((unsigned char *)(s))[-1]))
+
+typedef union float_ext {
+ double d;
+ EI_ULONGLONG val;
+} FloatExt;
+
#endif /* _PUTGET_H */
diff --git a/lib/erl_interface/src/misc/show_msg.c b/lib/erl_interface/src/misc/show_msg.c
index 25865d6f8e..14bea5e01f 100644
--- a/lib/erl_interface/src/misc/show_msg.c
+++ b/lib/erl_interface/src/misc/show_msg.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 1998-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 1998-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*
@@ -400,6 +400,7 @@ static void show_term(const char *termbuf, int *index, FILE *stream)
break;
case ERL_FLOAT_EXT:
+ case NEW_FLOAT_EXT:
ei_decode_double(termbuf,index,&fnum);
fprintf(stream,"%f",fnum);
break;
diff --git a/lib/erl_interface/test/ei_decode_SUITE.erl b/lib/erl_interface/test/ei_decode_SUITE.erl
index ea528728ab..c6858b45ad 100644
--- a/lib/erl_interface/test/ei_decode_SUITE.erl
+++ b/lib/erl_interface/test/ei_decode_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -181,22 +181,9 @@ test_ei_decode_misc(suite) -> [];
test_ei_decode_misc(Config) when is_list(Config) ->
?line P = runner:start(?test_ei_decode_misc),
-% ?line <<131>> = get_binaries(P),
-
-% ?line {term,F} = get_term(P),
-% ?line match_float(F, 0.0),
-% ?line {term,F} = get_term(P),
-% ?line match_float(F, 0.0),
-
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, -1.0),
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, -1.0),
-
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, 1.0),
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, 1.0),
+ ?line send_term_as_binary(P,0.0),
+ ?line send_term_as_binary(P,-1.0),
+ ?line send_term_as_binary(P,1.0),
?line send_term_as_binary(P,false),
?line send_term_as_binary(P,true),
diff --git a/lib/erl_interface/test/ei_decode_SUITE_data/ei_decode_test.c b/lib/erl_interface/test/ei_decode_SUITE_data/ei_decode_test.c
index d81ea88437..5447e2deb3 100644
--- a/lib/erl_interface/test/ei_decode_SUITE_data/ei_decode_test.c
+++ b/lib/erl_interface/test/ei_decode_SUITE_data/ei_decode_test.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2004-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2004-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
@@ -515,11 +515,10 @@ TESTCASE(test_ei_decode_misc)
/*
EI_DECODE_0(decode_version);
*/
-/*
- EI_DECODE_2(decode_double, 0.0);
- EI_DECODE_2(decode_double, -1.0);
- EI_DECODE_2(decode_double, 1.0);
-*/
+ EI_DECODE_2(decode_double, 32, double, 0.0);
+ EI_DECODE_2(decode_double, 32, double, -1.0);
+ EI_DECODE_2(decode_double, 32, double, 1.0);
+
EI_DECODE_2(decode_boolean, 8, int, 0);
EI_DECODE_2(decode_boolean, 7, int, 1);
diff --git a/lib/erl_interface/test/ei_encode_SUITE.erl b/lib/erl_interface/test/ei_encode_SUITE.erl
index fb790eb7c3..6b9de4f093 100644
--- a/lib/erl_interface/test/ei_encode_SUITE.erl
+++ b/lib/erl_interface/test/ei_encode_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -181,20 +181,14 @@ test_ei_encode_misc(Config) when is_list(Config) ->
?line <<131>> = get_binaries(P),
-% ?line {term,F} = get_term(P),
-% ?line match_float(F, 0.0),
-% ?line {term,F} = get_term(P),
-% ?line match_float(F, 0.0),
+ ?line {<<70,_:8/binary>>,F0} = get_buf_and_term(P),
+ ?line true = match_float(F0, 0.0),
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, -1.0),
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, -1.0),
+ ?line {<<70,_:8/binary>>,Fn1} = get_buf_and_term(P),
+ ?line true = match_float(Fn1, -1.0),
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, 1.0),
-% ?line {term,F} = get_term(P),
-% ?line true = match_float(F, 1.0),
+ ?line {<<70,_:8/binary>>,Fp1} = get_buf_and_term(P),
+ ?line true = match_float(Fp1, 1.0),
?line {<<100,0,5,"false">>,false} = get_buf_and_term(P),
?line {<<100,0,4,"true">> ,true} = get_buf_and_term(P),
@@ -310,6 +304,8 @@ get_term(P) ->
%%
+match_float(F, Match) when is_float(F), is_float(Match), F == Match ->
+ true;
match_float(F, Match) when is_float(F), F > Match*0.99, F < Match*1.01 ->
true.
diff --git a/lib/erl_interface/test/ei_encode_SUITE_data/ei_encode_test.c b/lib/erl_interface/test/ei_encode_SUITE_data/ei_encode_test.c
index f8de0b7878..c373658152 100644
--- a/lib/erl_interface/test/ei_encode_SUITE_data/ei_encode_test.c
+++ b/lib/erl_interface/test/ei_encode_SUITE_data/ei_encode_test.c
@@ -1,19 +1,19 @@
/*
* %CopyrightBegin%
- *
- * Copyright Ericsson AB 2004-2009. All Rights Reserved.
- *
+ *
+ * Copyright Ericsson AB 2004-2010. All Rights Reserved.
+ *
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
- *
+ *
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
- *
+ *
* %CopyrightEnd%
*/
@@ -350,13 +350,13 @@ TESTCASE(test_ei_encode_char)
TESTCASE(test_ei_encode_misc)
{
EI_ENCODE_0(encode_version);
-/*
+
EI_ENCODE_1(encode_double, 0.0);
EI_ENCODE_1(encode_double, -1.0);
EI_ENCODE_1(encode_double, 1.0);
-*/
+
EI_ENCODE_1(encode_boolean, 0) /* Only case it should be false */;
EI_ENCODE_1(encode_boolean, 1);
diff --git a/lib/erl_interface/test/ei_tmo_SUITE.erl b/lib/erl_interface/test/ei_tmo_SUITE.erl
index 0c211aa148..e7a2465421 100644
--- a/lib/erl_interface/test/ei_tmo_SUITE.erl
+++ b/lib/erl_interface/test/ei_tmo_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2003-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -349,10 +349,12 @@ make_and_check_dummy() ->
-define(DFLAG_ATOM_CACHE,2).
-define(DFLAG_EXTENDED_REFERENCES,4).
-define(DFLAG_EXTENDED_PIDS_PORTS,16#100).
+-define(DFLAG_NEW_FLOATS,16#800).
-define(DFLAG_DIST_MONITOR,8).
%% From R9 and forward extended references is compulsory
--define(COMPULSORY_DFLAGS, (?DFLAG_EXTENDED_REFERENCES bor ?DFLAG_EXTENDED_PIDS_PORTS)).
+%% From 14 and forward new float is compulsory
+-define(COMPULSORY_DFLAGS, (?DFLAG_EXTENDED_REFERENCES bor ?DFLAG_EXTENDED_PIDS_PORTS bor ?DFLAG_NEW_FLOATS)).
-define(shutdown(X), exit(X)).
-define(int16(X), [((X) bsr 8) band 16#ff, (X) band 16#ff]).
diff --git a/lib/erl_interface/vsn.mk b/lib/erl_interface/vsn.mk
index 589b9e2f9c..672b1be55f 100644
--- a/lib/erl_interface/vsn.mk
+++ b/lib/erl_interface/vsn.mk
@@ -1 +1 @@
-EI_VSN = 3.6.5
+EI_VSN = 3.7
diff --git a/lib/gs/contribs/bonk/bonk.erl b/lib/gs/contribs/bonk/bonk.erl
index 12d94f6c5e..79f01bf659 100644
--- a/lib/gs/contribs/bonk/bonk.erl
+++ b/lib/gs/contribs/bonk/bonk.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -33,10 +33,10 @@ run() ->
run([ColorMode]) -> % This is for the start script...
run(ColorMode);
-run(ColorMode) when atom(ColorMode) ->
+run(ColorMode) when is_atom(ColorMode) ->
GS = gs:start(),
- SoundPid=spawn_link(bonk_sound,start,[]),
- {H,M,S}=time(),
+ SoundPid = spawn_link(bonk_sound,start,[]),
+ {H,M,S} = time(),
random:seed(H*13,M*7,S*3),
{SqrPids, Bmps, Colors} = create_board(GS, ColorMode),
{ScoreL,_File} = get_highscore(),
@@ -96,7 +96,7 @@ init(SoundPid, SqrPids, Bmps, Colors) ->
game(SoundPid, SqrPids, Bmps, Colors, Scores) ->
receive
- {gs, _Square, buttonpress, SqrPid, [1 | _Rest]} when pid(SqrPid) ->
+ {gs, _Square, buttonpress, SqrPid, [1 | _Rest]} when is_pid(SqrPid) ->
SqrPid ! bonk,
game(SoundPid, SqrPids, Bmps, Colors, Scores);
{gs, _Id, buttonpress, _Data, [Butt | _Rest]} when Butt =/= 1 ->
@@ -224,11 +224,9 @@ update_score(SoundPid, SqrPids, Scores) ->
send_to_all([], _Msg) ->
true;
-
-send_to_all([Pid|Rest],Msg) when pid(Pid) ->
+send_to_all([Pid|Rest],Msg) when is_pid(Pid) ->
Pid ! Msg,
send_to_all(Rest,Msg);
-
send_to_all([_Else|Rest],Msg) ->
send_to_all(Rest,Msg).
@@ -460,7 +458,7 @@ update_scorelist(SoundPid, Scores) ->
{ScoreL,FileName} = get_highscore(),
New_scorelist=update_scorelist_2(ScoreL, Score, 0, SoundPid),
display_highscore(New_scorelist),
- case file:open(FileName, write) of
+ case file:open(FileName, [write]) of
{error,_} ->
true;
{ok,FD} ->
@@ -559,7 +557,7 @@ display_about() ->
{activebg, BGColor}]),
gs:create(text, aboutText, aboutCan, [{width, Wid-30}, {coords, [{15, 0}]},
{fg, TextColor}, {justify, center}]),
- case file:open(lists:append(bonk_dir(),"bonk.txt"), read) of
+ case file:open(lists:append(bonk_dir(),"bonk.txt"), [read]) of
{ok, Fd} ->
write_text(Fd, "", io:get_line(Fd, "")),
file:close(Fd);
diff --git a/lib/gs/contribs/othello/othello_adt.erl b/lib/gs/contribs/othello/othello_adt.erl
index d1d3ec950b..fb60c30b89 100644
--- a/lib/gs/contribs/othello/othello_adt.erl
+++ b/lib/gs/contribs/othello/othello_adt.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -375,29 +375,29 @@ is_good(Colour,H,Board) ->
false.
is_good_0(_,_,false,_) -> false;
-is_good_0(_,H,D,_) when integer(H), integer(D), H+D<0 -> false;
-is_good_0(_,H,D,_) when integer(H), integer(D), H+D>63 -> false;
-is_good_0(black,H,D,Board) when integer(H), integer(D) ->
+is_good_0(_,H,D,_) when is_integer(H), is_integer(D), H+D<0 -> false;
+is_good_0(_,H,D,_) when is_integer(H), is_integer(D), H+D>63 -> false;
+is_good_0(black,H,D,Board) when is_integer(H), is_integer(D) ->
case element((H+D)+1,Board) of
white -> is_good_1(black,H+D,dir(H+D,D),Board);
_ -> false
end;
-is_good_0(white,H,D,Board) when integer(H), integer(D) ->
+is_good_0(white,H,D,Board) when is_integer(H), is_integer(D) ->
case element((H+D)+1,Board) of
black -> is_good_1(white,H+D,dir(H+D,D),Board);
_ -> false
end.
is_good_1(_,_,false,_) -> false;
-is_good_1(_,H,D,_) when integer(H), integer(D), H+D<0 -> false;
-is_good_1(_,H,D,_) when integer(H), integer(D), H+D>63 -> false;
-is_good_1(black,H,D,Board) when integer(H), integer(D) ->
+is_good_1(_,H,D,_) when is_integer(H), is_integer(D), H+D<0 -> false;
+is_good_1(_,H,D,_) when is_integer(H), is_integer(D), H+D>63 -> false;
+is_good_1(black,H,D,Board) when is_integer(H), is_integer(D) ->
case element((H+D)+1,Board) of
white -> is_good_1(black,H+D,dir(H+D,D),Board);
black -> throw(true);
_ -> false
end;
-is_good_1(white,H,D,Board) when integer(H), integer(D) ->
+is_good_1(white,H,D,Board) when is_integer(H), is_integer(D) ->
case element((H+D)+1,Board) of
black -> is_good_1(white,H+D,dir(H+D,D),Board);
white -> throw(true);
@@ -429,15 +429,15 @@ turn(Colour,H,D,Board) ->
Board
end.
-turn_0(_,H,D,B) when integer(H), integer(D), H+D<0 -> B;
-turn_0(_,H,D,B) when integer(H), integer(D), H+D>63 -> B;
-turn_0(black,H,D,Board) when integer(H), integer(D) ->
+turn_0(_,H,D,B) when is_integer(H), is_integer(D), H+D<0 -> B;
+turn_0(_,H,D,B) when is_integer(H), is_integer(D), H+D>63 -> B;
+turn_0(black,H,D,Board) when is_integer(H), is_integer(D) ->
E = H+D,
case element(E+1,Board) of
white -> turn_0(black,H+D,D,swap(black,E,Board));
_ -> Board
end;
-turn_0(white,H,D,Board) when integer(H), integer(D) ->
+turn_0(white,H,D,Board) when is_integer(H), is_integer(D) ->
E = H+D,
case element(E+1,Board) of
black -> turn_0(white,H+D,D,swap(white,E,Board));
@@ -450,7 +450,7 @@ turn_0(white,H,D,Board) when integer(H), integer(D) ->
%% Neighbours are not changed !!
%%-------------------------------------------------------
-swap(Colour,Pos,Board) when integer(Pos) ->
+swap(Colour,Pos,Board) when is_integer(Pos) ->
setelement(Pos+1,Board,Colour).
score(Pos) -> score1({col(Pos),row(Pos)}).
diff --git a/lib/gs/src/tool_utils.erl b/lib/gs/src/tool_utils.erl
index 697dd07151..b07e92c4f0 100644
--- a/lib/gs/src/tool_utils.erl
+++ b/lib/gs/src/tool_utils.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -224,11 +224,11 @@ help_win(Type, Parent, Strings) ->
{Wbtn0,Hbtn0} = gs:read(Lbl, {font_wh,{Font,"Cancel"}}),
%% Compute size of the objects and adjust the graphics accordingly
- Wbtn = max(Wbtn0+10, ?Wbtn),
- Hbtn = max(Hbtn0+10, ?Hbtn),
- Hent = max(Hent0+10, ?Hent),
- Wlbl = max(Wlbl0, max(Nbtn*Wbtn+(Nbtn-1)*?PAD, ?Wlbl)),
- Hlbl = max(Hlbl0, ?Hlbl),
+ Wbtn = erlang:max(Wbtn0+10, ?Wbtn),
+ Hbtn = erlang:max(Hbtn0+10, ?Hbtn),
+ Hent = erlang:max(Hent0+10, ?Hent),
+ Wlbl = erlang:max(Wlbl0, erlang:max(Nbtn*Wbtn+(Nbtn-1)*?PAD, ?Wlbl)),
+ Hlbl = erlang:max(Hlbl0, ?Hlbl),
Wwin = ?PAD+Wlbl+?PAD,
@@ -297,9 +297,6 @@ data("Yes") -> {helpwin,yes};
data("No") -> {helpwin,no};
data("Cancel") -> {helpwin,cancel}.
-max(X, Y) when X>Y -> X;
-max(_X, Y) -> Y.
-
get_coords(Parent, W, H) ->
case gs:read(Parent, x) of
X when is_integer(X) ->
diff --git a/lib/gs/vsn.mk b/lib/gs/vsn.mk
index 701d0e178d..35976d556a 100644
--- a/lib/gs/vsn.mk
+++ b/lib/gs/vsn.mk
@@ -1,2 +1,2 @@
-GS_VSN = 1.5.11
+GS_VSN = 1.5.12
diff --git a/lib/hipe/cerl/cerl_closurean.erl b/lib/hipe/cerl/cerl_closurean.erl
index 12771668ac..021acd5b35 100644
--- a/lib/hipe/cerl/cerl_closurean.erl
+++ b/lib/hipe/cerl/cerl_closurean.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2003-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% =====================================================================
@@ -808,7 +808,7 @@ take_work({Queue0, Set0}) ->
is_escape_op(match_fail, 1) -> false;
is_escape_op(F, A) when is_atom(F), is_integer(A) -> true.
--spec is_escape_op(module(), atom(), arity()) -> boolean().
+-spec is_escape_op(atom(), atom(), arity()) -> boolean().
is_escape_op(erlang, error, 1) -> false;
is_escape_op(erlang, error, 2) -> false;
@@ -825,7 +825,7 @@ is_escape_op(M, F, A) when is_atom(M), is_atom(F), is_integer(A) -> true.
is_literal_op(match_fail, 1) -> true;
is_literal_op(F, A) when is_atom(F), is_integer(A) -> false.
--spec is_literal_op(module(), atom(), arity()) -> boolean().
+-spec is_literal_op(atom(), atom(), arity()) -> boolean().
is_literal_op(erlang, '+', 2) -> true;
is_literal_op(erlang, '-', 2) -> true;
diff --git a/lib/hipe/cerl/cerl_messagean.erl b/lib/hipe/cerl/cerl_messagean.erl
index 0753376e7d..6dd93adaa3 100644
--- a/lib/hipe/cerl/cerl_messagean.erl
+++ b/lib/hipe/cerl/cerl_messagean.erl
@@ -1,19 +1,19 @@
%% =====================================================================
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% Message analysis of Core Erlang programs.
@@ -1043,7 +1043,7 @@ get_deps(L, Dep) ->
%% is_escape_op(_F, _A) -> [].
--spec is_escape_op(module(), atom(), arity()) -> [arity()].
+-spec is_escape_op(atom(), atom(), arity()) -> [arity()].
is_escape_op(erlang, '!', 2) -> [2];
is_escape_op(erlang, send, 2) -> [2];
@@ -1064,7 +1064,7 @@ is_escape_op(_M, _F, _A) -> [].
is_imm_op(match_fail, 1) -> true;
is_imm_op(_, _) -> false.
--spec is_imm_op(module(), atom(), arity()) -> boolean().
+-spec is_imm_op(atom(), atom(), arity()) -> boolean().
is_imm_op(erlang, self, 0) -> true;
is_imm_op(erlang, '=:=', 2) -> true;
@@ -1102,4 +1102,4 @@ is_imm_op(erlang, throw, 1) -> true;
is_imm_op(erlang, exit, 1) -> true;
is_imm_op(erlang, error, 1) -> true;
is_imm_op(erlang, error, 2) -> true;
-is_imm_op(_, _, _) -> false.
+is_imm_op(_M, _F, _A) -> false.
diff --git a/lib/hipe/cerl/erl_bif_types.erl b/lib/hipe/cerl/erl_bif_types.erl
index be3073c0e6..838f9429f0 100644
--- a/lib/hipe/cerl/erl_bif_types.erl
+++ b/lib/hipe/cerl/erl_bif_types.erl
@@ -143,6 +143,51 @@ type(M, F, A) ->
-spec type(atom(), atom(), arity(), [erl_types:erl_type()]) -> erl_types:erl_type().
+%%-- binary -------------------------------------------------------------------
+type(binary, at, 2, Xs) ->
+ strict(arg_types(binary, at, 2), Xs, fun(_) -> t_integer() end);
+type(binary, bin_to_list, Arity, Xs) when 1 =< Arity, Arity =< 3 ->
+ strict(arg_types(binary, bin_to_list, Arity), Xs,
+ fun(_) -> t_list(t_integer()) end);
+type(binary, compile_pattern, 1, Xs) ->
+ strict(arg_types(binary, compile_pattern, 1), Xs,
+ fun(_) -> t_tuple([t_atom(bm),t_binary()]) end);
+type(binary, copy, Arity, Xs) when Arity =:= 1; Arity =:= 2 ->
+ strict(arg_types(binary, copy, Arity), Xs,
+ fun(_) -> t_binary() end);
+type(binary, decode_unsigned, Arity, Xs) when Arity =:= 1; Arity =:= 2 ->
+ strict(arg_types(binary, decode_unsigned, Arity), Xs,
+ fun(_) -> t_non_neg_integer() end);
+type(binary, encode_unsigned, Arity, Xs) when Arity =:= 1; Arity =:= 2 ->
+ strict(arg_types(binary, encode_unsigned, Arity), Xs,
+ fun(_) -> t_binary() end);
+type(binary, first, 1, Xs) ->
+ strict(arg_types(binary, first, 1), Xs, fun(_) -> t_non_neg_integer() end);
+type(binary, last, 1, Xs) ->
+ strict(arg_types(binary, last, 1), Xs, fun(_) -> t_non_neg_integer() end);
+type(binary, list_to_bin, 1, Xs) ->
+ type(erlang, list_to_binary, 1, Xs);
+type(binary, longest_common_prefix, 1, Xs) ->
+ strict(arg_types(binary, longest_common_prefix, 1), Xs,
+ fun(_) -> t_integer() end);
+type(binary, longest_common_suffix, 1, Xs) ->
+ strict(arg_types(binary, longest_common_suffix, 1), Xs,
+ fun(_) -> t_integer() end);
+type(binary, match, Arity, Xs) when Arity =:= 2; Arity =:= 3 ->
+ strict(arg_types(binary, match, Arity), Xs,
+ fun(_) ->
+ t_sup(t_atom('nomatch'), t_binary_canonical_part())
+ end);
+type(binary, matches, Arity, Xs) when Arity =:= 2; Arity =:= 3 ->
+ strict(arg_types(binary, matches, Arity), Xs,
+ fun(_) -> t_list(t_binary_canonical_part()) end);
+type(binary, part, 2, Xs) ->
+ type(erlang, binary_part, 2, Xs);
+type(binary, part, 3, Xs) ->
+ type(erlang, binary_part, 3, Xs);
+type(binary, referenced_byte_size, 1, Xs) ->
+ strict(arg_types(binary, referenced_byte_size, 1), Xs,
+ fun(_) -> t_non_neg_integer() end);
%%-- code ---------------------------------------------------------------------
type(code, add_path, 1, Xs) ->
strict(arg_types(code, add_path, 1), Xs,
@@ -665,6 +710,14 @@ type(erlang, 'bnot', 1, Xs) ->
%% strict(arg_types(erlang, 'bnot', 1), Xs, fun (_) -> t_integer() end);
type(erlang, abs, 1, Xs) ->
strict(arg_types(erlang, abs, 1), Xs, fun ([X]) -> X end);
+type(erlang, adler32, 1, Xs) ->
+ strict(arg_types(erlang, adler32, 1), Xs, fun (_) -> t_adler32() end);
+type(erlang, adler32, 2, Xs) ->
+ strict(arg_types(erlang, adler32, 2), Xs, fun (_) -> t_adler32() end);
+type(erlang, adler32_combine, 3, Xs) ->
+ strict(arg_types(erlang, adler32_combine, 3), Xs,
+ fun (_) -> t_adler32() end);
+type(erlang, append, 2, Xs) -> type(erlang, '++', 2, Xs); % alias
type(erlang, append_element, 2, Xs) ->
strict(arg_types(erlang, append_element, 2), Xs, fun (_) -> t_tuple() end);
type(erlang, apply, 2, Xs) ->
@@ -683,6 +736,10 @@ type(erlang, atom_to_binary, 2, Xs) ->
strict(arg_types(erlang, atom_to_binary, 2), Xs, fun (_) -> t_binary() end);
type(erlang, atom_to_list, 1, Xs) ->
strict(arg_types(erlang, atom_to_list, 1), Xs, fun (_) -> t_string() end);
+type(erlang, binary_part, 2, Xs) ->
+ strict(arg_types(erlang, binary_part, 2), Xs, fun (_) -> t_binary() end);
+type(erlang, binary_part, 3, Xs) ->
+ strict(arg_types(erlang, binary_part, 3), Xs, fun (_) -> t_binary() end);
type(erlang, binary_to_atom, 2, Xs) ->
strict(arg_types(erlang, binary_to_atom, 2), Xs, fun (_) -> t_atom() end);
type(erlang, binary_to_existing_atom, 2, Xs) ->
@@ -726,11 +783,11 @@ type(erlang, check_process_code, 2, Xs) ->
type(erlang, concat_binary, 1, Xs) ->
strict(arg_types(erlang, concat_binary, 1), Xs, fun (_) -> t_binary() end);
type(erlang, crc32, 1, Xs) ->
- strict(arg_types(erlang, crc32, 1), Xs, fun (_) -> t_integer() end);
+ strict(arg_types(erlang, crc32, 1), Xs, fun (_) -> t_crc32() end);
type(erlang, crc32, 2, Xs) ->
- strict(arg_types(erlang, crc32, 2), Xs, fun (_) -> t_integer() end);
+ strict(arg_types(erlang, crc32, 2), Xs, fun (_) -> t_crc32() end);
type(erlang, crc32_combine, 3, Xs) ->
- strict(arg_types(erlang, crc32_combine, 3), Xs, fun (_) -> t_integer() end);
+ strict(arg_types(erlang, crc32_combine, 3), Xs, fun (_) -> t_crc32() end);
type(erlang, date, 0, _) ->
t_date();
type(erlang, decode_packet, 3, Xs) ->
@@ -752,6 +809,10 @@ type(erlang, demonitor, 2, Xs) ->
type(erlang, disconnect_node, 1, Xs) ->
strict(arg_types(erlang, disconnect_node, 1), Xs, fun (_) -> t_boolean() end);
type(erlang, display, 1, _) -> t_atom('true');
+type(erlang, display_string, 1, Xs) ->
+ strict(arg_types(erlang, display_string, 1), Xs, fun(_) -> t_atom('true') end);
+type(erlang, display_nl, 0, _) ->
+ t_atom('true');
type(erlang, dist_exit, 3, Xs) ->
strict(arg_types(erlang, dist_exit, 3), Xs, fun (_) -> t_atom('true') end);
type(erlang, element, 2, Xs) ->
@@ -802,6 +863,8 @@ type(erlang, fun_to_list, 1, Xs) ->
type(erlang, garbage_collect, 0, _) -> t_atom('true');
type(erlang, garbage_collect, 1, Xs) ->
strict(arg_types(erlang, garbage_collect, 1), Xs, fun (_) -> t_boolean() end);
+type(erlang, garbage_collect_message_area, 0, _) ->
+ t_boolean();
type(erlang, get, 0, _) -> t_list(t_tuple(2));
type(erlang, get, 1, _) -> t_any(); % | t_atom('undefined')
type(erlang, get_cookie, 0, _) -> t_atom(); % | t_atom('nocookie')
@@ -1155,6 +1218,10 @@ type(erlang, monitor_node, 2, Xs) ->
type(erlang, monitor_node, 3, Xs) ->
strict(arg_types(erlang, monitor_node, 3), Xs,
fun (_) -> t_atom('true') end);
+type(erlang, nif_error, 1, _) ->
+ t_any();
+type(erlang, nif_error, 2, Xs) ->
+ strict(arg_types(erlang, nif_error, 2), Xs, fun (_) -> t_any() end);
type(erlang, node, 0, _) -> t_node();
type(erlang, node, 1, Xs) ->
strict(arg_types(erlang, node, 1), Xs, fun (_) -> t_node() end);
@@ -1173,8 +1240,8 @@ type(erlang, phash2, 2, Xs) ->
strict(arg_types(erlang, phash2, 2), Xs, fun (_) -> t_non_neg_integer() end);
type(erlang, pid_to_list, 1, Xs) ->
strict(arg_types(erlang, pid_to_list, 1), Xs, fun (_) -> t_string() end);
-type(erlang, port_call, 3, Xs) ->
- strict(arg_types(erlang, port_call, 3), Xs, fun (_) -> t_any() end);
+type(erlang, port_call, Arity, Xs) when Arity =:= 2; Arity =:= 3 ->
+ strict(arg_types(erlang, port_call, Arity), Xs, fun (_) -> t_any() end);
type(erlang, port_close, 1, Xs) ->
strict(arg_types(erlang, port_close, 1), Xs,
fun (_) -> t_atom('true') end);
@@ -1503,6 +1570,7 @@ type(erlang, statistics, 1, Xs) ->
T_statistics_1
end
end);
+type(erlang, subtract, 2, Xs) -> type(erlang, '--', 2, Xs); % alias
type(erlang, suspend_process, 1, Xs) ->
strict(arg_types(erlang, suspend_process, 1), Xs,
fun (_) -> t_atom('true') end);
@@ -1595,7 +1663,7 @@ type(erlang, system_info, 1, Xs) ->
t_sup([t_atom('false'),
t_list(t_tuple([t_atom(), t_any()]))]);
['endian'] ->
- t_sup([t_atom('big'), t_atom('little')]);
+ t_endian();
['fullsweep_after'] ->
t_tuple([t_atom('fullsweep_after'), t_non_neg_integer()]);
['garbage_collection'] ->
@@ -1607,9 +1675,8 @@ type(erlang, system_info, 1, Xs) ->
['heap_type'] ->
t_sup([t_atom('private'), t_atom('hybrid')]);
['hipe_architecture'] ->
- t_sup([t_atom('amd64'), t_atom('arm'),
- t_atom('powerpc'), t_atom('undefined'),
- t_atom('ultrasparc'), t_atom('x86')]);
+ t_atoms(['amd64', 'arm', 'powerpc', 'ppc64',
+ 'undefined', 'ultrasparc', 'x86']);
['info'] ->
t_binary();
['internal_cpu_topology'] -> %% Undocumented internal feature
@@ -1785,11 +1852,21 @@ type(erts_debug, disassemble, 1, Xs) ->
fun (_) -> t_sup([t_atom('false'),
t_atom('undef'),
t_tuple([t_integer(), t_binary(), t_mfa()])]) end);
+type(erts_debug, display, 1, _) ->
+ t_string();
type(erts_debug, dist_ext_to_term, 2, Xs) ->
strict(arg_types(erts_debug, dist_ext_to_term, 2), Xs,
fun (_) -> t_any() end);
+type(erts_debug, dump_monitors, 1, Xs) ->
+ strict(arg_types(erts_debug, dump_monitors, 1), Xs,
+ fun(_) -> t_atom('true') end);
+type(erts_debug, dump_links, 1, Xs) ->
+ strict(arg_types(erts_debug, dump_links, 1), Xs,
+ fun(_) -> t_atom('true') end);
type(erts_debug, flat_size, 1, Xs) ->
strict(arg_types(erts_debug, flat_size, 1), Xs, fun (_) -> t_integer() end);
+type(erts_debug, get_internal_state, 1, _) ->
+ t_any();
type(erts_debug, lock_counters, 1, Xs) ->
strict(arg_types(erts_debug, lock_counters, 1), Xs,
fun ([Arg]) ->
@@ -1810,6 +1887,8 @@ type(erts_debug, lock_counters, 1, Xs) ->
end);
type(erts_debug, same, 2, Xs) ->
strict(arg_types(erts_debug, same, 2), Xs, fun (_) -> t_boolean() end);
+type(erts_debug, set_internal_state, 2, _) ->
+ t_any();
%%-- ets ----------------------------------------------------------------------
type(ets, all, 0, _) ->
t_list(t_tab());
@@ -2099,7 +2178,7 @@ type(hipe_bifs, set_native_address, 3, Xs) ->
strict(arg_types(hipe_bifs, set_native_address, 3), Xs,
fun (_) -> t_nil() end);
type(hipe_bifs, system_crc, 1, Xs) ->
- strict(arg_types(hipe_bifs, system_crc, 1), Xs, fun (_) -> t_integer() end);
+ strict(arg_types(hipe_bifs, system_crc, 1), Xs, fun (_) -> t_crc32() end);
type(hipe_bifs, term_to_word, 1, Xs) ->
strict(arg_types(hipe_bifs, term_to_word, 1), Xs,
fun (_) -> t_integer() end);
@@ -3194,6 +3273,53 @@ arith(Op, X1, X2) ->
-spec arg_types(atom(), atom(), arity()) -> [erl_types:erl_type()] | 'unknown'.
+%%------- binary --------------------------------------------------------------
+arg_types(binary, at, 2) ->
+ [t_binary(), t_non_neg_integer()];
+arg_types(binary, bin_to_list, 1) ->
+ [t_binary()];
+arg_types(binary, bin_to_list, 2) ->
+ [t_binary(), t_binary_part()];
+arg_types(binary, bin_to_list, 3) ->
+ [t_binary(), t_integer(), t_non_neg_integer()];
+arg_types(binary, compile_pattern, 1) ->
+ [t_sup(t_binary(), t_list(t_binary()))];
+arg_types(binary, copy, 1) ->
+ [t_binary()];
+arg_types(binary, copy, 2) ->
+ [t_binary(), t_non_neg_integer()];
+arg_types(binary, decode_unsigned, 1) ->
+ [t_binary()];
+arg_types(binary, decode_unsigned, 2) ->
+ [t_binary(), t_endian()];
+arg_types(binary, encode_unsigned, 1) ->
+ [t_non_neg_integer()];
+arg_types(binary, encode_unsigned, 2) ->
+ [t_non_neg_integer(), t_endian()];
+arg_types(binary, first, 1) ->
+ [t_binary()];
+arg_types(binary, last, 1) ->
+ [t_binary()];
+arg_types(binary, list_to_bin, 1) ->
+ arg_types(erlang, list_to_binary, 1);
+arg_types(binary, longest_common_prefix, 1) ->
+ [t_list(t_binary())];
+arg_types(binary, longest_common_suffix, 1) ->
+ [t_list(t_binary())];
+arg_types(binary, match, 2) ->
+ [t_binary(), t_binary_pattern()];
+arg_types(binary, match, 3) ->
+ [t_binary(), t_binary_pattern(), t_binary_options()];
+arg_types(binary, matches, 2) ->
+ [t_binary(), t_binary_pattern()];
+arg_types(binary, matches, 3) ->
+ [t_binary(), t_binary_pattern(), t_binary_options()];
+arg_types(binary, part, 2) ->
+ arg_types(erlang, binary_part, 2);
+arg_types(binary, part, 3) ->
+ arg_types(erlang, binary_part, 3);
+arg_types(binary, referenced_byte_size, 1) ->
+ [t_binary()];
%%------- code ----------------------------------------------------------------
arg_types(code, add_path, 1) ->
[t_string()];
@@ -3375,6 +3501,14 @@ arg_types(erlang, 'bnot', 1) ->
[t_integer()];
arg_types(erlang, abs, 1) ->
[t_number()];
+arg_types(erlang, adler32, 1) ->
+ [t_iodata()];
+arg_types(erlang, adler32, 2) ->
+ [t_adler32(), t_iodata()];
+arg_types(erlang, adler32_combine, 3) ->
+ [t_adler32(), t_adler32(), t_non_neg_integer()];
+arg_types(erlang, append, 2) ->
+ arg_types(erlang, '++', 2);
arg_types(erlang, append_element, 2) ->
[t_tuple(), t_any()];
arg_types(erlang, apply, 2) ->
@@ -3388,6 +3522,10 @@ arg_types(erlang, atom_to_binary, 2) ->
[t_atom(), t_encoding_a2b()];
arg_types(erlang, atom_to_list, 1) ->
[t_atom()];
+arg_types(erlang, binary_part, 2) ->
+ [t_binary(), t_tuple([t_integer(),t_integer()])];
+arg_types(erlang, binary_part, 3) ->
+ [t_binary(), t_integer(), t_integer()];
arg_types(erlang, binary_to_atom, 2) ->
[t_binary(), t_encoding_a2b()];
arg_types(erlang, binary_to_existing_atom, 2) ->
@@ -3423,9 +3561,9 @@ arg_types(erlang, concat_binary, 1) ->
arg_types(erlang, crc32, 1) ->
[t_iodata()];
arg_types(erlang, crc32, 2) ->
- [t_integer(), t_iodata()];
+ [t_crc32(), t_iodata()];
arg_types(erlang, crc32_combine, 3) ->
- [t_integer(), t_integer(), t_integer()];
+ [t_crc32(), t_crc32(), t_non_neg_integer()];
arg_types(erlang, date, 0) ->
[];
arg_types(erlang, decode_packet, 3) ->
@@ -3440,6 +3578,10 @@ arg_types(erlang, disconnect_node, 1) ->
[t_node()];
arg_types(erlang, display, 1) ->
[t_any()];
+arg_types(erlang, display_nl, 0) ->
+ [];
+arg_types(erlang, display_string, 1) ->
+ [t_string()];
arg_types(erlang, dist_exit, 3) ->
[t_pid(), t_dist_exit(), t_sup(t_pid(), t_port())];
arg_types(erlang, element, 2) ->
@@ -3476,6 +3618,8 @@ arg_types(erlang, garbage_collect, 0) ->
[];
arg_types(erlang, garbage_collect, 1) ->
[t_pid()];
+arg_types(erlang, garbage_collect_message_area, 0) ->
+ [];
arg_types(erlang, get, 0) ->
[];
arg_types(erlang, get, 1) ->
@@ -3628,6 +3772,10 @@ arg_types(erlang, monitor_node, 2) ->
[t_node(), t_boolean()];
arg_types(erlang, monitor_node, 3) ->
[t_node(), t_boolean(), t_list(t_atom('allow_passive_connect'))];
+arg_types(erlang, nif_error, 1) ->
+ [t_any()];
+arg_types(erlang, nif_error, 2) ->
+ [t_any(), t_list()];
arg_types(erlang, node, 0) ->
[];
arg_types(erlang, node, 1) ->
@@ -3668,6 +3816,8 @@ arg_types(erlang, phash2, 2) ->
[t_any(), t_pos_integer()];
arg_types(erlang, pid_to_list, 1) ->
[t_pid()];
+arg_types(erlang, port_call, 2) ->
+ [t_sup(t_port(), t_atom()), t_any()];
arg_types(erlang, port_call, 3) ->
[t_sup(t_port(), t_atom()), t_integer(), t_any()];
arg_types(erlang, port_close, 1) ->
@@ -3795,6 +3945,8 @@ arg_types(erlang, statistics, 1) ->
t_atom('run_queue'),
t_atom('runtime'),
t_atom('wall_clock')])];
+arg_types(erlang, subtract, 2) ->
+ arg_types(erlang, '--', 2);
arg_types(erlang, suspend_process, 1) ->
[t_pid()];
arg_types(erlang, suspend_process, 2) ->
@@ -3917,10 +4069,18 @@ arg_types(erts_debug, breakpoint, 2) ->
[t_tuple([t_atom(), t_atom(), t_sup(t_integer(), t_atom('_'))]), t_boolean()];
arg_types(erts_debug, disassemble, 1) ->
[t_sup(t_mfa(), t_integer())];
+arg_types(erts_debug, display, 1) ->
+ [t_any()];
arg_types(erts_debug, dist_ext_to_term, 2) ->
[t_tuple(), t_binary()];
+arg_types(erts_debug, dump_monitors, 1) ->
+ [t_sup([t_pid(),t_atom()])];
+arg_types(erts_debug, dump_links, 1) ->
+ [t_sup([t_pid(),t_atom(),t_port()])];
arg_types(erts_debug, flat_size, 1) ->
[t_any()];
+arg_types(erts_debug, get_internal_state, 1) ->
+ [t_any()];
arg_types(erts_debug, lock_counters, 1) ->
[t_sup([t_atom(enabled),
t_atom(info),
@@ -3929,6 +4089,8 @@ arg_types(erts_debug, lock_counters, 1) ->
t_tuple([t_atom(process_locks), t_boolean()])])];
arg_types(erts_debug, same, 2) ->
[t_any(), t_any()];
+arg_types(erts_debug, set_internal_state, 2) ->
+ [t_any(), t_any()];
%%------- ets -----------------------------------------------------------------
arg_types(ets, all, 0) ->
[];
@@ -4109,7 +4271,7 @@ arg_types(hipe_bifs, call_count_off, 1) ->
arg_types(hipe_bifs, call_count_on, 1) ->
[t_mfa()];
arg_types(hipe_bifs, check_crc, 1) ->
- [t_integer()];
+ [t_crc32()];
arg_types(hipe_bifs, enter_code, 2) ->
[t_binary(), t_sup(t_nil(), t_tuple())];
arg_types(hipe_bifs, enter_sdesc, 1) ->
@@ -4153,7 +4315,7 @@ arg_types(hipe_bifs, set_funinfo_native_address, 3) ->
arg_types(hipe_bifs, set_native_address, 3) ->
[t_mfa(), t_integer(), t_boolean()];
arg_types(hipe_bifs, system_crc, 1) ->
- [t_integer()];
+ [t_crc32()];
arg_types(hipe_bifs, term_to_word, 1) ->
[t_any()];
arg_types(hipe_bifs, update_code_size, 3) ->
@@ -4467,6 +4629,30 @@ t_httppacket() ->
t_sup([t_HttpRequest(), t_HttpResponse(),
t_HttpHeader(), t_atom('http_eoh'), t_HttpError()]).
+t_endian() ->
+ t_sup([t_atom('big'), t_atom('little')]).
+
+%% =====================================================================
+%% Types for the binary module
+%% =====================================================================
+
+t_binary_part() ->
+ t_tuple([t_non_neg_integer(),t_integer()]).
+
+t_binary_canonical_part() ->
+ t_tuple([t_non_neg_integer(),t_non_neg_integer()]).
+
+t_binary_pattern() ->
+ t_sup([t_binary(),
+ t_list(t_binary()),
+ t_binary_compiled_pattern()]).
+
+t_binary_compiled_pattern() ->
+ t_tuple([t_atom('cp'),t_binary()]).
+
+t_binary_options() ->
+ t_list(t_tuple([t_atom('scope'),t_binary_part()])).
+
%% =====================================================================
%% HTTP types documented in R12B-4
%% =====================================================================
@@ -4549,6 +4735,12 @@ t_code_loaded_fname_or_status() ->
%% These are used for the built-in functions of 'erlang'
%% =====================================================================
+t_adler32() ->
+ t_non_neg_integer().
+
+t_crc32() ->
+ t_non_neg_integer().
+
t_decode_packet_option() ->
t_sup([t_tuple([t_atom('packet_size'), t_non_neg_integer()]),
t_tuple([t_atom('line_length'), t_non_neg_integer()])]).
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index f3b91b3953..9a40be6d14 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -178,7 +178,7 @@
t_remote/3,
t_string/0,
t_struct_from_opaque/2,
- t_solve_remote/2,
+ t_solve_remote/3,
t_subst/2,
t_subtract/2,
t_subtract_list/2,
@@ -205,11 +205,14 @@
t_var_name/1,
%% t_assign_variables_to_subtype/2,
type_is_defined/3,
+ record_field_diffs_to_string/2,
subst_all_vars_to_any/1,
- lift_list_to_pos_empty/1
+ lift_list_to_pos_empty/1,
+ is_erl_type/1
]).
%%-define(DO_ERL_TYPES_TEST, true).
+-compile({no_auto_import,[min/2,max/2]}).
-ifdef(DO_ERL_TYPES_TEST).
-export([test/0]).
@@ -221,6 +224,8 @@
-export([t_is_identifier/1]).
-endif.
+-export_type([erl_type/0]).
+
%%=============================================================================
%%
%% Definition of the type structure
@@ -299,7 +304,7 @@
%% Auxiliary types and convenient macros
%%
--type parse_form() :: {atom(), _, _} | {atom(), _, _, _}. %% XXX: Temporarily
+-type parse_form() :: {atom(), _, _} | {atom(), _, _, _} | {'op', _, _, _, _}. %% XXX: Temporarily
-type rng_elem() :: 'pos_inf' | 'neg_inf' | integer().
-record(int_set, {set :: [integer()]}).
@@ -398,7 +403,8 @@ t_is_none(_) -> false.
-spec t_opaque(module(), atom(), [_], erl_type()) -> erl_type().
t_opaque(Mod, Name, Args, Struct) ->
- ?opaque(set_singleton(#opaque{mod=Mod, name=Name, args=Args, struct=Struct})).
+ O = #opaque{mod = Mod, name = Name, args = Args, struct = Struct},
+ ?opaque(set_singleton(O)).
-spec t_is_opaque(erl_type()) -> boolean().
@@ -427,7 +433,7 @@ t_opaque_structure(?opaque(Elements)) ->
t_opaque_module(?opaque(Elements)) ->
case ordsets:size(Elements) of
1 ->
- [#opaque{mod=Module}] = ordsets:to_list(Elements),
+ [#opaque{mod = Module}] = ordsets:to_list(Elements),
Module;
_ -> throw({error, "Unexpected multiple opaque types"})
end.
@@ -631,7 +637,7 @@ t_unopaque_on_mismatch(GenType, Type, Opaques) ->
case t_inf(GenType, Type) of
?none ->
Unopaqued = t_unopaque(Type, Opaques),
- %% Unions might be a problem, must investigate.
+ %% XXX: Unions might be a problem, must investigate.
case t_inf(GenType, Unopaqued) of
?none -> Type;
_ -> Unopaqued
@@ -643,12 +649,12 @@ t_unopaque_on_mismatch(GenType, Type, Opaques) ->
module_builtin_opaques(Module) ->
[O || O <- all_opaque_builtins(), t_opaque_module(O) =:= Module].
-
+
%%-----------------------------------------------------------------------------
-%% Remote types
-%% These types are used for preprocessing they should never reach the analysis stage
+%% Remote types: these types are used for preprocessing;
+%% they should never reach the analysis stage.
--spec t_remote(module(), atom(), [_]) -> erl_type().
+-spec t_remote(atom(), atom(), [erl_type()]) -> erl_type().
t_remote(Mod, Name, Args) ->
?remote(set_singleton(#remote{mod = Mod, name = Name, args = Args})).
@@ -658,126 +664,132 @@ t_remote(Mod, Name, Args) ->
t_is_remote(?remote(_)) -> true;
t_is_remote(_) -> false.
--spec t_solve_remote(erl_type(), dict()) -> erl_type().
+-spec t_solve_remote(erl_type(), set(), dict()) -> erl_type().
-t_solve_remote(Type , Records) ->
- {RT, _RR} = t_solve_remote(Type, Records, []),
+t_solve_remote(Type, ExpTypes, Records) ->
+ {RT, _RR} = t_solve_remote(Type, ExpTypes, Records, []),
RT.
-t_solve_remote(?function(Domain, Range), R, C) ->
- {RT1, RR1} = t_solve_remote(Domain, R, C),
- {RT2, RR2} = t_solve_remote(Range, R, C),
+t_solve_remote(?function(Domain, Range), ET, R, C) ->
+ {RT1, RR1} = t_solve_remote(Domain, ET, R, C),
+ {RT2, RR2} = t_solve_remote(Range, ET, R, C),
{?function(RT1, RT2), RR1 ++ RR2};
-t_solve_remote(?list(Types, Term, Size), R, C) ->
- {RT, RR} = t_solve_remote(Types, R, C),
+t_solve_remote(?list(Types, Term, Size), ET, R, C) ->
+ {RT, RR} = t_solve_remote(Types, ET, R, C),
{?list(RT, Term, Size), RR};
-t_solve_remote(?product(Types), R, C) ->
- {RL, RR} = list_solve_remote(Types, R, C),
+t_solve_remote(?product(Types), ET, R, C) ->
+ {RL, RR} = list_solve_remote(Types, ET, R, C),
{?product(RL), RR};
-t_solve_remote(?opaque(Set), R, C) ->
+t_solve_remote(?opaque(Set), ET, R, C) ->
List = ordsets:to_list(Set),
- {NewList, RR} = opaques_solve_remote(List, R, C),
+ {NewList, RR} = opaques_solve_remote(List, ET, R, C),
{?opaque(ordsets:from_list(NewList)), RR};
-t_solve_remote(?tuple(?any, _, _) = T, _R, _C) -> {T, []};
-t_solve_remote(?tuple(Types, Arity, Tag), R, C) ->
- {RL, RR} = list_solve_remote(Types, R, C),
+t_solve_remote(?tuple(?any, _, _) = T, _ET, _R, _C) -> {T, []};
+t_solve_remote(?tuple(Types, Arity, Tag), ET, R, C) ->
+ {RL, RR} = list_solve_remote(Types, ET, R, C),
{?tuple(RL, Arity, Tag), RR};
-t_solve_remote(?tuple_set(Set), R, C) ->
- {NewSet, RR} = tuples_solve_remote(Set, R, C),
+t_solve_remote(?tuple_set(Set), ET, R, C) ->
+ {NewSet, RR} = tuples_solve_remote(Set, ET, R, C),
{?tuple_set(NewSet), RR};
-t_solve_remote(?remote(Set), R, C) ->
+t_solve_remote(?remote(Set), ET, R, C) ->
RemoteList = ordsets:to_list(Set),
- {RL, RR} = list_solve_remote_type(RemoteList, R, C),
+ {RL, RR} = list_solve_remote_type(RemoteList, ET, R, C),
{t_sup(RL), RR};
-t_solve_remote(?union(List), R, C) ->
- {RL, RR} = list_solve_remote(List, R, C),
+t_solve_remote(?union(List), ET, R, C) ->
+ {RL, RR} = list_solve_remote(List, ET, R, C),
{t_sup(RL), RR};
-t_solve_remote(T, _R, _C) -> {T, []}.
+t_solve_remote(T, _ET, _R, _C) -> {T, []}.
t_solve_remote_type(#remote{mod = RemMod, name = Name, args = Args} = RemType,
- R, C) ->
+ ET, R, C) ->
+ ArgsLen = length(Args),
case dict:find(RemMod, R) of
error ->
- Msg = io_lib:format("Cannot locate module ~w to "
- "resolve the remote type: ~w:~w()~n",
- [RemMod, RemMod, Name]),
- throw({error, Msg});
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), []};
{ok, RemDict} ->
- case lookup_type(Name, RemDict) of
- {type, {_Mod, Type, ArgNames}} when length(Args) =:= length(ArgNames) ->
- {NewType, NewCycle, NewRR} =
- case unfold(RemType, C) of
- true ->
- List = lists:zip(ArgNames, Args),
- TmpVarDict = dict:from_list(List),
- {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
- false -> {t_any(), C, [RemType]}
- end,
- {RT, RR} = t_solve_remote(NewType, R, NewCycle),
- RetRR = NewRR ++ RR,
- RT1 =
- case lists:member(RemType, RetRR) of
- true -> t_limit(RT, ?REC_TYPE_LIMIT);
- false -> RT
- end,
- {RT1, RetRR};
- {opaque, {Mod, Type, ArgNames}} when length(Args) =:= length(ArgNames) ->
- List = lists:zip(ArgNames, Args),
- TmpVarDict = dict:from_list(List),
- {Rep, NewCycle, NewRR} =
- case unfold(RemType, C) of
- true -> {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
- false -> {t_any(), C, [RemType]}
- end,
- {NewRep, RR} = t_solve_remote(Rep, R, NewCycle),
- RetRR = NewRR ++ RR,
- RT1 =
- case lists:member(RemType, RetRR) of
- true -> t_limit(NewRep, ?REC_TYPE_LIMIT);
- false -> NewRep
- end,
- {t_from_form({opaque, -1, Name, {Mod, Args, RT1}},
- RemDict, TmpVarDict),
- RetRR};
- {type, _} ->
- Msg = io_lib:format("Unknown remote type ~w\n", [Name]),
- throw({error, Msg});
- {opaque, _} ->
- Msg = io_lib:format("Unknown remote opaque type ~w\n", [Name]),
- throw({error, Msg});
- error ->
- Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
- [RemMod, Name]),
- throw({error, Msg})
+ MFA = {RemMod, Name, ArgsLen},
+ case sets:is_element(MFA, ET) of
+ true ->
+ case lookup_type(Name, RemDict) of
+ {type, {_Mod, Type, ArgNames}} when ArgsLen =:= length(ArgNames) ->
+ {NewType, NewCycle, NewRR} =
+ case unfold(RemType, C) of
+ true ->
+ List = lists:zip(ArgNames, Args),
+ TmpVarDict = dict:from_list(List),
+ {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
+ false -> {t_any(), C, [RemType]}
+ end,
+ {RT, RR} = t_solve_remote(NewType, ET, R, NewCycle),
+ RetRR = NewRR ++ RR,
+ RT1 =
+ case lists:member(RemType, RetRR) of
+ true -> t_limit(RT, ?REC_TYPE_LIMIT);
+ false -> RT
+ end,
+ {RT1, RetRR};
+ {opaque, {Mod, Type, ArgNames}} when ArgsLen =:= length(ArgNames) ->
+ List = lists:zip(ArgNames, Args),
+ TmpVarDict = dict:from_list(List),
+ {Rep, NewCycle, NewRR} =
+ case unfold(RemType, C) of
+ true -> {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
+ false -> {t_any(), C, [RemType]}
+ end,
+ {NewRep, RR} = t_solve_remote(Rep, ET, R, NewCycle),
+ RetRR = NewRR ++ RR,
+ RT1 =
+ case lists:member(RemType, RetRR) of
+ true -> t_limit(NewRep, ?REC_TYPE_LIMIT);
+ false -> NewRep
+ end,
+ {t_from_form({opaque, -1, Name, {Mod, Args, RT1}},
+ RemDict, TmpVarDict),
+ RetRR};
+ {type, _} ->
+ Msg = io_lib:format("Unknown remote type ~w\n", [Name]),
+ throw({error, Msg});
+ {opaque, _} ->
+ Msg = io_lib:format("Unknown remote opaque type ~w\n", [Name]),
+ throw({error, Msg});
+ error ->
+ Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
+ [RemMod, Name]),
+ throw({error, Msg})
+ end;
+ false ->
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), []}
end
end.
-list_solve_remote([], _R, _C) ->
+list_solve_remote([], _ET, _R, _C) ->
{[], []};
-list_solve_remote([Type|Types], R, C) ->
- {RT, RR1} = t_solve_remote(Type, R, C),
- {RL, RR2} = list_solve_remote(Types, R, C),
+list_solve_remote([Type|Types], ET, R, C) ->
+ {RT, RR1} = t_solve_remote(Type, ET, R, C),
+ {RL, RR2} = list_solve_remote(Types, ET, R, C),
{[RT|RL], RR1 ++ RR2}.
-list_solve_remote_type([], _R, _C) ->
+list_solve_remote_type([], _ET, _R, _C) ->
{[], []};
-list_solve_remote_type([Type|Types], R, C) ->
- {RT, RR1} = t_solve_remote_type(Type, R, C),
- {RL, RR2} = list_solve_remote_type(Types, R, C),
+list_solve_remote_type([Type|Types], ET, R, C) ->
+ {RT, RR1} = t_solve_remote_type(Type, ET, R, C),
+ {RL, RR2} = list_solve_remote_type(Types, ET, R, C),
{[RT|RL], RR1 ++ RR2}.
-opaques_solve_remote([], _R, _C) ->
+opaques_solve_remote([], _ET, _R, _C) ->
{[], []};
-opaques_solve_remote([#opaque{struct = Struct} = Remote|Tail], R, C) ->
- {RT, RR1} = t_solve_remote(Struct, R, C),
- {LOp, RR2} = opaques_solve_remote(Tail, R, C),
+opaques_solve_remote([#opaque{struct = Struct} = Remote|Tail], ET, R, C) ->
+ {RT, RR1} = t_solve_remote(Struct, ET, R, C),
+ {LOp, RR2} = opaques_solve_remote(Tail, ET, R, C),
{[Remote#opaque{struct = RT}|LOp], RR1 ++ RR2}.
-tuples_solve_remote([], _R, _C) ->
+tuples_solve_remote([], _ET, _R, _C) ->
{[], []};
-tuples_solve_remote([{Sz, Tuples}|Tail], R, C) ->
- {RL, RR1} = list_solve_remote(Tuples, R, C),
- {LSzTpls, RR2} = tuples_solve_remote(Tail, R, C),
+tuples_solve_remote([{Sz, Tuples}|Tail], ET, R, C) ->
+ {RL, RR1} = list_solve_remote(Tuples, ET, R, C),
+ {LSzTpls, RR2} = tuples_solve_remote(Tail, ET, R, C),
{[{Sz, RL}|LSzTpls], RR1 ++ RR2}.
%%-----------------------------------------------------------------------------
@@ -801,7 +813,7 @@ t_is_none_or_unit(?unit) -> true;
t_is_none_or_unit(_) -> false.
%%-----------------------------------------------------------------------------
-%% Atoms and the derived type bool
+%% Atoms and the derived type boolean
%%
-spec t_atom() -> erl_type().
@@ -1596,7 +1608,7 @@ t_set() ->
t_tid() ->
t_opaque(ets, tid, [], t_integer()).
--spec all_opaque_builtins() -> [erl_type()].
+-spec all_opaque_builtins() -> [erl_type(),...].
all_opaque_builtins() ->
[t_array(), t_dict(), t_digraph(), t_gb_set(),
@@ -2523,12 +2535,14 @@ t_subst(T, _Dict, _Fun) ->
%% Unification
%%
--spec t_unify(erl_type(), erl_type()) -> {erl_type(), [{_, erl_type()}]}.
+-type t_unify_ret() :: {erl_type(), [{_, erl_type()}]}.
+
+-spec t_unify(erl_type(), erl_type()) -> t_unify_ret().
t_unify(T1, T2) ->
t_unify(T1, T2, []).
--spec t_unify(erl_type(), erl_type(), [erl_type()]) -> {erl_type(), [{_, erl_type()}]}.
+-spec t_unify(erl_type(), erl_type(), [erl_type()]) -> t_unify_ret().
t_unify(T1, T2, Opaques) ->
{T, Dict} = t_unify(T1, T2, dict:new(), Opaques),
@@ -2541,7 +2555,7 @@ t_unify(?var(Id1) = T, ?var(Id2), Dict, Opaques) ->
error ->
case dict:find(Id2, Dict) of
error -> {T, dict:store(Id2, T, Dict)};
- {ok, Type} -> {Type, t_unify(T, Type, Dict, Opaques)}
+ {ok, Type} -> t_unify(T, Type, Dict, Opaques)
end;
{ok, Type1} ->
case dict:find(Id2, Dict) of
@@ -3298,28 +3312,44 @@ record_to_string(Tag, [_|Fields], FieldNames, RecDict) ->
FieldStrings = record_fields_to_string(Fields, FieldNames, RecDict, []),
"#" ++ atom_to_list(Tag) ++ "{" ++ sequence(FieldStrings, [], ",") ++ "}".
-record_fields_to_string([Field|Left1], [{FieldName, DeclaredType}|Left2],
- RecDict, Acc) ->
- PrintType =
- case t_is_equal(Field, DeclaredType) of
- true -> false;
+record_fields_to_string([F|Fs], [{FName, _DefType}|FDefs], RecDict, Acc) ->
+ NewAcc =
+ case t_is_any(F) orelse t_is_atom('undefined', F) of
+ true -> Acc;
false ->
- case t_is_any(DeclaredType) andalso t_is_atom(undefined, Field) of
- true -> false;
- false ->
- TmpType = t_subtract(DeclaredType, t_atom(undefined)),
- not t_is_equal(Field, TmpType)
- end
+ StrFV = atom_to_list(FName) ++ "::" ++ t_to_string(F, RecDict),
+ %% ActualDefType = t_subtract(DefType, t_atom('undefined')),
+ %% Str = case t_is_any(ActualDefType) of
+ %% true -> StrFV;
+ %% false -> StrFV ++ "::" ++ t_to_string(ActualDefType, RecDict)
+ %% end,
+ [StrFV|Acc]
end,
- case PrintType of
- false -> record_fields_to_string(Left1, Left2, RecDict, Acc);
- true ->
- String = atom_to_list(FieldName) ++ "::" ++ t_to_string(Field, RecDict),
- record_fields_to_string(Left1, Left2, RecDict, [String|Acc])
- end;
+ record_fields_to_string(Fs, FDefs, RecDict, NewAcc);
record_fields_to_string([], [], _RecDict, Acc) ->
lists:reverse(Acc).
+-spec record_field_diffs_to_string(erl_type(), dict()) -> string().
+
+record_field_diffs_to_string(?tuple([_|Fs], Arity, Tag), RecDict) ->
+ [TagAtom] = t_atom_vals(Tag),
+ {ok, FieldNames} = lookup_record(TagAtom, Arity-1, RecDict),
+ %% io:format("RecCElems = ~p\nRecTypes = ~p\n", [Fs, FieldNames]),
+ FieldDiffs = field_diffs(Fs, FieldNames, RecDict, []),
+ sequence(FieldDiffs, [], " and ").
+
+field_diffs([F|Fs], [{FName, DefType}|FDefs], RecDict, Acc) ->
+ NewAcc =
+ case t_is_subtype(F, DefType) of
+ true -> Acc;
+ false ->
+ Str = atom_to_list(FName) ++ "::" ++ t_to_string(DefType, RecDict),
+ [Str|Acc]
+ end,
+ field_diffs(Fs, FDefs, RecDict, NewAcc);
+field_diffs([], [], _, Acc) ->
+ lists:reverse(Acc).
+
comma_sequence(Types, RecDict) ->
List = [case T =:= ?any of
true -> "_";
@@ -3338,8 +3368,8 @@ sequence([], [], _Delimiter) ->
[];
sequence([T], Acc, _Delimiter) ->
lists:flatten(lists:reverse([T|Acc]));
-sequence([T|Left], Acc, Delimiter) ->
- sequence(Left, [T ++ Delimiter|Acc], Delimiter).
+sequence([T|Ts], Acc, Delimiter) ->
+ sequence(Ts, [T ++ Delimiter|Acc], Delimiter).
%%=============================================================================
%%
@@ -3386,6 +3416,18 @@ t_from_form({atom, _L, Atom}, _TypeNames, _RecDict, _VarDict) ->
{t_atom(Atom), []};
t_from_form({integer, _L, Int}, _TypeNames, _RecDict, _VarDict) ->
{t_integer(Int), []};
+t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _RecDict, _VarDict) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, Val} ->
+ {t_integer(Val), []};
+ _ -> throw({error, io_lib:format("Unable evaluate type ~w\n", [Op])})
+ end;
+t_from_form({op, _L, _Op, _Arg1, _Arg2} = Op, _TypeNames, _RecDict, _VarDict) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, Val} ->
+ {t_integer(Val), []};
+ _ -> throw({error, io_lib:format("Unable evaluate type ~w\n", [Op])})
+ end;
t_from_form({type, _L, any, []}, _TypeNames, _RecDict, _VarDict) ->
{t_any(), []};
t_from_form({type, _L, arity, []}, _TypeNames, _RecDict, _VarDict) ->
@@ -3396,9 +3438,15 @@ t_from_form({type, _L, atom, []}, _TypeNames, _RecDict, _VarDict) ->
{t_atom(), []};
t_from_form({type, _L, binary, []}, _TypeNames, _RecDict, _VarDict) ->
{t_binary(), []};
-t_from_form({type, _L, binary, [{integer, _, Base}, {integer, _, Unit}]},
+t_from_form({type, _L, binary, [Base, Unit]} = Type,
_TypeNames, _RecDict, _VarDict) ->
- {t_bitstr(Unit, Base), []};
+ case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of
+ {{integer, _, BaseVal},
+ {integer, _, UnitVal}}
+ when BaseVal >= 0, UnitVal >= 0 ->
+ {t_bitstr(UnitVal, BaseVal), []};
+ _ -> throw({error, io_lib:format("Unable evaluate type ~w\n", [Type])})
+ end;
t_from_form({type, _L, bitstring, []}, _TypeNames, _RecDict, _VarDict) ->
{t_bitstr(), []};
t_from_form({type, _L, bool, []}, _TypeNames, _RecDict, _VarDict) ->
@@ -3502,9 +3550,14 @@ t_from_form({type, _L, product, Elements}, TypeNames, RecDict, VarDict) ->
{t_product(L), R};
t_from_form({type, _L, queue, []}, _TypeNames, _RecDict, _VarDict) ->
{t_queue(), []};
-t_from_form({type, _L, range, [{integer, _, From}, {integer, _, To}]},
+t_from_form({type, _L, range, [From, To]} = Type,
_TypeNames, _RecDict, _VarDict) ->
- {t_from_range(From, To), []};
+ case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
+ {{integer, _, FromVal},
+ {integer, _, ToVal}} ->
+ {t_from_range(FromVal, ToVal), []};
+ _ -> throw({error, io_lib:format("Unable evaluate type ~w\n", [Type])})
+ end;
t_from_form({type, _L, record, [Name|Fields]}, TypeNames, RecDict, VarDict) ->
record_from_form(Name, Fields, TypeNames, RecDict, VarDict);
t_from_form({type, _L, reference, []}, _TypeNames, _RecDict, _VarDict) ->
@@ -3679,6 +3732,16 @@ t_form_to_string({var, _L, Name}) -> atom_to_list(Name);
t_form_to_string({atom, _L, Atom}) ->
io_lib:write_string(atom_to_list(Atom), $'); % To quote or not to quote... '
t_form_to_string({integer, _L, Int}) -> integer_to_list(Int);
+t_form_to_string({op, _L, _Op, _Arg} = Op) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, _} = Int -> t_form_to_string(Int);
+ _ -> io_lib:format("Bad formed type ~w",[Op])
+ end;
+t_form_to_string({op, _L, _Op, _Arg1, _Arg2} = Op) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, _} = Int -> t_form_to_string(Int);
+ _ -> io_lib:format("Bad formed type ~w",[Op])
+ end;
t_form_to_string({ann_type, _L, [Var, Type]}) ->
t_form_to_string(Var) ++ "::" ++ t_form_to_string(Type);
t_form_to_string({paren_type, _L, [Type]}) ->
@@ -3705,8 +3768,12 @@ t_form_to_string({type, _L, nonempty_list, [Type]}) ->
t_form_to_string({type, _L, nonempty_string, []}) -> "nonempty_string()";
t_form_to_string({type, _L, product, Elements}) ->
"<" ++ sequence(t_form_to_string_list(Elements), ",") ++ ">";
-t_form_to_string({type, _L, range, [{integer, _, From}, {integer, _, To}]}) ->
- io_lib:format("~w..~w", [From, To]);
+t_form_to_string({type, _L, range, [From, To]} = Type) ->
+ case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
+ {{integer, _, FromVal}, {integer, _, ToVal}} ->
+ io_lib:format("~w..~w", [FromVal, ToVal]);
+ _ -> io_lib:format("Bad formed type ~w",[Type])
+ end;
t_form_to_string({type, _L, record, [{atom, _, Name}]}) ->
io_lib:format("#~w{}", [Name]);
t_form_to_string({type, _L, record, [{atom, _, Name}|Fields]}) ->
@@ -3725,13 +3792,17 @@ t_form_to_string({type, _L, Name, []} = T) ->
try t_to_string(t_from_form(T))
catch throw:{error, _} -> atom_to_list(Name) ++ "()"
end;
-t_form_to_string({type, _L, binary, [{integer, _, X}, {integer, _, Y}]}) ->
- case Y of
- 0 ->
- case X of
- 0 -> "<<>>";
- _ -> io_lib:format("<<_:~w>>", [X])
- end
+t_form_to_string({type, _L, binary, [X,Y]} = Type) ->
+ case {erl_eval:partial_eval(X), erl_eval:partial_eval(Y)} of
+ {{integer, _, XVal}, {integer, _, YVal}} ->
+ case YVal of
+ 0 ->
+ case XVal of
+ 0 -> "<<>>";
+ _ -> io_lib:format("<<_:~w>>", [XVal])
+ end
+ end;
+ _ -> io_lib:format("Bad formed type ~w",[Type])
end;
t_form_to_string({type, _L, Name, List}) ->
io_lib:format("~w(~s)", [Name, sequence(t_form_to_string_list(List), ",")]).
@@ -3763,6 +3834,8 @@ any_none_or_unit([?unit|_]) -> true;
any_none_or_unit([_|Left]) -> any_none_or_unit(Left);
any_none_or_unit([]) -> false.
+-spec is_erl_type(any()) -> boolean().
+
is_erl_type(?any) -> true;
is_erl_type(?none) -> true;
is_erl_type(?unit) -> true;
diff --git a/lib/hipe/flow/hipe_dominators.erl b/lib/hipe/flow/hipe_dominators.erl
index 3bfa6d43c4..17357461a5 100644
--- a/lib/hipe/flow/hipe_dominators.erl
+++ b/lib/hipe/flow/hipe_dominators.erl
@@ -1,20 +1,20 @@
%% -*- erlang-indent-level: 2 -*-
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%------------------------------------------------------------------------
@@ -37,6 +37,8 @@
domFrontier_create/2,
domFrontier_get/2]).
+-export_type([domTree/0]).
+
-include("cfg.hrl").
%%========================================================================
diff --git a/lib/hipe/icode/hipe_beam_to_icode.erl b/lib/hipe/icode/hipe_beam_to_icode.erl
index b9679fbb12..1f8be4040e 100644
--- a/lib/hipe/icode/hipe_beam_to_icode.erl
+++ b/lib/hipe/icode/hipe_beam_to_icode.erl
@@ -895,11 +895,6 @@ trans_fun([{bs_init_bits,{f,Lbl},Size,_Words,_LiveRegs,{field_flags,Flags0},X}|
end,
trans_bin_call({hipe_bs_primop,Name}, Lbl, Args, [Dst, Base, Offset],
Base, Offset, Env, Instructions);
-trans_fun([{bs_bits_to_bytes2, Bits, Bytes}|Instructions], Env) ->
- Src = trans_arg(Bits),
- Dst = mk_var(Bytes),
- [hipe_icode:mk_primop([Dst], 'bsl', [Src, hipe_icode:mk_const(3)])|
- trans_fun(Instructions,Env)];
trans_fun([{bs_add, {f,Lbl}, [Old,New,Unit], Res}|Instructions], Env) ->
Dst = mk_var(Res),
Temp = mk_var(new),
@@ -1129,13 +1124,6 @@ trans_fun([{gc_bif,Name,Fail,_Live,SrcRs,DstR}|Instructions], Env) ->
trans_fun([{bif,Name,Fail,SrcRs,DstR}|Instructions], Env)
end;
%%--------------------------------------------------------------------
-%% Instruction for constant pool added in February 2007 for R11B-4.
-%%--------------------------------------------------------------------
-trans_fun([{put_literal,{literal,Literal},DstR}|Instructions], Env) ->
- DstV = mk_var(DstR),
- Move = hipe_icode:mk_move(DstV, hipe_icode:mk_const(Literal)),
- [Move | trans_fun(Instructions, Env)];
-%%--------------------------------------------------------------------
%% New test instruction added in July 2007 for R12.
%%--------------------------------------------------------------------
%%--- is_bitstr ---
diff --git a/lib/hipe/util/hipe_digraph.erl b/lib/hipe/util/hipe_digraph.erl
index a62e913fe5..fcfaa64684 100644
--- a/lib/hipe/util/hipe_digraph.erl
+++ b/lib/hipe/util/hipe_digraph.erl
@@ -1,20 +1,20 @@
%% -*- erlang-indent-level: 2 -*-
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%-----------------------------------------------------------------------
@@ -30,6 +30,8 @@
from_list/1, to_list/1, get_parents/2, get_children/2]).
-export([reverse_preorder_sccs/1]).
+-export_type([hdg/0]).
+
%%------------------------------------------------------------------------
-type ordset(T) :: [T]. % XXX: temporarily
diff --git a/lib/hipe/vsn.mk b/lib/hipe/vsn.mk
index 129718a305..31c860ddec 100644
--- a/lib/hipe/vsn.mk
+++ b/lib/hipe/vsn.mk
@@ -1 +1 @@
-HIPE_VSN = 3.7.5
+HIPE_VSN = 3.7.6
diff --git a/lib/ic/doc/src/Makefile b/lib/ic/doc/src/Makefile
index 26d0932a95..8eda436a24 100644
--- a/lib/ic/doc/src/Makefile
+++ b/lib/ic/doc/src/Makefile
@@ -1,19 +1,19 @@
#
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 1998-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 1998-2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
#
#
@@ -211,7 +211,11 @@ $(HTMLDIR)/%.gif: %.gif
ifdef DOCSUPPORT
+ifneq (,$(JAVA))
docs: pdf html man $(JAVADOC_GENERATED_FILES)
+else
+docs: pdf html man
+endif
$(TOP_PDF_FILE): $(XML_FILES)
@@ -301,6 +305,7 @@ release_docs_spec: docs
$(INSTALL_DATA) $(GIF_FILES) $(EXTRA_FILES) $(HTML_FILES) \
$(RELSYSDIR)/doc/html
$(INSTALL_DATA) $(INFO_FILE) $(RELSYSDIR)
+ifneq (,$(JAVA))
$(INSTALL_DIR) $(RELSYSDIR)/doc/html/java
$(INSTALL_DIR) $(RELSYSDIR)/doc/html/java/resources
$(INSTALL_DIR) $(RELSYSDIR)/doc/html/java/com
@@ -313,6 +318,7 @@ release_docs_spec: docs
$(RELSYSDIR)/doc/html/java/resources
$(INSTALL_DATA) $(JAVADOC_PACK_HTML_FILES) \
$(RELSYSDIR)/doc/html/java/com/ericsson/otp/ic
+endif
$(INSTALL_DIR) $(RELEASE_PATH)/man/man3
$(INSTALL_DATA) $(MAN3_FILES) $(RELEASE_PATH)/man/man3
diff --git a/lib/ic/doc/src/notes.xml b/lib/ic/doc/src/notes.xml
index dbafde7b4b..6684547572 100644
--- a/lib/ic/doc/src/notes.xml
+++ b/lib/ic/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>1998</year><year>2009</year>
+ <year>1998</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>IDL Compiler Release Notes</title>
@@ -31,6 +31,22 @@
</header>
<section>
+ <title>IC 4.2.25</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>
+ The documentation can now be built and installed without Java.</p>
+ <p>
+ Own Id: OTP-8639 Aux Id:</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
<title>IC 4.2.24</title>
<section>
diff --git a/lib/ic/vsn.mk b/lib/ic/vsn.mk
index e0fccf4889..4aa2a04b60 100644
--- a/lib/ic/vsn.mk
+++ b/lib/ic/vsn.mk
@@ -1,6 +1,8 @@
-IC_VSN = 4.2.24
+IC_VSN = 4.2.25
-TICKETS = OTP-8307 \
+TICKETS = OTP-8639
+
+TICKETS_4.2.24 = OTP-8307 \
OTP-8353 \
OTP-8354 \
OTP-8355
diff --git a/lib/inets/doc/src/http_server.xml b/lib/inets/doc/src/http_server.xml
index 01e0b47d37..68dfd1add0 100644
--- a/lib/inets/doc/src/http_server.xml
+++ b/lib/inets/doc/src/http_server.xml
@@ -30,6 +30,8 @@
<date></date>
<rev></rev>
<file>http_server.xml</file>
+
+ <marker id="intro"></marker>
</header>
<section>
@@ -65,6 +67,8 @@
Server API. This API can be used to advantage by all who wants
to enhance the server core functionality, for example custom
logging and authentication.</p>
+
+ <marker id="config"></marker>
</section>
<section>
@@ -109,6 +113,8 @@
functions or only exported functions on chosen modules.</p>
<p>{accept_timeout, integer()} sets the wanted timeout value for
the server to set up a request connection.</p>
+
+ <marker id="using_http_server_api"></marker>
</section>
<section>
@@ -173,6 +179,7 @@
the ip address reported by the info function and can
not be the hostname that is allowed when inputting bind_address.</p>
+ <marker id="htaccess"></marker>
</section>
<section>
@@ -337,6 +344,8 @@ UserName:Password
</item>
</list>
</section>
+
+ <marker id="dynamic_we_pages"></marker>
</section>
<section>
@@ -434,6 +443,8 @@ http://your.server.org/eval?httpd_example:print(atom_to_list(apply(erlang,halt,[
</note>
</section>
</section>
+
+ <marker id="logging"></marker>
</section>
<section>
@@ -467,6 +478,8 @@ http://your.server.org/eval?httpd_example:print(atom_to_list(apply(erlang,halt,[
</p>
<p><em>[date]</em> access to <em>path</em> failed for
<em>remotehost</em>, reason: <em>reason</em></p>
+
+ <marker id="ssi"></marker>
</section>
<section>
diff --git a/lib/inets/doc/src/httpc.xml b/lib/inets/doc/src/httpc.xml
index 7430a62b1b..9c8df28fec 100644
--- a/lib/inets/doc/src/httpc.xml
+++ b/lib/inets/doc/src/httpc.xml
@@ -167,6 +167,8 @@ ssl_options() = {verify, code()} |
<v>http_option() = {timeout, timeout()} |
{connect_timeout, timeout()} |
{ssl, ssl_options()} |
+ {ossl, ssl_options()} |
+ {essl, ssl_options()} |
{autoredirect, boolean()} |
{proxy_auth, {userstring(), passwordstring()}} |
{version, http_version()} |
@@ -222,7 +224,22 @@ ssl_options() = {verify, code()} |
<tag><c><![CDATA[ssl]]></c></tag>
<item>
- <p>If using SSL, these SSL-specific options are used. </p>
+ <p>This is the default ssl config option, currently defaults to
+ <c>ossl</c>, see below. </p>
+ <p>Defaults to <c>[]</c>. </p>
+ </item>
+
+ <tag><c><![CDATA[ossl]]></c></tag>
+ <item>
+ <p>If using the OpenSSL based (old) implementation of SSL,
+ these SSL-specific options are used. </p>
+ <p>Defaults to <c>[]</c>. </p>
+ </item>
+
+ <tag><c><![CDATA[essl]]></c></tag>
+ <item>
+ <p>If using the Erlang based (new) implementation of SSL,
+ these SSL-specific options are used. </p>
<p>Defaults to <c>[]</c>. </p>
</item>
diff --git a/lib/inets/doc/src/httpd.xml b/lib/inets/doc/src/httpd.xml
index 7dabeb33e9..847605fe93 100644
--- a/lib/inets/doc/src/httpd.xml
+++ b/lib/inets/doc/src/httpd.xml
@@ -148,8 +148,13 @@
in the apache like configuration file.
</item>
- <tag>{socket_type, ip_comm | ssl}</tag>
+ <tag>{socket_type, ip_comm | ssl | ossl | essl}</tag>
<item>
+ <p>When using ssl, there are several alternatives.
+ <c>ossl</c> specifically uses the OpenSSL based (old) SSL.
+ <c>essl</c> specifically uses the Erlang based (new) SSL.
+ When using <c>ssl</c> it <em>currently</em> defaults to
+ <c>ossl</c>. </p>
<p>Defaults to <c>ip_comm</c>. </p>
</item>
@@ -267,18 +272,22 @@ text/plain asc txt
The <c>common</c> format is one line that looks like this:
<c>remotehost rfc931 authuser [date] "request" status bytes</c></p>
- <pre>remotehost
+ <pre>
+remotehost
Remote
rfc931
The client's remote username (RFC 931).
authuser
- The username with which the user authenticated himself.
+ The username with which the user authenticated
+ himself.
[date]
Date and time of the request (RFC 1123).
"request"
- The request line exactly as it came from the client(RFC 1945).
+ The request line exactly as it came from the client
+ (RFC 1945).
status
- The HTTP status code returned to the client (RFC 1945).
+ The HTTP status code returned to the client
+ (RFC 1945).
bytes
The content-length of the document transferred.
</pre>
@@ -286,10 +295,11 @@ bytes
<p>The <c>combined</c> format is on line that look like this:
<c>remotehost rfc931 authuser [date] "request" status bytes "referer" "user_agent" </c></p>
- <pre>"referer"
+ <pre>
+"referer"
The url the client was on before
- requesting your url. (If it could not be determined a minus
- sign will be placed in this field)
+ requesting your url. (If it could not be determined
+ a minus sign will be placed in this field)
"user_agent"
The software the client claims to be using. (If it
could not be determined a minus sign will be placed in
@@ -389,6 +399,31 @@ bytes
and an access to http://your.server.org/image/foo.gif would refer to
the file /ftp/pub/image/foo.gif.</item>
+ <tag>{re_write, {Re, Replacement}}</tag>
+
+ <item> Where Re = string() and Replacement = string().
+ The ReWrite property allows documents to be stored in the local file
+ system instead of the document_root location. URLs are rewritten
+ by re:replace/3 to produce a path in the local filesystem.
+ For example:
+
+ <code>{re_write, {"^/[~]([^/]+)(.*)$", "/home/\\1/public\\2"}</code>
+
+ and an access to http://your.server.org/~bob/foo.gif would refer to
+ the file /home/bob/public/foo.gif.
+
+ In an Apache like configuration file the Re is separated
+ from Replacement with one single space, and as expected
+ backslashes do not need to be backslash escaped so the
+ same example would become:
+
+ <code>ReWrite ^/[~]([^/]+)(.*)$ /home/\1/public\2</code>
+
+ Beware of trailing space in Replacement that will be used.
+ If you must have a space in Re use e.g the character encoding
+ <code>\040</code> see <seealso marker="re">re(3)</seealso>.
+ </item>
+
<tag>{directory_index, [string()]}</tag>
<item>
@@ -408,7 +443,7 @@ bytes
</taglist>
<marker id="cgi_prop"></marker>
- <p><em>CGI properties - requires mod_cgi</em></p>
+ <p><em>CGI properties - requires mod_cgi</em></p>
<taglist>
<tag>{script_alias, {Alias, RealName}}</tag>
<item> Where Alias = string() and RealName = string().
@@ -423,6 +458,19 @@ bytes
the server to run the script /web/cgi-bin/foo.
</item>
+ <tag>{script_re_write, {Re, Replacement}}</tag>
+ <item> Where Re = string() and Replacement = string().
+ Has the same behavior as the ReWrite property, except that
+ it also marks the target directory as containing CGI
+ scripts. URLs with a path beginning with url-path are mapped to
+ scripts beginning with directory-filename, for example:
+
+ <code> {script_re_write, {"^/cgi-bin/(\\d+)/", "/web/\\1/cgi-bin/"}</code>
+
+ and an access to http://your.server.org/cgi-bin/17/foo would cause
+ the server to run the script /web/17/cgi-bin/foo.
+ </item>
+
<tag>{script_nocache, boolean()}</tag>
<item>
diff --git a/lib/inets/doc/src/mod_esi.xml b/lib/inets/doc/src/mod_esi.xml
index 6bad77dc0a..3c473d3f94 100644
--- a/lib/inets/doc/src/mod_esi.xml
+++ b/lib/inets/doc/src/mod_esi.xml
@@ -73,7 +73,8 @@
<v>SessionID = term()</v>
<v>Env = [EnvironmentDirectives] ++ ParsedHeader</v>
<v>EnvironmentDirectives = {Key,Value}</v>
- <v>Key = query_string | content_length | server_software | gateway_interface | server_protocol | server_port | request_method | remote_addr | script_name. &lt;v>Input = string()</v>
+ <v>Key = query_string | content_length | server_software | gateway_interface | server_protocol | server_port | request_method | remote_addr | script_name</v>
+ <v>Input = string()</v>
</type>
<desc>
<p>The <c>Module</c> must be found in the code path and export
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index 3216b0c2cd..23ad5c0df0 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -32,6 +32,122 @@
<file>notes.xml</file>
</header>
+ <section><title>Inets 5.4</title>
+
+ <section><title>Improvements and New Features</title>
+<!--
+ <p>-</p>
+-->
+
+ <list>
+ <item>
+ <p>[httpc|httpd] - Now allow the use of the "new" ssl, by using
+ the <c>essl</c> tag instead. </p>
+ <p>See the <c>http_option</c> option in the
+ <seealso marker="httpc#request2">request/4,5</seealso> or
+ the <seealso marker="httpd#comm_prop">socket-type</seealso>
+ section of the Communication properties chapter for more info, </p>
+ <p>Own Id: OTP-7907</p>
+ </item>
+
+ <item>
+ <p>Deprecated functions designated to be removed in R14 has been
+ removed. Also, some new functions has been marked as deprecated
+ (the old http client api module). </p>
+ <p>Own Id: OTP-8564</p>
+ <p>*** POTENTIAL INCOMPATIBILITY ***</p>
+ </item>
+
+ <item>
+ <p>[httpd] - Improved mod_alias.
+ Now able to do better URL rewrites. </p>
+ <p>See
+ <seealso marker="httpd#alias_prop">URL aliasing properties</seealso>
+ and the
+ <seealso marker="httpd#cgi_prop">CGI properties</seealso>
+ section(s) for more info, </p>
+ <p>Own Id: OTP-8573</p>
+ </item>
+
+ </list>
+ </section>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+
+ <p>-</p>
+
+<!--
+ <list>
+ <item>
+ <p>[httpd] The server did not fully support the documented module
+ callback api. Specifically, the load function should be able to
+ return the atom <c>ok</c>, but this was not accepted. </p>
+ <p>Own Id: OTP-8359</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+
+ </section> <!-- 5.4 -->
+
+
+ <section><title>Inets 5.3.3</title>
+
+ <section><title>Improvements and New Features</title>
+ <p>-</p>
+
+<!--
+ <list>
+ <item>
+ <p>[httpc] - Allow users to pass socket options to the transport
+ module when making requests. </p>
+ <p>See the <c>socket_opts</c> option in the
+ <seealso marker="httpc#request2">request/4</seealso> or
+ <seealso marker="httpc#set_options">set_options/1,2</seealso>
+ for more info, </p>
+ <p>Own Id: OTP-8352</p>
+ </item>
+
+ </list>
+-->
+ </section>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+
+<!--
+ <p>-</p>
+-->
+
+ <list>
+ <item>
+ <p>[httpc] - Made cookie handling more case insensitive.</p>
+ <p>Own Id: OTP-8609</p>
+ <p>Nicolas Thauvin</p>
+ </item>
+
+ <item>
+ <p>[httpc|httpd] - Netscape cookie dates can also be given with a
+ 2-digit year (e.g. 06 = 2006). </p>
+ <p>Own Id: OTP-8610</p>
+ <p>Nicolas Thauvin</p>
+ </item>
+
+ <item>
+ <p>[httpd] - Added support (again) for the documented debugging
+ features. See the User's Guide
+ <seealso marker="http_server#config">Configuration</seealso>
+ chapter for more info. </p>
+ <p>Own Id: OTP-8624</p>
+ </item>
+
+ </list>
+ </section>
+
+ </section> <!-- 5.3.3 -->
+
+
<section><title>Inets 5.3.2</title>
<section><title>Improvements and New Features</title>
@@ -249,6 +365,7 @@
<p>Own Id: OTP-8016</p>
<p>*** POTENTIAL INCOMPATIBILITY ***</p>
</item>
+
</list>
</section>
diff --git a/lib/inets/examples/Makefile b/lib/inets/examples/Makefile
index a42b0e38b6..775c449062 100644
--- a/lib/inets/examples/Makefile
+++ b/lib/inets/examples/Makefile
@@ -1,19 +1,19 @@
#
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 1997-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
#
#
@@ -21,189 +21,15 @@ include $(ERL_TOP)/make/target.mk
include $(ERL_TOP)/make/$(TARGET)/otp.mk
# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../vsn.mk
-VSN=$(INETS_VSN)
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
-
-# ----------------------------------------------------
-# Target Specs
+# Common Macros
# ----------------------------------------------------
-MODULE=
-AUTH_FILES = server_root/auth/group \
- server_root/auth/passwd
-CGI_FILES = server_root/cgi-bin/printenv.sh
-CONF_FILES = server_root/conf/8080.conf \
- server_root/conf/8888.conf \
- server_root/conf/httpd.conf \
- server_root/conf/ssl.conf \
- server_root/conf/mime.types
-OPEN_FILES = server_root/htdocs/open/dummy.html
-MNESIA_OPEN_FILES = server_root/htdocs/mnesia_open/dummy.html
-MISC_FILES = server_root/htdocs/misc/friedrich.html \
- server_root/htdocs/misc/oech.html
-SECRET_FILES = server_root/htdocs/secret/dummy.html
-MNESIA_SECRET_FILES = server_root/htdocs/mnesia_secret/dummy.html
-HTDOCS_FILES = server_root/htdocs/index.html \
- server_root/htdocs/config.shtml \
- server_root/htdocs/echo.shtml \
- server_root/htdocs/exec.shtml \
- server_root/htdocs/flastmod.shtml \
- server_root/htdocs/fsize.shtml \
- server_root/htdocs/include.shtml
-ICON_FILES = server_root/icons/README \
- server_root/icons/a.gif \
- server_root/icons/alert.black.gif \
- server_root/icons/alert.red.gif \
- server_root/icons/apache_pb.gif \
- server_root/icons/back.gif \
- server_root/icons/ball.gray.gif \
- server_root/icons/ball.red.gif \
- server_root/icons/binary.gif \
- server_root/icons/binhex.gif \
- server_root/icons/blank.gif \
- server_root/icons/bomb.gif \
- server_root/icons/box1.gif \
- server_root/icons/box2.gif \
- server_root/icons/broken.gif \
- server_root/icons/burst.gif \
- server_root/icons/button1.gif \
- server_root/icons/button10.gif \
- server_root/icons/button2.gif \
- server_root/icons/button3.gif \
- server_root/icons/button4.gif \
- server_root/icons/button5.gif \
- server_root/icons/button6.gif \
- server_root/icons/button7.gif \
- server_root/icons/button8.gif \
- server_root/icons/button9.gif \
- server_root/icons/buttonl.gif \
- server_root/icons/buttonr.gif \
- server_root/icons/c.gif \
- server_root/icons/comp.blue.gif \
- server_root/icons/comp.gray.gif \
- server_root/icons/compressed.gif \
- server_root/icons/continued.gif \
- server_root/icons/dir.gif \
- server_root/icons/down.gif \
- server_root/icons/dvi.gif \
- server_root/icons/f.gif \
- server_root/icons/folder.gif \
- server_root/icons/folder.open.gif \
- server_root/icons/folder.sec.gif \
- server_root/icons/forward.gif \
- server_root/icons/generic.gif \
- server_root/icons/generic.red.gif \
- server_root/icons/generic.sec.gif \
- server_root/icons/hand.right.gif \
- server_root/icons/hand.up.gif \
- server_root/icons/htdig.gif \
- server_root/icons/icon.sheet.gif \
- server_root/icons/image1.gif \
- server_root/icons/image2.gif \
- server_root/icons/image3.gif \
- server_root/icons/index.gif \
- server_root/icons/layout.gif \
- server_root/icons/left.gif \
- server_root/icons/link.gif \
- server_root/icons/movie.gif \
- server_root/icons/p.gif \
- server_root/icons/patch.gif \
- server_root/icons/pdf.gif \
- server_root/icons/pie0.gif \
- server_root/icons/pie1.gif \
- server_root/icons/pie2.gif \
- server_root/icons/pie3.gif \
- server_root/icons/pie4.gif \
- server_root/icons/pie5.gif \
- server_root/icons/pie6.gif \
- server_root/icons/pie7.gif \
- server_root/icons/pie8.gif \
- server_root/icons/portal.gif \
- server_root/icons/poweredby.gif \
- server_root/icons/ps.gif \
- server_root/icons/quill.gif \
- server_root/icons/right.gif \
- server_root/icons/screw1.gif \
- server_root/icons/screw2.gif \
- server_root/icons/script.gif \
- server_root/icons/sound1.gif \
- server_root/icons/sound2.gif \
- server_root/icons/sphere1.gif \
- server_root/icons/sphere2.gif \
- server_root/icons/star.gif \
- server_root/icons/star_blank.gif \
- server_root/icons/tar.gif \
- server_root/icons/tex.gif \
- server_root/icons/text.gif \
- server_root/icons/transfer.gif \
- server_root/icons/unknown.gif \
- server_root/icons/up.gif \
- server_root/icons/uu.gif \
- server_root/icons/uuencoded.gif \
- server_root/icons/world1.gif \
- server_root/icons/world2.gif
+include subdirs.mk
-SSL_FILES = server_root/ssl/ssl_client.pem \
- server_root/ssl/ssl_server.pem
+SPECIAL_TARGETS =
# ----------------------------------------------------
-# FLAGS
+# Default Subdir Targets
# ----------------------------------------------------
-ERL_COMPILE_FLAGS +=
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-debug opt:
-
-clean:
-
-docs:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/auth
- $(INSTALL_DATA) $(AUTH_FILES) $(RELSYSDIR)/examples/server_root/auth
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/cgi-bin
- $(INSTALL_SCRIPT) $(CGI_FILES) $(RELSYSDIR)/examples/server_root/cgi-bin
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/conf
- $(INSTALL_DATA) $(CONF_FILES) $(RELSYSDIR)/examples/server_root/conf
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/open
- $(INSTALL_DATA) $(OPEN_FILES) \
- $(RELSYSDIR)/examples/server_root/htdocs/open
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
- $(INSTALL_DATA) $(MNESIA_OPEN_FILES) \
- $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/misc
- $(INSTALL_DATA) $(MISC_FILES) \
- $(RELSYSDIR)/examples/server_root/htdocs/misc
- $(INSTALL_DIR) \
- $(RELSYSDIR)/examples/server_root/htdocs/secret/top_secret
- $(INSTALL_DIR) \
- $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret/top_secret
- $(INSTALL_DATA) $(SECRET_FILES) \
- $(RELSYSDIR)/examples/server_root/htdocs/secret
- $(INSTALL_DATA) $(MNESIA_SECRET_FILES) \
- $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs
- $(INSTALL_DATA) $(HTDOCS_FILES) $(RELSYSDIR)/examples/server_root/htdocs
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/icons
- $(INSTALL_DATA) $(ICON_FILES) $(RELSYSDIR)/examples/server_root/icons
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/ssl
- $(INSTALL_DATA) $(SSL_FILES) $(RELSYSDIR)/examples/server_root/ssl
- $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/logs
-
-release_docs_spec:
+include $(ERL_TOP)/make/otp_subdir.mk
diff --git a/lib/inets/examples/httpd_load_test/Makefile b/lib/inets/examples/httpd_load_test/Makefile
new file mode 100644
index 0000000000..1cc61ad8ae
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/Makefile
@@ -0,0 +1,123 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+include $(ERL_TOP)/make/target.mk
+
+EBIN = .
+
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+
+VSN=$(INETS_VSN)
+
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
+EXAMPLE_RELSYSDIR = $(RELSYSDIR)/examples
+HDLT_RELSYSDIR = $(EXAMPLE_RELSYSDIR)/httpd_load_test
+
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+
+include modules.mk
+
+ERL_FILES = $(MODULES:%=%.erl)
+
+SOURCE = $(ERL_FILES) $(INTERNAL_HRL_FILES)
+
+TARGET_FILES = \
+ $(ERL_FILES:%.erl=$(EBIN)/%.$(EMULATOR))
+
+ifeq ($(TYPE),debug)
+ERL_COMPILE_FLAGS += -Ddebug -W
+endif
+
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+
+include ../../src/inets_app/inets.mk
+
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include
+
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+debug:
+ @${MAKE} TYPE=debug opt
+
+opt: $(TARGET_FILES)
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f errs core *~
+
+docs:
+
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+
+release_spec: opt
+ $(INSTALL_DIR) $(EXAMPLE_RELSYSDIR)
+ $(INSTALL_DIR) $(HDLT_RELSYSDIR)
+ $(INSTALL_DATA) $(SCRIPT_SKELETONS) $(HDLT_RELSYSDIR)
+ $(INSTALL_DATA) $(CONF_SKELETONS) $(HDLT_RELSYSDIR)
+ $(INSTALL_DATA) $(CERT_FILES) $(HDLT_RELSYSDIR)
+ $(INSTALL_DATA) $(TARGET_FILES) $(HDLT_RELSYSDIR)
+ $(INSTALL_DATA) $(ERL_FILES) $(HDLT_RELSYSDIR)
+
+
+release_docs_spec:
+
+
+# ----------------------------------------------------
+# Include dependencies
+# ----------------------------------------------------
+
+megaco_codec_transform.$(EMULATOR): megaco_codec_transform.erl
+
+megaco_codec_meas.$(EMULATOR): megaco_codec_meas.erl
+
+megaco_codec_mstone1.$(EMULATOR): megaco_codec_mstone1.erl
+
+megaco_codec_mstone2.$(EMULATOR): megaco_codec_mstone2.erl
+
+megaco_codec_mstone_lib.$(EMULATOR): megaco_codec_mstone_lib.erl
+
diff --git a/lib/inets/examples/httpd_load_test/hdlt.config.skel b/lib/inets/examples/httpd_load_test/hdlt.config.skel
new file mode 100644
index 0000000000..640867ebac
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt.config.skel
@@ -0,0 +1,20 @@
+%% Debug: silence | info | log | debug
+{debug, [{ctrl, info}, {proxy, silence}, {slave, silence}, {client, silence}]}.
+{server, {"/usr/local/bin", "fooserver"}}.
+%% {port, 8888}. % integer() > 0
+{server_dir, "/tmp/hdlt"}. % Absolute path
+{work_dir, "/tmp/hdlt"}. % Absolute path
+{clients,
+ [
+ {"/opt/local/bin", "foo"},
+ {"/usr/local/bin", "bar"}
+ ]
+}.
+%% {send_rate, 80}. % Max number of outstanding requests, integer() > 0
+%% {test_time, 120}. % Number of seconds,
+%% {max_nof_schedulers, 8}. % integer() >= 0
+%% {work_simulator, 10000}. % integer() > 0
+%% {data_size, {100, 500, 2}}. % {integer() > 0, integer() > 0, integer() > 0}
+%% {socket_type, ip_comm}. % ip_comm | ssl | essl | ossl
+%% {server_cert_file, "hdlt_ssl_server_cert.pem"}.
+%% {client_cert_file, "hdlt_ssl_client_cert.pem"}. \ No newline at end of file
diff --git a/lib/inets/examples/httpd_load_test/hdlt.erl b/lib/inets/examples/httpd_load_test/hdlt.erl
new file mode 100644
index 0000000000..18d8c34ccf
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt.erl
@@ -0,0 +1,74 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+%%----------------------------------------------------------------------
+%% Purpose: Main API module for the httpd load test utility
+%%----------------------------------------------------------------------
+
+-module(hdlt).
+
+
+%%-----------------------------------------------------------------
+%% Public interface
+%%-----------------------------------------------------------------
+
+-export([start/0, start/1, stop/0, help/0]).
+
+
+%%-----------------------------------------------------------------
+%% Start the HDLT utility
+%%-----------------------------------------------------------------
+
+start() ->
+ ConfigFile = "hdlt.config",
+ case file:consult(ConfigFile) of
+ {ok, Config} when is_list(Config) ->
+ start(Config);
+ Error ->
+ Error
+ end.
+
+start(Config) ->
+ Flag = process_flag(trap_exit, true),
+ Result =
+ case hdlt_ctrl:start(Config) of
+ {ok, Pid} ->
+ receive
+ {'EXIT', Pid, normal} ->
+ ok;
+ {'EXIT', Pid, Reason} ->
+ io:format("HDLT failed: "
+ "~n ~p"
+ "~n", [Reason]),
+ {error, Reason}
+ end;
+ Error ->
+ Error
+ end,
+ process_flag(trap_exit, Flag),
+ Result.
+
+
+
+stop() ->
+ hdlt_ctrl:stop().
+
+
+help() ->
+ hdlt_ctrl:help().
diff --git a/lib/inets/examples/httpd_load_test/hdlt.sh.skel b/lib/inets/examples/httpd_load_test/hdlt.sh.skel
new file mode 100644
index 0000000000..a250bad9c5
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt.sh.skel
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+# Skeleton for a script intended to run the hdlt(N)
+# performance test.
+#
+# This test can be used for several things depending on the
+# configuration: SMP or SocketType performance tests
+#
+
+ERL_HOME=<path to otp top dir>
+INETS_HOME=$ERL_HOME/lib/erlang/lib/<inets dir>
+HDLT_HOME=$INETS_HOME/examples/httpd_load_test
+PATH=$ERL_HOME/bin:$PATH
+
+HDLT="-s hdlt start"
+STOP="-s init stop"
+
+ERL="erl \
+ -noshell \
+ -pa $HDLT_HOME \
+ $HDLT \
+ $STOP"
+
+echo $ERL
+$ERL | tee hdlt.log
+
diff --git a/lib/inets/examples/httpd_load_test/hdlt_client.erl b/lib/inets/examples/httpd_load_test/hdlt_client.erl
new file mode 100644
index 0000000000..d65ac5a885
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_client.erl
@@ -0,0 +1,370 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The HDLT client module.
+%% This is the traffic generator
+%%----------------------------------------------------------------------
+
+-module(hdlt_client).
+
+-export([
+ start/1,
+ stop/0,
+ start_inets/0,
+ start_service/1,
+ release/0,
+ node_info/0
+ ]).
+
+-export([
+ proxy/1
+ ]).
+
+-include("hdlt_logger.hrl").
+
+-define(CTRL, hdlt_ctrl).
+-define(PROXY, hdlt_proxy).
+
+-record(state,
+ {
+ mode = initial,
+ send_rate,
+ time,
+ stop_time,
+ url,
+ nof_reqs = 0,
+ nof_reps = 0,
+ last_req,
+ sizes,
+ socket_type,
+ cert_file
+ }).
+
+
+
+start(Debug) ->
+ proc_lib:start_link(?MODULE, proxy, [Debug]).
+
+stop() ->
+ (catch erlang:send(?PROXY, stop)),
+ ok.
+
+start_inets() ->
+ ?PROXY ! start_inets.
+
+start_service(Args) ->
+ ?PROXY ! {start_client, Args, self()},
+ receive
+ client_started ->
+ %% ?LOG("client service started"),
+ ok
+ end.
+
+release() ->
+ ?PROXY ! release.
+
+node_info() ->
+ ?PROXY ! {node_info, self()},
+ receive
+ {node_info, NodeInfo} ->
+ NodeInfo
+ end.
+
+
+%% ---------------------------------------------------------------------
+%%
+%% The proxy process
+%%
+
+proxy(Debug) ->
+ process_flag(trap_exit, true),
+ erlang:register(?PROXY, self()),
+ SName = lists:flatten(
+ io_lib:format("HDLT PROXY[~p,~p]", [self(), node()])),
+ ?SET_NAME(SName),
+ ?SET_LEVEL(Debug),
+ ?LOG("starting", []),
+ Ref = await_for_controller(10),
+ CtrlNode = node(Ref),
+ erlang:monitor_node(CtrlNode, true),
+ proc_lib:init_ack({ok, self()}),
+ ?DEBUG("started", []),
+ proxy_loop(Ref, CtrlNode, undefined).
+
+await_for_controller(N) when N > 0 ->
+ case global:whereis_name(hdlt_ctrl) of
+ Pid when is_pid(Pid) ->
+ erlang:monitor(process, Pid);
+ _ ->
+ timer:sleep(1000),
+ await_for_controller(N-1)
+ end;
+await_for_controller(_) ->
+ proc_lib:init_ack({error, controller_not_found, nodes()}),
+ timer:sleep(500),
+ init:stop().
+
+
+proxy_loop(Ref, CtrlNode, Client) ->
+ ?DEBUG("await command", []),
+ receive
+ stop ->
+ ?LOG("stop", []),
+ timer:sleep(1000),
+ halt();
+
+ start_inets ->
+ ?LOG("start the inets service framework", []),
+ %% inets:enable_trace(max, "/tmp/inets-httpc-trace.log", all),
+ case (catch inets:start()) of
+ ok ->
+ ?LOG("framework started", []),
+ proxy_loop(Ref, CtrlNode, Client);
+ Error ->
+ ?LOG("failed starting inets service framework: "
+ "~n Error: ~p", [Error]),
+ timer:sleep(1000),
+ halt()
+ end;
+
+ {start_client, Args, From} ->
+ ?LOG("start client with"
+ "~n Args: ~p", [Args]),
+ Client2 = spawn_link(fun() -> client(Args) end),
+ From ! client_started,
+ proxy_loop(Ref, CtrlNode, Client2);
+
+ release ->
+ ?LOG("release", []),
+ Client ! go,
+ proxy_loop(Ref, CtrlNode, Client);
+
+ {node_info, Pid} ->
+ ?LOG("received requets for node info", []),
+ NodeInfo = get_node_info(),
+ Pid ! {node_info, NodeInfo},
+ proxy_loop(Ref, CtrlNode, Client);
+
+ {'EXIT', Client, normal} ->
+ ?LOG("received normal exit message from client (~p)",
+ [Client]),
+ exit(normal);
+
+ {'EXIT', Client, Reason} ->
+ ?INFO("received exit message from client (~p)"
+ "~n Reason: ~p", [Client, Reason]),
+ %% Unexpected client termination, inform the controller and die
+ global:send(hdlt_ctrl, {client_exit, Client, node(), Reason}),
+ exit({client_exit, Reason});
+
+ {nodedown, CtrlNode} ->
+ ?LOG("received nodedown for controller node - terminate", []),
+ halt();
+
+ {'DOWN', Ref, process, _, _} ->
+ ?INFO("received DOWN message for controller - terminate", []),
+ %% The controller has terminated, dont care why, time to die
+ halt()
+
+ end.
+
+
+
+%% ---------------------------------------------------------------------
+%%
+%% The client process
+%%
+
+client([SocketType, CertFile, URLBase, Sizes, Time, SendRate, Debug]) ->
+ SName = lists:flatten(
+ io_lib:format("HDLT CLIENT[~p,~p]", [self(), node()])),
+ ?SET_NAME(SName),
+ ?SET_LEVEL(Debug),
+ ?LOG("starting with"
+ "~n SocketType: ~p"
+ "~n Time: ~p"
+ "~n SendRate: ~p", [SocketType, Time, SendRate]),
+ httpc:set_options([{max_pipeline_length, 0}]),
+ if
+ (SocketType =:= ssl) orelse
+ (SocketType =:= ossl) orelse
+ (SocketType =:= essl) ->
+ %% Ensure crypto and ssl started:
+ crypto:start(),
+ ssl:start();
+ true ->
+ ok
+ end,
+ State = #state{mode = idle,
+ url = URLBase,
+ time = Time,
+ send_rate = SendRate,
+ sizes = Sizes,
+ socket_type = SocketType,
+ cert_file = CertFile},
+ ?DEBUG("started", []),
+ client_loop(State).
+
+%% The point is to first start all client nodes and then this
+%% process. Then, when they are all started, the go-ahead, go,
+%% message is sent to let them lose at the same time.
+client_loop(#state{mode = idle,
+ time = Time,
+ send_rate = SendRate} = State) ->
+ ?DEBUG("[idle] awaiting the go command", []),
+ receive
+ go ->
+ ?LOG("[idle] received go", []),
+ erlang:send_after(Time, self(), stop),
+ NewState = send_requests(State, SendRate),
+ client_loop(NewState#state{mode = generating,
+ nof_reqs = SendRate})
+ end;
+
+%% In this mode the client is generating traffic.
+%% It will continue to do so until the stop message
+%% is received.
+client_loop(#state{mode = generating} = State) ->
+ receive
+ stop ->
+ ?LOG("[generating] received stop", []),
+ StopTime = timestamp(),
+ req_reply(State),
+ client_loop(State#state{mode = stopping, stop_time = StopTime});
+
+ {http, {_, {{_, 200, _}, _, _}}} ->
+ %% ?DEBUG("[generating] received reply - send another request", []),
+ NewState = send_requests(State, 1),
+ client_loop(NewState#state{nof_reps = NewState#state.nof_reps + 1,
+ nof_reqs = NewState#state.nof_reqs + 1});
+
+ {http, {ReqId, {error, Reason}}} ->
+ ?INFO("[generating] request ~p failed: "
+ "~n Reason: ~p"
+ "~n NofReqs: ~p"
+ "~n NofReps: ~p",
+ [ReqId, Reason, State#state.nof_reqs, State#state.nof_reps]),
+ exit({Reason, generating, State#state.nof_reqs, State#state.nof_reps});
+
+ Else ->
+ ?LOG("[generating] received unexpected message: "
+ "~n~p", [Else]),
+ unexpected_data(Else),
+ client_loop(State)
+ end;
+
+%% The client no longer issues any new requests, instead it
+%% waits for replies for all the oustanding requests to
+%% arrive.
+client_loop(#state{mode = stopping,
+ time = Time,
+ last_req = LastReqId} = State) ->
+ receive
+ {http, {LastReqId, {{_, 200, _}, _, _}}} ->
+ ?DEBUG("[stopping] received reply for last request (~p)", [LastReqId]),
+ time_to_complete(State),
+ ok;
+
+ {http, {ReqId, {{_, 200, _}, _, _}}} ->
+ ?DEBUG("[stopping] received reply ~p", [ReqId]),
+ client_loop(State);
+
+ {http, {ReqId, {error, Reason}}} ->
+ ?INFO("[stopping] request ~p failed: "
+ "~n Reason: ~p"
+ "~n NofReqs: ~p"
+ "~n NofReps: ~p",
+ [ReqId, Reason, State#state.nof_reqs, State#state.nof_reps]),
+ exit({Reason, stopping, State#state.nof_reqs, State#state.nof_reps});
+
+ Else ->
+ ?LOG("[stopping] received unexpected message: "
+ "~n~p", [Else]),
+ unexpected_data(Else),
+ client_loop(State)
+
+ after Time ->
+ ?INFO("timeout when"
+ "~n Number of requests: ~p"
+ "~n Number of replies: ~p",
+ [State#state.nof_reqs, State#state.nof_reps]),
+ exit({timeout, State#state.nof_reqs, State#state.nof_reps})
+ end.
+
+req_reply(#state{nof_reqs = NofReqs, nof_reps = NofReps}) ->
+ load_data({req_reply, node(), NofReqs, NofReps}).
+
+time_to_complete(#state{stop_time = StopTime}) ->
+ StoppedTime = os:timestamp(),
+ load_data({time_to_complete, node(), StopTime, StoppedTime}).
+
+load_data(Data) ->
+ global:send(?CTRL, {load_data, Data}).
+
+unexpected_data(Else) ->
+ global:send(?CTRL, {unexpected_data, Else}).
+
+
+send_requests(#state{sizes = Sizes} = State, N) ->
+ send_requests(State, N, Sizes).
+
+send_requests(State, 0, Sizes) ->
+ State#state{sizes = Sizes};
+send_requests(#state{socket_type = SocketType,
+ cert_file = CertFile} = State, N, [Sz | Sizes]) ->
+ URL = lists:flatten(io_lib:format("~s~w", [State#state.url, Sz])),
+ Method = get,
+ Request = {URL, []},
+ HTTPOptions =
+ case SocketType of
+ ip_comm ->
+ [];
+ _ ->
+ SslOpts = [{verify, 0},
+ {certfile, CertFile},
+ {keyfile, CertFile}],
+ case SocketType of
+ ssl ->
+ [{ssl, SslOpts}];
+ ossl ->
+ [{ssl, {ossl, SslOpts}}];
+ essl ->
+ [{ssl, {essl, SslOpts}}]
+ end
+ end,
+ Options = [{sync, false}],
+ {ok, Ref} = httpc:request(Method, Request, HTTPOptions, Options),
+ send_requests(State#state{last_req = Ref}, N-1, lists:append(Sizes, [Sz])).
+
+
+timestamp() ->
+ os:timestamp().
+
+
+get_node_info() ->
+ [{cpu_topology, erlang:system_info(cpu_topology)},
+ {heap_type, erlang:system_info(heap_type)},
+ {nof_schedulers, erlang:system_info(schedulers)},
+ {otp_release, erlang:system_info(otp_release)},
+ {version, erlang:system_info(version)},
+ {system_version, erlang:system_info(system_version)},
+ {system_architecture, erlang:system_info(system_architecture)}].
+
+
diff --git a/lib/inets/examples/httpd_load_test/hdlt_ctrl.erl b/lib/inets/examples/httpd_load_test/hdlt_ctrl.erl
new file mode 100644
index 0000000000..950d2632f7
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_ctrl.erl
@@ -0,0 +1,1530 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The httpd load test (hdlt) controller/collector module,
+%% This module contains all the code of the httpd load test
+%% controller/collector. It sets up the test, starts all
+%% server and client nodes and applications and finally
+%% collects test data.
+%%----------------------------------------------------------------------
+
+-module(hdlt_ctrl).
+
+-export([start/1, stop/0, help/0]).
+
+-export([init/1, proxy/7]).
+
+-include_lib("kernel/include/file.hrl").
+-include("hdlt_logger.hrl").
+
+-define(DEFAULT_SENDRATE, 89).
+-define(DEFAULT_TEST_TIME, 120). % 2 minutes
+-define(DEFAULT_PORT, 8889).
+-define(TIMEOUT, 60000).
+-define(DEFAULT_MAX_NOF_SCHEDULERS, 8).
+-define(DEFAULT_SERVER_DIR, "/tmp/hdlt").
+-define(DEFAULT_WORK_DIR, "/tmp/hdlt").
+-define(SSH_PORT, 22).
+-define(DEFAULT_SOCKET_TYPE, ip_comm).
+-define(DEFAULT_SERVER_CERT, "hdlt_ssl_server_cert.pem").
+-define(DEFAULT_CLIENT_CERT, "hdlt_ssl_client_cert.pem").
+-define(SSH_CONNECT_TIMEOUT, 5000).
+-define(NODE_START_TIMEOUT, 5000).
+-define(LOCAL_PROXY_START_TIMEOUT, ?NODE_START_TIMEOUT * 4).
+-define(DEFAULT_DEBUGS,
+ [{ctrl, info}, {slave, silence}, {proxy, silence}, {client, silence}]).
+-define(DEFAULT_WORK_SIM, 10000).
+-define(DEFAULT_DATA_SIZE_START, 500).
+-define(DEFAULT_DATA_SIZE_END, 1500).
+-define(DEFAULT_DATA_SIZE_INCR, 1).
+-define(DEFAULT_DATA_SIZE, {?DEFAULT_DATA_SIZE_START,
+ ?DEFAULT_DATA_SIZE_END,
+ ?DEFAULT_DATA_SIZE_INCR}).
+
+
+%% hdlt = httpd load test
+
+-define(COLLECTOR, hdlt_ctrl).
+-define(RESULTS_TAB, hdlt_results).
+
+-define(CLIENT_MOD, hdlt_client).
+-define(CLIENT_NODE_NAME, ?CLIENT_MOD).
+
+-define(SERVER_MOD, hdlt_server).
+-define(SERVER_NODE_NAME, ?SERVER_MOD).
+
+-define(LOGGER, hdlt_logger).
+
+
+-record(state,
+ {
+ url,
+ test_time,
+ send_rate,
+ http_server,
+ http_port,
+ results = ?RESULTS_TAB,
+ nodes,
+ server_root,
+ doc_root,
+ server_dir,
+ work_dir,
+ server_conn,
+ client_conns = [],
+ client_mod = ?CLIENT_MOD,
+ clients,
+ nof_schedulers = 0,
+ max_nof_schedulers,
+ socket_type,
+ server_cert_file,
+ client_cert_file,
+ debugs,
+ client_sz_from,
+ client_sz_to,
+ client_sz_incr
+ }
+ ).
+
+-record(proxy,
+ {
+ mode,
+ mod,
+ connection,
+ channel,
+ host,
+ cmd,
+ node_name,
+ node,
+ ref,
+ erl_path,
+ paths,
+ args
+ }).
+
+-record(connection,
+ {
+ proxy,
+ node,
+ node_name,
+ host
+ }).
+
+
+-record(client, {host, path, version}).
+-record(server, {host, path, version}).
+
+
+start(Config) when is_list(Config) ->
+ proc_lib:start_link(?MODULE, init, [Config]).
+
+stop() ->
+ global:send(?COLLECTOR, stop).
+
+init(Config) ->
+ %% io:format("Config: ~n~p~n", [Config]),
+ case (catch do_init(Config)) of
+ {ok, State} ->
+ proc_lib:init_ack({ok, self()}),
+ loop(State);
+ {error, _Reason} = Error ->
+ proc_lib:init_ack(Error),
+ ok;
+ {'EXIT', Reason} ->
+ proc_lib:init_ack({error, Reason}),
+ ok
+ end.
+
+do_init(Config) ->
+ %% Do not trap exit, but register ourself
+ global:register_name(?COLLECTOR, self()),
+
+ State = #state{},
+ ets:new(State#state.results, [bag, named_table]),
+
+ hdlt_logger:start(),
+ global:sync(),
+
+ %% Maybe enable debug
+ Debugs = get_debugs(Config),
+ ?SET_NAME("HDLT CTRL"),
+ set_debug_level(Debugs),
+
+ ?DEBUG("network info: "
+ "~n Global names: ~p"
+ "~n Nodes: ~p", [global:registered_names(), nodes()]),
+
+ %% Read config
+ ?LOG("read config", []),
+ SendRate = get_send_rate(Config),
+ Clients = get_clients(Config),
+ TestTime = get_test_time(Config),
+ Server = get_server(Config),
+ Port = get_port(Config),
+ ServerDir = get_server_dir(Config),
+ WorkingDir = get_work_dir(Config),
+ MaxNofSchedulers = get_max_nof_schedulers(Config),
+ SocketType = get_socket_type(Config),
+ ServerCertFile = get_server_cert_file(Config),
+ ClientCertFile = get_client_cert_file(Config),
+ WorkSim = get_work_sim(Config),
+ {From, To, Incr} = get_data_size(Config),
+
+ URL = url(Server, Port, SocketType, WorkSim),
+ ServerRoot = filename:join(ServerDir, "server_root"),
+ DocRoot = ServerRoot, %% Not really used in this test
+
+ ?DEBUG("randomize setup", []),
+ randomized_sizes_init(),
+
+ %% Start used applications
+ ?DEBUG("ensure crypto started", []),
+ crypto:start(),
+ ?DEBUG("ensure ssh started", []),
+ ssh:start(),
+
+ State2 = State#state{server_root = ServerRoot,
+ doc_root = DocRoot,
+ server_dir = ServerDir,
+ work_dir = WorkingDir,
+ max_nof_schedulers = MaxNofSchedulers,
+ socket_type = SocketType,
+ server_cert_file = ServerCertFile,
+ client_cert_file = ClientCertFile,
+ http_server = Server,
+ http_port = Port,
+ url = URL,
+ test_time = TestTime,
+ send_rate = SendRate,
+ clients = Clients,
+ debugs = Debugs,
+ client_sz_from = From,
+ client_sz_to = To,
+ client_sz_incr = Incr},
+
+ ?LOG("prepare server host", []),
+ prepare_server_host(State2),
+
+ ?LOG("prepare client hosts", []),
+ State3 = prepare_client_hosts(State2),
+
+ ?LOG("basic init done", []),
+ {ok, State3}.
+
+
+loop(#state{nof_schedulers = N, max_nof_schedulers = M} = State) when N > M ->
+
+ ?INFO("Starting to analyse data", []),
+
+ AnalysedTab = analyse_data(State),
+
+ Files = save_results_to_file(AnalysedTab, State),
+ io:format("~n******************************************************"
+ "~n~nResult(s) saved to: ~n~p~n", [Files]),
+ clean_up(State);
+
+loop(#state{url = URL,
+ test_time = TestTime,
+ send_rate = SendRate,
+ nof_schedulers = NofSchedulers} = State) ->
+
+ {StartH, StartM, StartS} = erlang:time(),
+
+ ?INFO("Performing test with ~p smp-scheduler(s): ~n"
+ " It will take a minimum of: ~p seconds. ~n"
+ " Start time: ~.2.0w:~.2.0w:~.2.0w",
+ [NofSchedulers, round(TestTime/1000), StartH, StartM, StartS]),
+
+ %% Start the server node
+ %% (The local proxy, the node, the remote proxy, and the inets framework)
+ State1 = start_server_node(State),
+ ?DEBUG("nodes after server start: ~p", [nodes() -- [node()]]),
+
+ %% Start the client node(s)
+ %% (The local proxy, the node, the remote proxy, and the inets framework)
+ ?LOG("start client node(s)", []),
+ State2 = start_client_nodes(State1),
+ ?DEBUG("nodes after client(s) start: ~p", [nodes() -- [node()]]),
+
+ ?LOG("start server", []),
+ start_server(State2),
+
+ ?LOG("start clients", []),
+ start_clients(State2, URL, TestTime, SendRate),
+
+ ?LOG("release clients", []),
+ release_clients(State2),
+
+ ?LOG("collect data", []),
+ collect_data(State2),
+
+ ?LOG("stop all nodes", []),
+ State3 = stop_nodes(State2),
+
+ ?INFO("Test with ~p smp-scheduler(s) complete"
+ "~n~n"
+ "****************************************************************"
+ "~n",
+ [NofSchedulers]),
+ loop(State3#state{nof_schedulers = NofSchedulers + 1}).
+
+
+prepare_server_host(#state{server_root = ServerRoot,
+ http_server = #server{host = Host},
+ socket_type = SocketType,
+ server_cert_file = CertFile}) ->
+ ?INFO("prepare server host ~s", [Host]),
+ Opts = [{user_interaction, false},
+ {silently_accept_hosts, true},
+ {timeout, 2*?SSH_CONNECT_TIMEOUT},
+ {connect_timeout, ?SSH_CONNECT_TIMEOUT}],
+ case ssh_sftp:start_channel(Host, Opts) of
+ {ok, Sftp, ConnectionRef} ->
+ ?DEBUG("sftp connection established - now transer server content",
+ []),
+ create_server_content(Sftp, ServerRoot, SocketType, CertFile),
+ ?DEBUG("server content transfered - now close ssh connection ",
+ []),
+ ssh:close(ConnectionRef),
+ ?DEBUG("server preparation complete ", []),
+ ok;
+ Error ->
+ ?INFO("FAILED creating sftp channel to server host ~s: "
+ "~n ~p", [Host, Error]),
+ exit({failed_establishing_sftp_connection, Error})
+ end.
+
+create_server_content(Sftp, ServerRoot, SocketType, CertFile) ->
+ %% Create server root
+ ?DEBUG("ensure existence of ~p", [ServerRoot]),
+ ensure_remote_dir_exist(Sftp, ServerRoot),
+
+ %% Create the server ebin dir (for the starter module)
+ EBIN = filename:join(ServerRoot, "ebin"),
+ ?DEBUG("make ebin dir: ~p", [EBIN]),
+ maybe_create_remote_dir(Sftp, EBIN),
+
+ %% Create the server ebin dir (for the starter module)
+ LOG = filename:join(ServerRoot, "log"),
+ ?DEBUG("make log dir: ~p", [LOG]),
+ maybe_create_remote_dir(Sftp, LOG),
+
+ LocalServerMod = local_server_module(),
+ ?DEBUG("copy server stub/proxy module ~s", [LocalServerMod]),
+ RemoteServerMod = remote_server_module(EBIN),
+ {ok, ServerModBin} = file:read_file(LocalServerMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteServerMod, ServerModBin),
+
+ LocalSlaveMod = local_slave_module(),
+ ?DEBUG("copy slave module ~s", [LocalSlaveMod]),
+ RemoteSlaveMod = remote_slave_module(EBIN),
+ {ok, SlaveModBin} = file:read_file(LocalSlaveMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteSlaveMod, SlaveModBin),
+
+ LocalLoggerMod = local_logger_module(),
+ ?DEBUG("copy logger module ~s", [LocalLoggerMod]),
+ RemoteLoggerMod = remote_logger_module(EBIN),
+ {ok, LoggerModBin} = file:read_file(LocalLoggerMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteLoggerMod, LoggerModBin),
+
+ %% Create the inets server data dir
+ CGI = filename:join(ServerRoot, "cgi-bin"),
+ ?DEBUG("make cgi dir: ~p", [CGI]),
+ maybe_create_remote_dir(Sftp, CGI),
+
+ LocalRandomMod = local_random_html_module(),
+ ?DEBUG("copy random-html module ~s", [LocalRandomMod]),
+ RemoteRandomMod = remote_random_html_module(EBIN),
+ {ok, RandomModBin} = file:read_file(LocalRandomMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteRandomMod, RandomModBin),
+
+ case SocketType of
+ ip_comm ->
+ ok;
+ _ ->
+ SSLDir = filename:join(ServerRoot, "ssl"),
+ ?DEBUG("make conf dir: ~p", [SSLDir]),
+ maybe_create_remote_dir(Sftp, SSLDir),
+ ?DEBUG("copy ssl cert file ~s", [CertFile]),
+ {ok, CertBin} = file:read_file(CertFile),
+ RemoteCertFile = filename:join(SSLDir,
+ filename:basename(CertFile)),
+ ok = ssh_sftp:write_file(Sftp, RemoteCertFile, CertBin),
+ ok
+ end,
+
+ ?DEBUG("done", []),
+ ok.
+
+remote_server_module(Path) ->
+ Mod = server_module(),
+ filename:join(Path, Mod).
+
+local_server_module() ->
+ Mod = server_module(),
+ case code:where_is_file(Mod) of
+ Path when is_list(Path) ->
+ Path;
+ _ ->
+ exit({server_module_not_found, Mod})
+ end.
+
+server_module() ->
+ module(?SERVER_MOD).
+
+
+prepare_client_hosts(#state{work_dir = WorkDir,
+ clients = Clients,
+ socket_type = SocketType,
+ client_cert_file = CertFile} = State) ->
+ Clients2 =
+ prepare_client_hosts(WorkDir, SocketType, CertFile, Clients, []),
+ State#state{clients = Clients2}.
+
+prepare_client_hosts(_WorkDir, _SocketType, _CertFile, [], Acc) ->
+ lists:reverse(Acc);
+prepare_client_hosts(WorkDir, SocketType, CertFile, [Client|Clients], Acc) ->
+ case prepare_client_host(WorkDir, SocketType, CertFile, Client) of
+ ok ->
+ prepare_client_hosts(WorkDir, SocketType, CertFile, Clients,
+ [Client|Acc]);
+ _ ->
+ prepare_client_hosts(WorkDir, SocketType, CertFile, Clients, Acc)
+ end.
+
+prepare_client_host(WorkDir, SocketType, CertFile, #client{host = Host}) ->
+ ?INFO("prepare client host ~s", [Host]),
+ Opts = [{user_interaction, false},
+ {silently_accept_hosts, true},
+ {timeout, 2*?SSH_CONNECT_TIMEOUT},
+ {connect_timeout, ?SSH_CONNECT_TIMEOUT}],
+ case ssh_sftp:start_channel(Host, Opts) of
+ {ok, Sftp, ConnectionRef} ->
+ ?DEBUG("sftp connection established - now transer client content",
+ []),
+ create_client_content(Sftp, WorkDir, SocketType, CertFile),
+ ?DEBUG("client content transered - now close ssh connection ", []),
+ ssh:close(ConnectionRef),
+ ?DEBUG("client preparation complete ", []),
+ ok;
+ Error ->
+ ?INFO("FAILED creating sftp channel to client host ~s: skipping"
+ "~n ~p", [Host, Error]),
+ Error
+ end.
+
+create_client_content(Sftp, WorkDir, SocketType, CertFile) ->
+ %% Create work dir
+ ?DEBUG("ensure existence of ~p", [WorkDir]),
+ ensure_remote_dir_exist(Sftp, WorkDir),
+
+ %% Create the client ebin dir
+ EBIN = filename:join(WorkDir, "ebin"),
+ RemoteClientMod = remote_client_module(EBIN),
+ ?DEBUG("make ebin dir: ~p", [EBIN]),
+ maybe_create_remote_dir(Sftp, EBIN),
+
+ LocalClientMod = local_client_module(),
+ ?DEBUG("copy client stub/proxy module ~s", [LocalClientMod]),
+ {ok, ClientModBin} = file:read_file(LocalClientMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteClientMod, ClientModBin),
+
+ LocalSlaveMod = local_slave_module(),
+ ?DEBUG("copy slave module ~s", [LocalSlaveMod]),
+ RemoteSlaveMod = remote_slave_module(EBIN),
+ {ok, SlaveModBin} = file:read_file(LocalSlaveMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteSlaveMod, SlaveModBin),
+
+ LocalLoggerMod = local_logger_module(),
+ ?DEBUG("copy logger module ~s", [LocalLoggerMod]),
+ RemoteLoggerMod = remote_logger_module(EBIN),
+ {ok, LoggerModBin} = file:read_file(LocalLoggerMod),
+ ok = ssh_sftp:write_file(Sftp, RemoteLoggerMod, LoggerModBin),
+
+ case SocketType of
+ ip_comm ->
+ ok;
+ _ ->
+ %% We should really store the remote path somewhere as
+ %% we use it when starting the client service...
+ SSLDir = filename:join(WorkDir, "ssl"),
+ ?DEBUG("make ssl dir: ~p", [SSLDir]),
+ maybe_create_remote_dir(Sftp, SSLDir),
+ ?DEBUG("copy ssl cert file ~s", [CertFile]),
+ {ok, CertBin} = file:read_file(CertFile),
+ RemoteCertFile = filename:join(SSLDir,
+ filename:basename(CertFile)),
+ ok = ssh_sftp:write_file(Sftp, RemoteCertFile, CertBin),
+ ok
+ end,
+
+ ?DEBUG("done", []),
+ ok.
+
+remote_client_module(Path) ->
+ Mod = client_module(),
+ filename:join(Path, Mod).
+
+local_client_module() ->
+ Mod = client_module(),
+ case code:where_is_file(Mod) of
+ Path when is_list(Path) ->
+ Path;
+ _ ->
+ exit({client_module_not_found, Mod})
+ end.
+
+client_module() ->
+ module(?CLIENT_MOD).
+
+
+remote_slave_module(Path) ->
+ Mod = slave_module(),
+ filename:join(Path, Mod).
+
+local_slave_module() ->
+ Mod = slave_module(),
+ case code:where_is_file(Mod) of
+ Path when is_list(Path) ->
+ Path;
+ _ ->
+ exit({slave_module_not_found, Mod})
+ end.
+
+slave_module() ->
+ module(hdlt_slave).
+
+
+remote_logger_module(Path) ->
+ Mod = logger_module(),
+ filename:join(Path, Mod).
+
+local_logger_module() ->
+ Mod = logger_module(),
+ case code:where_is_file(Mod) of
+ Path when is_list(Path) ->
+ Path;
+ _ ->
+ exit({logger_module_not_found, Mod})
+ end.
+
+logger_module() ->
+ module(hdlt_logger).
+
+
+remote_random_html_module(Path) ->
+ Mod = random_html_module(),
+ filename:join(Path, Mod).
+
+local_random_html_module() ->
+ Mod = random_html_module(),
+ case code:where_is_file(Mod) of
+ Path when is_list(Path) ->
+ Path;
+ _ ->
+ exit({random_module_not_found, Mod})
+ end.
+
+random_html_module() ->
+ module(hdlt_random_html).
+
+
+module(Mod) ->
+ Ext = string:to_lower(erlang:system_info(machine)),
+ lists:flatten(io_lib:format("~w.~s", [Mod, Ext])).
+
+
+%% -----------------------------------------------------------------------
+%% - For every node created (server and client both) there is both
+%% a local and remote proxy.
+%% - The local proxy is running on the local (controller/collector) node.
+%% - The remote proxy is running on the client or server node(s).
+%% - The local (ctrl) proxy monitor the remote (server/client) proxy.
+%% - The remote (server/client) proxy monitor the local (ctrl) proxy.
+%%
+
+start_client_nodes(#state{clients = Clients,
+ work_dir = WorkDir,
+ debugs = Debugs} = State) ->
+ Connections =
+ [start_client_node(Client, WorkDir, Debugs) || Client <- Clients],
+ State#state{client_conns = Connections}.
+
+start_client_node(#client{path = ErlPath, host = Host}, WorkDir, Debugs) ->
+ ?INFO("start client on host ~p", [Host]),
+ EbinDir = filename:join(WorkDir, "ebin"),
+ start_client_node(Host, ErlPath, [EbinDir], Debugs).
+
+start_client_node(Host, ErlPath, Paths, Debugs) ->
+ start_node(Host, ?CLIENT_NODE_NAME,
+ ErlPath, Paths, [], ?CLIENT_MOD, Debugs).
+
+
+start_server_node(#state{http_server = #server{path = ErlPath, host = Host},
+ server_root = ServerRoot,
+ nof_schedulers = NofScheds,
+ debugs = Debugs} = State) ->
+ ?INFO("start server on host ~p", [Host]),
+ CgiBinDir = filename:join(ServerRoot, "cgi-bin"),
+ EbinDir = filename:join(ServerRoot, "ebin"),
+ Connection =
+ start_server_node(Host, ErlPath, [CgiBinDir, EbinDir],
+ Debugs, NofScheds),
+ State#state{server_conn = Connection}.
+
+start_server_node(Host, ErlPath, Paths, Debugs, NofScheds) ->
+ Args =
+ if
+ NofScheds =:= 0 ->
+ "-smp disable";
+ true ->
+ lists:flatten(io_lib:format("-smp +S ~w", [NofScheds]))
+ end,
+ start_node(Host, ?SERVER_NODE_NAME,
+ ErlPath, Paths, Args, ?SERVER_MOD, Debugs).
+
+
+%% -----------------------------------------------------------------------
+%% - For every node created (server and client both) there is both
+%% a local and remote proxy.
+%% - The local proxy is running on the local (controller/collector) node.
+%% - The remote proxy is running on the client or server node(s).
+%% - The local (ctrl) proxy monitor the remote (server/client) proxy.
+%% - The remote (server/client) proxy monitor the local (ctrl) proxy.
+%%
+
+start_node(Host, NodeName, ErlPath, Paths, Args, Module, Debugs) ->
+ %% Start the (local) proxy
+ ?DEBUG("start_node -> start local proxy and remote node", []),
+ ProxyDebug = proplists:get_value(proxy, Debugs, silence),
+ Proxy = proxy_start(Host, NodeName, ErlPath, Paths, Args, Module,
+ ProxyDebug),
+
+ ?DEBUG("start_node -> local proxy started - now start node", []),
+ SlaveDebug = proplists:get_value(slave, Debugs, silence),
+ Node = proxy_start_node(Proxy, SlaveDebug),
+
+ ?DEBUG("start_node -> sync global", []),
+ global:sync(),
+
+ ?DEBUG("start_node -> start remote proxy", []),
+ proxy_start_remote(Proxy),
+
+ ?DEBUG("start_node -> start (remote) inets framework", []),
+ proxy_start_inets(Proxy),
+
+ ?DEBUG("start_node -> done", []),
+ #connection{proxy = Proxy, node = Node, node_name = NodeName, host = Host}.
+
+
+proxy_start(Host, NodeName, ErlPath, Paths, Args, Module, Debug) ->
+ ?LOG("try starting local proxy for ~p@~s", [NodeName, Host]),
+ ProxyArgs = [Host, NodeName, ErlPath, Paths, Args, Module, Debug],
+ case proc_lib:start_link(?MODULE, proxy,
+ ProxyArgs, ?LOCAL_PROXY_START_TIMEOUT) of
+ {ok, Proxy} ->
+ Proxy;
+ Error ->
+ exit({failed_starting_proxy, Error})
+ end.
+
+proxy_start_node(Proxy, Debug) ->
+ {ok, Node} = proxy_request(Proxy, {start_node, Debug}),
+ Node.
+
+proxy_start_remote(Proxy) ->
+ proxy_request(Proxy, start_remote_proxy).
+
+proxy_start_inets(Proxy) ->
+ proxy_request(Proxy, start_inets).
+
+proxy_start_service(Proxy, Args) ->
+ proxy_request(Proxy, {start_service, Args}).
+
+proxy_release(Proxy) ->
+ proxy_request(Proxy, release).
+
+proxy_stop(Proxy) ->
+ StopResult = proxy_request(Proxy, stop),
+ ?DEBUG("proxy stop result: ~p", [StopResult]),
+ StopResult.
+
+proxy_request(Proxy, Req) ->
+ Ref = make_ref(),
+ Proxy ! {proxy_request, Ref, self(), Req},
+ receive
+ {proxy_reply, Ref, Proxy, Rep} ->
+ Rep
+ end.
+
+proxy_reply(From, Ref, Rep) ->
+ From ! {proxy_reply, Ref, self(), Rep}.
+
+proxy(Host, NodeName, ErlPath, Paths, Args, Module, Debug) ->
+ process_flag(trap_exit, true),
+ SName = lists:flatten(
+ io_lib:format("HDLT CTRL PROXY[~p,~s,~w]",
+ [self(), Host, NodeName])),
+ ?SET_NAME(SName),
+ ?SET_LEVEL(Debug),
+ ?LOG("starting with"
+ "~n Host: ~p"
+ "~n NodeName: ~p"
+ "~n ErlPath: ~p"
+ "~n Paths: ~p"
+ "~n Args: ~p"
+ "~n Module: ~p", [Host, NodeName, ErlPath, Paths, Args, Module]),
+ State = #proxy{mode = started,
+ mod = Module,
+ host = Host,
+ node_name = NodeName,
+ erl_path = ErlPath,
+ paths = Paths,
+ args = Args},
+ proc_lib:init_ack({ok, self()}),
+ ?DEBUG("started", []),
+ proxy_loop(State).
+
+
+proxy_loop(#proxy{mode = stopping}) ->
+ receive
+ {proxy_request, Ref, From, stop} ->
+ ?LOG("[stopping] received stop order", []),
+ proxy_reply(From, Ref, ok),
+ exit(normal);
+
+ {'EXIT', Pid, Reason} ->
+ ?INFO("[stopping] received exit message from ~p: "
+ "~n Reason: ~p", [Pid, Reason]),
+ exit(Reason)
+
+ end;
+
+proxy_loop(#proxy{mode = started,
+ host = Host,
+ node_name = NodeName,
+ erl_path = ErlPath,
+ paths = Paths,
+ args = Args} = State) ->
+ receive
+ {proxy_request, Ref, From, {start_node, Debug}} ->
+ ?LOG("[starting] received start_node order", []),
+ case hdlt_slave:start_link(Host, NodeName,
+ ErlPath, Paths, Args,
+ Debug) of
+ {ok, Node} ->
+ ?DEBUG("[starting] node ~p started - now monitor", [Node]),
+ erlang:monitor_node(Node, true),
+ State2 = State#proxy{mode = operational,
+ node = Node},
+ proxy_reply(From, Ref, {ok, Node}),
+ proxy_loop(State2);
+ {error, Reason} ->
+ ?INFO("[starting] failed starting node: "
+ "~n Reason: ~p", [Reason]),
+ exit({failed_starting_node, {Host, NodeName, Reason}})
+ end;
+
+ {'EXIT', Pid, Reason} ->
+ ?INFO("[stopping] received exit message from ~p: "
+ "~n Reason: ~p", [Pid, Reason]),
+ exit(Reason)
+
+ end;
+
+proxy_loop(#proxy{mode = operational,
+ mod = Mod,
+ node = Node} = State) ->
+ ?DEBUG("[operational] await command", []),
+ receive
+ {proxy_request, Ref, From, start_remote_proxy} ->
+ ?LOG("[operational] start remote proxy", []),
+ case rpc:call(Node, Mod, start, [?GET_LEVEL()]) of
+ {ok, Pid} ->
+ ?DEBUG("[operational] remote proxy started (~p) - "
+ "create monitor", [Pid]),
+ ProxyRef = erlang:monitor(process, Pid),
+ ?DEBUG("[operational] monitor: ~p", [Ref]),
+ proxy_reply(From, Ref, ok),
+ proxy_loop(State#proxy{ref = ProxyRef});
+ Error ->
+ ?INFO("[operational] failed starting remote proxy"
+ "~n Error: ~p", [Error]),
+ ReplyReason = {failed_starting_remote_proxy,
+ {Node, Error}},
+ Reply = {error, ReplyReason},
+ proxy_reply(From, Ref, Reply),
+ exit({failed_starting_remote_proxy, {Node, Error}})
+ end;
+
+ {proxy_request, Ref, From, start_inets} ->
+ ?INFO("[operational] start inets framework", []),
+ rpc:cast(Node, Mod, start_inets, []),
+ proxy_reply(From, Ref, ok),
+ proxy_loop(State);
+
+ {proxy_request, Ref, From, {start_service, Args}} ->
+ ?INFO("[operational] start service with"
+ "~n ~p", [Args]),
+ case rpc:call(Node, Mod, start_service, Args) of
+ ok ->
+ ?DEBUG("[operational] service started", []),
+ proxy_reply(From, Ref, ok),
+ proxy_loop(State);
+ Error ->
+ ?INFO("[operational] failed starting service: "
+ "~n Args. ~p"
+ "~n Error: ~p", [Args, Error]),
+ erlang:demonitor(State#proxy.ref, [flush]),
+ Reply = {error, {failed_starting_service, Node, Error}},
+ proxy_reply(From, Ref, Reply),
+ exit({failed_starting_service, Node, Error})
+ end;
+
+ {proxy_request, Ref, From, release} ->
+ ?INFO("[operational] release", []),
+ rpc:call(Node, Mod, release, []),
+ proxy_reply(From, Ref, ok),
+ proxy_loop(State);
+
+ {proxy_request, Ref, From, stop} ->
+ ?INFO("[operational] received stop order", []),
+ erlang:demonitor(State#proxy.ref, [flush]),
+ ?DEBUG("[operational] rpc cast stop order", []),
+ rpc:cast(Node, Mod, stop, []),
+ %% And wait for the node death to be reported
+ Reason =
+ receive
+ {nodedown, Node} when State#proxy.node =:= Node ->
+ ok
+ after 10000 ->
+ ?INFO("Node did not die within expected time frame",
+ []),
+ {node_death_timeout, Node}
+ end,
+ ?DEBUG("[operational] ack stop", []),
+ proxy_reply(From, Ref, Reason),
+ exit(normal);
+
+ {nodedown, Node} when State#proxy.node =:= Node ->
+ ?INFO("[operational] received unexpected nodedoen message", []),
+ exit({node_died, Node});
+
+ {'DOWN', Ref, process, _, normal} when State#proxy.ref =:= Ref ->
+ ?INFO("[operational] remote proxy terminated normally", []),
+ proxy_loop(State#proxy{ref = undefined,
+ connection = undefined,
+ mode = stopping});
+
+ {'DOWN', Ref, process, _, noconnection} when State#proxy.ref =:= Ref ->
+ ?INFO("[operational] remote proxy terminated - no node", []),
+ proxy_loop(State#proxy{ref = undefined,
+ connection = undefined,
+ mode = stopping});
+
+ {'DOWN', Ref, process, _, Reason} when State#proxy.ref =:= Ref ->
+ ?INFO("[operational] remote proxy terminated: "
+ "~n Reason: ~p", [Reason]),
+ exit({remote_proxy_crash, Reason});
+
+ {'EXIT', Pid, Reason} ->
+ ?INFO("[operational] received unexpected exit message from ~p: "
+ "~n Reason: ~p", [Pid, Reason]),
+ proxy_loop(State)
+
+ end.
+
+
+stop_nodes(#state{server_conn = ServerConn,
+ client_conns = ClientConns} = State) ->
+ lists:foreach(
+ fun(#connection{proxy = Proxy, node_name = NodeName, host = Host}) ->
+ ?DEBUG("stop_erlang_nodes -> send stop order to local proxy ~p"
+ "~n for node ~p on ~s", [Proxy, NodeName, Host]),
+ proxy_stop(Proxy)
+ end,
+ ClientConns ++ [ServerConn]),
+ ?DEBUG("stop_erlang_nodes -> sleep some to give the nodes time to die",
+ []),
+ timer:sleep(1000),
+ ?DEBUG("stop_erlang_nodes -> and a final cleanup round", []),
+ lists:foreach(fun(Node) ->
+ ?INFO("try brutal stop node ~p", [Node]),
+ rpc:cast(Node, erlang, halt, [])
+ end,
+ nodes() -- [node()]),
+ ?DEBUG("stop_erlang_nodes -> done", []),
+ State#state{server_conn = undefined, client_conns = []}.
+
+
+%% The nodes on which the HDLT clients run have been started previously
+start_clients(#state{client_conns = Connections,
+ debugs = Debugs,
+ work_dir = WorkDir,
+ socket_type = SocketType,
+ client_cert_file = CertFile,
+ client_sz_from = From,
+ client_sz_to = To,
+ client_sz_incr = Incr},
+ URL, TestTime, SendRate) ->
+ Debug = proplists:get_value(client, Debugs, silence),
+ StartClient =
+ fun(#connection{host = Host} = Connection) ->
+ ?DEBUG("start client on ~p", [Host]),
+ start_client(Connection,
+ WorkDir, SocketType, CertFile,
+ URL, From, To, Incr,
+ TestTime, SendRate, Debug);
+ (_) ->
+ ok
+ end,
+ lists:foreach(StartClient, Connections).
+
+start_client(#connection{proxy = Proxy},
+ WorkDir, SocketType, LocalCertFile,
+ URL, From, To, Incr,
+ TestTime, SendRate, Debug) ->
+ SSLDir = filename:join(WorkDir, "ssl"),
+ CertFile = filename:join(SSLDir, filename:basename(LocalCertFile)),
+ Sizes = randomized_sizes(From, To, Incr),
+ Args = [SocketType, CertFile, URL, Sizes, TestTime, SendRate, Debug],
+ proxy_start_service(Proxy, [Args]).
+
+release_clients(#state{client_conns = Connections}) ->
+ ReleaseClient =
+ fun(#connection{proxy = Proxy,
+ host = Host}) ->
+ ?DEBUG("release client on ~p", [Host]),
+ proxy_release(Proxy);
+ (_) ->
+ ok
+ end,
+ lists:foreach(ReleaseClient, Connections).
+
+
+start_server(#state{server_conn = #connection{proxy = Proxy},
+ http_port = Port,
+ server_root = ServerRoot,
+ doc_root = DocRoot,
+ socket_type = SocketType,
+ server_cert_file = CertFile}) ->
+
+ HttpdConfig =
+ httpd_config(Port, "hdlt", ServerRoot, DocRoot, SocketType, CertFile),
+ ?LOG("start the httpd inets service with config: "
+ "~n ~p", [HttpdConfig]),
+ proxy_start_service(Proxy, [HttpdConfig]),
+ ?DEBUG("start_server -> done", []),
+ ok.
+
+
+httpd_config(Port, ServerName, ServerRoot, DocRoot,
+ SocketType, LocalCertFile) ->
+ LogDir = filename:join(ServerRoot, "log"),
+ ErrorLog = filename:join(LogDir, "error_log"),
+ TransferLog = filename:join(LogDir, "access_log"),
+
+ SSL =
+ case SocketType of
+ ip_comm ->
+ [];
+ _ -> % ssl
+ SSLDir = filename:join(ServerRoot, "ssl"),
+ CertFile =
+ filename:join(SSLDir, filename:basename(LocalCertFile)),
+ [
+ {ssl_certificate_file, CertFile},
+ {ssl_certificate_key_file, CertFile},
+ {ssl_verify_client, 0}
+ ]
+ end,
+ [{port, Port},
+ {server_name, ServerName},
+ {server_root, ServerRoot},
+ {document_root, DocRoot},
+ {error_log, ErrorLog},
+ {error_log_format, pretty},
+ {transfer_log, TransferLog},
+ {socket_type, SocketType},
+ {max_clients, 10000},
+ {modules, [mod_alias, mod_auth, mod_esi, mod_actions, mod_cgi,
+ mod_dir, mod_get, mod_head, mod_log, mod_disk_log]},
+ {script_alias, {"/cgi-bin", filename:join(ServerRoot, "cgi-bin")}},
+ {erl_script_alias, {"/cgi-bin", [hdlt_random_html]}},
+ {erl_script_timeout, 120000} | SSL].
+
+
+clean_up(#state{server_root = ServerRoot,
+ work_dir = WorkDir,
+ http_server = #server{host = Host},
+ clients = Clients}) ->
+ ?DEBUG("begin server cleanup", []),
+ server_clean_up(ServerRoot, WorkDir, Host),
+ ?DEBUG("begin lient cleanup", []),
+ clients_clean_up(WorkDir, Clients),
+ ?DEBUG("cleanup done", []),
+ ok.
+
+server_clean_up(ServerRoot, WorkDir, Host) ->
+ ?DEBUG("server cleanup - create sftp channel", []),
+ {ok, Sftp, ConnectionRef} =
+ ssh_sftp:start_channel(Host, [{user_interaction, false},
+ {silently_accept_hosts, true}]),
+ ?DEBUG("server cleanup - delete ~p dirs", [ServerRoot]),
+ del_dirs(Sftp, ServerRoot),
+ ?DEBUG("server cleanup - delete ~p dirs", [WorkDir]),
+ del_dirs(Sftp, WorkDir),
+ ?DEBUG("server cleanup - close sftp channel", []),
+ ssh:close(ConnectionRef).
+
+clients_clean_up(_WorkDir, []) ->
+ ok;
+clients_clean_up(WorkDir, [Client|Clients]) ->
+ client_clean_up(WorkDir, Client),
+ clients_clean_up(WorkDir, Clients).
+
+client_clean_up(WorkDir, #client{host = Host}) ->
+ ?DEBUG("client cleanup - create sftp channel to ~p", [Host]),
+ {ok, Sftp, ConnectionRef} =
+ ssh_sftp:start_channel(Host, [{user_interaction, false},
+ {silently_accept_hosts, true}]),
+ ?DEBUG("client cleanup - delete ~p dirs", [WorkDir]),
+ del_dirs(Sftp, WorkDir),
+ ?DEBUG("client cleanup - close sftp channel", []),
+ ssh:close(ConnectionRef).
+
+
+del_dirs(Sftp, Dir) ->
+ case ssh_sftp:list_dir(Sftp, Dir) of
+ {ok, []} ->
+ ssh_sftp:del_dir(Sftp, Dir);
+ {ok, Files} ->
+ Files2 = [F || F <- Files, (F =/= "..") andalso (F =/= ".")],
+ lists:foreach(fun(File) when ((File =/= "..") andalso
+ (File =/= ".")) ->
+ FullPath = filename:join(Dir, File),
+ case ssh_sftp:read_file_info(Sftp,
+ FullPath) of
+ {ok, #file_info{type = directory}} ->
+ del_dirs(Sftp, FullPath),
+ ssh_sftp:del_dir(Sftp, FullPath);
+ {ok, _} ->
+ ssh_sftp:delete(Sftp, FullPath)
+ end
+ end, Files2);
+ _ ->
+ ok
+ end.
+
+collect_data(#state{clients = Clients} = State) ->
+ N = length(Clients),
+ collect_req_reply(N, State),
+ collect_time(N, State).
+
+collect_req_reply(0, _State) ->
+ ?DEBUG("all reply data collected", []),
+ ok;
+collect_req_reply(N, #state{nof_schedulers = NofScheduler,
+ results = Db,
+ client_conns = Conns} = State) ->
+ ?DEBUG("await reply data from ~p client(s)", [N]),
+ receive
+ {load_data,
+ {req_reply, Client, NoRequests, NoReplys}} ->
+ ?DEBUG("received req_reply load-data from client ~p: "
+ "~n Number of requests: ~p"
+ "~n Number of replies: ~p",
+ [Client, NoRequests, NoReplys]),
+ ets:insert(Db, {{NofScheduler, Client},
+ {req_reply, NoRequests, NoReplys}});
+ stop ->
+ ?INFO("received stop", []),
+ exit(self(), stop);
+
+ {client_exit, Client, Node, Reason} ->
+ ?INFO("Received unexpected client exit from ~p on node ~p "
+ "while collecting replies: "
+ "~n ~p", [Client, Node, Reason]),
+ case lists:keysearch(Node, #connection.node, Conns) of
+ {value, Conn} ->
+ ?LOG("Found problem connection: "
+ "~n ~p", [Conn]),
+ exit({unexpected_client_exit, Reason});
+ false ->
+ collect_req_reply(N, State)
+ end
+ end,
+ collect_req_reply(N-1, State).
+
+collect_time(0, _State) ->
+ ?DEBUG("all time data collected", []),
+ ok;
+collect_time(N, #state{nof_schedulers = NofScheduler,
+ results = Db,
+ client_conns = Conns} = State) ->
+ ?DEBUG("await time data from ~p clients", [N]),
+ receive
+ {load_data,
+ {time_to_complete, Client, StopTime, LastResponseTime}} ->
+ ?LOG("received time load-data from client ~p: "
+ "~n Time of stop: ~p"
+ "~n Time of last response: ~p",
+ [Client, StopTime, LastResponseTime]),
+ ets:insert(Db, {{NofScheduler, Client},
+ {time, StopTime, LastResponseTime}});
+ stop ->
+ ?INFO("received stop while collecting data, when N = ~p", [N]),
+ exit(self(), stop);
+
+ {client_exit, Client, Node, Reason} ->
+ ?INFO("Received unexpected exit from client ~p on node ~p "
+ "while collecting time data: "
+ "~n ~p", [Client, Node, Reason]),
+ case lists:keysearch(Node, #connection.node, Conns) of
+ {value, Conn} ->
+ ?LOG("Found problem connection: "
+ "~n ~p", [Conn]),
+ exit({unexpected_client_exit, Reason});
+ false ->
+ collect_req_reply(N, State)
+ end;
+
+ Else -> %%% Something is wrong!
+ ?INFO("RECEIVED UNEXPECTED MESSAGE WHILE COLLECTING TIME DATA: "
+ "~n ~p", [Else]),
+ collect_time(N, State)
+ end,
+ collect_time(N-1, State).
+
+analyse_data(#state{results = Db,
+ max_nof_schedulers = MaxNofSchedulers,
+ test_time = MicroSec}) ->
+ Tab = ets:new(analysed_results, [set]),
+ lists:foreach(fun(NofSchedulers) ->
+ Result = analyse(NofSchedulers, Db, MicroSec),
+ ets:insert(Tab, Result)
+ end, [N || N <- lists:seq(0, MaxNofSchedulers)]),
+ Tab.
+
+
+no_requests_replys(NoSchedulers, Tab) ->
+ NoRequests =
+ ets:select(Tab, [{{{NoSchedulers,'_'},{req_reply, '$1', '_'}},
+ [],['$$']}]),
+ NoReplys =
+ ets:select(Tab, [{{{NoSchedulers, '_'}, {req_reply, '_', '$1'}},
+ [], ['$$']}]),
+
+ {lists:sum(lists:append(NoRequests)),
+ lists:sum(lists:append(NoReplys))}.
+
+max_time_to_final_response(NofSchedulers, Tab) ->
+ Candidates =
+ ets:select(Tab, [{{{NofSchedulers, '_'}, {time, '$1', '$2'}},
+ [], ['$$']}]),
+
+ NewCandidates = lists:map(
+ fun([StopTime, LastTime]) ->
+ round(
+ timer:now_diff(LastTime, StopTime) / 100000)/10
+ end, Candidates),
+
+ lists:max(NewCandidates).
+
+
+analyse(NofSchedulers, Db, TestTime) ->
+ Sec = TestTime / 1000,
+ {NoRequests, NoReplys} = no_requests_replys(NofSchedulers, Db),
+ {NofSchedulers, round(NoReplys / Sec), NoRequests,
+ max_time_to_final_response(NofSchedulers, Db)}.
+
+
+save_results_to_file(AnalysedTab,
+ #state{socket_type = SocketType,
+ http_server = #server{host = Server},
+ max_nof_schedulers = MaxNofSchedulers}) ->
+ FileName = fun(Post) ->
+ File =
+ lists:flatten(
+ io_lib:format("~s_~w_~s",
+ [Server, SocketType, Post])),
+ filename:join("./", File)
+ end,
+ Reps = FileName("replys_per_sec.txt"),
+ Reqs = FileName("total_requests.txt"),
+ Decay = FileName("decay_time.txt"),
+
+ [FdReps, FdReqs, FdDecay] =
+ lists:map(fun(File) ->
+ {ok, Fd} = file:open(File, [write]),
+ Fd
+ end, [Reps, Reqs, Decay]),
+ lists:foreach(fun(NofSchedulers) ->
+ save_result_to_file(NofSchedulers,
+ FdReps, FdReqs,
+ FdDecay, AnalysedTab)
+ end, [N || N <- lists:seq(0, MaxNofSchedulers)]),
+ [Reps, Reqs, Decay].
+
+save_result_to_file(NofSchedulers,
+ FdReps, FdReqs, FdDecay, AnalysedTab) ->
+
+ [{NofSchedulers, NofRepsPerSec, NofReqs, MaxFinalResponseTime}] =
+ ets:lookup(AnalysedTab, NofSchedulers),
+
+ file:write(FdReps, io_lib:format("~p,~p~n",
+ [NofRepsPerSec, NofSchedulers])),
+ file:write(FdReqs, io_lib:format("~p,~p~n",
+ [NofReqs, NofSchedulers])),
+ file:write(FdDecay, io_lib:format("~p,~p~n", [MaxFinalResponseTime,
+ NofSchedulers])).
+
+
+help() ->
+ io:format("hdlt:start(Options). Where options:~n "
+ " ~n~p~n~n hdlt:start([]). -> hdlt:start(~p)~n~n",
+ [[{send_rate, "integer()",
+ "Numer of outstanding requests that a client "
+ "should have during the test to create a load situation."},
+ {clients, "[{path(), host()}]", "Paths to erlang and names of hosts to run clients on."},
+ {test_time, "{hours(), mins(), sec()}",
+ "How long the test should be run."},
+ {server, "{path(), host()}", "Path to erl and name of host to run the HTTP-server on."},
+ {port, "port()", "The port that the HTTP-server should use."},
+ {server_dir, "dir()", "The directory where the HTTP server "
+ " stores its contents and configuration."},
+ {work_dir, "dir()", "Path on the computer, where the test "
+ "is run, to a directory where the results can be saved."},
+ {max_no_schedulers, "integer()",
+ "Max number of schedulers to run."},
+ {socket_type, "Httpd configuration option socket_type"}],
+ defaults()]).
+
+
+defaults() ->
+ [{send_rate, ?DEFAULT_SENDRATE},
+ %% {clients, []},
+ {test_time, ?DEFAULT_TEST_TIME},
+ %% {server, ?DEFAULT_SERVER},
+ {port, ?DEFAULT_PORT},
+ {server_dir, ?DEFAULT_SERVER_DIR},
+ {work_dir, ?DEFAULT_WORK_DIR},
+ {max_nof_schedulers, ?DEFAULT_MAX_NOF_SCHEDULERS},
+ {socket_type, ?DEFAULT_SOCKET_TYPE}].
+
+
+get_debugs(Config) ->
+ ?DEBUG("get debugs", []),
+ Debugs = proplists:get_value(debug, Config, ?DEFAULT_DEBUGS),
+ verify_debugs(Debugs),
+ Debugs.
+
+verify_debugs([]) ->
+ ok;
+verify_debugs([{Tag, Debug}|Debugs]) ->
+ verify_debug(Tag, Debug),
+ verify_debugs(Debugs).
+
+verify_debug(Tag, Debug) ->
+ case lists:member(Tag, [ctrl, proxy, slave, client]) of
+ true ->
+ ok;
+ false ->
+ exit({bad_debug_tag, Tag})
+ end,
+ case lists:member(Debug, [silence, info, log, debug]) of
+ true ->
+ ok;
+ false ->
+ exit({bad_debug_level, Debug})
+ end.
+
+get_send_rate(Config) ->
+ ?DEBUG("get send_rate", []),
+ case proplists:get_value(send_rate, Config, ?DEFAULT_SENDRATE) of
+ SendRate when is_integer(SendRate) andalso (SendRate > 0) ->
+ SendRate;
+ BadSendRate ->
+ exit({bad_sendrate, BadSendRate})
+ end.
+
+
+get_clients(Config) ->
+ ?DEBUG("get clients", []),
+ case proplists:get_value(clients, Config, undefined) of
+ undefined ->
+ missing_mandatory_config(clients);
+ Clients when is_list(Clients) andalso (length(Clients) > 0) ->
+ case [#client{path = Path, host = Host} ||
+ {Path, Host} <- Clients] of
+ Clients2 when (length(Clients2) > 0) ->
+ Clients2;
+ _ ->
+ exit({bad_clients, Clients})
+ end;
+
+ BadClients ->
+ exit({bad_clients, BadClients})
+
+ end.
+
+get_server(Config) ->
+ ?DEBUG("get server", []),
+ case proplists:get_value(server, Config) of
+ {Path, Host} when is_list(Path) andalso is_list(Host) ->
+ #server{path = Path, host = Host};
+ undefined ->
+ missing_mandatory_config(server)
+ end.
+
+get_server_dir(Config) ->
+ ?DEBUG("get server_dir", []),
+ get_dir(server_dir, Config, ?DEFAULT_SERVER_DIR).
+
+get_work_dir(Config) ->
+ ?DEBUG("get work_dir", []),
+ get_dir(work_dir, Config, ?DEFAULT_WORK_DIR).
+
+get_dir(Key, Config, Default) ->
+ Dir = proplists:get_value(Key, Config, Default),
+ ensure_absolute(Dir),
+ Dir.
+
+ensure_absolute(Path) ->
+ case filename:pathtype(Path) of
+ absolute ->
+ ok;
+ PathType ->
+ exit({bad_pathtype, Path, PathType})
+ end.
+
+get_port(Config) ->
+ ?DEBUG("get port", []),
+ case proplists:get_value(port, Config, ?DEFAULT_PORT) of
+ Port when is_integer(Port) andalso (Port > 0) ->
+ Port;
+ BadPort ->
+ exit({bad_port, BadPort})
+ end.
+
+get_socket_type(Config) ->
+ ?DEBUG("get socket_type", []),
+ case proplists:get_value(socket_type, Config, ?DEFAULT_SOCKET_TYPE) of
+ SocketType when ((SocketType =:= ip_comm) orelse
+ (SocketType =:= ssl) orelse
+ (SocketType =:= essl) orelse
+ (SocketType =:= ossl)) ->
+ SocketType;
+ BadSocketType ->
+ exit({bad_socket_type, BadSocketType})
+ end.
+
+get_test_time(Config) ->
+ ?DEBUG("get test_time", []),
+ case proplists:get_value(test_time, Config, ?DEFAULT_TEST_TIME) of
+ Seconds when is_integer(Seconds) andalso (Seconds > 0) ->
+ timer:seconds(Seconds);
+ BadTestTime ->
+ exit({bad_test_time, BadTestTime})
+ end.
+
+get_max_nof_schedulers(Config) ->
+ ?DEBUG("get max_nof_schedulers", []),
+ case proplists:get_value(max_nof_schedulers,
+ Config,
+ ?DEFAULT_MAX_NOF_SCHEDULERS) of
+ MaxNofScheds when (is_integer(MaxNofScheds) andalso
+ (MaxNofScheds >= 0)) ->
+ MaxNofScheds;
+ BadMaxNofScheds ->
+ exit({bad_max_nof_schedulers, BadMaxNofScheds})
+ end.
+
+
+get_server_cert_file(Config) ->
+ ?DEBUG("get server cert file", []),
+ get_cert_file(server_cert_file, ?DEFAULT_SERVER_CERT, Config).
+
+get_client_cert_file(Config) ->
+ ?DEBUG("get client cert file", []),
+ get_cert_file(client_cert_file, ?DEFAULT_CLIENT_CERT, Config).
+
+get_cert_file(Tag, DefaultCertFileName, Config) ->
+ LibDir = code:lib_dir(inets),
+ HdltDir = filename:join(LibDir, "examples/httpd_load_test"),
+ DefaultCertFile = filename:join(HdltDir, DefaultCertFileName),
+ case proplists:get_value(Tag, Config, DefaultCertFile) of
+ F when is_list(F) ->
+ case file:read_file_info(F) of
+ {ok, #file_info{type = regular}} ->
+ F;
+ {ok, #file_info{type = Type}} ->
+ exit({wrong_file_type, Tag, F, Type});
+ {error, Reason} ->
+ exit({failed_readin_file_info, Tag, F, Reason})
+ end;
+ BadFile ->
+ exit({bad_cert_file, Tag, BadFile})
+ end.
+
+
+get_work_sim(Config) ->
+ ?DEBUG("get work_sim", []),
+ case proplists:get_value(work_simulator, Config, ?DEFAULT_WORK_SIM) of
+ WS when is_integer(WS) andalso (WS > 0) ->
+ WS;
+ BadWS ->
+ exit({bad_work_simulator, BadWS})
+ end.
+
+
+get_data_size(Config) ->
+ ?DEBUG("get data_size", []),
+ case proplists:get_value(data_size, Config, ?DEFAULT_DATA_SIZE) of
+ {From, To, Incr} = DS when (is_integer(From) andalso
+ is_integer(To) andalso
+ is_integer(Incr) andalso
+ (To > From) andalso
+ (From > 0) andalso
+ (Incr > 0)) ->
+ DS;
+ {From, To} when (is_integer(From) andalso
+ is_integer(To) andalso
+ (To > From) andalso
+ (From > 0)) ->
+ {From, To, ?DEFAULT_DATA_SIZE_INCR};
+ BadDS ->
+ exit({bad_data_size, BadDS})
+ end.
+
+
+url(#server{host = Host}, Port, SocketType, WorkSim) ->
+ Scheme =
+ case SocketType of
+ ip_comm ->
+ "http";
+ _ -> %% SSL
+ "https"
+ end,
+ lists:flatten(
+ io_lib:format("~s://~s:~w/cgi-bin/hdlt_random_html:page?~w:",
+ [Scheme, Host, Port, WorkSim])).
+
+
+missing_mandatory_config(Missing) ->
+ exit({missing_mandatory_config, Missing}).
+
+
+ensure_remote_dir_exist(Sftp, Path0) ->
+ case filename:split(Path0) of
+ [Root, Dir | Rest] ->
+ %% We never accept creating the root directory,
+ %% or the next level, so these *must* exist:
+ Path = filename:join(Root, Dir),
+ case ssh_sftp:read_file_info(Sftp, Path) of
+ {ok, #file_info{type = directory}} ->
+ ensure_remote_dir_exist(Sftp, Path, Rest);
+ {ok, #file_info{type = Type}} ->
+ ?INFO("Not a dir: ~p (~p)", [Path, Type]),
+ exit({not_a_dir, Path, Type});
+ {error, Reason} ->
+ ?INFO("Failed reading file info for ~p: ~p",
+ [Path, Reason]),
+ exit({failed_reading_file_info, Path, Reason})
+ end;
+ BadSplit ->
+ ?INFO("Bad remote dir path: ~p -> ~p", [Path0, BadSplit]),
+ exit({bad_dir, Path0})
+ end.
+
+ensure_remote_dir_exist(_Sftp, _Dir, []) ->
+ ok;
+ensure_remote_dir_exist(Sftp, Path, [Dir|Rest]) ->
+ NewPath = filename:join(Path, Dir),
+ case ssh_sftp:read_file_info(Sftp, NewPath) of
+ {ok, #file_info{type = directory}} ->
+ ensure_remote_dir_exist(Sftp, NewPath, Rest);
+ {ok, #file_info{type = Type}} ->
+ %% Exist, but is not a dir
+ ?INFO("Not a dir: ~p (~p)", [NewPath, Type]),
+ exit({not_a_dir, NewPath, Type});
+ {error, Reason} ->
+ %% This *could* be because the dir does not exist,
+ %% but it could also be some other error.
+ %% As usual, the error reason of the sftp is
+ %% a pease of crap, so we cannot use the
+ %% error reason.
+ %% The simplest way to test this is to simply
+ %% try to create the directory, since we should
+ %% ensure its existence anyway..
+ case ssh_sftp:make_dir(Sftp, NewPath) of
+ ok ->
+ ensure_remote_dir_exist(Sftp, NewPath, Rest);
+ _ ->
+ ?INFO("Failed reading file info for ~p: ~p",
+ [Dir, Reason]),
+ exit({failed_reading_file_info, NewPath, Reason})
+ end
+ end.
+
+maybe_create_remote_dir(Sftp, Dir) ->
+ case ssh_sftp:read_file_info(Sftp, Dir) of
+ {ok, #file_info{type = directory}} ->
+ ok;
+ {ok, #file_info{type = Type}} ->
+ %% Exist, but is not a dir
+ ?INFO("Not a dir: ~p (~p)", [Dir, Type]),
+ exit({not_a_dir, Dir, Type});
+ {error, Reason} ->
+ %% Assume dir noes not exist...
+ case ssh_sftp:make_dir(Sftp, Dir) of
+ ok ->
+ ok;
+ _ ->
+ ?INFO("Failed reading file info for ~p: ~p",
+ [Dir, Reason]),
+ exit({failed_reading_file_info, Dir, Reason})
+ end
+ end.
+
+
+set_debug_level(Debugs) ->
+ Debug = proplists:get_value(ctrl, Debugs, silence),
+ ?SET_LEVEL(Debug).
+
+
+%% Generates a list of numbers between A and B, such that
+%% there is exact one number between A and B and then
+%% randomizes that list.
+
+randomized_sizes_init() ->
+ {A, B, C} = os:timestamp(),
+ random:seed(A, B, C).
+
+randomized_sizes(From, To, Incr) ->
+ L = lists:seq(From, To, Incr),
+ Len = length(L),
+ randomized_sizes2(L, 0, Len-1).
+
+randomized_sizes2(L, N, Len) when N >= Len ->
+ L;
+randomized_sizes2(L, N, Len) ->
+ SplitWhere = random:uniform(Len),
+ {A, B} = lists:split(SplitWhere, L),
+ randomized_sizes2(B ++ A, N+1, Len).
diff --git a/lib/inets/examples/httpd_load_test/hdlt_logger.erl b/lib/inets/examples/httpd_load_test/hdlt_logger.erl
new file mode 100644
index 0000000000..b0c7eab2d1
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_logger.erl
@@ -0,0 +1,138 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%----------------------------------------------------------------------
+%% Purpose: This is a simple logger utility for the HDLT toolkit.
+%% It assumesd that the debug level and the "name" of the
+%% logging entity has been put in process environment
+%% (using the set_level and set_name functions respectively).
+%%----------------------------------------------------------------------
+
+%%
+
+-module(hdlt_logger).
+
+-export([
+ start/0,
+ set_level/1, get_level/0, set_name/1,
+ info/2, log/2, debug/2
+ ]).
+
+-export([logger/1]).
+
+-define(LOGGER, ?MODULE).
+-define(MSG, hdlt_logger_msg).
+-define(LEVEL, hdlt_logger_level).
+-define(NAME, hdlt_logger_name).
+-define(INFO_STR, "INFO").
+-define(LOG_STR, "LOG ").
+-define(DEBUG_STR, "DBG ").
+
+
+start() ->
+ Self = self(),
+ proc_lib:start(?MODULE, logger, [Self]).
+
+set_name(Name) when is_list(Name) ->
+ put(?NAME, Name),
+ ok.
+
+get_level() ->
+ get(?LEVEL).
+
+set_level(Level) ->
+ case lists:member(Level, [silence, info, log, debug]) of
+ true ->
+ put(?LEVEL, Level),
+ ok;
+ false ->
+ erlang:error({bad_debug_level, Level})
+ end.
+
+
+info(F, A) ->
+%% io:format("info -> " ++ F ++ "~n", A),
+ do_log(info, get(?LEVEL), F, A).
+
+log(F, A) ->
+%% io:format("log -> " ++ F ++ "~n", A),
+ do_log(log, get(?LEVEL), F, A).
+
+debug(F, A) ->
+%% io:format("debug -> " ++ F ++ "~n", A),
+ do_log(debug, get(?LEVEL), F, A).
+
+
+logger(Parent) ->
+ global:register_name(?LOGGER, self()),
+ Ref = erlang:monitor(process, Parent),
+ proc_lib:init_ack(self()),
+ logger_loop(Ref).
+
+logger_loop(Ref) ->
+ receive
+ {?MSG, F, A} ->
+ io:format(F, A),
+ logger_loop(Ref);
+ {'DOWN', Ref, process, _Object, _Info} ->
+ %% start the stop timer
+ erlang:send_after(timer:seconds(5), self(), stop),
+ logger_loop(undefined);
+ stop ->
+ global:unregister_name(?LOGGER),
+ ok
+ end.
+
+
+formated_timestamp() ->
+ {Date, Time} = erlang:localtime(),
+ {YYYY,MM,DD} = Date,
+ {Hour,Min,Sec} = Time,
+ FormatDate =
+ io_lib:format("~.4w-~.2.0w-~.2.0w ~.2.0w:~.2.0w:~.2.0w",
+ [YYYY,MM,DD,Hour,Min,Sec]),
+ lists:flatten(FormatDate).
+
+do_log(_, silence, _, _) ->
+ ok;
+do_log(info, info, F, A) ->
+ do_log(?INFO_STR, F, A);
+do_log(info, log, F, A) ->
+ do_log(?INFO_STR, F, A);
+do_log(log, log, F, A) ->
+ do_log(?LOG_STR, F, A);
+do_log(info, debug, F, A) ->
+ do_log(?INFO_STR, F, A);
+do_log(log, debug, F, A) ->
+ do_log(?LOG_STR, F, A);
+do_log(debug, debug, F, A) ->
+ do_log(?DEBUG_STR, F, A);
+do_log(_, _, _F, _A) ->
+ ok.
+
+do_log(SEV, F, A) ->
+ Name =
+ case get(?NAME) of
+ L when is_list(L) ->
+ L;
+ _ ->
+ "UNDEFINED"
+ end,
+ Msg = {?MSG, "~s ~s [~s] " ++ F ++ "~n",
+ [SEV, Name, formated_timestamp() | A]},
+ (catch global:send(?LOGGER, Msg)).
diff --git a/lib/inets/examples/httpd_load_test/hdlt_logger.hrl b/lib/inets/examples/httpd_load_test/hdlt_logger.hrl
new file mode 100644
index 0000000000..aa94babc48
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_logger.hrl
@@ -0,0 +1,33 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-ifndef(hdlt_logger_hrl).
+-define(hdlt_logger_hrl, true).
+
+%% Various log macros
+-define(SET_LEVEL(N), hdlt_logger:set_level(N)).
+-define(GET_LEVEL(), hdlt_logger:get_level()).
+-define(SET_NAME(N), hdlt_logger:set_name(N)).
+
+-define(INFO(F, A), hdlt_logger:info(F, A)).
+-define(LOG(F, A), hdlt_logger:log(F, A)).
+-define(DEBUG(F, A), hdlt_logger:debug(F, A)).
+
+-endif. % -ifdef(hdlt_logger_hrl).
diff --git a/lib/inets/examples/httpd_load_test/hdlt_random_html.erl b/lib/inets/examples/httpd_load_test/hdlt_random_html.erl
new file mode 100644
index 0000000000..e3a572c61f
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_random_html.erl
@@ -0,0 +1,59 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(hdlt_random_html).
+-export([page/3]).
+
+page(SessionID, _Env, Input) ->
+%% log("page(~p) -> deliver content-type when"
+%% "~n SessionID: ~p"
+%% "~n Env: ~p"
+%% "~n Input: ~p", [self(), SessionID, Env, Input]),
+ [WorkSimStr, SzSimStr] = string:tokens(Input, [$:]),
+ WorkSim = list_to_integer(WorkSimStr),
+ SzSim = list_to_integer(SzSimStr),
+ mod_esi:deliver(SessionID, "Content-Type:text/html\r\n\r\n"),
+ mod_esi:deliver(SessionID, start("Random test page")),
+ mod_esi:deliver(SessionID, content(WorkSim, SzSim)),
+ mod_esi:deliver(SessionID, stop()),
+ ok.
+
+start(Title) ->
+ "<HTML>
+<HEAD>
+<TITLE>" ++ Title ++ "</TITLE>
+ </HEAD>
+<BODY>\n".
+
+stop() ->
+ "</BODY>
+</HTML>
+".
+
+content(WorkSim, SzSim) ->
+ {A, B, C} = now(),
+ random:seed(A, B, C),
+ lists:sort([random:uniform(X) || X <- lists:seq(1, WorkSim)]),
+ lists:flatten(lists:duplicate(SzSim, "Dummy data ")).
+
+%% log(F, A) ->
+%% hdlt_logger:set_name("HDLT RANDOM-HTML"),
+%% hdlt_logger:set_level(debug),
+%% hdlt_logger:log(F, A).
diff --git a/lib/inets/examples/httpd_load_test/hdlt_server.erl b/lib/inets/examples/httpd_load_test/hdlt_server.erl
new file mode 100644
index 0000000000..3e5a849d5b
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_server.erl
@@ -0,0 +1,163 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The HDLT server module.
+%% This is just a stub, making future expansion easy.
+%% All code in this module is executed in the local node!
+%%----------------------------------------------------------------------
+
+-module(hdlt_server).
+
+-export([start/1, stop/0, start_inets/0, start_service/1]).
+
+-export([proxy/1]).
+
+-include_lib("kernel/include/file.hrl").
+-include("hdlt_logger.hrl").
+
+
+-define(PROXY, hdlt_proxy).
+
+
+%% This function is used to start the proxy process
+%% This function is called *after* the nodes has been
+%% "connected" with the controller/collector node.
+
+start(Debug) ->
+ proc_lib:start(?MODULE, proxy, [Debug]).
+
+stop() ->
+ ?PROXY ! stop.
+
+start_inets() ->
+ ?PROXY ! start_inets.
+
+start_service(Config) ->
+ ?PROXY ! {server_start, Config, self()},
+ receive
+ {server_start_result, Result} ->
+ Result
+ after 15000 ->
+ {error, timeout}
+ end.
+
+
+proxy(Debug) ->
+ process_flag(trap_exit, true),
+ erlang:register(?PROXY, self()),
+ ?SET_NAME("HDLT PROXY"),
+ ?SET_LEVEL(Debug),
+ ?LOG("starting", []),
+ Ref = await_for_controller(10),
+ CtrlNode = node(Ref),
+ erlang:monitor_node(CtrlNode, true),
+ proc_lib:init_ack({ok, self()}),
+ ?DEBUG("started", []),
+ proxy_loop(Ref, CtrlNode).
+
+await_for_controller(N) when N > 0 ->
+ case global:whereis_name(hdlt_ctrl) of
+ Pid when is_pid(Pid) ->
+ erlang:monitor(process, Pid);
+ _ ->
+ timer:sleep(1000),
+ await_for_controller(N-1)
+ end;
+await_for_controller(_) ->
+ proc_lib:init_ack({error, controller_not_found, nodes()}),
+ timer:sleep(500),
+ halt().
+
+
+proxy_loop(Ref, CtrlNode) ->
+ ?DEBUG("await command", []),
+ receive
+ stop ->
+ ?LOG("received stop", []),
+ halt();
+
+ start_inets ->
+ ?LOG("start the inets service framework", []),
+ case (catch inets:start()) of
+ ok ->
+ ?LOG("framework started", []),
+ proxy_loop(Ref, CtrlNode);
+ Error ->
+ ?LOG("failed starting inets service framework: "
+ "~n Error: ~p", [Error]),
+ halt()
+ end;
+
+ {server_start, Config, From} ->
+ ?LOG("start-server", []),
+ maybe_start_crypto_and_ssl(Config),
+ %% inets:enable_trace(max, "/tmp/inets-httpd-trace.log", httpd),
+ %% inets:enable_trace(max, "/tmp/inets-httpd-trace.log", all),
+ case (catch inets:start(httpd, Config)) of
+ {ok, _} ->
+ ?LOG("server started when"
+ "~n which(inets): ~p"
+ "~n RootDir: ~p"
+ "~n System info: ~p", [code:which(inets),
+ code:root_dir(),
+ get_node_info()]),
+ From ! {server_start_result, ok},
+ proxy_loop(Ref, CtrlNode);
+ Error ->
+ ?INFO("server start failed"
+ "~n Error: ~p", [Error]),
+ From ! {server_start_result, Error},
+ halt()
+ end;
+
+ {nodedown, CtrlNode} ->
+ ?LOG("received nodedown for controller node - terminate", []),
+ halt();
+
+ {'DOWN', Ref, process, _, _} ->
+ ?LOG("received DOWN message for controller - terminate", []),
+ %% The controller has terminated, time to die
+ halt()
+
+ end.
+
+
+maybe_start_crypto_and_ssl(Config) ->
+ case lists:keysearch(socket_type, 1, Config) of
+ {value, {socket_type, SocketType}} when ((SocketType =:= ssl) orelse
+ (SocketType =:= ossl) orelse
+ (SocketType =:= essl)) ->
+ ?LOG("maybe start crypto and ssl", []),
+ (catch crypto:start()),
+ ssl:start();
+ _ ->
+ ok
+ end.
+
+
+get_node_info() ->
+ [{cpu_topology, erlang:system_info(cpu_topology)},
+ {heap_type, erlang:system_info(heap_type)},
+ {nof_schedulers, erlang:system_info(schedulers)},
+ {otp_release, erlang:system_info(otp_release)},
+ {version, erlang:system_info(version)},
+ {system_version, erlang:system_info(system_version)},
+ {system_architecture, erlang:system_info(system_architecture)}].
+
diff --git a/lib/inets/examples/httpd_load_test/hdlt_slave.erl b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
new file mode 100644
index 0000000000..52af9b5b90
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
@@ -0,0 +1,291 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(hdlt_slave).
+
+
+-export([start_link/4, start_link/5, start_link/6, stop/1]).
+
+%% Internal exports
+-export([wait_for_slave/9, slave_start/1, wait_for_master_to_die/3]).
+
+-include("hdlt_logger.hrl").
+
+-define(SSH_PORT, 22).
+-define(TIMEOUT, 60000).
+-define(LOGGER, hdlt_logger).
+
+
+%% ***********************************************************************
+%% start_link/4,5 --
+%%
+%% The start/4,5 functions are used to start a slave Erlang node.
+%% The node on which the start/N functions are used is called the
+%% master in the description below.
+%%
+%% If hostname is the same for the master and the slave,
+%% the Erlang node will simply be spawned. The only requirment for
+%% this to work is that the 'erl' program can be found in PATH.
+%%
+%% If the master and slave are on different hosts, start/N uses
+%% the 'rsh' program to spawn an Erlang node on the other host.
+%% Alternative, if the master was started as
+%% 'erl -sname xxx -rsh my_rsh...', then 'my_rsh' will be used instead
+%% of 'rsh' (this is useful for systems where the rsh program is named
+%% 'remsh').
+%%
+%% For this to work, the following conditions must be fulfilled:
+%%
+%% 1. There must be an Rsh program on computer; if not an error
+%% is returned.
+%%
+%% 2. The hosts must be configured to allowed 'rsh' access without
+%% prompts for password.
+%%
+%% The slave node will have its filer and user server redirected
+%% to the master. When the master node dies, the slave node will
+%% terminate. For the start_link functions, the slave node will
+%% terminate also if the process which called start_link terminates.
+%%
+%% Returns: {ok, Name@Host} |
+%% {error, timeout} |
+%% {error, no_rsh} |
+%% {error, {already_running, Name@Host}}
+
+start_link(Host, Name, ErlPath, Paths) ->
+ start_link(Host, Name, ErlPath, Paths, [], silence).
+
+start_link(Host, Name, ErlPath, Paths, DebugLevel) when is_atom(DebugLevel) ->
+ start_link(Host, Name, ErlPath, Paths, [], DebugLevel);
+start_link(Host, Name, ErlPath, Paths, Args) when is_list(Args) ->
+ start_link(Host, Name, ErlPath, Paths, Args, silence).
+
+start_link(Host, Name, ErlPath, Paths, Args, DebugLevel) ->
+ Node = list_to_atom(lists:concat([Name, "@", Host])),
+ case net_adm:ping(Node) of
+ pang ->
+ start_it(Host, Name, Node, ErlPath, Paths, Args, DebugLevel);
+ pong ->
+ {error, {already_running, Node}}
+ end.
+
+%% Stops a running node.
+
+stop(Node) ->
+ rpc:call(Node, erlang, halt, []),
+ ok.
+
+
+%% Starts a new slave node.
+
+start_it(Host, Name, Node, ErlPath, Paths, Args, DebugLevel) ->
+ Prog = filename:join([ErlPath, "erl"]),
+ spawn(?MODULE, wait_for_slave, [self(), Host, Name, Node, Paths, Args, self(), Prog, DebugLevel]),
+ receive
+ {result, Result} -> Result
+ end.
+
+%% Waits for the slave to start.
+
+wait_for_slave(Parent, Host, Name, Node, Paths, Args,
+ LinkTo, Prog, DebugLevel) ->
+ ?SET_NAME("HDLT SLAVE STARTER"),
+ ?SET_LEVEL(DebugLevel),
+ ?DEBUG("begin", []),
+ Waiter = register_unique_name(0),
+ case mk_cmd(Host, Name, Paths, Args, Waiter, Prog) of
+ {ok, Cmd} ->
+ ?DEBUG("command generated: ~n~s", [Cmd]),
+ case (catch ssh_slave_start(Host, Cmd)) of
+ {ok, Conn, _Chan} ->
+ ?DEBUG("ssh channel created", []),
+ receive
+ {SlavePid, slave_started} ->
+ ?DEBUG("slave started: ~p", [SlavePid]),
+ unregister(Waiter),
+ slave_started(Parent, LinkTo, SlavePid, Conn,
+ DebugLevel)
+ after 32000 ->
+ ?INFO("slave node failed to report in on time",
+ []),
+ %% If it seems that the node was partially started,
+ %% try to kill it.
+ case net_adm:ping(Node) of
+ pong ->
+ spawn(Node, erlang, halt, []),
+ ok;
+ _ ->
+ ok
+ end,
+ Parent ! {result, {error, timeout}}
+ end;
+ {error, Reason} = Error ->
+ ?INFO("FAILED starting node: "
+ "~n ~p"
+ "~n ~p", [Reason, Cmd]),
+ Parent ! {result, Error}
+ end;
+ Other ->
+ ?INFO("FAILED creating node command string: "
+ "~n ~p", [Other]),
+ Parent ! {result, Other}
+ end.
+
+
+ssh_slave_start(Host, ErlCmd) ->
+ ?DEBUG("ssh_slave_start -> try connect to ~p", [Host]),
+ Connection =
+ case (catch ssh:connect(Host, ?SSH_PORT,
+ [{silently_accept_hosts, true}])) of
+ {ok, Conn} ->
+ ?DEBUG("ssh_exec_erl -> connected: ~p", [Conn]),
+ Conn;
+ Error1 ->
+ ?LOG("failed connecting to ~p: ~p", [Host, Error1]),
+ throw({error, {ssh_connect_failed, Error1}})
+ end,
+
+ ?DEBUG("ssh_exec_erl -> connected - now create channel", []),
+ Channel =
+ case (catch ssh_connection:session_channel(Connection, ?TIMEOUT)) of
+ {ok, Chan} ->
+ ?DEBUG("ssh_exec_erl -> channel ~p created", [Chan]),
+ Chan;
+ Error2 ->
+ ?LOG("failed creating channel: ~p", [Error2]),
+ throw({error, {ssh_channel_create_failed, Error2}})
+ end,
+
+ ?DEBUG("ssh_exec_erl -> channel created - now exec command: "
+ "~n ~p", [ErlCmd]),
+ case (catch ssh_connection:exec(Connection, Channel, ErlCmd, infinity)) of
+ success ->
+ ?DEBUG("ssh_exec_erl -> command exec'ed - clean ssh msg", []),
+ clean_ssh_msg(),
+ ?DEBUG("ssh_exec_erl -> done", []),
+ {ok, Connection, Channel};
+ Error3 ->
+ ?LOG("failed exec comand: ~p", [Error3]),
+ throw({error, {ssh_exec_failed, Error3}})
+ end.
+
+clean_ssh_msg() ->
+ receive
+ {ssh_cm, _X, _Y} ->
+ clean_ssh_msg()
+ after 1000 ->
+ ok
+ end.
+
+
+slave_started(ReplyTo, Master, Slave, Conn, Level)
+ when is_pid(Master) andalso is_pid(Slave) ->
+ process_flag(trap_exit, true),
+ SName = lists:flatten(
+ io_lib:format("HDLT SLAVE CTRL[~p,~p]",
+ [self(), node(Slave)])),
+ ?SET_NAME(SName),
+ ?SET_LEVEL(Level),
+ ?LOG("initiating", []),
+ MasterRef = erlang:monitor(process, Master),
+ SlaveRef = erlang:monitor(process, Slave),
+ ReplyTo ! {result, {ok, node(Slave)}},
+ slave_running(Master, MasterRef, Slave, SlaveRef, Conn).
+
+
+%% The slave node will be killed if the master process terminates,
+%% The master process will not be killed if the slave node terminates.
+
+slave_running(Master, MasterRef, Slave, SlaveRef, Conn) ->
+ ?DEBUG("await message", []),
+ receive
+ {'DOWN', MasterRef, process, _Object, _Info} ->
+ ?LOG("received DOWN from master", []),
+ erlang:demonitor(SlaveRef, [flush]),
+ Slave ! {nodedown, node()},
+ ssh:close(Conn);
+
+ {'DOWN', SlaveRef, process, Object, _Info} ->
+ ?LOG("received DOWN from slave (~p)", [Object]),
+ erlang:demonitor(MasterRef, [flush]),
+ ssh:close(Conn);
+
+ Other ->
+ ?DEBUG("received unknown: ~n~p", [Other]),
+ slave_running(Master, MasterRef, Slave, SlaveRef, Conn)
+
+ end.
+
+register_unique_name(Number) ->
+ Name = list_to_atom(lists:concat([?MODULE, "_waiter_", Number])),
+ case catch register(Name, self()) of
+ true ->
+ Name;
+ {'EXIT', {badarg, _}} ->
+ register_unique_name(Number+1)
+ end.
+
+
+%% Makes up the command to start the nodes.
+%% If the node should run on the local host, there is
+%% no need to use rsh.
+
+mk_cmd(Host, Name, Paths, Args, Waiter, Prog) ->
+ PaPaths = [[" -pa ", Path] || Path <- Paths],
+ {ok, lists:flatten(
+ lists:concat([Prog,
+ " -detached -nopinput ",
+ Args, " ",
+ " -sname ", Name, "@", Host,
+ " -s ", ?MODULE, " slave_start ", node(),
+ " ", Waiter,
+ " ", PaPaths]))}.
+
+
+%% This function will be invoked on the slave, using the -s option of erl.
+%% It will wait for the master node to terminate.
+
+slave_start([Master, Waiter]) ->
+ spawn(?MODULE, wait_for_master_to_die, [Master, Waiter, silence]);
+slave_start([Master, Waiter, Level]) ->
+ spawn(?MODULE, wait_for_master_to_die, [Master, Waiter, Level]).
+
+
+wait_for_master_to_die(Master, Waiter, Level) ->
+ process_flag(trap_exit, true),
+ SName = lists:flatten(
+ io_lib:format("HDLT-SLAVE MASTER MONITOR[~p,~p,~p]",
+ [self(), node(), Master])),
+ ?SET_NAME(SName),
+ ?SET_LEVEL(Level),
+ erlang:monitor_node(Master, true),
+ {Waiter, Master} ! {self(), slave_started},
+ wloop(Master).
+
+wloop(Master) ->
+ ?DEBUG("await message", []),
+ receive
+ {nodedown, Master} ->
+ ?INFO("received master nodedown", []),
+ halt();
+ _Other ->
+ wloop(Master)
+ end.
+
+
+
diff --git a/lib/inets/examples/httpd_load_test/hdlt_ssl_client_cert.pem b/lib/inets/examples/httpd_load_test/hdlt_ssl_client_cert.pem
new file mode 120000
index 0000000000..41644a1098
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_ssl_client_cert.pem
@@ -0,0 +1 @@
+../../test/httpc_SUITE_data/ssl_client_cert.pem \ No newline at end of file
diff --git a/lib/inets/examples/httpd_load_test/hdlt_ssl_server_cert.pem b/lib/inets/examples/httpd_load_test/hdlt_ssl_server_cert.pem
new file mode 120000
index 0000000000..41644a1098
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/hdlt_ssl_server_cert.pem
@@ -0,0 +1 @@
+../../test/httpc_SUITE_data/ssl_client_cert.pem \ No newline at end of file
diff --git a/lib/inets/examples/httpd_load_test/modules.mk b/lib/inets/examples/httpd_load_test/modules.mk
new file mode 100644
index 0000000000..9d0d7103d5
--- /dev/null
+++ b/lib/inets/examples/httpd_load_test/modules.mk
@@ -0,0 +1,44 @@
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+SCRIPT_SKELETONS = \
+ hdlt.sh.skel
+
+CONF_SKELETONS = \
+ hdlt.config.skel
+
+CERT_FILES = \
+ hdlt_ssl_client_cert.pem \
+ hdlt_ssl_server_cert.pem
+
+README = HDLT_README
+
+MODULES = \
+ hdlt \
+ hdlt_ctrl \
+ hdlt_client \
+ hdlt_logger \
+ hdlt_random_html \
+ hdlt_server \
+ hdlt_slave
+
+INTERNAL_HRL_FILES = \
+ hdlt_logger.hrl
+
+
diff --git a/lib/inets/examples/server_root/Makefile b/lib/inets/examples/server_root/Makefile
new file mode 100644
index 0000000000..d7a3231068
--- /dev/null
+++ b/lib/inets/examples/server_root/Makefile
@@ -0,0 +1,209 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1997-2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+VSN=$(INETS_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULE=
+
+AUTH_FILES = auth/group \
+ auth/passwd
+CGI_FILES = cgi-bin/printenv.sh
+CONF_FILES = conf/8080.conf \
+ conf/8888.conf \
+ conf/httpd.conf \
+ conf/ssl.conf \
+ conf/mime.types
+OPEN_FILES = htdocs/open/dummy.html
+MNESIA_OPEN_FILES = htdocs/mnesia_open/dummy.html
+MISC_FILES = htdocs/misc/friedrich.html \
+ htdocs/misc/oech.html
+SECRET_FILES = htdocs/secret/dummy.html
+MNESIA_SECRET_FILES = htdocs/mnesia_secret/dummy.html
+HTDOCS_FILES = htdocs/index.html \
+ htdocs/config.shtml \
+ htdocs/echo.shtml \
+ htdocs/exec.shtml \
+ htdocs/flastmod.shtml \
+ htdocs/fsize.shtml \
+ htdocs/include.shtml
+ICON_FILES = icons/README \
+ icons/a.gif \
+ icons/alert.black.gif \
+ icons/alert.red.gif \
+ icons/apache_pb.gif \
+ icons/back.gif \
+ icons/ball.gray.gif \
+ icons/ball.red.gif \
+ icons/binary.gif \
+ icons/binhex.gif \
+ icons/blank.gif \
+ icons/bomb.gif \
+ icons/box1.gif \
+ icons/box2.gif \
+ icons/broken.gif \
+ icons/burst.gif \
+ icons/button1.gif \
+ icons/button10.gif \
+ icons/button2.gif \
+ icons/button3.gif \
+ icons/button4.gif \
+ icons/button5.gif \
+ icons/button6.gif \
+ icons/button7.gif \
+ icons/button8.gif \
+ icons/button9.gif \
+ icons/buttonl.gif \
+ icons/buttonr.gif \
+ icons/c.gif \
+ icons/comp.blue.gif \
+ icons/comp.gray.gif \
+ icons/compressed.gif \
+ icons/continued.gif \
+ icons/dir.gif \
+ icons/down.gif \
+ icons/dvi.gif \
+ icons/f.gif \
+ icons/folder.gif \
+ icons/folder.open.gif \
+ icons/folder.sec.gif \
+ icons/forward.gif \
+ icons/generic.gif \
+ icons/generic.red.gif \
+ icons/generic.sec.gif \
+ icons/hand.right.gif \
+ icons/hand.up.gif \
+ icons/htdig.gif \
+ icons/icon.sheet.gif \
+ icons/image1.gif \
+ icons/image2.gif \
+ icons/image3.gif \
+ icons/index.gif \
+ icons/layout.gif \
+ icons/left.gif \
+ icons/link.gif \
+ icons/movie.gif \
+ icons/p.gif \
+ icons/patch.gif \
+ icons/pdf.gif \
+ icons/pie0.gif \
+ icons/pie1.gif \
+ icons/pie2.gif \
+ icons/pie3.gif \
+ icons/pie4.gif \
+ icons/pie5.gif \
+ icons/pie6.gif \
+ icons/pie7.gif \
+ icons/pie8.gif \
+ icons/portal.gif \
+ icons/poweredby.gif \
+ icons/ps.gif \
+ icons/quill.gif \
+ icons/right.gif \
+ icons/screw1.gif \
+ icons/screw2.gif \
+ icons/script.gif \
+ icons/sound1.gif \
+ icons/sound2.gif \
+ icons/sphere1.gif \
+ icons/sphere2.gif \
+ icons/star.gif \
+ icons/star_blank.gif \
+ icons/tar.gif \
+ icons/tex.gif \
+ icons/text.gif \
+ icons/transfer.gif \
+ icons/unknown.gif \
+ icons/up.gif \
+ icons/uu.gif \
+ icons/uuencoded.gif \
+ icons/world1.gif \
+ icons/world2.gif
+
+SSL_FILES = ssl/ssl_client.pem \
+ ssl/ssl_server.pem
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_COMPILE_FLAGS +=
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt:
+
+clean:
+
+docs:
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/auth
+ $(INSTALL_DATA) $(AUTH_FILES) $(RELSYSDIR)/examples/server_root/auth
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/cgi-bin
+ $(INSTALL_SCRIPT) $(CGI_FILES) $(RELSYSDIR)/examples/server_root/cgi-bin
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/conf
+ $(INSTALL_DATA) $(CONF_FILES) $(RELSYSDIR)/examples/server_root/conf
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/open
+ $(INSTALL_DATA) $(OPEN_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/open
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
+ $(INSTALL_DATA) $(MNESIA_OPEN_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/misc
+ $(INSTALL_DATA) $(MISC_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/misc
+ $(INSTALL_DIR) \
+ $(RELSYSDIR)/examples/server_root/htdocs/secret/top_secret
+ $(INSTALL_DIR) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret/top_secret
+ $(INSTALL_DATA) $(SECRET_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/secret
+ $(INSTALL_DATA) $(MNESIA_SECRET_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs
+ $(INSTALL_DATA) $(HTDOCS_FILES) $(RELSYSDIR)/examples/server_root/htdocs
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/icons
+ $(INSTALL_DATA) $(ICON_FILES) $(RELSYSDIR)/examples/server_root/icons
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/ssl
+ $(INSTALL_DATA) $(SSL_FILES) $(RELSYSDIR)/examples/server_root/ssl
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/logs
+
+release_docs_spec:
+
diff --git a/lib/inets/examples/subdirs.mk b/lib/inets/examples/subdirs.mk
new file mode 100644
index 0000000000..10a331fc26
--- /dev/null
+++ b/lib/inets/examples/subdirs.mk
@@ -0,0 +1,3 @@
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+SUB_DIRECTORIES = server_root httpd_load_test \ No newline at end of file
diff --git a/lib/inets/src/ftp/Makefile b/lib/inets/src/ftp/Makefile
index 0c15277a18..19b93870df 100644
--- a/lib/inets/src/ftp/Makefile
+++ b/lib/inets/src/ftp/Makefile
@@ -22,6 +22,7 @@ include $(ERL_TOP)/make/target.mk
EBIN = ../../ebin
include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
# ----------------------------------------------------
# Application version
# ----------------------------------------------------
@@ -29,6 +30,7 @@ include ../../vsn.mk
VSN = $(INETS_VSN)
+
# ----------------------------------------------------
# Release directory specification
# ----------------------------------------------------
@@ -52,24 +54,21 @@ TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
# ----------------------------------------------------
-# INETS FLAGS
+# FLAGS
# ----------------------------------------------------
-INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
+
+include ../inets_app/inets.mk
ifeq ($(FTP_DEBUG),true)
INETS_FLAGS += -Dftp_debug
endif
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include \
+ -I../inets_app
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-INETS_ERL_FLAGS += -I ../inets_app -pa ../../ebin
-
-ERL_COMPILE_FLAGS += $(INETS_ERL_FLAGS) \
- $(INETS_FLAGS) \
- +'{parse_transform,sys_pre_attributes}' \
- +'{attribute,insert,app_vsn,$(APP_VSN)}'
# ----------------------------------------------------
# Targets
@@ -89,9 +88,10 @@ docs:
include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/src
- $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
- $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/src/ftp
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src/ftp
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
release_docs_spec:
diff --git a/lib/inets/src/ftp/ftp.erl b/lib/inets/src/ftp/ftp.erl
index 534fcae675..5ad74851c8 100644
--- a/lib/inets/src/ftp/ftp.erl
+++ b/lib/inets/src/ftp/ftp.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -25,14 +25,12 @@
-behaviour(gen_server).
-behaviour(inets_service).
--deprecated({open, 3, next_major_release}).
--deprecated({force_active, 1, next_major_release}).
%% API - Client interface
-export([cd/2, close/1, delete/2, formaterror/1,
lcd/2, lpwd/1, ls/1, ls/2,
mkdir/2, nlist/1, nlist/2,
- open/1, open/2, open/3, force_active/1,
+ open/1, open/2,
pwd/1, quote/2,
recv/2, recv/3, recv_bin/2,
recv_chunk_start/2, recv_chunk/1,
@@ -133,11 +131,6 @@ open(Host, Port) when is_integer(Port) ->
open(Host, [{port, Port}]);
%% </BACKWARD-COMPATIBILLITY>
-%% <BACKWARD-COMPATIBILLITY>
-open(Host, [H|_] = Flags) when is_atom(H) ->
- open(Host, ?FTP_PORT, Flags);
-%% </BACKWARD-COMPATIBILLITY>
-
open(Host, Opts) when is_list(Opts) ->
?fcrt("open", [{host, Host}, {opts, Opts}]),
try
@@ -160,32 +153,6 @@ open(Host, Opts) when is_list(Opts) ->
end.
-%% <BACKWARD-COMPATIBILLITY>
-open(Host, Port, Flags) when is_integer(Port) andalso is_list(Flags) ->
- ?fcrt("open", [{host, Host}, {port, Port}, {flags, Flags}]),
- try
- {ok, StartOptions} = start_options([{flags, Flags}]),
- ?fcrt("open", [{start_options, StartOptions}]),
- {ok, OpenOptions} = open_options([{host, Host}, {port, Port}|Flags]),
- ?fcrt("open", [{open_options, OpenOptions}]),
- case ftp_sup:start_child([[{client, self()} | StartOptions], []]) of
- {ok, Pid} ->
- ?fcrt("open - ok", [{pid, Pid}]),
- call(Pid, {open, ip_comm, OpenOptions}, plain);
- Error1 ->
- ?fcrt("open - error", [{error1, Error1}]),
- Error1
- end
- catch
- throw:Error2 ->
- Error2
- end.
-%% </BACKWARD-COMPATIBILLITY>
-
-
-
-
-
%%--------------------------------------------------------------------------
%% user(Pid, User, Pass, <Acc>) -> ok | {error, euser} | {error, econn}
%% | {error, eacct}
@@ -528,16 +495,6 @@ close(Pid) ->
cast(Pid, close),
ok.
-%%--------------------------------------------------------------------------
-%% force_active(Pid) -> ok
-%% Pid = pid()
-%%
-%% Description: Force connection to use active mode.
-%%--------------------------------------------------------------------------
-force_active(Pid) ->
- error_logger:info_report("This function is deprecated use the mode flag "
- "instead"),
- call(Pid, force_active, atom).
%%--------------------------------------------------------------------------
%% formaterror(Tag) -> string()
@@ -886,9 +843,6 @@ handle_call({_, {open, ip_comm, Host, Opts}}, From, State) ->
{stop, normal, State2#state{client = undefined}}
end;
-handle_call({_, force_active}, _, State) ->
- {reply, ok, State#state{mode = active}};
-
handle_call({_, {user, User, Password}}, From,
#state{csock = CSock} = State) when (CSock =/= undefined) ->
handle_user(User, Password, "", State#state{client = From});
diff --git a/lib/inets/src/ftp/ftp_internal.hrl b/lib/inets/src/ftp/ftp_internal.hrl
index c3fa1e611d..148f8217ba 100644
--- a/lib/inets/src/ftp/ftp_internal.hrl
+++ b/lib/inets/src/ftp/ftp_internal.hrl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -21,7 +21,8 @@
-ifndef(ftp_internal_hrl).
-define(ftp_internal_hrl, true).
--include("inets_internal.hrl").
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
-define(SERVICE, ftpc).
-define(fcri(Label, Content), ?report_important(Label, ?SERVICE, Content)).
-define(fcrv(Label, Content), ?report_verbose(Label, ?SERVICE, Content)).
diff --git a/lib/inets/src/http_client/Makefile b/lib/inets/src/http_client/Makefile
index 628c91421f..575c6efaec 100644
--- a/lib/inets/src/http_client/Makefile
+++ b/lib/inets/src/http_client/Makefile
@@ -61,20 +61,17 @@ TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
# ----------------------------------------------------
-# INETS FLAGS
-# ----------------------------------------------------
-INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
-
-
-# ----------------------------------------------------
# FLAGS
# ----------------------------------------------------
-INETS_ERL_FLAGS += -I ../http_lib -I ../inets_app -pa ../../ebin
-ERL_COMPILE_FLAGS += $(INETS_ERL_FLAGS) \
- $(INETS_FLAGS) \
- +'{parse_transform,sys_pre_attributes}' \
- +'{attribute,insert,app_vsn,$(APP_VSN)}'
+include ../inets_app/inets.mk
+
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include \
+ -I../inets_app \
+ -I../http_lib
# ----------------------------------------------------
@@ -94,9 +91,10 @@ docs:
include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/src
- $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
- $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/src/http_client
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src/http_client
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
release_docs_spec:
diff --git a/lib/inets/src/http_client/http.erl b/lib/inets/src/http_client/http.erl
index 7e1e90b50e..bbe2fec267 100644
--- a/lib/inets/src/http_client/http.erl
+++ b/lib/inets/src/http_client/http.erl
@@ -18,21 +18,38 @@
%%
%%
-%% Description:
-%%% This version of the HTTP/1.1 client supports:
-%%% - RFC 2616 HTTP 1.1 client part
-%%% - RFC 2818 HTTP Over TLS
+%%% Description: OLD API MODULE - USE httpc INSTEAD
-module(http).
-%% API
--export([request/1, request/2, request/4, request/5,
+-deprecated({request, 1, next_major_release}).
+-deprecated({request, 2, next_major_release}).
+-deprecated({request, 4, next_major_release}).
+-deprecated({request, 5, next_major_release}).
+-deprecated({cancel_request, 1, next_major_release}).
+-deprecated({cancel_request, 2, next_major_release}).
+-deprecated({set_option, 2, next_major_release}).
+-deprecated({set_option, 3, next_major_release}).
+-deprecated({set_options, 1, next_major_release}).
+-deprecated({set_options, 2, next_major_release}).
+-deprecated({verify_cookies, 2, next_major_release}).
+-deprecated({verify_cookies, 3, next_major_release}).
+-deprecated({cookie_header, 1, next_major_release}).
+-deprecated({cookie_header, 2, next_major_release}).
+-deprecated({stream_next, 1, next_major_release}).
+-deprecated({default_profile, 0, next_major_release}).
+
+%% Deprecated
+-export([
+ request/1, request/2, request/4, request/5,
cancel_request/1, cancel_request/2,
set_option/2, set_option/3,
set_options/1, set_options/2,
- verify_cookies/2, verify_cookies/3, cookie_header/1,
- cookie_header/2, stream_next/1,
- default_profile/0]).
+ verify_cookies/2, verify_cookies/3,
+ cookie_header/1, cookie_header/2,
+ stream_next/1,
+ default_profile/0
+ ]).
%%%=========================================================================
diff --git a/lib/inets/src/http_client/httpc.erl b/lib/inets/src/http_client/httpc.erl
index 6deeab6948..851364001c 100644
--- a/lib/inets/src/http_client/httpc.erl
+++ b/lib/inets/src/http_client/httpc.erl
@@ -48,7 +48,7 @@
stop_service/1,
services/0, service_info/1]).
--include("http_internal.hrl").
+-include_lib("inets/src/http_lib/http_internal.hrl").
-include("httpc_internal.hrl").
-define(DEFAULT_PROFILE, default).
@@ -104,8 +104,14 @@ request(Url, Profile) ->
%% HTTPOptions - [HttpOption]
%% HTTPOption - {timeout, Time} | {connect_timeout, Time} |
%% {ssl, SSLOptions} | {proxy_auth, {User, Password}}
-%% Ssloptions = [SSLOption]
-%% SSLOption = {verify, code()} | {depth, depth()} | {certfile, path()} |
+%% Ssloptions = ssl_options() |
+%% {ssl, ssl_options()} |
+%% {ossl, ssl_options()} |
+%% {essl, ssl_options()}
+%% ssl_options() = [ssl_option()]
+%% ssl_option() = {verify, code()} |
+%% {depth, depth()} |
+%% {certfile, path()} |
%% {keyfile, path()} | {password, string()} | {cacertfile, path()} |
%% {ciphers, string()}
%% Options - [Option]
@@ -579,7 +585,13 @@ http_options_default() ->
error
end,
SslPost = fun(Value) when is_list(Value) ->
- {ok, Value};
+ {ok, {?HTTP_DEFAULT_SSL_KIND, Value}};
+ ({ssl, SslOptions}) when is_list(SslOptions) ->
+ {ok, {?HTTP_DEFAULT_SSL_KIND, SslOptions}};
+ ({ossl, SslOptions}) when is_list(SslOptions) ->
+ {ok, {ossl, SslOptions}};
+ ({essl, SslOptions}) when is_list(SslOptions) ->
+ {ok, {essl, SslOptions}};
(_) ->
error
end,
@@ -604,14 +616,14 @@ http_options_default() ->
error
end,
[
- {version, {value, "HTTP/1.1"}, #http_options.version, VersionPost},
- {timeout, {value, ?HTTP_REQUEST_TIMEOUT}, #http_options.timeout, TimeoutPost},
- {autoredirect, {value, true}, #http_options.autoredirect, AutoRedirectPost},
- {ssl, {value, []}, #http_options.ssl, SslPost},
- {proxy_auth, {value, undefined}, #http_options.proxy_auth, ProxyAuthPost},
- {relaxed, {value, false}, #http_options.relaxed, RelaxedPost},
- %% this field has to be *after* the timeout field (as that field is used for the default value)
- {connect_timeout, {field, #http_options.timeout}, #http_options.connect_timeout, ConnTimeoutPost}
+ {version, {value, "HTTP/1.1"}, #http_options.version, VersionPost},
+ {timeout, {value, ?HTTP_REQUEST_TIMEOUT}, #http_options.timeout, TimeoutPost},
+ {autoredirect, {value, true}, #http_options.autoredirect, AutoRedirectPost},
+ {ssl, {value, {?HTTP_DEFAULT_SSL_KIND, []}}, #http_options.ssl, SslPost},
+ {proxy_auth, {value, undefined}, #http_options.proxy_auth, ProxyAuthPost},
+ {relaxed, {value, false}, #http_options.relaxed, RelaxedPost},
+ %% this field has to be *after* the timeout option (as that field is used for the default value)
+ {connect_timeout, {field, #http_options.timeout}, #http_options.connect_timeout, ConnTimeoutPost}
].
diff --git a/lib/inets/src/http_client/httpc_cookie.erl b/lib/inets/src/http_client/httpc_cookie.erl
index 586701b4a1..4d61f82b5a 100644
--- a/lib/inets/src/http_client/httpc_cookie.erl
+++ b/lib/inets/src/http_client/httpc_cookie.erl
@@ -476,13 +476,13 @@ path_sort(Cookies)->
lists:reverse(lists:keysort(#http_cookie.path, Cookies)).
-%% Informally, the Set-Cookie response header comprises the token
-%% Set-Cookie:, followed by a comma-separated list of one or more
-%% cookies. Netscape cookies expires attribute may also have a
-%% , in this case the header list will have been incorrectly split
-%% in parse_set_cookies/2 this functions fixs that problem.
+%% Informally, the Set-Cookie response header comprises the token
+%% Set-Cookie:, followed by a comma-separated list of one or more
+%% cookies. Netscape cookies expires attribute may also have a,
+%% in this case the header list will have been incorrectly split
+%% in parse_set_cookies/2 this functions fix that problem.
fix_netscape_cookie([Cookie1, Cookie2 | Rest], Acc) ->
- case inets_regexp:match(Cookie1, "expires=") of
+ case inets_regexp:match(string:to_lower(Cookie1), "expires=") of
{_, _, _} ->
fix_netscape_cookie(Rest, [Cookie1 ++ Cookie2 | Acc]);
nomatch ->
diff --git a/lib/inets/src/http_client/httpc_handler.erl b/lib/inets/src/http_client/httpc_handler.erl
index 5e79d874fb..c34b641b7b 100644
--- a/lib/inets/src/http_client/httpc_handler.erl
+++ b/lib/inets/src/http_client/httpc_handler.erl
@@ -22,8 +22,8 @@
-behaviour(gen_server).
+-include_lib("inets/src/http_lib/http_internal.hrl").
-include("httpc_internal.hrl").
--include("http_internal.hrl").
%%--------------------------------------------------------------------
@@ -177,8 +177,8 @@ stream(BodyPart, Request = #request{stream = Self}, Code)
stream(BodyPart, Request = #request{stream = Self}, 404)
when (Self =:= self) orelse (Self =:= {self, once}) ->
?hcrt("stream - self with 404", [{stream, Self}]),
- httpc_response:send(Request#request.from,
- {Request#request.id, stream, BodyPart}),
+ httpc_response:send(Request#request.from,
+ {Request#request.id, stream, BodyPart}),
{<<>>, Request};
%% Stream to file
@@ -286,8 +286,7 @@ handle_call({connect_and_send, #request{address = Address0,
handle_call(#request{address = Addr} = Request, _,
#state{status = Status,
- session = #tcp_session{socket = Socket,
- type = pipeline} = Session,
+ session = #session{type = pipeline} = Session,
timers = Timers,
options = #options{proxy = Proxy} = _Options,
profile_name = ProfileName} = State)
@@ -301,7 +300,7 @@ handle_call(#request{address = Addr} = Request, _,
Address = handle_proxy(Addr, Proxy),
- case httpc_request:send(Address, Request, Socket) of
+ case httpc_request:send(Address, Session, Request) of
ok ->
?hcrd("request sent", []),
@@ -320,10 +319,10 @@ handle_call(#request{address = Addr} = Request, _,
NewTimers = NewState#state.timers,
NewPipeline = queue:in(Request, State#state.pipeline),
NewSession =
- Session#tcp_session{queue_length =
- %% Queue + current
- queue:len(NewPipeline) + 1,
- client_close = ClientClose},
+ Session#session{queue_length =
+ %% Queue + current
+ queue:len(NewPipeline) + 1,
+ client_close = ClientClose},
httpc_manager:insert_session(NewSession, ProfileName),
?hcrd("session updated", []),
{reply, ok, State#state{pipeline = NewPipeline,
@@ -336,8 +335,8 @@ handle_call(#request{address = Addr} = Request, _,
cancel_timer(Timers#timers.queue_timer,
timeout_queue),
NewSession =
- Session#tcp_session{queue_length = 1,
- client_close = ClientClose},
+ Session#session{queue_length = 1,
+ client_close = ClientClose},
httpc_manager:insert_session(NewSession, ProfileName),
Relaxed =
(Request#request.settings)#http_options.relaxed,
@@ -357,8 +356,7 @@ handle_call(#request{address = Addr} = Request, _,
handle_call(#request{address = Addr} = Request, _,
#state{status = Status,
- session = #tcp_session{socket = Socket,
- type = keep_alive} = Session,
+ session = #session{type = keep_alive} = Session,
timers = Timers,
options = #options{proxy = Proxy} = _Options,
profile_name = ProfileName} = State)
@@ -370,7 +368,7 @@ handle_call(#request{address = Addr} = Request, _,
{status, Status}]),
Address = handle_proxy(Addr, Proxy),
- case httpc_request:send(Address, Request, Socket) of
+ case httpc_request:send(Address, Session, Request) of
ok ->
?hcrd("request sent", []),
@@ -389,10 +387,10 @@ handle_call(#request{address = Addr} = Request, _,
NewTimers = NewState#state.timers,
NewKeepAlive = queue:in(Request, State#state.keep_alive),
NewSession =
- Session#tcp_session{queue_length =
- %% Queue + current
- queue:len(NewKeepAlive) + 1,
- client_close = ClientClose},
+ Session#session{queue_length =
+ %% Queue + current
+ queue:len(NewKeepAlive) + 1,
+ client_close = ClientClose},
httpc_manager:insert_session(NewSession, ProfileName),
?hcrd("session updated", []),
{reply, ok, State#state{keep_alive = NewKeepAlive,
@@ -405,8 +403,8 @@ handle_call(#request{address = Addr} = Request, _,
cancel_timer(Timers#timers.queue_timer,
timeout_queue),
NewSession =
- Session#tcp_session{queue_length = 1,
- client_close = ClientClose},
+ Session#session{queue_length = 1,
+ client_close = ClientClose},
httpc_manager:insert_session(NewSession, ProfileName),
Relaxed =
(Request#request.settings)#http_options.relaxed,
@@ -589,13 +587,13 @@ handle_info({ssl_closed, _}, State = #state{request = undefined}) ->
%%% Error cases
handle_info({tcp_closed, _}, #state{session = Session0} = State) ->
- Socket = Session0#tcp_session.socket,
- Session = Session0#tcp_session{socket = {remote_close, Socket}},
+ Socket = Session0#session.socket,
+ Session = Session0#session{socket = {remote_close, Socket}},
%% {stop, session_remotly_closed, State};
{stop, normal, State#state{session = Session}};
handle_info({ssl_closed, _}, #state{session = Session0} = State) ->
- Socket = Session0#tcp_session.socket,
- Session = Session0#tcp_session{socket = {remote_close, Socket}},
+ Socket = Session0#session.socket,
+ Session = Session0#session{socket = {remote_close, Socket}},
%% {stop, session_remotly_closed, State};
{stop, normal, State#state{session = Session}};
handle_info({tcp_error, _, _} = Reason, State) ->
@@ -699,19 +697,18 @@ terminate(normal, #state{session = undefined}) ->
%% Init error sending, no session information has been setup but
%% there is a socket that needs closing.
terminate(normal,
- #state{request = Request,
- session = #tcp_session{id = undefined,
- socket = Socket}}) ->
- http_transport:close(socket_type(Request), Socket);
+ #state{session = #session{id = undefined} = Session}) ->
+ close_socket(Session);
%% Socket closed remotely
terminate(normal,
- #state{session = #tcp_session{socket = {remote_close, Socket},
- id = Id},
+ #state{session = #session{socket = {remote_close, Socket},
+ socket_type = SocketType,
+ id = Id},
profile_name = ProfileName,
- request = Request,
- timers = Timers,
- pipeline = Pipeline}) ->
+ request = Request,
+ timers = Timers,
+ pipeline = Pipeline}) ->
?hcrt("terminate(normal) - remote close",
[{id, Id}, {profile, ProfileName}]),
@@ -728,11 +725,11 @@ terminate(normal,
deliver_answers([Request | queue:to_list(Pipeline)]),
%% And, just in case, close our side (**really** overkill)
- http_transport:close(socket_type(Request), Socket);
+ http_transport:close(SocketType, Socket);
-terminate(_, #state{session = #tcp_session{id = Id,
- socket = Socket,
- scheme = Scheme},
+terminate(_, #state{session = #session{id = Id,
+ socket = Socket,
+ socket_type = SocketType},
request = undefined,
profile_name = ProfileName,
timers = Timers,
@@ -744,7 +741,7 @@ terminate(_, #state{session = #tcp_session{id = Id,
maybe_retry_queue(KeepAlive, State),
cancel_timer(Timers#timers.queue_timer, timeout_queue),
- http_transport:close(socket_type(Scheme), Socket);
+ http_transport:close(SocketType, Socket);
terminate(Reason, #state{request = undefined}) ->
?hcrt("terminate", [{reason, Reason}]),
@@ -878,22 +875,23 @@ connect_and_send_first_request(Address,
ConnTimeout = Settings#http_options.connect_timeout,
case connect(SocketType, Address, Options, ConnTimeout) of
{ok, Socket} ->
+ Session = #session{id = {OrigAddress, self()},
+ scheme = Scheme,
+ socket = Socket,
+ socket_type = SocketType},
?hcrd("connected - now send first request", [{socket, Socket}]),
- case httpc_request:send(Address, Request, Socket) of
+ case httpc_request:send(Address, Session, Request) of
ok ->
?hcrd("first request sent", []),
ClientClose =
httpc_request:is_client_closing(Headers),
SessionType = httpc_manager:session_type(Options),
- Session =
- #tcp_session{id = {OrigAddress, self()},
- scheme = Scheme,
- socket = Socket,
- client_close = ClientClose,
- type = SessionType},
+ Session2 =
+ Session#session{client_close = ClientClose,
+ type = SessionType},
TmpState =
State#state{request = Request,
- session = Session,
+ session = Session2,
mfa = init_mfa(Request, State),
status_line = init_status_line(Request),
headers = undefined,
@@ -947,21 +945,20 @@ handler_info(#state{request = Request,
?hcrt("handler info", [{request_info, RequestInfo}]),
%% Info about the current session/socket
- SessionType = Session#tcp_session.type,
- QueueLen = case Session#tcp_session.type of
+ SessionType = Session#session.type,
+ QueueLen = case SessionType of
pipeline ->
queue:len(Pipeline);
keep_alive ->
queue:len(KeepAlive)
end,
- Socket = Session#tcp_session.socket,
- Scheme = Session#tcp_session.scheme,
- SocketType = socket_type(Scheme),
+ Scheme = Session#session.scheme,
+ Socket = Session#session.socket,
+ SocketType = Session#session.socket_type,
?hcrt("handler info", [{session_type, SessionType},
{queue_length, QueueLen},
{scheme, Scheme},
- {socket_type, SocketType},
{socket, Socket}]),
SocketOpts = http_transport:getopts(SocketType, Socket),
@@ -1118,9 +1115,7 @@ handle_response(#state{request = Request,
?hcrd("handle response - continue", []),
%% Send request body
{_, RequestBody} = Request#request.content,
- http_transport:send(socket_type(Session#tcp_session.scheme),
- Session#tcp_session.socket,
- RequestBody),
+ send_raw(Session, RequestBody),
%% Wait for next response
activate_once(Session),
Relaxed = (Request#request.settings)#http_options.relaxed,
@@ -1217,7 +1212,7 @@ handle_pipeline(#state{status = pipeline,
%% If a pipeline that has been idle for some time is not
%% closed by the server, the client may want to close it.
NewState = activate_queue_timeout(TimeOut, State),
- NewSession = Session#tcp_session{queue_length = 0},
+ NewSession = Session#session{queue_length = 0},
httpc_manager:insert_session(NewSession, ProfileName),
%% Note mfa will be initilized when a new request
%% arrives.
@@ -1239,9 +1234,9 @@ handle_pipeline(#state{status = pipeline,
false ->
?hcrv("next request", [{request, NextRequest}]),
NewSession =
- Session#tcp_session{queue_length =
- %% Queue + current
- queue:len(Pipeline) + 1},
+ Session#session{queue_length =
+ %% Queue + current
+ queue:len(Pipeline) + 1},
httpc_manager:insert_session(NewSession, ProfileName),
Relaxed =
(NextRequest#request.settings)#http_options.relaxed,
@@ -1290,16 +1285,16 @@ handle_keep_alive_queue(
%% If a keep_alive session has been idle for some time is not
%% closed by the server, the client may want to close it.
NewState = activate_queue_timeout(TimeOut, State),
- NewSession = Session#tcp_session{queue_length = 0},
+ NewSession = Session#session{queue_length = 0},
httpc_manager:insert_session(NewSession, ProfileName),
%% Note mfa will be initilized when a new request
%% arrives.
{noreply,
- NewState#state{request = undefined,
- mfa = undefined,
+ NewState#state{request = undefined,
+ mfa = undefined,
status_line = undefined,
- headers = undefined,
- body = undefined
+ headers = undefined,
+ body = undefined
}
};
{{value, NextRequest}, KeepAlive} ->
@@ -1342,10 +1337,12 @@ case_insensitive_header(Str) when is_list(Str) ->
case_insensitive_header(Str) ->
Str.
-activate_once(#tcp_session{scheme = Scheme, socket = Socket}) ->
- SocketType = socket_type(Scheme),
+activate_once(#session{socket = Socket, socket_type = SocketType}) ->
http_transport:setopts(SocketType, Socket, [{active, once}]).
+close_socket(#session{socket = Socket, socket_type = SocketType}) ->
+ http_transport:close(SocketType, Socket).
+
activate_request_timeout(
#state{request = #request{timer = undefined} = Request} = State) ->
Timeout = (Request#request.settings)#http_options.timeout,
@@ -1378,7 +1375,7 @@ activate_queue_timeout(Time, State) ->
State#state{timers = #timers{queue_timer = Ref}}.
-is_pipeline_enabled_client(#tcp_session{type = pipeline}) ->
+is_pipeline_enabled_client(#session{type = pipeline}) ->
true;
is_pipeline_enabled_client(_) ->
false.
@@ -1391,7 +1388,7 @@ is_keep_alive_enabled_server("HTTP/1.0",
is_keep_alive_enabled_server(_,_) ->
false.
-is_keep_alive_connection(Headers, #tcp_session{client_close = ClientClose}) ->
+is_keep_alive_connection(Headers, #session{client_close = ClientClose}) ->
(not ((ClientClose) orelse httpc_response:is_server_closing(Headers))).
try_to_enable_pipeline_or_keep_alive(
@@ -1416,7 +1413,7 @@ try_to_enable_pipeline_or_keep_alive(
httpc_manager:insert_session(Session, ProfileName),
%% Make sure type is keep_alive in session
%% as it in this case might be pipeline
- NewSession = Session#tcp_session{type = keep_alive},
+ NewSession = Session#session{type = keep_alive},
State#state{status = keep_alive,
session = NewSession}
end;
@@ -1551,11 +1548,11 @@ init_status_line(#request{settings = Settings}) ->
socket_type(#request{scheme = http}) ->
ip_comm;
socket_type(#request{scheme = https, settings = Settings}) ->
- {ssl, Settings#http_options.ssl};
-socket_type(http) ->
- ip_comm;
-socket_type(https) ->
- {ssl, []}. %% Dummy value ok for ex setopts that does not use this value
+ Settings#http_options.ssl.
+%% socket_type(http) ->
+%% ip_comm;
+%% socket_type(https) ->
+%% {ssl1, []}. %% Dummy value ok for ex setopts that does not use this value
start_stream({_Version, _Code, _ReasonPhrase}, _Headers,
#request{stream = none} = Request) ->
@@ -1624,18 +1621,15 @@ end_stream(SL, R) ->
next_body_chunk(#state{request = #request{stream = {self, once}},
- once = once, session = Session} = State) ->
- http_transport:setopts(socket_type(Session#tcp_session.scheme),
- Session#tcp_session.socket,
- [{active, once}]),
+ once = once,
+ session = Session} = State) ->
+ activate_once(Session),
State#state{once = inactive};
next_body_chunk(#state{request = #request{stream = {self, once}},
once = inactive} = State) ->
State; %% Wait for user to call stream_next
next_body_chunk(#state{session = Session} = State) ->
- http_transport:setopts(socket_type(Session#tcp_session.scheme),
- Session#tcp_session.socket,
- [{active, once}]),
+ activate_once(Session),
State.
handle_verbose(verbose) ->
@@ -1712,6 +1706,11 @@ handle_verbose(_) ->
%% ok.
+send_raw(#session{socket = Socket, socket_type = SocketType}, Body) ->
+ http_transport:send(SocketType, Socket, Body).
+
+
+
call(Msg, Pid) ->
Timeout = infinity,
call(Msg, Pid, Timeout).
diff --git a/lib/inets/src/http_client/httpc_internal.hrl b/lib/inets/src/http_client/httpc_internal.hrl
index 4d76c4beb3..3cdd95a02b 100644
--- a/lib/inets/src/http_client/httpc_internal.hrl
+++ b/lib/inets/src/http_client/httpc_internal.hrl
@@ -18,7 +18,11 @@
%%
%%
--include("inets_internal.hrl").
+-ifndef(httpc_internal_hrl).
+-define(httpc_internal_hrl, true).
+
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
-define(SERVICE, httpc).
-define(hcri(Label, Data), ?report_important(Label, ?SERVICE, Data)).
-define(hcrv(Label, Data), ?report_verbose(Label, ?SERVICE, Data)).
@@ -104,13 +108,14 @@
}
).
--record(tcp_session,
+-record(session,
{
id, % {{Host, Port}, HandlerPid}
client_close, % true | false
scheme, % http (HTTP/TCP) | https (HTTP/SSL/TCP)
socket, % Open socket, used by connection
- queue_length = 1, % Current length of pipeline or keep alive queue
+ socket_type, % socket-type, used by connection
+ queue_length = 1, % Current length of pipeline or keep-alive queue
type % pipeline | keep_alive (wait for response before sending new request)
}).
@@ -138,3 +143,6 @@
%% path, % string()
%% q % query: string()
%% }).
+
+
+-endif. % -ifdef(httpc_internal_hrl).
diff --git a/lib/inets/src/http_client/httpc_manager.erl b/lib/inets/src/http_client/httpc_manager.erl
index b278077a66..d5d6376369 100644
--- a/lib/inets/src/http_client/httpc_manager.erl
+++ b/lib/inets/src/http_client/httpc_manager.erl
@@ -21,8 +21,8 @@
-behaviour(gen_server).
+-include_lib("inets/src/http_lib/http_internal.hrl").
-include("httpc_internal.hrl").
--include("http_internal.hrl").
%% Internal Application API
-export([
@@ -333,7 +333,7 @@ do_init(ProfileName, CookiesDir) ->
?hcrt("create session db", []),
SessionDbName = session_db_name(ProfileName),
ets:new(SessionDbName,
- [public, set, named_table, {keypos, #tcp_session.id}]),
+ [public, set, named_table, {keypos, #session.id}]),
%% Create handler db
?hcrt("create handler/request db", []),
@@ -876,12 +876,12 @@ select_session(Method, HostPort, Scheme, SessionType,
%% client_close, scheme and type specified.
%% The fields id (part of: HandlerPid) and queue_length
%% specified.
- Pattern = #tcp_session{id = {HostPort, '$1'},
- client_close = false,
- scheme = Scheme,
- socket = '_',
- queue_length = '$2',
- type = SessionType},
+ Pattern = #session{id = {HostPort, '$1'},
+ client_close = false,
+ scheme = Scheme,
+ queue_length = '$2',
+ type = SessionType,
+ _ = '_'},
%% {'_', {HostPort, '$1'}, false, Scheme, '_', '$2', SessionTyp},
Candidates = ets:match(SessionDb, Pattern),
?hcrd("select session", [{host_port, HostPort},
diff --git a/lib/inets/src/http_client/httpc_request.erl b/lib/inets/src/http_client/httpc_request.erl
index 55e0af4b42..d4df97ad40 100644
--- a/lib/inets/src/http_client/httpc_request.erl
+++ b/lib/inets/src/http_client/httpc_request.erl
@@ -19,12 +19,13 @@
-module(httpc_request).
--include("http_internal.hrl").
+-include_lib("inets/src/http_lib/http_internal.hrl").
-include("httpc_internal.hrl").
%%% Internal API
-export([send/3, is_idempotent/1, is_client_closing/1]).
+
%%%=========================================================================
%%% Internal application API
%%%=========================================================================
@@ -39,10 +40,9 @@
%%
%% Description: Composes and sends a HTTP-request.
%%-------------------------------------------------------------------------
-send(SendAddr, #request{scheme = Scheme, socket_opts = SocketOpts} = Request,
- Socket)
+send(SendAddr, #session{socket = Socket, socket_type = SocketType},
+ #request{socket_opts = SocketOpts} = Request)
when is_list(SocketOpts) ->
- SocketType = socket_type(Scheme),
case http_transport:setopts(SocketType, Socket, SocketOpts) of
ok ->
send(SendAddr, Socket, SocketType,
@@ -50,8 +50,7 @@ send(SendAddr, #request{scheme = Scheme, socket_opts = SocketOpts} = Request,
{error, Reason} ->
{error, {setopts_failed, Reason}}
end;
-send(SendAddr, #request{scheme = Scheme} = Request, Socket) ->
- SocketType = socket_type(Scheme),
+send(SendAddr, #session{socket = Socket, socket_type = SocketType}, Request) ->
send(SendAddr, Socket, SocketType, Request).
send(SendAddr, Socket, SocketType,
@@ -209,10 +208,6 @@ headers(_, "HTTP/0.9") ->
headers(Headers, _) ->
Headers.
-socket_type(http) ->
- ip_comm;
-socket_type(https) ->
- {ssl, []}.
http_headers([], Headers) ->
lists:flatten(Headers);
diff --git a/lib/inets/src/http_client/httpc_response.erl b/lib/inets/src/http_client/httpc_response.erl
index df7d40a33e..bb9c516259 100644
--- a/lib/inets/src/http_client/httpc_response.erl
+++ b/lib/inets/src/http_client/httpc_response.erl
@@ -19,7 +19,7 @@
-module(httpc_response).
--include("http_internal.hrl").
+-include_lib("inets/src/http_lib/http_internal.hrl").
-include("httpc_internal.hrl").
%% API
diff --git a/lib/inets/src/http_lib/Makefile b/lib/inets/src/http_lib/Makefile
index 7f4c92861c..5dac3b0c00 100644
--- a/lib/inets/src/http_lib/Makefile
+++ b/lib/inets/src/http_lib/Makefile
@@ -55,24 +55,16 @@ TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
# ----------------------------------------------------
-# INETS FLAGS
-# ----------------------------------------------------
-INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
-
-
-# ----------------------------------------------------
# FLAGS
# ----------------------------------------------------
-INETS_ERL_FLAGS += -I ../inets_app
-ifeq ($(WARN_UNUSED_WARS),true)
-ERL_COMPILE_FLAGS += +warn_unused_vars
-endif
+include ../inets_app/inets.mk
-ERL_COMPILE_FLAGS += $(INETS_ERL_FLAGS) \
- $(INETS_FLAGS) \
- +'{parse_transform,sys_pre_attributes}' \
- +'{attribute,insert,app_vsn,$(APP_VSN)}'
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include \
+ -I../inets_app
# ----------------------------------------------------
@@ -94,9 +86,10 @@ docs:
include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/src
- $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
- $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/src/http_lib
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src/http_lib
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
release_docs_spec:
diff --git a/lib/inets/src/http_lib/http_internal.hrl b/lib/inets/src/http_lib/http_internal.hrl
index bb2e831727..5440f214b5 100644
--- a/lib/inets/src/http_lib/http_internal.hrl
+++ b/lib/inets/src/http_lib/http_internal.hrl
@@ -1,28 +1,37 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
--include("inets_internal.hrl").
+-ifndef(http_internal_hrl).
+-define(http_internal_hrl, true).
--define(HTTP_MAX_BODY_SIZE, nolimit).
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
+-define(HTTP_MAX_BODY_SIZE, nolimit).
-define(HTTP_MAX_HEADER_SIZE, 10240).
--define(HTTP_MAX_URI_SIZE, nolimit).
+-define(HTTP_MAX_URI_SIZE, nolimit).
+
+-ifndef(HTTP_DEFAULT_SSL_KIND).
+-define(HTTP_DEFAULT_SSL_KIND, ossl).
+%% -define(HTTP_DEFAULT_SSL_KIND, essl).
+-endif. % -ifdef(HTTP_DEFAULT_SSL_KIND).
+
%%% Response headers
-record(http_response_h,{
@@ -106,3 +115,5 @@
'last-modified',
other=[] % list() - Key/Value list with other headers
}).
+
+-endif. % -ifdef(http_internal_hrl).
diff --git a/lib/inets/src/http_lib/http_transport.erl b/lib/inets/src/http_lib/http_transport.erl
index 7c2ac626e6..b8121852b8 100644
--- a/lib/inets/src/http_lib/http_transport.erl
+++ b/lib/inets/src/http_lib/http_transport.erl
@@ -36,7 +36,9 @@
-export([negotiate/3]).
--include("inets_internal.hrl").
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+-include("http_internal.hrl").
+
-define(SERVICE, httpl).
-define(hlri(Label, Content), ?report_important(Label, ?SERVICE, Content)).
-define(hlrv(Label, Content), ?report_verbose(Label, ?SERVICE, Content)).
@@ -55,6 +57,18 @@
%% Description: Makes sure inet_db or ssl is started.
%%-------------------------------------------------------------------------
start(ip_comm) ->
+ do_start_ip_comm();
+
+%% This is just for backward compatibillity
+start({ssl, _}) ->
+ do_start_ssl();
+start({ossl, _}) ->
+ do_start_ssl();
+start({essl, _}) ->
+ do_start_ssl().
+
+
+do_start_ip_comm() ->
case inet_db:start() of
{ok, _} ->
ok;
@@ -62,8 +76,9 @@ start(ip_comm) ->
ok;
Error ->
Error
- end;
-start({ssl, _}) ->
+ end.
+
+do_start_ssl() ->
case ssl:start() of
ok ->
ok;
@@ -97,18 +112,26 @@ connect(ip_comm = _SocketType, {Host, Port}, Opts0, Timeout)
[{host, Host}, {port, Port}, {opts, Opts}, {timeout, Timeout}]),
gen_tcp:connect(Host, Port, Opts, Timeout);
-connect({ssl, SslConfig}, {Host, Port}, _, Timeout) ->
- Opts = [binary, {active, false}] ++ SslConfig,
- ?hlrt("connect using ssl",
- [{host, Host}, {port, Port}, {ssl_config, SslConfig},
- {timeout, Timeout}]),
+%% Wrapper for backaward compatibillity
+connect({ssl, SslConfig}, Address, Opts, Timeout) ->
+ connect({?HTTP_DEFAULT_SSL_KIND, SslConfig}, Address, Opts, Timeout);
+
+connect({ossl, SslConfig}, {Host, Port}, _, Timeout) ->
+ Opts = [binary, {active, false}, {ssl_imp, old}] ++ SslConfig,
+ ?hlrt("connect using ossl",
+ [{host, Host},
+ {port, Port},
+ {ssl_config, SslConfig},
+ {timeout, Timeout}]),
ssl:connect(Host, Port, Opts, Timeout);
-connect({erl_ssl, SslConfig}, {Host, Port}, _, Timeout) ->
+connect({essl, SslConfig}, {Host, Port}, _, Timeout) ->
Opts = [binary, {active, false}, {ssl_imp, new}] ++ SslConfig,
- ?hlrt("connect using erl_ssl",
- [{host, Host}, {port, Port}, {ssl_config, SslConfig},
- {timeout, Timeout}]),
+ ?hlrt("connect using essl",
+ [{host, Host},
+ {port, Port},
+ {ssl_config, SslConfig},
+ {timeout, Timeout}]),
ssl:connect(Host, Port, Opts, Timeout).
@@ -136,13 +159,32 @@ listen(ip_comm, Addr, Port) ->
Else
end;
-listen({ssl, SSLConfig} = Ssl, Addr, Port) ->
+%% Wrapper for backaward compatibillity
+listen({ssl, SSLConfig}, Addr, Port) ->
+ ?hlrt("listen (wrapper)",
+ [{addr, Addr},
+ {port, Port},
+ {ssl_config, SSLConfig}]),
+ listen({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Addr, Port);
+
+listen({ossl, SSLConfig} = Ssl, Addr, Port) ->
+ ?hlrt("listen (ossl)",
+ [{addr, Addr},
+ {port, Port},
+ {ssl_config, SSLConfig}]),
Opt = sock_opt(Ssl, Addr, SSLConfig),
- ssl:listen(Port, Opt);
-
-listen({erl_ssl, SSLConfig} = Ssl, Addr, Port) ->
+ ?hlrt("listen options", [{opt, Opt}]),
+ ssl:listen(Port, [{ssl_imp, old} | Opt]);
+
+listen({essl, SSLConfig} = Ssl, Addr, Port) ->
+ ?hlrt("listen (essl)",
+ [{addr, Addr},
+ {port, Port},
+ {ssl_config, SSLConfig}]),
Opt = sock_opt(Ssl, Addr, SSLConfig),
- ssl:listen(Port, [{ssl_imp, new} | Opt]).
+ ?hlrt("listen options", [{opt, Opt}]),
+ Opt2 = [{ssl_imp, new}, {reuseaddr, true} | Opt],
+ ssl:listen(Port, Opt2).
listen_ip_comm(Addr, Port) ->
@@ -228,9 +270,17 @@ ip_family_of(IpFamilyStr) ->
%%-------------------------------------------------------------------------
accept(SocketType, ListenSocket) ->
accept(SocketType, ListenSocket, infinity).
+
accept(ip_comm, ListenSocket, Timeout) ->
gen_tcp:accept(ListenSocket, Timeout);
-accept({ssl,_SSLConfig}, ListenSocket, Timeout) ->
+
+%% Wrapper for backaward compatibillity
+accept({ssl, SSLConfig}, ListenSocket, Timeout) ->
+ accept({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, ListenSocket, Timeout);
+
+accept({ossl, _SSLConfig}, ListenSocket, Timeout) ->
+ ssl:transport_accept(ListenSocket, Timeout);
+accept({essl, _SSLConfig}, ListenSocket, Timeout) ->
ssl:transport_accept(ListenSocket, Timeout).
@@ -244,7 +294,15 @@ accept({ssl,_SSLConfig}, ListenSocket, Timeout) ->
%%-------------------------------------------------------------------------
controlling_process(ip_comm, Socket, NewOwner) ->
gen_tcp:controlling_process(Socket, NewOwner);
-controlling_process({ssl, _}, Socket, NewOwner) ->
+
+%% Wrapper for backaward compatibillity
+controlling_process({ssl, SSLConfig}, Socket, NewOwner) ->
+ controlling_process({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket, NewOwner);
+
+controlling_process({ossl, _}, Socket, NewOwner) ->
+ ssl:controlling_process(Socket, NewOwner);
+
+controlling_process({essl, _}, Socket, NewOwner) ->
ssl:controlling_process(Socket, NewOwner).
@@ -259,9 +317,23 @@ controlling_process({ssl, _}, Socket, NewOwner) ->
setopts(ip_comm, Socket, Options) ->
?hlrt("ip_comm setopts", [{socket, Socket}, {options, Options}]),
inet:setopts(Socket, Options);
-setopts({ssl, _}, Socket, Options) ->
- ?hlrt("ssl setopts", [{socket, Socket}, {options, Options}]),
- ssl:setopts(Socket, Options).
+
+%% Wrapper for backaward compatibillity
+setopts({ssl, SSLConfig}, Socket, Options) ->
+ setopts({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket, Options);
+
+setopts({ossl, _}, Socket, Options) ->
+ ?hlrt("[o]ssl setopts", [{socket, Socket}, {options, Options}]),
+ Reason = (catch ssl:setopts(Socket, Options)),
+ ?hlrt("[o]ssl setopts result", [{reason, Reason}]),
+ Reason;
+
+
+setopts({essl, _}, Socket, Options) ->
+ ?hlrt("[e]ssl setopts", [{socket, Socket}, {options, Options}]),
+ Reason = (catch ssl:setopts(Socket, Options)),
+ ?hlrt("[e]ssl setopts result", [{reason, Reason}]),
+ Reason.
%%-------------------------------------------------------------------------
@@ -283,15 +355,27 @@ getopts(ip_comm, Socket, Options) ->
{error, _} ->
[]
end;
-getopts({ssl, _}, Socket, Options) ->
+
+%% Wrapper for backaward compatibillity
+getopts({ssl, SSLConfig}, Socket, Options) ->
+ getopts({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket, Options);
+
+getopts({ossl, _}, Socket, Options) ->
?hlrt("ssl getopts", [{socket, Socket}, {options, Options}]),
+ getopts_ssl(Socket, Options);
+
+getopts({essl, _}, Socket, Options) ->
+ ?hlrt("essl getopts", [{socket, Socket}, {options, Options}]),
+ getopts_ssl(Socket, Options).
+
+getopts_ssl(Socket, Options) ->
case ssl:getopts(Socket, Options) of
{ok, SocketOpts} ->
SocketOpts;
{error, _} ->
[]
end.
-
+
%%-------------------------------------------------------------------------
%% getstat(SocketType, Socket) -> socket_stats()
@@ -308,8 +392,15 @@ getstat(ip_comm = _SocketType, Socket) ->
{error, _} ->
[]
end;
-getstat({ssl, _} = _SocketType, _Socket) ->
- %% ?hlrt("ssl getstat", [{socket, Socket}]),
+
+%% Wrapper for backaward compatibillity
+getstat({ssl, SSLConfig}, Socket) ->
+ getstat({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket);
+
+getstat({ossl, _} = _SocketType, _Socket) ->
+ [];
+
+getstat({essl, _} = _SocketType, _Socket) ->
[].
@@ -322,7 +413,15 @@ getstat({ssl, _} = _SocketType, _Socket) ->
%%-------------------------------------------------------------------------
send(ip_comm, Socket, Message) ->
gen_tcp:send(Socket, Message);
-send({ssl, _}, Socket, Message) ->
+
+%% Wrapper for backaward compatibillity
+send({ssl, SSLConfig}, Socket, Message) ->
+ send({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket, Message);
+
+send({ossl, _}, Socket, Message) ->
+ ssl:send(Socket, Message);
+
+send({essl, _}, Socket, Message) ->
ssl:send(Socket, Message).
@@ -335,9 +434,18 @@ send({ssl, _}, Socket, Message) ->
%%-------------------------------------------------------------------------
close(ip_comm, Socket) ->
gen_tcp:close(Socket);
-close({ssl, _}, Socket) ->
+
+%% Wrapper for backaward compatibillity
+close({ssl, SSLConfig}, Socket) ->
+ close({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket);
+
+close({ossl, _}, Socket) ->
+ ssl:close(Socket);
+
+close({essl, _}, Socket) ->
ssl:close(Socket).
+
%%-------------------------------------------------------------------------
%% peername(SocketType, Socket) -> {Port, SockName}
%% SocketType = ip_comm | {ssl, _}
@@ -368,7 +476,17 @@ peername(ip_comm, Socket) ->
{-1, "unknown"}
end;
-peername({ssl, _}, Socket) ->
+%% Wrapper for backaward compatibillity
+peername({ssl, SSLConfig}, Socket) ->
+ peername({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket);
+
+peername({ossl, _}, Socket) ->
+ peername_ssl(Socket);
+
+peername({essl, _}, Socket) ->
+ peername_ssl(Socket).
+
+peername_ssl(Socket) ->
case ssl:peername(Socket) of
{ok,{{A, B, C, D}, Port}} ->
PeerName = integer_to_list(A)++"."++integer_to_list(B)++"."++
@@ -409,7 +527,17 @@ sockname(ip_comm, Socket) ->
{-1, "unknown"}
end;
-sockname({ssl, _}, Socket) ->
+%% Wrapper for backaward compatibillity
+sockname({ssl, SSLConfig}, Socket) ->
+ sockname({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket);
+
+sockname({ossl, _}, Socket) ->
+ sockname_ssl(Socket);
+
+sockname({essl, _}, Socket) ->
+ sockname_ssl(Socket).
+
+sockname_ssl(Socket) ->
case ssl:sockname(Socket) of
{ok,{{A, B, C, D}, Port}} ->
SockName = integer_to_list(A)++"."++integer_to_list(B)++"."++
@@ -455,22 +583,31 @@ sock_opt2(Opts) ->
[{packet, 0}, {active, false} | Opts].
negotiate(ip_comm,_,_) ->
+ ?hlrt("negotiate(ip_comm)", []),
ok;
-negotiate({ssl,_},Socket,Timeout) ->
- negotiate(Socket, Timeout);
-negotiate({erl_ssl, _}, Socket, Timeout) ->
- negotiate(Socket, Timeout).
-
-negotiate(Socket, Timeout) ->
+negotiate({ssl, SSLConfig}, Socket, Timeout) ->
+ ?hlrt("negotiate(ssl)", []),
+ negotiate({?HTTP_DEFAULT_SSL_KIND, SSLConfig}, Socket, Timeout);
+negotiate({ossl, _}, Socket, Timeout) ->
+ ?hlrt("negotiate(ossl)", []),
+ negotiate_ssl(Socket, Timeout);
+negotiate({essl, _}, Socket, Timeout) ->
+ ?hlrt("negotiate(essl)", []),
+ negotiate_ssl(Socket, Timeout).
+
+negotiate_ssl(Socket, Timeout) ->
+ ?hlrt("negotiate_ssl", [{socket, Socket}, {timeout, Timeout}]),
case ssl:ssl_accept(Socket, Timeout) of
ok ->
ok;
- {error, Error} ->
- case lists:member(Error,
- [timeout,econnreset,esslaccept,esslerrssl]) of
+ {error, Reason} ->
+ ?hlrd("negotiate_ssl - accept failed", [{reason, Reason}]),
+ %% Look for "valid" error reasons
+ ValidReasons = [timeout, econnreset, esslaccept, esslerrssl],
+ case lists:member(Reason, ValidReasons) of
true ->
- {error,normal};
+ {error, normal};
false ->
- {error, Error}
+ {error, Reason}
end
end.
diff --git a/lib/inets/src/http_lib/http_util.erl b/lib/inets/src/http_lib/http_util.erl
index ddb58c7116..4f1147176c 100644
--- a/lib/inets/src/http_lib/http_util.erl
+++ b/lib/inets/src/http_lib/http_util.erl
@@ -38,13 +38,79 @@ to_upper(Str) ->
to_lower(Str) ->
string:to_lower(Str).
-convert_netscapecookie_date([_D,_A,_Y, $,, _SP,
- D1,D2,_DA,
- M,O,N,_DA,
- Y1,Y2,Y3,Y4,_SP,
- H1,H2,_Col,
- M1,M2,_Col,
+%% Example: Mon, 09-Dec-2002 13:46:00 GMT
+convert_netscapecookie_date([_D,_A,_Y, $,, $ ,
+ D1,D2, $-,
+ M,O,N, $-,
+ Y1,Y2,Y3,Y4, $ ,
+ H1,H2, $:,
+ M1,M2, $:,
+ S1,S2|_Rest]) ->
+ Year = list_to_integer([Y1,Y2,Y3,Y4]),
+ Day = list_to_integer([D1,D2]),
+ Month = convert_month([M,O,N]),
+ Hour = list_to_integer([H1,H2]),
+ Min = list_to_integer([M1,M2]),
+ Sec = list_to_integer([S1,S2]),
+ {{Year,Month,Day},{Hour,Min,Sec}};
+
+convert_netscapecookie_date([_D,_A,_Y, $,, $ ,
+ D1,D2, $-,
+ M,O,N, $-,
+ Y3,Y4, $ ,
+ H1,H2, $:,
+ M1,M2, $:,
+ S1,S2|_Rest]) ->
+ {CurrentYear, _, _} = date(),
+ [Y1,Y2|_] = integer_to_list(CurrentYear),
+ Year = list_to_integer([Y1,Y2,Y3,Y4]),
+ Day = list_to_integer([D1,D2]),
+ Month = convert_month([M,O,N]),
+ Hour = list_to_integer([H1,H2]),
+ Min = list_to_integer([M1,M2]),
+ Sec = list_to_integer([S1,S2]),
+ {{Year,Month,Day},{Hour,Min,Sec}};
+
+convert_netscapecookie_date([_D,_A,_Y, $ ,
+ D1,D2, $-,
+ M,O,N, $-,
+ Y1,Y2,Y3,Y4, $ ,
+ H1,H2, $:,
+ M1,M2, $:,
S1,S2|_Rest]) ->
+ Year = list_to_integer([Y1,Y2,Y3,Y4]),
+ Day = list_to_integer([D1,D2]),
+ Month = convert_month([M,O,N]),
+ Hour = list_to_integer([H1,H2]),
+ Min = list_to_integer([M1,M2]),
+ Sec = list_to_integer([S1,S2]),
+ {{Year,Month,Day},{Hour,Min,Sec}};
+
+convert_netscapecookie_date([_D,_A,_Y, $ ,
+ D1,D2, $-,
+ M,O,N, $-,
+ Y3,Y4, $ ,
+ H1,H2, $:,
+ M1,M2, $:,
+ S1,S2|_Rest]) ->
+ {CurrentYear, _, _} = date(),
+ [Y1,Y2|_] = integer_to_list(CurrentYear),
+ Year = list_to_integer([Y1,Y2,Y3,Y4]),
+ Day = list_to_integer([D1,D2]),
+ Month = convert_month([M,O,N]),
+ Hour = list_to_integer([H1,H2]),
+ Min = list_to_integer([M1,M2]),
+ Sec = list_to_integer([S1,S2]),
+ {{Year,Month,Day},{Hour,Min,Sec}};
+
+%% Sloppy...
+convert_netscapecookie_date([_D,_A,_Y, $,, _SP,
+ D1,D2,_DA,
+ M,O,N,_DA,
+ Y1,Y2,Y3,Y4,_SP,
+ H1,H2,_Col,
+ M1,M2,_Col,
+ S1,S2|_Rest]) ->
Year=list_to_integer([Y1,Y2,Y3,Y4]),
Day=list_to_integer([D1,D2]),
Month=convert_month([M,O,N]),
@@ -54,12 +120,12 @@ convert_netscapecookie_date([_D,_A,_Y, $,, _SP,
{{Year,Month,Day},{Hour,Min,Sec}};
convert_netscapecookie_date([_D,_A,_Y, _SP,
- D1,D2,_DA,
- M,O,N,_DA,
- Y1,Y2,Y3,Y4,_SP,
- H1,H2,_Col,
- M1,M2,_Col,
- S1,S2|_Rest]) ->
+ D1,D2,_DA,
+ M,O,N,_DA,
+ Y1,Y2,Y3,Y4,_SP,
+ H1,H2,_Col,
+ M1,M2,_Col,
+ S1,S2|_Rest]) ->
Year=list_to_integer([Y1,Y2,Y3,Y4]),
Day=list_to_integer([D1,D2]),
Month=convert_month([M,O,N]),
@@ -68,17 +134,17 @@ convert_netscapecookie_date([_D,_A,_Y, _SP,
Sec=list_to_integer([S1,S2]),
{{Year,Month,Day},{Hour,Min,Sec}}.
-hexlist_to_integer([])->
+hexlist_to_integer([]) ->
empty;
%%When the string only contains one value its eaasy done.
%% 0-9
-hexlist_to_integer([Size]) when Size >= 48 , Size =< 57 ->
+hexlist_to_integer([Size]) when (Size >= 48) andalso (Size =< 57) ->
Size - 48;
%% A-F
-hexlist_to_integer([Size]) when Size >= 65 , Size =< 70 ->
+hexlist_to_integer([Size]) when (Size >= 65) andalso (Size =< 70) ->
Size - 55;
%% a-f
-hexlist_to_integer([Size]) when Size >= 97 , Size =< 102 ->
+hexlist_to_integer([Size]) when (Size >= 97) andalso (Size =< 102) ->
Size - 87;
hexlist_to_integer([_Size]) ->
not_a_num;
@@ -141,7 +207,7 @@ hexlist_to_integer2([HexVal | HexString], Pos, Sum)
hexlist_to_integer2(_AfterHexString, _Pos, Sum)->
Sum.
-integer_to_hexlist(Num, Pot, Res) when Pot<0 ->
+integer_to_hexlist(Num, Pot, Res) when Pot < 0 ->
convert_to_ascii([Num | Res]);
integer_to_hexlist(Num,Pot,Res) ->
@@ -163,7 +229,9 @@ convert_to_ascii(RevesedNum) ->
convert_to_ascii([], Num)->
Num;
-convert_to_ascii([Num | Reversed], Number) when Num > -1, Num < 10 ->
+convert_to_ascii([Num | Reversed], Number)
+ when (Num > -1) andalso (Num < 10) ->
convert_to_ascii(Reversed, [Num + 48 | Number]);
-convert_to_ascii([Num | Reversed], Number) when Num > 9, Num < 16 ->
+convert_to_ascii([Num | Reversed], Number)
+ when (Num > 9) andalso (Num < 16) ->
convert_to_ascii(Reversed, [Num + 55 | Number]).
diff --git a/lib/inets/src/http_server/Makefile b/lib/inets/src/http_server/Makefile
index ce1405011e..879e605217 100644
--- a/lib/inets/src/http_server/Makefile
+++ b/lib/inets/src/http_server/Makefile
@@ -90,20 +90,17 @@ TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
# ----------------------------------------------------
-# INETS FLAGS
-# ----------------------------------------------------
-INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
-
-
-# ----------------------------------------------------
# FLAGS
# ----------------------------------------------------
-INETS_ERL_FLAGS += -I ../http_lib -I ../inets_app -pa ../../ebin
-ERL_COMPILE_FLAGS += $(INETS_ERL_FLAGS) \
- $(INETS_FLAGS) \
- +'{parse_transform,sys_pre_attributes}' \
- +'{attribute,insert,app_vsn,$(APP_VSN)}'
+include ../inets_app/inets.mk
+
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include \
+ -I../inets_app \
+ -I../http_lib
# ----------------------------------------------------
@@ -125,9 +122,10 @@ docs:
include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/src
- $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
- $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/src/http_server
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src/http_server
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
release_docs_spec:
diff --git a/lib/inets/src/http_server/httpd.erl b/lib/inets/src/http_server/httpd.erl
index 8fe54ccef6..fb5fa1c758 100644
--- a/lib/inets/src/http_server/httpd.erl
+++ b/lib/inets/src/http_server/httpd.erl
@@ -24,54 +24,25 @@
-include("httpd.hrl").
--deprecated({start, 0, next_major_release}).
--deprecated({start, 1, next_major_release}).
--deprecated({start_link, 1, next_major_release}).
--deprecated({start_child, 0, next_major_release}).
--deprecated({start_child, 1, next_major_release}).
--deprecated({stop, 0, next_major_release}).
--deprecated({stop, 1, next_major_release}).
--deprecated({stop, 2, next_major_release}).
--deprecated({stop_child, 0, next_major_release}).
--deprecated({stop_child, 1, next_major_release}).
--deprecated({stop_child, 2, next_major_release}).
--deprecated({restart, 0, next_major_release}).
--deprecated({restart, 1, next_major_release}).
--deprecated({restart, 2, next_major_release}).
--deprecated({block, 0, next_major_release}).
--deprecated({block, 1, next_major_release}).
--deprecated({block, 2, next_major_release}).
--deprecated({block, 3, next_major_release}).
--deprecated({block, 4, next_major_release}).
--deprecated({unblock, 0, next_major_release}).
--deprecated({unblock, 1, next_major_release}).
--deprecated({unblock, 2, next_major_release}).
%% Behavior callbacks
--export([start_standalone/1, start_service/1, stop_service/1, services/0,
- service_info/1]).
+-export([
+ start_standalone/1,
+ start_service/1,
+ stop_service/1,
+ services/0,
+ service_info/1
+ ]).
%% API
-export([parse_query/1, reload_config/2, info/1, info/2, info/3]).
-%% Deprecated
--export([start/0, start/1,
- start_link/0, start_link/1,
- start_child/0,start_child/1,
- stop/0,stop/1,stop/2,
- stop_child/0,stop_child/1,stop_child/2,
- restart/0,restart/1,restart/2]).
-
-%% Management stuff should be internal functions
-%% Will be from r13
--export([block/0,block/1,block/2,block/3,block/4,
- unblock/0,unblock/1,unblock/2]).
-
-%% Internal Debugging and status info stuff...
-%% Keep for now should probably be moved to test catalog
--export([get_status/1,get_status/2,get_status/3,
- get_admin_state/0,get_admin_state/1,get_admin_state/2,
- get_usage_state/0,get_usage_state/1,get_usage_state/2]).
+%% Internal debugging and status info stuff...
+-export([
+ get_status/1, get_status/2, get_status/3,
+ get_admin_state/0, get_admin_state/1, get_admin_state/2,
+ get_usage_state/0, get_usage_state/1, get_usage_state/2
+ ]).
%%%========================================================================
%%% API
@@ -111,6 +82,7 @@ info(Address, Port, Properties) when is_integer(Port) andalso
is_list(Properties) ->
httpd_conf:get_config(Address, Port, Properties).
+
%%%========================================================================
%%% Behavior callbacks
%%%========================================================================
@@ -149,6 +121,8 @@ service_info(Pid) ->
exit:{noproc, _} ->
{error, service_not_available}
end.
+
+
%%%--------------------------------------------------------------
%%% Internal functions
%%%--------------------------------------------------------------------
@@ -176,6 +150,7 @@ child_name2info({httpd_instance_sup, Address, Port}) ->
{ok, [{bind_address, Address}, {port, Port} | Info]}
end.
+
reload(Config, Address, Port) ->
Name = make_name(Address,Port),
case whereis(Name) of
@@ -185,26 +160,12 @@ reload(Config, Address, Port) ->
{error,not_started}
end.
-reload(Addr, Port) when is_integer(Port) ->
- Name = make_name(Addr,Port),
- case whereis(Name) of
- Pid when is_pid(Pid) ->
- httpd_manager:reload(Pid, undefined);
- _ ->
- {error,not_started}
- end.
%%% =========================================================
-%%% Function: block/0, block/1, block/2, block/3, block/4
-%%% block()
-%%% block(Port)
-%%% block(ConfigFile)
-%%% block(Addr,Port)
-%%% block(Port,Mode)
-%%% block(ConfigFile,Mode)
-%%% block(Addr,Port,Mode)
-%%% block(ConfigFile,Mode,Timeout)
-%%% block(Addr,Port,Mode,Timeout)
+%%% Function: block/3, block/4
+%%% block(Addr, Port, Mode)
+%%% block(ConfigFile, Mode, Timeout)
+%%% block(Addr, Port, Mode, Timeout)
%%%
%%% Returns: ok | {error,Reason}
%%%
@@ -237,58 +198,32 @@ reload(Addr, Port) when is_integer(Port) ->
%%% Mode -> disturbing | non_disturbing
%%% Timeout -> integer()
%%%
-block() -> block(undefined,8888,disturbing).
-
-block(Port) when is_integer(Port) ->
- block(undefined,Port,disturbing);
-
-block(ConfigFile) when is_list(ConfigFile) ->
- case get_addr_and_port(ConfigFile) of
- {ok,Addr,Port} ->
- block(Addr,Port,disturbing);
- Error ->
- Error
- end.
-
-block(Addr,Port) when is_integer(Port) ->
- block(Addr,Port,disturbing);
-
-block(Port,Mode) when is_integer(Port) andalso is_atom(Mode) ->
- block(undefined,Port,Mode);
-
-block(ConfigFile,Mode) when is_list(ConfigFile) andalso is_atom(Mode) ->
- case get_addr_and_port(ConfigFile) of
- {ok,Addr,Port} ->
- block(Addr,Port,Mode);
- Error ->
- Error
- end.
-
-block(Addr,Port,disturbing) when is_integer(Port) ->
- do_block(Addr,Port,disturbing);
-block(Addr,Port,non_disturbing) when is_integer(Port) ->
- do_block(Addr,Port,non_disturbing);
+block(Addr, Port, disturbing) when is_integer(Port) ->
+ do_block(Addr, Port, disturbing);
+block(Addr, Port, non_disturbing) when is_integer(Port) ->
+ do_block(Addr, Port, non_disturbing);
-block(ConfigFile,Mode,Timeout) when is_list(ConfigFile) andalso
- is_atom(Mode) andalso
- is_integer(Timeout) ->
+block(ConfigFile, Mode, Timeout)
+ when is_list(ConfigFile) andalso
+ is_atom(Mode) andalso
+ is_integer(Timeout) ->
case get_addr_and_port(ConfigFile) of
- {ok,Addr,Port} ->
- block(Addr,Port,Mode,Timeout);
+ {ok, Addr, Port} ->
+ block(Addr, Port, Mode, Timeout);
Error ->
Error
end.
-block(Addr,Port,non_disturbing,Timeout)
+block(Addr, Port, non_disturbing, Timeout)
+ when is_integer(Port) andalso is_integer(Timeout) ->
+ do_block(Addr, Port, non_disturbing, Timeout);
+block(Addr,Port,disturbing,Timeout)
when is_integer(Port) andalso is_integer(Timeout) ->
- do_block(Addr,Port,non_disturbing,Timeout);
-block(Addr,Port,disturbing,Timeout) when is_integer(Port) andalso
- is_integer(Timeout) ->
- do_block(Addr,Port,disturbing,Timeout).
+ do_block(Addr, Port, disturbing, Timeout).
-do_block(Addr,Port,Mode) when is_integer(Port) andalso is_atom(Mode) ->
+do_block(Addr, Port, Mode) when is_integer(Port) andalso is_atom(Mode) ->
Name = make_name(Addr,Port),
case whereis(Name) of
Pid when is_pid(Pid) ->
@@ -298,7 +233,7 @@ do_block(Addr,Port,Mode) when is_integer(Port) andalso is_atom(Mode) ->
end.
-do_block(Addr,Port,Mode,Timeout)
+do_block(Addr, Port, Mode, Timeout)
when is_integer(Port) andalso is_atom(Mode) ->
Name = make_name(Addr,Port),
case whereis(Name) of
@@ -310,11 +245,8 @@ do_block(Addr,Port,Mode,Timeout)
%%% =========================================================
-%%% Function: unblock/0, unblock/1, unblock/2
-%%% unblock()
-%%% unblock(Port)
-%%% unblock(ConfigFile)
-%%% unblock(Addr,Port)
+%%% Function: unblock/2
+%%% unblock(Addr, Port)
%%%
%%% Description: This function is used to reverse a previous block
%%% operation on the HTTP server.
@@ -323,16 +255,6 @@ do_block(Addr,Port,Mode,Timeout)
%%% Addr -> {A,B,C,D} | string() | undefined
%%% ConfigFile -> string()
%%%
-unblock() -> unblock(undefined,8888).
-unblock(Port) when is_integer(Port) -> unblock(undefined,Port);
-
-unblock(ConfigFile) when is_list(ConfigFile) ->
- case get_addr_and_port(ConfigFile) of
- {ok,Addr,Port} ->
- unblock(Addr,Port);
- Error ->
- Error
- end.
unblock(Addr, Port) when is_integer(Port) ->
Name = make_name(Addr,Port),
@@ -521,80 +443,81 @@ do_reload_config(ConfigList, Mode) ->
%%%--------------------------------------------------------------
%%% Deprecated
%%%--------------------------------------------------------------
-start() ->
- start("/var/tmp/server_root/conf/8888.conf").
-start(ConfigFile) ->
- {ok, Pid} = inets:start(httpd, ConfigFile, stand_alone),
- unlink(Pid),
- {ok, Pid}.
+%% start() ->
+%% start("/var/tmp/server_root/conf/8888.conf").
-start_link() ->
- start("/var/tmp/server_root/conf/8888.conf").
+%% start(ConfigFile) ->
+%% {ok, Pid} = inets:start(httpd, ConfigFile, stand_alone),
+%% unlink(Pid),
+%% {ok, Pid}.
-start_link(ConfigFile) when is_list(ConfigFile) ->
- inets:start(httpd, ConfigFile, stand_alone).
+%% start_link() ->
+%% start("/var/tmp/server_root/conf/8888.conf").
-stop() ->
- stop(8888).
+%% start_link(ConfigFile) when is_list(ConfigFile) ->
+%% inets:start(httpd, ConfigFile, stand_alone).
-stop(Port) when is_integer(Port) ->
- stop(undefined, Port);
-stop(Pid) when is_pid(Pid) ->
- old_stop(Pid);
-stop(ConfigFile) when is_list(ConfigFile) ->
- old_stop(ConfigFile).
+%% stop() ->
+%% stop(8888).
-stop(Addr, Port) when is_integer(Port) ->
- old_stop(Addr, Port).
+%% stop(Port) when is_integer(Port) ->
+%% stop(undefined, Port);
+%% stop(Pid) when is_pid(Pid) ->
+%% old_stop(Pid);
+%% stop(ConfigFile) when is_list(ConfigFile) ->
+%% old_stop(ConfigFile).
-start_child() ->
- start_child("/var/tmp/server_root/conf/8888.conf").
+%% stop(Addr, Port) when is_integer(Port) ->
+%% old_stop(Addr, Port).
-start_child(ConfigFile) ->
- httpd_sup:start_child(ConfigFile).
+%% start_child() ->
+%% start_child("/var/tmp/server_root/conf/8888.conf").
-stop_child() ->
- stop_child(8888).
+%% start_child(ConfigFile) ->
+%% httpd_sup:start_child(ConfigFile).
-stop_child(Port) ->
- stop_child(undefined, Port).
+%% stop_child() ->
+%% stop_child(8888).
-stop_child(Addr, Port) when is_integer(Port) ->
- httpd_sup:stop_child(Addr, Port).
+%% stop_child(Port) ->
+%% stop_child(undefined, Port).
-restart() -> reload(undefined, 8888).
+%% stop_child(Addr, Port) when is_integer(Port) ->
+%% httpd_sup:stop_child(Addr, Port).
-restart(Port) when is_integer(Port) ->
- reload(undefined, Port).
-restart(Addr, Port) ->
- reload(Addr, Port).
+%% restart() -> reload(undefined, 8888).
-old_stop(Pid) when is_pid(Pid) ->
- do_stop(Pid);
-old_stop(ConfigFile) when is_list(ConfigFile) ->
- case get_addr_and_port(ConfigFile) of
- {ok, Addr, Port} ->
- old_stop(Addr, Port);
-
- Error ->
- Error
- end;
-old_stop(_StartArgs) ->
- ok.
+%% restart(Port) when is_integer(Port) ->
+%% reload(undefined, Port).
+%% restart(Addr, Port) ->
+%% reload(Addr, Port).
-old_stop(Addr, Port) when is_integer(Port) ->
- Name = old_make_name(Addr, Port),
- case whereis(Name) of
- Pid when is_pid(Pid) ->
- do_stop(Pid),
- ok;
- _ ->
- not_started
- end.
+%% old_stop(Pid) when is_pid(Pid) ->
+%% do_stop(Pid);
+%% old_stop(ConfigFile) when is_list(ConfigFile) ->
+%% case get_addr_and_port(ConfigFile) of
+%% {ok, Addr, Port} ->
+%% old_stop(Addr, Port);
+
+%% Error ->
+%% Error
+%% end;
+%% old_stop(_StartArgs) ->
+%% ok.
+
+%% old_stop(Addr, Port) when is_integer(Port) ->
+%% Name = old_make_name(Addr, Port),
+%% case whereis(Name) of
+%% Pid when is_pid(Pid) ->
+%% do_stop(Pid),
+%% ok;
+%% _ ->
+%% not_started
+%% end.
-do_stop(Pid) ->
- exit(Pid, shutdown).
+%% do_stop(Pid) ->
+%% exit(Pid, shutdown).
-old_make_name(Addr,Port) ->
- httpd_util:make_name("httpd_instance_sup",Addr,Port).
+%% old_make_name(Addr,Port) ->
+%% httpd_util:make_name("httpd_instance_sup",Addr,Port).
diff --git a/lib/inets/src/http_server/httpd_acceptor.erl b/lib/inets/src/http_server/httpd_acceptor.erl
index 568fd3c610..c261eff6b2 100644
--- a/lib/inets/src/http_server/httpd_acceptor.erl
+++ b/lib/inets/src/http_server/httpd_acceptor.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -138,9 +138,9 @@ acceptor_loop(Manager, SocketType, ListenSocket, ConfigDb, AcceptTimeout) ->
handle_error(Reason, ConfigDb),
?MODULE:acceptor_loop(Manager, SocketType, ListenSocket,
ConfigDb, AcceptTimeout);
- {'EXIT', Reason} ->
- ?hdri("accept exited", [{reason, Reason}]),
- handle_error({'EXIT', Reason}, ConfigDb),
+ {'EXIT', _Reason} = EXIT ->
+ ?hdri("accept exited", [{reason, _Reason}]),
+ handle_error(EXIT, ConfigDb),
?MODULE:acceptor_loop(Manager, SocketType, ListenSocket,
ConfigDb, AcceptTimeout)
end.
diff --git a/lib/inets/src/http_server/httpd_cgi.erl b/lib/inets/src/http_server/httpd_cgi.erl
index 0532d7d100..c06a06aad3 100644
--- a/lib/inets/src/http_server/httpd_cgi.erl
+++ b/lib/inets/src/http_server/httpd_cgi.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -21,7 +21,8 @@
-export([parse_headers/1, handle_headers/1]).
--include("inets_internal.hrl").
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
%%%=========================================================================
%%% Internal application API
diff --git a/lib/inets/src/http_server/httpd_conf.erl b/lib/inets/src/http_server/httpd_conf.erl
index 5ca2e47eb5..8438c4037e 100644
--- a/lib/inets/src/http_server/httpd_conf.erl
+++ b/lib/inets/src/http_server/httpd_conf.erl
@@ -25,13 +25,15 @@
%% Application internal API
-export([load/1, load/2, load_mime_types/1, store/1, store/2,
- remove/1, remove_all/1, config/1, get_config/2, get_config/3,
- lookup/2, lookup/3, lookup/4,
- validate_properties/1]).
+ remove/1, remove_all/1, get_config/2, get_config/3,
+ lookup_socket_type/1,
+ lookup/2, lookup/3, lookup/4,
+ validate_properties/1]).
-define(VMODULE,"CONF").
-include("httpd.hrl").
-include("httpd_internal.hrl").
+-include_lib("inets/src/http_lib/http_internal.hrl").
%%%=========================================================================
@@ -216,9 +218,12 @@ load("ServerName " ++ ServerName, []) ->
{ok,[],{server_name,clean(ServerName)}};
load("SocketType " ++ SocketType, []) ->
- case check_enum(clean(SocketType),["ssl","ip_comm"]) of
+ %% ssl is the same as HTTP_DEFAULT_SSL_KIND
+ %% ossl is ssl based on OpenSSL (the "old" ssl)
+ %% essl is the pure Erlang-based ssl (the "new" ssl)
+ case check_enum(clean(SocketType), ["ssl", "ossl", "essl", "ip_comm"]) of
{ok, ValidSocketType} ->
- {ok, [], {socket_type,ValidSocketType}};
+ {ok, [], {socket_type, ValidSocketType}};
{error,_} ->
{error, ?NICE(clean(SocketType) ++ " is an invalid SocketType")}
end;
@@ -226,7 +231,7 @@ load("SocketType " ++ SocketType, []) ->
load("Port " ++ Port, []) ->
case make_integer(Port) of
{ok, Integer} ->
- {ok, [], {port,Integer}};
+ {ok, [], {port, Integer}};
{error, _} ->
{error, ?NICE(clean(Port)++" is an invalid Port")}
end;
@@ -534,7 +539,10 @@ validate_config_params([{server_name, Value} | _]) ->
throw({server_name, Value});
validate_config_params([{socket_type, Value} | Rest])
- when (Value =:= ip_comm) orelse (Value =:= ssl) ->
+ when (Value =:= ip_comm) orelse
+ (Value =:= ssl) orelse
+ (Value =:= ossl) orelse
+ (Value =:= essl) ->
validate_config_params(Rest);
validate_config_params([{socket_type, Value} | _]) ->
throw({socket_type, Value});
@@ -695,6 +703,8 @@ store(ConfigList0) ->
ConfigList)
catch
throw:Error ->
+ ?hdri("store - config parameter validation failed",
+ [{error, Error}]),
{error, {invalid_option, Error}}
end.
@@ -741,27 +751,27 @@ remove(ConfigDB) ->
ets:delete(ConfigDB),
ok.
-config(ConfigDB) ->
- case httpd_util:lookup(ConfigDB, socket_type,ip_comm) of
- ssl ->
- case ssl_certificate_file(ConfigDB) of
- undefined ->
- {error,
- "Directive SSLCertificateFile "
- "not found in the config file"};
- SSLCertificateFile ->
- {ssl,
- SSLCertificateFile++
- ssl_certificate_key_file(ConfigDB)++
- ssl_verify_client(ConfigDB)++
- ssl_ciphers(ConfigDB)++
- ssl_password(ConfigDB)++
- ssl_verify_depth(ConfigDB)++
- ssl_ca_certificate_file(ConfigDB)}
- end;
- ip_comm ->
- ip_comm
- end.
+%% config(ConfigDB) ->
+%% case httpd_util:lookup(ConfigDB, socket_type, ip_comm) of
+%% ssl ->
+%% case ssl_certificate_file(ConfigDB) of
+%% undefined ->
+%% {error,
+%% "Directive SSLCertificateFile "
+%% "not found in the config file"};
+%% SSLCertificateFile ->
+%% {ssl,
+%% SSLCertificateFile++
+%% ssl_certificate_key_file(ConfigDB)++
+%% ssl_verify_client(ConfigDB)++
+%% ssl_ciphers(ConfigDB)++
+%% ssl_password(ConfigDB)++
+%% ssl_verify_depth(ConfigDB)++
+%% ssl_ca_certificate_file(ConfigDB)}
+%% end;
+%% ip_comm ->
+%% ip_comm
+%% end.
get_config(Address, Port) ->
@@ -797,6 +807,38 @@ table(Address, Port) ->
httpd_util:make_name("httpd_conf", Address, Port).
+lookup_socket_type(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB, socket_type, ip_comm) of
+ ip_comm ->
+ ip_comm;
+ SSL when (SSL =:= ssl) orelse (SSL =:= ossl) orelse (SSL =:= essl) ->
+ SSLTag =
+ if
+ (SSL =:= ssl) ->
+ ?HTTP_DEFAULT_SSL_KIND;
+ true ->
+ SSL
+ end,
+ case ssl_certificate_file(ConfigDB) of
+ undefined ->
+ Reason = "Directive SSLCertificateFile "
+ "not found in the config file",
+ throw({error, Reason});
+ SSLCertificateFile ->
+ {SSLTag, SSLCertificateFile ++ ssl_config(ConfigDB)}
+ end
+ end.
+
+ssl_config(ConfigDB) ->
+ ssl_certificate_key_file(ConfigDB) ++
+ ssl_verify_client(ConfigDB) ++
+ ssl_ciphers(ConfigDB) ++
+ ssl_password(ConfigDB) ++
+ ssl_verify_depth(ConfigDB) ++
+ ssl_ca_certificate_file(ConfigDB).
+
+
+
%%%========================================================================
%%% Internal functions
%%%========================================================================
diff --git a/lib/inets/src/http_server/httpd_esi.erl b/lib/inets/src/http_server/httpd_esi.erl
index b1a75fda52..026ec9a5fe 100644
--- a/lib/inets/src/http_server/httpd_esi.erl
+++ b/lib/inets/src/http_server/httpd_esi.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -21,7 +21,8 @@
-export([parse_headers/1, handle_headers/1]).
--include("inets_internal.hrl").
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
%%%=========================================================================
%%% Internal application API
diff --git a/lib/inets/src/http_server/httpd_instance_sup.erl b/lib/inets/src/http_server/httpd_instance_sup.erl
index 0aaeb838c2..baa60d318c 100644
--- a/lib/inets/src/http_server/httpd_instance_sup.erl
+++ b/lib/inets/src/http_server/httpd_instance_sup.erl
@@ -97,14 +97,16 @@ start_link(ConfigFile, AcceptTimeout, ListenInfo, Debug) ->
%%%=========================================================================
%%% Supervisor callback
%%%=========================================================================
-init([ConfigFile, ConfigList, AcceptTimeout, _Debug, Address, Port]) ->
+init([ConfigFile, ConfigList, AcceptTimeout, Debug, Address, Port]) ->
+ httpd_util:enable_debug(Debug),
Flags = {one_for_one, 0, 1},
Children = [sup_spec(httpd_acceptor_sup, Address, Port),
sup_spec(httpd_misc_sup, Address, Port),
worker_spec(httpd_manager, Address, Port,
ConfigFile, ConfigList,AcceptTimeout)],
{ok, {Flags, Children}};
-init([ConfigFile, ConfigList, AcceptTimeout, _Debug, Address, Port, ListenInfo]) ->
+init([ConfigFile, ConfigList, AcceptTimeout, Debug, Address, Port, ListenInfo]) ->
+ httpd_util:enable_debug(Debug),
Flags = {one_for_one, 0, 1},
Children = [sup_spec(httpd_acceptor_sup, Address, Port),
sup_spec(httpd_misc_sup, Address, Port),
diff --git a/lib/inets/src/http_server/httpd_internal.hrl b/lib/inets/src/http_server/httpd_internal.hrl
index 7795ab6c18..38b0ddefd3 100644
--- a/lib/inets/src/http_server/httpd_internal.hrl
+++ b/lib/inets/src/http_server/httpd_internal.hrl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2009-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -21,7 +21,8 @@
-ifndef(httpd_internal_hrl).
-define(httpd_internal_hrl, true).
--include("inets_internal.hrl").
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
-define(SERVICE, httpd).
-define(hdri(Label, Content), ?report_important(Label, ?SERVICE, Content)).
-define(hdrv(Label, Content), ?report_verbose(Label, ?SERVICE, Content)).
diff --git a/lib/inets/src/http_server/httpd_manager.erl b/lib/inets/src/http_server/httpd_manager.erl
index f2e8763907..b44bc77c41 100644
--- a/lib/inets/src/http_server/httpd_manager.erl
+++ b/lib/inets/src/http_server/httpd_manager.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -238,24 +238,25 @@ init([ConfigFile, ConfigList, AcceptTimeout, Addr, Port]) ->
case (catch do_init(ConfigFile, ConfigList, AcceptTimeout, Addr, Port)) of
{error, Reason} ->
String = lists:flatten(
- io_lib:format("Failed initiating "
- "web server: ~n~p~n~p~n",
- [ConfigFile,Reason])),
+ io_lib:format("Failed initiating web server: "
+ "~n~p"
+ "~n~p"
+ "~n", [ConfigFile, Reason])),
error_logger:error_report(String),
{stop, {error, Reason}};
{ok, State} ->
{ok, State}
end;
-init([ConfigFile, ConfigList, AcceptTimeout, Addr, Port,
- ListenInfo]) ->
+init([ConfigFile, ConfigList, AcceptTimeout, Addr, Port, ListenInfo]) ->
process_flag(trap_exit, true),
case (catch do_init(ConfigFile, ConfigList, AcceptTimeout,
Addr, Port, ListenInfo)) of
{error, Reason} ->
String = lists:flatten(
- io_lib:format("Failed initiating "
- "web server: ~n~p~n~p~n",
- [ConfigFile,Reason])),
+ io_lib:format("Failed initiating web server: "
+ "~n~p"
+ "~n~p"
+ "~n", [ConfigFile, Reason])),
error_logger:error_report(String),
{stop, {error, Reason}};
{ok, State} ->
@@ -264,13 +265,14 @@ init([ConfigFile, ConfigList, AcceptTimeout, Addr, Port,
do_init(ConfigFile, ConfigList, AcceptTimeout, Addr, Port) ->
NewConfigFile = proplists:get_value(file, ConfigList, ConfigFile),
- ConfigDB = do_initial_store(ConfigList),
- SocketType = httpd_conf:config(ConfigDB),
+ ConfigDB = do_initial_store(ConfigList),
+ SocketType = httpd_conf:lookup_socket_type(ConfigDB),
case httpd_acceptor_sup:start_acceptor(SocketType, Addr,
Port, ConfigDB, AcceptTimeout) of
{ok, _Pid} ->
- Status = [{max_conn,0}, {last_heavy_load,never},
- {last_connection,never}],
+ Status = [{max_conn, 0},
+ {last_heavy_load, never},
+ {last_connection, never}],
State = #state{socket_type = SocketType,
config_file = NewConfigFile,
config_db = ConfigDB,
@@ -284,7 +286,7 @@ do_init(ConfigFile, ConfigList, AcceptTimeout, Addr, Port) ->
do_init(ConfigFile, ConfigList, AcceptTimeout, Addr, Port, ListenInfo) ->
NewConfigFile = proplists:get_value(file, ConfigList, ConfigFile),
ConfigDB = do_initial_store(ConfigList),
- SocketType = httpd_conf:config(ConfigDB),
+ SocketType = httpd_conf:lookup_socket_type(ConfigDB),
case httpd_acceptor_sup:start_acceptor(SocketType, Addr,
Port, ConfigDB,
AcceptTimeout, ListenInfo) of
diff --git a/lib/inets/src/http_server/httpd_request.erl b/lib/inets/src/http_server/httpd_request.erl
index 8eee08e766..883acbf585 100644
--- a/lib/inets/src/http_server/httpd_request.erl
+++ b/lib/inets/src/http_server/httpd_request.erl
@@ -19,22 +19,35 @@
-module(httpd_request).
--include("http_internal.hrl").
+-include_lib("inets/src/http_lib/http_internal.hrl").
-include("httpd.hrl").
+-include("httpd_internal.hrl").
--export([parse/1, whole_body/2, validate/3, update_mod_data/5,
- body_data/2]).
+-export([
+ parse/1,
+ whole_body/2,
+ validate/3,
+ update_mod_data/5,
+ body_data/2
+ ]).
%% Callback API - used for example if the header/body is received a
%% little at a time on a socket.
--export([parse_method/1, parse_uri/1, parse_version/1, parse_headers/1,
- whole_body/1]).
+-export([
+ parse_method/1, parse_uri/1, parse_version/1, parse_headers/1,
+ whole_body/1
+ ]).
+
%%%=========================================================================
%%% Internal application API
%%%=========================================================================
parse([Bin, MaxSizes]) ->
- parse_method(Bin, [], MaxSizes, []).
+ ?hdrt("parse", [{bin, Bin}, {max_sizes, MaxSizes}]),
+ parse_method(Bin, [], MaxSizes, []);
+parse(Unknown) ->
+ ?hdrt("parse", [{unknown, Unknown}]),
+ exit({bad_args, Unknown}).
%% Functions that may be returned during the decoding process
%% if the input data is incompleate.
@@ -119,30 +132,65 @@ update_mod_data(ModData, Method, RequestURI, HTTPVersion, Headers)->
%%% Internal functions
%%%========================================================================
parse_method(<<>>, Method, MaxSizes, Result) ->
+ ?hdrt("parse_method - empty bin",
+ [{method, Method}, {max_sizes, MaxSizes}, {result, Result}]),
{?MODULE, parse_method, [Method, MaxSizes, Result]};
parse_method(<<?SP, Rest/binary>>, Method, MaxSizes, Result) ->
+ ?hdrt("parse_method - SP begin",
+ [{rest, Rest},
+ {method, Method},
+ {max_sizes, MaxSizes},
+ {result, Result}]),
parse_uri(Rest, [], 0, MaxSizes,
[string:strip(lists:reverse(Method)) | Result]);
parse_method(<<Octet, Rest/binary>>, Method, MaxSizes, Result) ->
+ ?hdrt("parse_method",
+ [{octet, Octet},
+ {rest, Rest},
+ {method, Method},
+ {max_sizes, MaxSizes},
+ {result, Result}]),
parse_method(Rest, [Octet | Method], MaxSizes, Result).
-parse_uri(_, _, CurrSize, {MaxURI, _}, _) when CurrSize > MaxURI,
- MaxURI =/= nolimit ->
+parse_uri(_, _, CurrSize, {MaxURI, _}, _)
+ when (CurrSize > MaxURI) andalso (MaxURI =/= nolimit) ->
+ ?hdrt("parse_uri",
+ [{current_size, CurrSize},
+ {max_uri, MaxURI}]),
%% We do not know the version of the client as it comes after the
%% uri send the lowest version in the response so that the client
%% will be able to handle it.
HttpVersion = "HTTP/0.9",
{error, {uri_too_long, MaxURI}, HttpVersion};
parse_uri(<<>>, URI, CurrSize, MaxSizes, Result) ->
+ ?hdrt("parse_uri - empty bin",
+ [{uri, URI},
+ {current_size, CurrSize},
+ {max_sz, MaxSizes},
+ {result, Result}]),
{?MODULE, parse_uri, [URI, CurrSize, MaxSizes, Result]};
parse_uri(<<?SP, Rest/binary>>, URI, _, MaxSizes, Result) ->
+ ?hdrt("parse_uri - SP begin",
+ [{uri, URI},
+ {max_sz, MaxSizes},
+ {result, Result}]),
parse_version(Rest, [], MaxSizes,
[string:strip(lists:reverse(URI)) | Result]);
%% Can happen if it is a simple HTTP/0.9 request e.i "GET /\r\n\r\n"
-parse_uri(<<?CR, _Rest/binary>> = Data, URI, _,MaxSizes, Result) ->
+parse_uri(<<?CR, _Rest/binary>> = Data, URI, _, MaxSizes, Result) ->
+ ?hdrt("parse_uri - CR begin",
+ [{uri, URI},
+ {max_sz, MaxSizes},
+ {result, Result}]),
parse_version(Data, [], MaxSizes,
[string:strip(lists:reverse(URI)) | Result]);
parse_uri(<<Octet, Rest/binary>>, URI, CurrSize, MaxSizes, Result) ->
+ ?hdrt("parse_uri",
+ [{octet, Octet},
+ {uri, URI},
+ {curr_sz, CurrSize},
+ {max_sz, MaxSizes},
+ {result, Result}]),
parse_uri(Rest, [Octet | URI], CurrSize + 1, MaxSizes, Result).
parse_version(<<>>, Version, MaxSizes, Result) ->
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index fa832cba3f..a9db6e2058 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -101,11 +101,13 @@ init([Manager, ConfigDB, AcceptTimeout]) ->
Then = erlang:now(),
+ ?hdrd("negotiate", []),
case http_transport:negotiate(SocketType, Socket, TimeOut) of
{error, Error} ->
+ ?hdrd("negotiation failed", [{error, Error}]),
exit(Error); %% Can be 'normal'.
ok ->
- ?hdrt("negotiated", []),
+ ?hdrt("negotiation successfull", []),
NewTimeout = TimeOut - timer:now_diff(now(),Then) div 1000,
continue_init(Manager, ConfigDB, SocketType, Socket, NewTimeout)
end.
@@ -121,12 +123,9 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
socket = Socket,
init_data = InitData},
- MaxHeaderSize = httpd_util:lookup(ConfigDB, max_header_size,
- ?HTTP_MAX_HEADER_SIZE),
- MaxURISize = httpd_util:lookup(ConfigDB, max_uri_size,
- ?HTTP_MAX_URI_SIZE),
- NrOfRequest = httpd_util:lookup(ConfigDB,
- max_keep_alive_request, infinity),
+ MaxHeaderSize = max_header_size(ConfigDB),
+ MaxURISize = max_uri_size(ConfigDB),
+ NrOfRequest = max_keep_alive_request(ConfigDB),
{_, Status} = httpd_manager:new_connection(Manager),
@@ -142,9 +141,10 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
?hdrt("activate request timeout", []),
NewState = activate_request_timeout(State),
- ?hdrt("update socket options", []),
- http_transport:setopts(SocketType, Socket, [binary,{packet, 0},
- {active, once}]),
+ ?hdrt("set socket options (binary, packet & active)", []),
+ http_transport:setopts(SocketType, Socket,
+ [binary, {packet, 0}, {active, once}]),
+
?hdrt("init done", []),
gen_server:enter_loop(?MODULE, [], NewState).
@@ -180,21 +180,29 @@ handle_cast(Msg, State) ->
%% {stop, Reason, State}
%% Description: Handling all non call/cast messages
%%--------------------------------------------------------------------
-handle_info({Proto, Socket, Data}, State =
+handle_info({Proto, Socket, Data},
#state{mfa = {Module, Function, Args} = MFA,
mod = #mod{socket_type = SockType,
socket = Socket} = ModData} = State)
when (((Proto =:= tcp) orelse
(Proto =:= ssl) orelse
(Proto =:= dummy)) andalso is_binary(Data)) ->
+
?hdrd("received data",
[{data, Data}, {proto, Proto},
{socket, Socket}, {socket_type, SockType}, {mfa, MFA}]),
- case Module:Function([Data | Args]) of
+
+%% case (catch Module:Function([Data | Args])) of
+ PROCESSED = (catch Module:Function([Data | Args])),
+
+ ?hdrt("data processed", [{processing_result, PROCESSED}]),
+
+ case PROCESSED of
{ok, Result} ->
?hdrd("data processed", [{result, Result}]),
NewState = cancel_request_timeout(State),
handle_http_msg(Result, NewState);
+
{error, {uri_too_long, MaxSize}, Version} ->
?hdrv("uri too long", [{max_size, MaxSize}, {version, Version}]),
NewModData = ModData#mod{http_version = Version},
@@ -205,7 +213,8 @@ handle_info({Proto, Socket, Data}, State =
{stop, normal, State#state{response_sent = true,
mod = NewModData}};
{error, {header_too_long, MaxSize}, Version} ->
- ?hdrv("header too long", [{max_size, MaxSize}, {version, Version}]),
+ ?hdrv("header too long",
+ [{max_size, MaxSize}, {version, Version}]),
NewModData = ModData#mod{http_version = Version},
httpd_response:send_status(NewModData, 413, "Header too long"),
Reason = io_lib:format("Header too long, max size is ~p~n",
@@ -263,14 +272,16 @@ terminate(Reason, #state{response_sent = false, mod = ModData} = State) ->
httpd_response:send_status(ModData, 500, none),
error_log(httpd_util:reason_phrase(500), ModData),
terminate(Reason, State#state{response_sent = true, mod = ModData});
-terminate(_, State) ->
+terminate(_Reason, State) ->
do_terminate(State).
do_terminate(#state{mod = ModData, manager = Manager} = State) ->
catch httpd_manager:done_connection(Manager),
cancel_request_timeout(State),
+ %% receive after 5000 -> ok end,
httpd_socket:close(ModData#mod.socket_type, ModData#mod.socket).
+
%%--------------------------------------------------------------------
%% code_change(OldVsn, State, Extra) -> {ok, NewState}
%%
@@ -279,6 +290,7 @@ do_terminate(#state{mod = ModData, manager = Manager} = State) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
+
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
@@ -383,9 +395,8 @@ is_host_specified_if_required(_, _, _) ->
handle_body(#state{mod = #mod{config_db = ConfigDB}} = State) ->
?hdrt("handle body", []),
- MaxHeaderSize =
- httpd_util:lookup(ConfigDB, max_header_size, ?HTTP_MAX_HEADER_SIZE),
- MaxBodySize = httpd_util:lookup(ConfigDB, max_body_size, nolimit),
+ MaxHeaderSize = max_header_size(ConfigDB),
+ MaxBodySize = max_body_size(ConfigDB),
case handle_expect(State, MaxBodySize) of
ok ->
@@ -538,24 +549,23 @@ handle_response(#state{body = Body,
{stop, normal, State#state{response_sent = true}}.
handle_next_request(#state{mod = #mod{connection = true} = ModData,
- max_keep_alive_request = Max} = State, Data) ->
+ max_keep_alive_request = Max} = State, Data) ->
?hdrt("handle next request", [{max, Max}]),
+
NewModData = #mod{socket_type = ModData#mod.socket_type,
- socket = ModData#mod.socket,
- config_db = ModData#mod.config_db,
- init_data = ModData#mod.init_data},
- MaxHeaderSize =
- httpd_util:lookup(ModData#mod.config_db,
- max_header_size, ?HTTP_MAX_HEADER_SIZE),
- MaxURISize = httpd_util:lookup(ModData#mod.config_db, max_uri_size,
- ?HTTP_MAX_URI_SIZE),
- TmpState = State#state{mod = NewModData,
- mfa = {httpd_request, parse, [{MaxURISize,
- MaxHeaderSize}]},
+ socket = ModData#mod.socket,
+ config_db = ModData#mod.config_db,
+ init_data = ModData#mod.init_data},
+ MaxHeaderSize = max_header_size(ModData#mod.config_db),
+ MaxURISize = max_uri_size(ModData#mod.config_db),
+
+ MFA = {httpd_request, parse, [{MaxURISize, MaxHeaderSize}]},
+ TmpState = State#state{mod = NewModData,
+ mfa = MFA,
max_keep_alive_request = decrease(Max),
- headers = undefined,
- body = undefined,
- response_sent = false},
+ headers = undefined,
+ body = undefined,
+ response_sent = false},
NewState = activate_request_timeout(TmpState),
@@ -596,7 +606,7 @@ decrease(N) ->
error_log(ReasonString, Info) ->
Error = lists:flatten(
- io_lib:format("Error reading request:~s",[ReasonString])),
+ io_lib:format("Error reading request: ~s", [ReasonString])),
error_log(mod_log, Info, Error),
error_log(mod_disk_log, Info, Error).
@@ -609,3 +619,21 @@ error_log(Mod, #mod{config_db = ConfigDB} = Info, String) ->
_ ->
ok
end.
+
+
+%%--------------------------------------------------------------------
+%% Config access wrapper functions
+%%--------------------------------------------------------------------
+
+max_header_size(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_header_size, ?HTTP_MAX_HEADER_SIZE).
+
+max_uri_size(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_uri_size, ?HTTP_MAX_URI_SIZE).
+
+max_body_size(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_body_size, nolimit).
+
+max_keep_alive_request(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_keep_alive_request, infinity).
+
diff --git a/lib/inets/src/http_server/httpd_sup.erl b/lib/inets/src/http_server/httpd_sup.erl
index 3399f78b53..1507c6852a 100644
--- a/lib/inets/src/http_server/httpd_sup.erl
+++ b/lib/inets/src/http_server/httpd_sup.erl
@@ -185,14 +185,14 @@ httpd_child_spec(ConfigFile, AcceptTimeout, Debug) ->
httpd_child_spec(Config, AcceptTimeout, Debug, Addr, 0) ->
case start_listen(Addr, 0, Config) of
{Pid, {NewPort, NewConfig, ListenSocket}} ->
- Name = {httpd_instance_sup, Addr, NewPort},
+ Name = {httpd_instance_sup, Addr, NewPort},
StartFunc = {httpd_instance_sup, start_link,
[NewConfig, AcceptTimeout,
{Pid, ListenSocket}, Debug]},
- Restart = permanent,
- Shutdown = infinity,
- Modules = [httpd_instance_sup],
- Type = supervisor,
+ Restart = permanent,
+ Shutdown = infinity,
+ Modules = [httpd_instance_sup],
+ Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules};
{Pid, {error, Reason}} ->
exit(Pid, normal),
diff --git a/lib/inets/src/http_server/httpd_util.erl b/lib/inets/src/http_server/httpd_util.erl
index b59fd861dc..cfad79638f 100644
--- a/lib/inets/src/http_server/httpd_util.erl
+++ b/lib/inets/src/http_server/httpd_util.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -755,23 +755,18 @@ do_enable_debug([{Level,Modules}|Rest])
when is_atom(Level) andalso is_list(Modules) ->
case Level of
all_functions ->
- io:format("Tracing on all functions set on modules: ~p~n",
- [Modules]),
lists:foreach(
- fun(X)->
+ fun(X) ->
dbg:tpl(X, [{'_', [], [{return_trace}]}])
end, Modules);
exported_functions ->
- io:format("Tracing on exported functions set on "
- "modules: ~p~n",[Modules]),
lists:foreach(
- fun(X)->
+ fun(X) ->
dbg:tp(X, [{'_', [], [{return_trace}]}])
end, Modules);
disable ->
- io:format("Tracing disabled on modules: ~p~n", [Modules]),
lists:foreach(
- fun(X)->
+ fun(X) ->
dbg:ctp(X)
end, Modules);
_ ->
diff --git a/lib/inets/src/http_server/mod_alias.erl b/lib/inets/src/http_server/mod_alias.erl
index ec0a12242f..9c5a8cc1c6 100644
--- a/lib/inets/src/http_server/mod_alias.erl
+++ b/lib/inets/src/http_server/mod_alias.erl
@@ -103,6 +103,19 @@ real_name(ConfigDB, RequestURI, []) ->
httpd_util:split_path(default_index(ConfigDB, RealName)),
{ShortPath, Path, AfterPath};
+real_name(ConfigDB, RequestURI, [{MP,Replacement}|Rest])
+ when element(1, MP) =:= re_pattern ->
+ case re:run(RequestURI, MP, [{capture,[]}]) of
+ match ->
+ NewURI = re:replace(RequestURI, MP, Replacement, [{return,list}]),
+ {ShortPath,_} = httpd_util:split_path(NewURI),
+ {Path,AfterPath} =
+ httpd_util:split_path(default_index(ConfigDB, NewURI)),
+ {ShortPath, Path, AfterPath};
+ nomatch ->
+ real_name(ConfigDB, RequestURI, Rest)
+ end;
+
real_name(ConfigDB, RequestURI, [{FakeName,RealName}|Rest]) ->
case inets_regexp:match(RequestURI, "^" ++ FakeName) of
{match, _, _} ->
@@ -120,6 +133,18 @@ real_name(ConfigDB, RequestURI, [{FakeName,RealName}|Rest]) ->
real_script_name(_ConfigDB, _RequestURI, []) ->
not_a_script;
+
+real_script_name(ConfigDB, RequestURI, [{MP,Replacement} | Rest])
+ when element(1, MP) =:= re_pattern ->
+ case re:run(RequestURI, MP, [{capture,[]}]) of
+ match ->
+ ActualName =
+ re:replace(RequestURI, MP, Replacement, [{return,list}]),
+ httpd_util:split_script_path(default_index(ConfigDB, ActualName));
+ nomatch ->
+ real_script_name(ConfigDB, RequestURI, Rest)
+ end;
+
real_script_name(ConfigDB, RequestURI, [{FakeName,RealName} | Rest]) ->
case inets_regexp:match(RequestURI, "^" ++ FakeName) of
{match,_,_} ->
@@ -180,6 +205,8 @@ load("Alias " ++ Alias, []) ->
{ok, _} ->
{error,?NICE(httpd_conf:clean(Alias)++" is an invalid Alias")}
end;
+load("ReWrite " ++ Rule, Acc) ->
+ load_re_write(Rule, Acc, "ReWrite", re_write);
load("ScriptAlias " ++ ScriptAlias, []) ->
case inets_regexp:split(ScriptAlias, " ") of
{ok, [FakeName, RealName]} ->
@@ -189,6 +216,24 @@ load("ScriptAlias " ++ ScriptAlias, []) ->
{ok, _} ->
{error, ?NICE(httpd_conf:clean(ScriptAlias)++
" is an invalid ScriptAlias")}
+ end;
+load("ScriptReWrite " ++ Rule, Acc) ->
+ load_re_write(Rule, Acc, "ScriptReWrite", script_re_write).
+
+load_re_write(Rule0, Acc, Type, Tag) ->
+ case lists:dropwhile(
+ fun ($\s) -> true; ($\t) -> true; (_) -> false end,
+ Rule0) of
+ "" ->
+ {error, ?NICE(httpd_conf:clean(Rule0)++" is an invalid "++Type)};
+ Rule ->
+ case string:chr(Rule, $\s) of
+ 0 ->
+ {ok, Acc, {Tag, {Rule, ""}}};
+ N ->
+ {Re, [_|Replacement]} = lists:split(N-1, Rule),
+ {ok, Acc, {Tag, {Re, Replacement}}}
+ end
end.
store({directory_index, Value} = Conf, _) when is_list(Value) ->
@@ -200,16 +245,36 @@ store({directory_index, Value} = Conf, _) when is_list(Value) ->
end;
store({directory_index, Value}, _) ->
{error, {wrong_type, {directory_index, Value}}};
-store({alias, {Fake, Real}} = Conf, _)
- when is_list(Fake) andalso is_list(Real) ->
+store({alias, {Fake, Real}} = Conf, _)
+ when is_list(Fake), is_list(Real) ->
{ok, Conf};
store({alias, Value}, _) ->
{error, {wrong_type, {alias, Value}}};
+store({re_write, {Re, Replacement}} = Conf, _)
+ when is_list(Re), is_list(Replacement) ->
+ case re:compile(Re) of
+ {ok, MP} ->
+ {ok, {alias, {MP, Replacement}}};
+ {error,_} ->
+ {error, {re_compile, Conf}}
+ end;
+store({re_write, _} = Conf, _) ->
+ {error, {wrong_type, Conf}};
store({script_alias, {Fake, Real}} = Conf, _)
- when is_list(Fake) andalso is_list(Real) ->
+ when is_list(Fake), is_list(Real) ->
{ok, Conf};
store({script_alias, Value}, _) ->
- {error, {wrong_type, {script_alias, Value}}}.
+ {error, {wrong_type, {script_alias, Value}}};
+store({script_re_write, {Re, Replacement}} = Conf, _)
+ when is_list(Re), is_list(Replacement) ->
+ case re:compile(Re) of
+ {ok, MP} ->
+ {ok, {script_alias, {MP, Replacement}}};
+ {error,_} ->
+ {error, {re_compile, Conf}}
+ end;
+store({script_re_write, _} = Conf, _) ->
+ {error, {wrong_type, Conf}}.
is_directory_index_list([]) ->
true;
diff --git a/lib/inets/src/http_server/mod_esi.erl b/lib/inets/src/http_server/mod_esi.erl
index cb33544540..f7877aa9e2 100644
--- a/lib/inets/src/http_server/mod_esi.erl
+++ b/lib/inets/src/http_server/mod_esi.erl
@@ -29,6 +29,7 @@
-export([do/1, load/2, store/2]).
-include("httpd.hrl").
+-include("httpd_internal.hrl").
-define(VMODULE,"ESI").
-define(DEFAULT_ERL_TIMEOUT,15000).
@@ -37,6 +38,7 @@
%%%=========================================================================
%%% API
%%%=========================================================================
+
%%--------------------------------------------------------------------------
%% deliver(SessionID, Data) -> ok | {error, bad_sessionID}
%% SessionID = pid()
@@ -48,7 +50,7 @@
%% request handling process so it can forward it to the client.
%%-------------------------------------------------------------------------
deliver(SessionID, Data) when is_pid(SessionID) ->
- SessionID ! {ok, Data},
+ SessionID ! {esi_data, Data},
ok;
deliver(_SessionID, _Data) ->
{error, bad_sessionID}.
@@ -65,6 +67,7 @@ deliver(_SessionID, _Data) ->
%% Description: See httpd(3) ESWAPI CALLBACK FUNCTIONS
%%-------------------------------------------------------------------------
do(ModData) ->
+ ?hdrt("do", []),
case proplists:get_value(status, ModData#mod.data) of
{_StatusCode, _PhraseArgs, _Reason} ->
{proceed, ModData#mod.data};
@@ -184,6 +187,7 @@ store({erl_script_nocache, Value}, _) ->
%%% Internal functions
%%%========================================================================
generate_response(ModData) ->
+ ?hdrt("generate response", []),
case scheme(ModData#mod.request_uri, ModData#mod.config_db) of
{eval, ESIBody, Modules} ->
eval(ModData, ESIBody, Modules);
@@ -235,6 +239,7 @@ alias_match_str(Alias, eval_script_alias) ->
erl(#mod{method = Method} = ModData, ESIBody, Modules)
when (Method =:= "GET") orelse (Method =:= "HEAD") ->
+ ?hdrt("erl", [{method, Method}]),
case httpd_util:split(ESIBody,":|%3A|/",2) of
{ok, [ModuleName, FuncAndInput]} ->
case httpd_util:split(FuncAndInput,"[\?/]",2) of
@@ -260,6 +265,7 @@ erl(#mod{request_uri = ReqUri,
method = "PUT",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
+ ?hdrt("erl", [{method, put}]),
{proceed, [{status,{501,{"PUT", ReqUri, Version},
?NICE("Erl mechanism doesn't support method PUT")}}|
Data]};
@@ -268,12 +274,14 @@ erl(#mod{request_uri = ReqUri,
method = "DELETE",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
+ ?hdrt("erl", [{method, delete}]),
{proceed,[{status,{501,{"DELETE", ReqUri, Version},
?NICE("Erl mechanism doesn't support method DELETE")}}|
Data]};
erl(#mod{method = "POST",
entity_body = Body} = ModData, ESIBody, Modules) ->
+ ?hdrt("erl", [{method, post}]),
case httpd_util:split(ESIBody,":|%3A|/",2) of
{ok,[ModuleName, Function]} ->
generate_webpage(ModData, ESIBody, Modules,
@@ -289,6 +297,7 @@ generate_webpage(ModData, ESIBody, [all], Module, FunctionName,
FunctionName, Input, ScriptElements);
generate_webpage(ModData, ESIBody, Modules, Module, FunctionName,
Input, ScriptElements) ->
+ ?hdrt("generate webpage", []),
Function = list_to_atom(FunctionName),
case lists:member(Module, Modules) of
true ->
@@ -309,8 +318,9 @@ generate_webpage(ModData, ESIBody, Modules, Module, FunctionName,
%% Old API that waits for the dymnamic webpage to be totally generated
%% before anythig is sent back to the client.
-erl_scheme_webpage_whole(Module, Function, Env, Input, ModData) ->
- case (catch Module:Function(Env, Input)) of
+erl_scheme_webpage_whole(Mod, Func, Env, Input, ModData) ->
+ ?hdrt("erl_scheme_webpage_whole", [{module, Mod}, {function, Func}]),
+ case (catch Mod:Func(Env, Input)) of
{'EXIT',{undef, _}} ->
{proceed, [{status, {404, ModData#mod.request_uri, "Not found"}}
| ModData#mod.data]};
@@ -347,6 +357,7 @@ erl_scheme_webpage_whole(Module, Function, Env, Input, ModData) ->
%% in small chunks at the time during generation.
erl_scheme_webpage_chunk(Mod, Func, Env, Input, ModData) ->
process_flag(trap_exit, true),
+ ?hdrt("erl_scheme_webpage_chunk", [{module, Mod}, {function, Func}]),
Self = self(),
%% Spawn worker that generates the webpage.
%% It would be nicer to use erlang:function_exported/3 but if the
@@ -372,9 +383,12 @@ deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid) ->
deliver_webpage_chunk(ModData, Pid, Timeout).
deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid, Timeout) ->
+ ?hdrt("deliver_webpage_chunk", [{timeout, Timeout}]),
case receive_headers(Timeout) of
{error, Reason} ->
%% Happens when webpage generator callback/3 is undefined
+ ?hdrv("deliver_webpage_chunk - failed receiving headers",
+ [{reason, Reason}]),
{error, Reason};
{Headers, Body} ->
case httpd_esi:handle_headers(Headers) of
@@ -399,6 +413,7 @@ deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid, Timeout) ->
IsDisableChunkedSend)
end;
timeout ->
+ ?hdrv("deliver_webpage_chunk - timeout", []),
send_headers(ModData, {504, "Timeout"},[{"connection", "close"}]),
httpd_socket:close(ModData#mod.socket_type, ModData#mod.socket),
process_flag(trap_exit,false),
@@ -407,11 +422,17 @@ deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid, Timeout) ->
receive_headers(Timeout) ->
receive
+ {esi_data, Chunk} ->
+ ?hdrt("receive_headers - received esi data (esi)", []),
+ httpd_esi:parse_headers(lists:flatten(Chunk));
{ok, Chunk} ->
+ ?hdrt("receive_headers - received esi data (ok)", []),
httpd_esi:parse_headers(lists:flatten(Chunk));
{'EXIT', Pid, erl_scheme_webpage_chunk_undefined} when is_pid(Pid) ->
+ ?hdrd("receive_headers - exit:chunk-undef", []),
{error, erl_scheme_webpage_chunk_undefined};
{'EXIT', Pid, Reason} when is_pid(Pid) ->
+ ?hdrv("receive_headers - exit", [{reason, Reason}]),
exit({mod_esi_linked_process_died, Pid, Reason})
after Timeout ->
timeout
@@ -427,19 +448,29 @@ handle_body(_, #mod{method = "HEAD"} = ModData, _, _, Size, _) ->
{proceed, [{response, {already_sent, 200, Size}} | ModData#mod.data]};
handle_body(Pid, ModData, Body, Timeout, Size, IsDisableChunkedSend) ->
+ ?hdrt("handle_body - send chunk", [{timeout, Timeout}, {size, Size}]),
httpd_response:send_chunk(ModData, Body, IsDisableChunkedSend),
receive
+ {esi_data, Data} ->
+ ?hdrt("handle_body - received data (esi)", []),
+ handle_body(Pid, ModData, Data, Timeout, Size + length(Data),
+ IsDisableChunkedSend);
{ok, Data} ->
+ ?hdrt("handle_body - received data (ok)", []),
handle_body(Pid, ModData, Data, Timeout, Size + length(Data),
IsDisableChunkedSend);
{'EXIT', Pid, normal} when is_pid(Pid) ->
+ ?hdrt("handle_body - exit:normal", []),
httpd_response:send_final_chunk(ModData, IsDisableChunkedSend),
{proceed, [{response, {already_sent, 200, Size}} |
ModData#mod.data]};
{'EXIT', Pid, Reason} when is_pid(Pid) ->
+ ?hdrv("handle_body - exit", [{reason, Reason}]),
httpd_response:send_final_chunk(ModData, IsDisableChunkedSend),
exit({mod_esi_linked_process_died, Pid, Reason})
+
after Timeout ->
+ ?hdrv("handle_body - timeout", []),
process_flag(trap_exit,false),
httpd_response:send_final_chunk(ModData, IsDisableChunkedSend),
exit({mod_esi_linked_process_timeout, Pid})
@@ -473,6 +504,7 @@ eval(#mod{request_uri = ReqUri,
method = "PUT",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
+ ?hdrt("eval", [{method, put}]),
{proceed,[{status,{501,{"PUT", ReqUri, Version},
?NICE("Eval mechanism doesn't support method PUT")}}|
Data]};
@@ -481,6 +513,7 @@ eval(#mod{request_uri = ReqUri,
method = "DELETE",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
+ ?hdrt("eval", [{method, delete}]),
{proceed,[{status,{501,{"DELETE", ReqUri, Version},
?NICE("Eval mechanism doesn't support method DELETE")}}|
Data]};
@@ -489,12 +522,14 @@ eval(#mod{request_uri = ReqUri,
method = "POST",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
+ ?hdrt("eval", [{method, post}]),
{proceed,[{status,{501,{"POST", ReqUri, Version},
?NICE("Eval mechanism doesn't support method POST")}}|
Data]};
eval(#mod{method = Method} = ModData, ESIBody, Modules)
- when Method == "GET"; Method == "HEAD" ->
+ when (Method =:= "GET") orelse (Method =:= "HEAD") ->
+ ?hdrt("eval", [{method, Method}]),
case is_authorized(ESIBody, Modules) of
true ->
case generate_webpage(ESIBody) of
diff --git a/lib/inets/src/inets_app/Makefile b/lib/inets/src/inets_app/Makefile
index 33c9e34a3a..4632ff3b68 100644
--- a/lib/inets/src/inets_app/Makefile
+++ b/lib/inets/src/inets_app/Makefile
@@ -67,18 +67,15 @@ APPUP_TARGET = $(EBIN)/$(APPUP_FILE)
# ----------------------------------------------------
-# INETS FLAGS
-# ----------------------------------------------------
-INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
-
-
-# ----------------------------------------------------
# FLAGS
# ----------------------------------------------------
-ERL_COMPILE_FLAGS += $(INETS_FLAGS) \
- +'{parse_transform,sys_pre_attributes}' \
- +'{attribute,insert,app_vsn,$(APP_VSN)}'
+include inets.mk
+
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include
# ----------------------------------------------------
@@ -112,7 +109,8 @@ include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
$(INSTALL_DIR) $(RELSYSDIR)/src
- $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/src/inets_app
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src/inets_app
$(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
diff --git a/lib/inets/src/inets_app/inets.app.src b/lib/inets/src/inets_app/inets.app.src
index 04f6365b98..cb036157a5 100644
--- a/lib/inets/src/inets_app/inets.app.src
+++ b/lib/inets/src/inets_app/inets.app.src
@@ -107,5 +107,6 @@
tftp_sup
]},
{registered,[inets_sup, httpc_manager]},
+ %% If the "new" ssl is used then 'crypto' must be started before inets.
{applications,[kernel,stdlib]},
{mod,{inets_app,[]}}]}.
diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src
index dfdfb41373..64fe664006 100644
--- a/lib/inets/src/inets_app/inets.appup.src
+++ b/lib/inets/src/inets_app/inets.appup.src
@@ -18,19 +18,24 @@
{"%VSN%",
[
+ {"5.3.3",
+ [
+ {restart_application, inets}
+ ]
+ },
+ {"5.3.2",
+ [
+ {restart_application, inets}
+ ]
+ },
{"5.3.1",
[
- {load_module, httpc, soft_purge, soft_purge, []},
- {update, httpc_handler, soft, soft_purge, soft_purge, [httpc_manager]},
- {update, httpc_manager, soft, soft_purge, soft_purge, []}
+ {restart_application, inets}
]
},
{"5.3",
[
- {load_module, httpc, soft_purge, soft_purge, []},
- {update, httpc_handler, soft, soft_purge, soft_purge, [httpc_manager]},
- {update, httpc_manager, soft, soft_purge, soft_purge, []},
- {load_module, mod_esi, soft_purge, soft_purge, []}
+ {restart_application, inets}
]
},
{"5.2",
@@ -50,19 +55,24 @@
}
],
[
+ {"5.3.3",
+ [
+ {restart_application, inets}
+ ]
+ },
+ {"5.3.2",
+ [
+ {restart_application, inets}
+ ]
+ },
{"5.3.1",
[
- {load_module, httpc, soft_purge, soft_purge, []},
- {update, httpc_handler, soft, soft_purge, soft_purge, [httpc_manager]},
- {update, httpc_manager, soft, soft_purge, soft_purge, []}
+ {restart_application, inets}
]
},
{"5.3",
[
- {load_module, httpc, soft_purge, soft_purge, []},
- {update, httpc_handler, soft, soft_purge, soft_purge, [httpc_manager]},
- {update, httpc_manager, soft, soft_purge, soft_purge, []},
- {load_module, mod_esi, soft_purge, soft_purge, []}
+ {restart_application, inets}
]
},
{"5.2",
diff --git a/lib/inets/src/inets_app/inets.erl b/lib/inets/src/inets_app/inets.erl
index 7e3f862ee7..f1fa5fd997 100644
--- a/lib/inets/src/inets_app/inets.erl
+++ b/lib/inets/src/inets_app/inets.erl
@@ -533,7 +533,7 @@ error_to_exit(Where, {error, Reason}) ->
%%-----------------------------------------------------------------
-%% report_event(Serverity, Label, Service, Content)
+%% report_event(Severity, Label, Service, Content)
%%
%% Parameters:
%% Severity -> 0 =< integer() =< 100
diff --git a/lib/inets/src/inets_app/inets.mk b/lib/inets/src/inets_app/inets.mk
new file mode 100644
index 0000000000..b6e9fe1d96
--- /dev/null
+++ b/lib/inets/src/inets_app/inets.mk
@@ -0,0 +1,45 @@
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+ifeq ($(INETS_TRACE), io)
+ERL_COMPILE_FLAGS += -Dinets_trace_io
+endif
+
+ifeq ($(INETS_DEBUG), true)
+ERL_COMPILE_FLAGS += -Dinets_debug
+endif
+
+ifeq ($(USE_INETS_HIPE), true)
+ERL_COMPILE_FLAGS += +native
+endif
+
+ifeq ($(WARN_UNUSED_WARS), true)
+ERL_COMPILE_FLAGS += +warn_unused_vars
+endif
+
+INETS_APP_VSN_COMPILE_FLAGS = \
+ +'{parse_transform,sys_pre_attributes}' \
+ +'{attribute,insert,app_vsn,$(APP_VSN)}'
+
+INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
+
+INETS_ERL_COMPILE_FLAGS += \
+ -pa $(ERL_TOP)/lib/inets/ebin \
+ $(INETS_APP_VSN_COMPILE_FLAGS)
+
diff --git a/lib/inets/src/inets_app/inets_service.erl b/lib/inets/src/inets_app/inets_service.erl
index 3499314d54..e9eb9892f2 100644
--- a/lib/inets/src/inets_app/inets_service.erl
+++ b/lib/inets/src/inets_app/inets_service.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -61,5 +61,5 @@ behaviour_info(_) ->
%% service_info() -> [{Property, Value}] | {error, Reason}
-%% ex: http:service_info() -> [{profile, ProfileName}]
+%% ex: httpc:service_info() -> [{profile, ProfileName}]
%% httpd:service_info() -> [{host, Host}, {port, Port}]
diff --git a/lib/inets/src/tftp/Makefile b/lib/inets/src/tftp/Makefile
index b4339da1e2..759b70c8e4 100644
--- a/lib/inets/src/tftp/Makefile
+++ b/lib/inets/src/tftp/Makefile
@@ -56,17 +56,16 @@ TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
# ----------------------------------------------------
-# INETS FLAGS
+# FLAGS
# ----------------------------------------------------
-INETS_FLAGS = -D'SERVER_SOFTWARE="$(APPLICATION)/$(VSN)"'
+include ../inets_app/inets.mk
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_COMPILE_FLAGS += $(INETS_FLAGS) \
- +'{parse_transform,sys_pre_attributes}' \
- +'{attribute,insert,app_vsn,$(APP_VSN)}'
+ERL_COMPILE_FLAGS += \
+ $(INETS_FLAGS) \
+ $(INETS_ERL_COMPILE_FLAGS) \
+ -I../../include \
+ -I../inets_app
# ----------------------------------------------------
@@ -87,9 +86,10 @@ docs:
include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
- $(INSTALL_DIR) $(RELSYSDIR)/src
- $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
- $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/src/tftp
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src/tftp
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
release_docs_spec:
diff --git a/lib/inets/test/Makefile b/lib/inets/test/Makefile
index 668752da9e..bb7f2186af 100644
--- a/lib/inets/test/Makefile
+++ b/lib/inets/test/Makefile
@@ -143,6 +143,8 @@ else
INETS_FLAGS += -Dhttpd_security_verbosity=log
endif
+INETS_FLAGS += -pa ../../inets/ebin
+
INETS_ROOT = ../../inets
MODULES = \
@@ -241,8 +243,11 @@ RELTESTSYSBINDIR = $(RELTESTSYSALLDATADIR)/bin
# The path to the test_server ebin dir is needed when
# running the target "targets".
# ----------------------------------------------------
-ERL_COMPILE_FLAGS += -pa ../../../internal_tools/test_server/ebin \
- $(INCLUDES) $(FTP_FLAGS) $(INETS_FLAGS)
+ERL_COMPILE_FLAGS += \
+ -pa ../../../internal_tools/test_server/ebin \
+ $(INCLUDES) \
+ $(FTP_FLAGS) \
+ $(INETS_FLAGS)
# ----------------------------------------------------
# Targets
diff --git a/lib/inets/test/ftp_suite_lib.erl b/lib/inets/test/ftp_suite_lib.erl
index 75e1a5a7f9..5e27bc3a86 100644
--- a/lib/inets/test/ftp_suite_lib.erl
+++ b/lib/inets/test/ftp_suite_lib.erl
@@ -48,14 +48,17 @@
-ifdef(ftp_debug_client).
-define(ftp_open(Host, Flags),
- do_ftp_open(Host, [debug, {timeout, timer:seconds(15)}] ++ Flags)).
+ do_ftp_open(Host, [{debug, debug},
+ {timeout, timer:seconds(15)} | Flags])).
-else.
-ifdef(ftp_trace_client).
-define(ftp_open(Host, Flags),
- do_ftp_open(Host, [trace, {timeout, timer:seconds(15)}] ++ Flags)).
+ do_ftp_open(Host, [{debug, trace},
+ {timeout, timer:seconds(15)} | Flags])).
-else.
-define(ftp_open(Host, Flags),
- do_ftp_open(Host, [verbose, {timeout, timer:seconds(15)}] ++ Flags)).
+ do_ftp_open(Host, [{verbose, true},
+ {timeout, timer:seconds(15)} | Flags])).
-endif.
-endif.
@@ -113,9 +116,7 @@ get_ftpd_host([Host|Hosts]) ->
p("get_ftpd_host -> entry with"
"~n Host: ~p"
"~n", [Host]),
- case (catch ftp:open({option_list,
- [{host, Host}, {port, ?FTP_PORT},
- {timeout, 20000}]})) of
+ case (catch ftp:open(Host, [{port, ?FTP_PORT}, {timeout, 20000}])) of
{ok, Pid} ->
(catch ftp:close(Pid)),
{ok, Host};
@@ -212,7 +213,7 @@ do_init_per_testcase(Case, Config)
inets:start(),
NewConfig = close_connection(watch_dog(Config)),
Host = ftp_host(Config),
- case (catch ?ftp_open(Host, [])) of
+ case (catch ?ftp_open(Host, [{mode, passive}])) of
{ok, Pid} ->
[{ftp, Pid} | data_dir(NewConfig)];
{skip, _} = SKIP ->
@@ -225,9 +226,8 @@ do_init_per_testcase(Case, Config)
inets:start(),
NewConfig = close_connection(watch_dog(Config)),
Host = ftp_host(Config),
- case (catch ?ftp_open(Host, [])) of
+ case (catch ?ftp_open(Host, [{mode, active}])) of
{ok, Pid} ->
- ok = ftp:force_active(Pid),
[{ftp, Pid} | data_dir(NewConfig)];
{skip, _} = SKIP ->
SKIP
@@ -240,11 +240,10 @@ do_init_per_testcase(Case, Config)
io:format(user, "~n~n*** INIT ~w:~w ***~n~n", [?MODULE, Case]),
NewConfig = close_connection(watch_dog(Config)),
Host = ftp_host(Config),
- Opts = [{host, Host},
- {port, ?FTP_PORT},
- {flags, [verbose]},
+ Opts = [{port, ?FTP_PORT},
+ {verbose, true},
{progress, {?MODULE, progress, #progress{}}}],
- case ftp:open({option_list, Opts}) of
+ case ftp:open(Host, Opts) of
{ok, Pid} ->
ok = ftp:user(Pid, ?FTP_USER, ?FTP_PASS),
[{ftp, Pid} | data_dir(NewConfig)];
@@ -257,22 +256,23 @@ do_init_per_testcase(Case, Config) ->
inets:start(),
NewConfig = close_connection(watch_dog(Config)),
Host = ftp_host(Config),
- Flags =
+ Opts1 =
if
((Case =:= passive_ip_v6_disabled) orelse
(Case =:= active_ip_v6_disabled)) ->
- [ip_v6_disabled];
+ [{ipfamily, inet}];
true ->
[]
end,
- case (catch ?ftp_open(Host, Flags)) of
+ Opts2 =
+ case string:tokens(atom_to_list(Case), [$_]) of
+ [_, "active" | _] ->
+ [{mode, active} | Opts1];
+ _ ->
+ [{mode, passive} | Opts1]
+ end,
+ case (catch ?ftp_open(Host, Opts2)) of
{ok, Pid} ->
- case string:tokens(atom_to_list(Case), [$_]) of
- [_, "active"|_] ->
- ok = ftp:force_active(Pid);
- _ ->
- ok
- end,
ok = ftp:user(Pid, ?FTP_USER, ?FTP_PASS),
[{ftp, Pid} | data_dir(NewConfig)];
{skip, _} = SKIP ->
@@ -365,6 +365,7 @@ open(Config) when is_list(Config) ->
Host = ftp_host(Config),
(catch tc_open(Host)).
+
tc_open(Host) ->
{ok, Pid} = ?ftp_open(Host, []),
ok = ftp:close(Pid),
@@ -374,8 +375,9 @@ tc_open(Host) ->
{flags, [verbose]},
{timeout, 30000}]}),
ok = ftp:close(Pid1),
- {error, ehost} = ftp:open({option_list, [{port, ?FTP_PORT},
- {flags, [verbose]}]}),
+
+ {error, ehost} =
+ ftp:open({option_list, [{port, ?FTP_PORT}, {flags, [verbose]}]}),
{ok, Pid2} = ftp:open(Host),
ok = ftp:close(Pid2),
@@ -408,6 +410,15 @@ tc_open(Host) ->
{mode, cool}]}),
test_server:sleep(100),
ok = ftp:close(Pid6),
+
+ {ok, Pid7} =
+ ftp:open(Host, [{port, ?FTP_PORT}, {verbose, true}, {timeout, 30000}]),
+ ok = ftp:close(Pid7),
+
+ {ok, Pid8} =
+ ftp:open(Host, ?FTP_PORT),
+ ok = ftp:close(Pid8),
+
ok.
@@ -420,7 +431,7 @@ open_port(suite) ->
[];
open_port(Config) when is_list(Config) ->
Host = ftp_host(Config),
- {ok, Pid} = ftp:open(Host, ?FTP_PORT),
+ {ok, Pid} = ftp:open(Host, [{port, ?FTP_PORT}]),
ok = ftp:close(Pid),
{error, ehost} = ftp:open(?BAD_HOST, []),
ok.
@@ -954,26 +965,39 @@ api_missuse(doc)->
["Test that behaviour of the ftp process if the api is abused"];
api_missuse(suite) -> [];
api_missuse(Config) when is_list(Config) ->
+ io:format("api_missuse -> entry~n", []),
+ Flag = process_flag(trap_exit, true),
Pid = ?config(ftp, Config),
Host = ftp_host(Config),
-
+
%% Serious programming fault, connetion will be shut down
- {error, {connection_terminated, 'API_violation'}} =
- gen_server:call(Pid, {self(), foobar, 10}, infinity),
+ io:format("api_missuse -> verify bad call termination (~p)~n", [Pid]),
+ case (catch gen_server:call(Pid, {self(), foobar, 10}, infinity)) of
+ {error, {connection_terminated, 'API_violation'}} ->
+ ok;
+ Unexpected1 ->
+ exit({unexpected_result, Unexpected1})
+ end,
test_server:sleep(500),
undefined = process_info(Pid, status),
+ io:format("api_missuse -> start new client~n", []),
{ok, Pid2} = ?ftp_open(Host, []),
%% Serious programming fault, connetion will be shut down
+ io:format("api_missuse -> verify bad cast termination~n", []),
gen_server:cast(Pid2, {self(), foobar, 10}),
test_server:sleep(500),
undefined = process_info(Pid2, status),
+ io:format("api_missuse -> start new client~n", []),
{ok, Pid3} = ?ftp_open(Host, []),
%% Could be an innocent misstake the connection lives.
+ io:format("api_missuse -> verify bad bang~n", []),
Pid3 ! foobar,
test_server:sleep(500),
{status, _} = process_info(Pid3, status),
+ process_flag(trap_exit, Flag),
+ io:format("api_missuse -> done~n", []),
ok.
@@ -1525,11 +1549,11 @@ split([C| Cs], I, Is) ->
split([], I, Is) ->
lists:reverse([lists:reverse(I)| Is]).
-do_ftp_open(Host, Flags) ->
+do_ftp_open(Host, Opts) ->
io:format("do_ftp_open -> entry with"
- "~n Host: ~p"
- "~n Flags: ~p", [Host, Flags]),
- case ftp:open(Host, Flags) of
+ "~n Host: ~p"
+ "~n Opts: ~p", [Host, Opts]),
+ case ftp:open(Host, Opts) of
{ok, _} = OK ->
OK;
{error, Reason} ->
diff --git a/lib/inets/test/http_format_SUITE.erl b/lib/inets/test/http_format_SUITE.erl
index 9559317640..79945f0f4d 100644
--- a/lib/inets/test/http_format_SUITE.erl
+++ b/lib/inets/test/http_format_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -567,6 +567,12 @@ convert_netscapecookie_date(Config) when is_list(Config) ->
http_util:convert_netscapecookie_date("Sun, 12-Nov-2006 08:59:38 GMT"),
{{2006,12,12},{8,59,38}} =
http_util:convert_netscapecookie_date("Sun, 12-Dec-2006 08:59:38 GMT"),
+ {{2006,12,12},{8,59,38}} =
+ http_util:convert_netscapecookie_date("Sun 12-Dec-2006 08:59:38 GMT"),
+ {{2006,12,12},{8,59,38}} =
+ http_util:convert_netscapecookie_date("Sun, 12-Dec-06 08:59:38 GMT"),
+ {{2006,12,12},{8,59,38}} =
+ http_util:convert_netscapecookie_date("Sun 12-Dec-06 08:59:38 GMT"),
ok.
%%--------------------------------------------------------------------
diff --git a/lib/inets/test/httpc_SUITE.erl b/lib/inets/test/httpc_SUITE.erl
index aa65fb1197..b5fd896001 100644
--- a/lib/inets/test/httpc_SUITE.erl
+++ b/lib/inets/test/httpc_SUITE.erl
@@ -87,8 +87,14 @@ all(suite) ->
http_headers_dummy,
http_bad_response,
ssl_head,
+ ossl_head,
+ essl_head,
ssl_get,
+ ossl_get,
+ essl_get,
ssl_trace,
+ ossl_trace,
+ essl_trace,
http_redirect,
http_redirect_loop,
http_internal_server_error,
@@ -179,49 +185,66 @@ init_per_testcase(otp_8154_1 = Case, Config) ->
init_per_testcase(Case, Config) ->
init_per_testcase(Case, 2, Config).
+init_per_testcase_ssl(Tag, PrivDir, SslConfFile, Config) ->
+ tsp("init_per_testcase_ssl -> stop ssl"),
+ application:stop(ssl),
+ Config2 = lists:keydelete(local_ssl_server, 1, Config),
+ %% Will start inets
+ tsp("init_per_testcase_ssl -> try start http server (including inets)"),
+ Server = inets_test_lib:start_http_server(
+ filename:join(PrivDir, SslConfFile), Tag),
+ tsp("init_per_testcase -> Server: ~p", [Server]),
+ [{local_ssl_server, Server} | Config2].
+
init_per_testcase(Case, Timeout, Config) ->
- io:format(user, "~n~n*** INIT ~w:~w[~w] ***~n~n",
+ io:format(user, "~n~n*** INIT ~w:[~w][~w] ***~n~n",
[?MODULE, Timeout, Case]),
- PrivDir = ?config(priv_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ tsp("init_per_testcase -> stop inets"),
application:stop(inets),
- Dog = test_server:timetrap(inets_test_lib:minutes(Timeout)),
- TmpConfig = lists:keydelete(watchdog, 1, Config),
- IpConfFile = integer_to_list(?IP_PORT) ++ ".conf",
+ Dog = test_server:timetrap(inets_test_lib:minutes(Timeout)),
+ TmpConfig = lists:keydelete(watchdog, 1, Config),
+ IpConfFile = integer_to_list(?IP_PORT) ++ ".conf",
SslConfFile = integer_to_list(?SSL_PORT) ++ ".conf",
+ %% inets:enable_trace(max, io, httpd),
+ %% inets:enable_trace(max, io, httpc),
+ inets:enable_trace(max, io, all),
+
NewConfig =
case atom_to_list(Case) of
- "ssl" ++ _ ->
- application:stop(ssl),
- TmpConfig2 =
- lists:keydelete(local_ssl_server, 1, TmpConfig),
- %% Will start inets
- Server =
- inets_test_lib:start_http_server(
- filename:join(PrivDir, SslConfFile)),
- [{watchdog, Dog}, {local_ssl_server, Server} | TmpConfig2];
+ [$s, $s, $l | _] ->
+ init_per_testcase_ssl(ssl, PrivDir, SslConfFile, [{watchdog, Dog} | TmpConfig]);
+
+ [$o, $s, $s, $l | _] ->
+ init_per_testcase_ssl(ossl, PrivDir, SslConfFile, [{watchdog, Dog} | TmpConfig]);
+
+ [$e, $s, $s, $l | _] ->
+ init_per_testcase_ssl(essl, PrivDir, SslConfFile, [{watchdog, Dog} | TmpConfig]);
+
"proxy" ++ Rest ->
- case Rest of
- "_https_not_supported" ->
- inets:start(),
- case (catch application:start(ssl)) of
- ok ->
- [{watchdog, Dog} | TmpConfig];
- _ ->
- [{skip,
- "SSL does not seem to be supported"}
- | TmpConfig]
- end;
- _ ->
- case is_proxy_available(?PROXY, ?PROXY_PORT) of
- true ->
- inets:start(),
- [{watchdog, Dog} | TmpConfig];
- false ->
- [{skip, "Failed to contact proxy"} |
- TmpConfig]
- end
- end;
+ case Rest of
+ "_https_not_supported" ->
+ tsp("init_per_testcase -> [proxy case] start inets"),
+ inets:start(),
+ tsp("init_per_testcase -> [proxy case] start ssl"),
+ case (catch application:start(ssl)) of
+ ok ->
+ [{watchdog, Dog} | TmpConfig];
+ _ ->
+ [{skip, "SSL does not seem to be supported"}
+ | TmpConfig]
+ end;
+ _ ->
+ case is_proxy_available(?PROXY, ?PROXY_PORT) of
+ true ->
+ inets:start(),
+ [{watchdog, Dog} | TmpConfig];
+ false ->
+ [{skip, "Failed to contact proxy"} |
+ TmpConfig]
+ end
+ end;
_ ->
TmpConfig2 = lists:keydelete(local_server, 1, TmpConfig),
Server =
@@ -231,13 +254,12 @@ init_per_testcase(Case, Timeout, Config) ->
[{watchdog, Dog}, {local_server, Server} | TmpConfig2]
end,
- http:set_options([{proxy, {{?PROXY, ?PROXY_PORT},
- ["localhost", ?IPV6_LOCAL_HOST]}}]),
- inets:enable_trace(max, io, httpc),
- %% inets:enable_trace(max, io, all),
+ httpc:set_options([{proxy, {{?PROXY, ?PROXY_PORT},
+ ["localhost", ?IPV6_LOCAL_HOST]}}]),
%% snmp:set_trace([gen_tcp, inet_tcp, prim_inet]),
NewConfig.
+
%%--------------------------------------------------------------------
%% Function: end_per_testcase(Case, Config) -> _
%% Case - atom()
@@ -306,7 +328,7 @@ http_head(Config) when is_list(Config) ->
ok ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
- case http:request(head, {URL, []}, [], []) of
+ case httpc:request(head, {URL, []}, [], []) of
{ok, {{_,200,_}, [_ | _], []}} ->
ok;
{ok, WrongReply} ->
@@ -337,7 +359,7 @@ http_get(Config) when is_list(Config) ->
HttpOptions1 = [{timeout, Timeout}, {connect_timeout, ConnTimeout}],
Options1 = [],
Body =
- case http:request(Method, Request, HttpOptions1, Options1) of
+ case httpc:request(Method, Request, HttpOptions1, Options1) of
{ok, {{_,200,_}, [_ | _], ReplyBody = [_ | _]}} ->
ReplyBody;
{ok, UnexpectedReply1} ->
@@ -346,12 +368,12 @@ http_get(Config) when is_list(Config) ->
tsf({bad_reply, Error1})
end,
- %% eqvivivalent to http:request(get, {URL, []}, [], []),
+ %% eqvivivalent to httpc:request(get, {URL, []}, [], []),
inets_test_lib:check_body(Body),
HttpOptions2 = [],
Options2 = [{body_format, binary}],
- case http:request(Method, Request, HttpOptions2, Options2) of
+ case httpc:request(Method, Request, HttpOptions2, Options2) of
{ok, {{_,200,_}, [_ | _], Bin}} when is_binary(Bin) ->
ok;
{ok, {{_,200,_}, [_ | _], BadBin}} ->
@@ -390,11 +412,11 @@ http_post(Config) when is_list(Config) ->
Body = lists:duplicate(100, "1"),
{ok, {{_,200,_}, [_ | _], [_ | _]}} =
- http:request(post, {URL, [{"expect","100-continue"}],
+ httpc:request(post, {URL, [{"expect","100-continue"}],
"text/plain", Body}, [], []),
{ok, {{_,504,_}, [_ | _], []}} =
- http:request(post, {URL, [{"expect","100-continue"}],
+ httpc:request(post, {URL, [{"expect","100-continue"}],
"text/plain", "foobar"}, [], []);
_ ->
{skip, "Failed to start local http-server"}
@@ -411,13 +433,13 @@ http_emulate_lower_versions(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
{ok, Body0} =
- http:request(get, {URL, []}, [{version, "HTTP/0.9"}], []),
+ httpc:request(get, {URL, []}, [{version, "HTTP/0.9"}], []),
inets_test_lib:check_body(Body0),
{ok, {{"HTTP/1.0", 200, _}, [_ | _], Body1 = [_ | _]}} =
- http:request(get, {URL, []}, [{version, "HTTP/1.0"}], []),
+ httpc:request(get, {URL, []}, [{version, "HTTP/1.0"}], []),
inets_test_lib:check_body(Body1),
{ok, {{"HTTP/1.1", 200, _}, [_ | _], Body2 = [_ | _]}} =
- http:request(get, {URL, []}, [{version, "HTTP/1.1"}], []),
+ httpc:request(get, {URL, []}, [{version, "HTTP/1.1"}], []),
inets_test_lib:check_body(Body2);
_->
{skip, "Failed to start local http-server"}
@@ -431,24 +453,24 @@ http_relaxed(doc) ->
http_relaxed(suite) ->
[];
http_relaxed(Config) when is_list(Config) ->
- ok = http:set_options([{ipv6, disabled}]), % also test the old option
- %% ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipv6, disabled}]), % also test the old option
+ %% ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++
"/missing_reason_phrase.html",
{error, Reason} =
- http:request(get, {URL, []}, [{relaxed, false}], []),
+ httpc:request(get, {URL, []}, [{relaxed, false}], []),
test_server:format("Not relaxed: ~p~n", [Reason]),
{ok, {{_, 200, _}, [_ | _], [_ | _]}} =
- http:request(get, {URL, []}, [{relaxed, true}], []),
+ httpc:request(get, {URL, []}, [{relaxed, true}], []),
DummyServerPid ! stop,
- ok = http:set_options([{ipv6, enabled}]),
- %% ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipv6, enabled}]),
+ %% ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -458,7 +480,7 @@ http_dummy_pipe(doc) ->
http_dummy_pipe(suite) ->
[];
http_dummy_pipe(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/foobar.html",
@@ -466,7 +488,7 @@ http_dummy_pipe(Config) when is_list(Config) ->
test_pipeline(URL),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
http_inets_pipe(doc) ->
@@ -488,11 +510,11 @@ test_pipeline(URL) ->
p("test_pipeline -> entry with"
"~n URL: ~p", [URL]),
- http:set_options([{pipeline_timeout, 50000}]),
+ httpc:set_options([{pipeline_timeout, 50000}]),
p("test_pipeline -> issue (async) request 1"),
{ok, RequestId1} =
- http:request(get, {URL, []}, [], [{sync, false}]),
+ httpc:request(get, {URL, []}, [], [{sync, false}]),
test_server:format("RequestId1: ~p~n", [RequestId1]),
p("test_pipeline -> RequestId1: ~p", [RequestId1]),
@@ -502,13 +524,13 @@ test_pipeline(URL) ->
p("test_pipeline -> issue (async) request 2"),
{ok, RequestId2} =
- http:request(get, {URL, []}, [], [{sync, false}]),
+ httpc:request(get, {URL, []}, [], [{sync, false}]),
tsp("RequestId2: ~p", [RequestId2]),
p("test_pipeline -> RequestId2: ~p", [RequestId2]),
p("test_pipeline -> issue (sync) request 3"),
{ok, {{_,200,_}, [_ | _], [_ | _]}} =
- http:request(get, {URL, []}, [], []),
+ httpc:request(get, {URL, []}, [], []),
p("test_pipeline -> expect reply for (async) request 1 or 2"),
receive
@@ -544,18 +566,18 @@ test_pipeline(URL) ->
p("test_pipeline -> issue (async) request 4"),
{ok, RequestId3} =
- http:request(get, {URL, []}, [], [{sync, false}]),
+ httpc:request(get, {URL, []}, [], [{sync, false}]),
tsp("RequestId3: ~p", [RequestId3]),
p("test_pipeline -> RequestId3: ~p", [RequestId3]),
p("test_pipeline -> issue (async) request 5"),
{ok, RequestId4} =
- http:request(get, {URL, []}, [], [{sync, false}]),
+ httpc:request(get, {URL, []}, [], [{sync, false}]),
tsp("RequestId4: ~p~n", [RequestId4]),
p("test_pipeline -> RequestId4: ~p", [RequestId4]),
p("test_pipeline -> cancel (async) request 4"),
- ok = http:cancel_request(RequestId3),
+ ok = httpc:cancel_request(RequestId3),
p("test_pipeline -> expect *no* reply for cancelled (async) request 4 (for 3 secs)"),
receive
@@ -607,7 +629,7 @@ http_trace(Config) when is_list(Config) ->
ok ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
- case http:request(trace, {URL, []}, [], []) of
+ case httpc:request(trace, {URL, []}, [], []) of
{ok, {{_,200,_}, [_ | _], "TRACE /dummy.html" ++ _}} ->
ok;
{ok, {{_,200,_}, [_ | _], WrongBody}} ->
@@ -631,7 +653,7 @@ http_async(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
{ok, RequestId} =
- http:request(get, {URL, []}, [], [{sync, false}]),
+ httpc:request(get, {URL, []}, [], [{sync, false}]),
Body =
receive
@@ -644,8 +666,8 @@ http_async(Config) when is_list(Config) ->
inets_test_lib:check_body(binary_to_list(Body)),
{ok, NewRequestId} =
- http:request(get, {URL, []}, [], [{sync, false}]),
- ok = http:cancel_request(NewRequestId),
+ httpc:request(get, {URL, []}, [], [{sync, false}]),
+ ok = httpc:cancel_request(NewRequestId),
receive
{http, {NewRequestId, _NewResult}} ->
test_server:fail(http_cancel_request_failed)
@@ -669,9 +691,9 @@ http_save_to_file(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
{ok, saved_to_file}
- = http:request(get, {URL, []}, [], [{stream, FilePath}]),
+ = httpc:request(get, {URL, []}, [], [{stream, FilePath}]),
{ok, Bin} = file:read_file(FilePath),
- {ok, {{_,200,_}, [_ | _], Body}} = http:request(URL),
+ {ok, {{_,200,_}, [_ | _], Body}} = httpc:request(URL),
Bin == Body;
_ ->
{skip, "Failed to start local http-server"}
@@ -690,7 +712,7 @@ http_save_to_file_async(Config) when is_list(Config) ->
FilePath = filename:join(PrivDir, "dummy.html"),
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
- {ok, RequestId} = http:request(get, {URL, []}, [],
+ {ok, RequestId} = httpc:request(get, {URL, []}, [],
[{stream, FilePath},
{sync, false}]),
receive
@@ -701,7 +723,7 @@ http_save_to_file_async(Config) when is_list(Config) ->
end,
{ok, Bin} = file:read_file(FilePath),
- {ok, {{_,200,_}, [_ | _], Body}} = http:request(URL),
+ {ok, {{_,200,_}, [_ | _], Body}} = httpc:request(URL),
Bin == Body;
_ ->
{skip, "Failed to start local http-server"}
@@ -731,7 +753,7 @@ http_headers(Config) when is_list(Config) ->
Date = httpd_util:rfc1123_date({date(), time()}),
{ok, {{_,200,_}, [_ | _], [_ | _]}} =
- http:request(get, {URL, [{"If-Modified-Since",
+ httpc:request(get, {URL, [{"If-Modified-Since",
Mod},
{"From","[email protected]"},
{"Date", Date}
@@ -742,7 +764,7 @@ http_headers(Config) when is_list(Config) ->
CreatedSec+1)),
{ok, {{_,200,_}, [_ | _], [_ | _]}} =
- http:request(get, {URL, [{"If-UnModified-Since",
+ httpc:request(get, {URL, [{"If-UnModified-Since",
Mod1}
]}, [], []),
@@ -750,12 +772,12 @@ http_headers(Config) when is_list(Config) ->
{ok, {{_,200,_}, [_ | _], [_ | _]}} =
- http:request(get, {URL, [{"If-Match",
+ httpc:request(get, {URL, [{"If-Match",
Tag}
]}, [], []),
{ok, {{_,200,_}, [_ | _], _}} =
- http:request(get, {URL, [{"If-None-Match",
+ httpc:request(get, {URL, [{"If-None-Match",
"NotEtag,NeihterEtag"},
{"Connection", "Close"}
]}, [], []),
@@ -773,7 +795,7 @@ http_headers_dummy(doc) ->
http_headers_dummy(suite) ->
[];
http_headers_dummy(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy_headers.html",
@@ -789,7 +811,7 @@ http_headers_dummy(Config) when is_list(Config) ->
%% that the client header-handling code. This would not
%% be a vaild http-request!
{ok, {{_,200,_}, [_ | _], [_|_]}} =
- http:request(post,
+ httpc:request(post,
{URL,
[{"Via",
"1.0 fred, 1.1 nowhere.com (Apache/1.1)"},
@@ -828,7 +850,7 @@ http_headers_dummy(Config) when is_list(Config) ->
], "text/plain", FooBar},
[], []),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -838,21 +860,21 @@ http_bad_response(doc) ->
http_bad_response(suite) ->
[];
http_bad_response(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/missing_crlf.html",
URL1 = ?URL_START ++ integer_to_list(Port) ++ "/wrong_statusline.html",
- {error, timeout} = http:request(get, {URL, []}, [{timeout, 400}], []),
+ {error, timeout} = httpc:request(get, {URL, []}, [{timeout, 400}], []),
- {error, Reason} = http:request(URL1),
+ {error, Reason} = httpc:request(URL1),
test_server:format("Wrong Statusline: ~p~n", [Reason]),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -862,69 +884,168 @@ ssl_head(doc) ->
ssl_head(suite) ->
[];
ssl_head(Config) when is_list(Config) ->
+ ssl_head(ssl, Config).
+
+ossl_head(doc) ->
+ ["Same as http_head/1 but over ssl sockets."];
+ossl_head(suite) ->
+ [];
+ossl_head(Config) when is_list(Config) ->
+ ssl_head(ossl, Config).
+
+essl_head(doc) ->
+ ["Same as http_head/1 but over ssl sockets."];
+essl_head(suite) ->
+ [];
+essl_head(Config) when is_list(Config) ->
+ ssl_head(essl, Config).
+
+ssl_head(SslTag, Config) ->
+ tsp("ssl_head -> entry with"
+ "~n SslTag: ~p"
+ "~n Config: ~p", [SslTag, Config]),
case ?config(local_ssl_server, Config) of
ok ->
- DataDir = ?config(data_dir, Config),
- Port = ?config(local_ssl_port, Config),
- URL = ?SSL_URL_START ++ integer_to_list(Port) ++ "/dummy.html",
- CertFile = filename:join(DataDir, "ssl_client_cert.pem"),
+ DataDir = ?config(data_dir, Config),
+ Port = ?config(local_ssl_port, Config),
+ URL = ?SSL_URL_START ++ integer_to_list(Port) ++ "/dummy.html",
+ CertFile = filename:join(DataDir, "ssl_client_cert.pem"),
SSLOptions = [{certfile, CertFile}, {keyfile, CertFile}],
+ SSLConfig =
+ case SslTag of
+ ssl ->
+ SSLOptions;
+ ossl ->
+ {ossl, SSLOptions};
+ essl ->
+ {essl, SSLOptions}
+ end,
+ tsp("ssl_head -> make request using: "
+ "~n URL: ~p"
+ "~n SslTag: ~p"
+ "~n SSLOptions: ~p", [URL, SslTag, SSLOptions]),
{ok, {{_,200, _}, [_ | _], []}} =
- http:request(head, {URL, []}, [{ssl, SSLOptions}], []);
+ httpc:request(head, {URL, []}, [{ssl, SSLConfig}], []);
{ok, _} ->
- {skip, "Failed to start local http-server"};
+ {skip, "local http-server not started"};
_ ->
- {skip, "Failed to start SSL"}
+ {skip, "SSL not started"}
end.
+
+
%%-------------------------------------------------------------------------
ssl_get(doc) ->
["Same as http_get/1 but over ssl sockets."];
ssl_get(suite) ->
[];
ssl_get(Config) when is_list(Config) ->
+ ssl_get(ssl, Config).
+
+ossl_get(doc) ->
+ ["Same as http_get/1 but over ssl sockets."];
+ossl_get(suite) ->
+ [];
+ossl_get(Config) when is_list(Config) ->
+ ssl_get(ossl, Config).
+
+essl_get(doc) ->
+ ["Same as http_get/1 but over ssl sockets."];
+essl_get(suite) ->
+ [];
+essl_get(Config) when is_list(Config) ->
+ ssl_get(essl, Config).
+
+ssl_get(SslTag, Config) when is_list(Config) ->
case ?config(local_ssl_server, Config) of
ok ->
- DataDir = ?config(data_dir, Config),
- Port = ?config(local_ssl_port, Config),
- URL = ?SSL_URL_START ++ integer_to_list(Port) ++ "/dummy.html",
- CertFile = filename:join(DataDir, "ssl_client_cert.pem"),
+ DataDir = ?config(data_dir, Config),
+ Port = ?config(local_ssl_port, Config),
+ URL = ?SSL_URL_START ++ integer_to_list(Port) ++ "/dummy.html",
+ CertFile = filename:join(DataDir, "ssl_client_cert.pem"),
SSLOptions = [{certfile, CertFile}, {keyfile, CertFile}],
- {ok, {{_,200, _}, [_ | _], Body = [_ | _]}} =
- http:request(get, {URL, []}, [{ssl, SSLOptions}], []),
- inets_test_lib:check_body(Body);
+ SSLConfig =
+ case SslTag of
+ ssl ->
+ SSLOptions;
+ ossl ->
+ {ossl, SSLOptions};
+ essl ->
+ {essl, SSLOptions}
+ end,
+ tsp("ssl_get -> make request using: "
+ "~n URL: ~p"
+ "~n SslTag: ~p"
+ "~n SSLOptions: ~p", [URL, SslTag, SSLOptions]),
+ {ok, {{_,200, _}, [_ | _], Body = [_ | _]}} =
+ httpc:request(get, {URL, []}, [{ssl, SSLConfig}], []),
+ inets_test_lib:check_body(Body);
{ok, _} ->
{skip, "Failed to start local http-server"};
_ ->
{skip, "Failed to start SSL"}
end.
+
+
%%-------------------------------------------------------------------------
ssl_trace(doc) ->
["Same as http_trace/1 but over ssl sockets."];
ssl_trace(suite) ->
[];
ssl_trace(Config) when is_list(Config) ->
+ ssl_trace(ssl, Config).
+
+ossl_trace(doc) ->
+ ["Same as http_trace/1 but over ssl sockets."];
+ossl_trace(suite) ->
+ [];
+ossl_trace(Config) when is_list(Config) ->
+ ssl_trace(ossl, Config).
+
+essl_trace(doc) ->
+ ["Same as http_trace/1 but over ssl sockets."];
+essl_trace(suite) ->
+ [];
+essl_trace(Config) when is_list(Config) ->
+ ssl_trace(essl, Config).
+
+ssl_trace(SslTag, Config) when is_list(Config) ->
case ?config(local_ssl_server, Config) of
ok ->
- DataDir = ?config(data_dir, Config),
- Port = ?config(local_ssl_port, Config),
- URL = ?SSL_URL_START ++ integer_to_list(Port) ++ "/dummy.html",
- CertFile = filename:join(DataDir, "ssl_client_cert.pem"),
+ DataDir = ?config(data_dir, Config),
+ Port = ?config(local_ssl_port, Config),
+ URL = ?SSL_URL_START ++ integer_to_list(Port) ++ "/dummy.html",
+ CertFile = filename:join(DataDir, "ssl_client_cert.pem"),
SSLOptions = [{certfile, CertFile}, {keyfile, CertFile}],
- case http:request(trace, {URL, []}, [{ssl, SSLOptions}], []) of
+ SSLConfig =
+ case SslTag of
+ ssl ->
+ SSLOptions;
+ ossl ->
+ {ossl, SSLOptions};
+ essl ->
+ {essl, SSLOptions}
+ end,
+ tsp("ssl_trace -> make request using: "
+ "~n URL: ~p"
+ "~n SslTag: ~p"
+ "~n SSLOptions: ~p", [URL, SslTag, SSLOptions]),
+ case httpc:request(trace, {URL, []}, [{ssl, SSLConfig}], []) of
{ok, {{_,200, _}, [_ | _], "TRACE /dummy.html" ++ _}} ->
ok;
{ok, {{_,200,_}, [_ | _], WrongBody}} ->
- test_server:fail({wrong_body, WrongBody});
+ tsf({wrong_body, WrongBody});
{ok, WrongReply} ->
- test_server:fail({wrong_reply, WrongReply});
+ tsf({wrong_reply, WrongReply});
Error ->
- test_server:fail({failed, Error})
+ tsf({failed, Error})
end;
{ok, _} ->
{skip, "Failed to start local http-server"};
_ ->
{skip, "Failed to start SSL"}
end.
+
+
%%-------------------------------------------------------------------------
http_redirect(doc) ->
["Test redirect with dummy server as httpd does not implement"
@@ -937,7 +1058,7 @@ http_redirect(Config) when is_list(Config) ->
case ?config(local_server, Config) of
ok ->
tsp("http_redirect -> set ipfamily option to inet"),
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
tsp("http_redirect -> start dummy server inet"),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
@@ -948,29 +1069,29 @@ http_redirect(Config) when is_list(Config) ->
tsp("http_redirect -> issue request 1: "
"~n ~p", [URL300]),
{ok, {{_,200,_}, [_ | _], [_|_]}}
- = http:request(get, {URL300, []}, [], []),
+ = httpc:request(get, {URL300, []}, [], []),
tsp("http_redirect -> issue request 2: "
"~n ~p", [URL300]),
{ok, {{_,300,_}, [_ | _], _}} =
- http:request(get, {URL300, []}, [{autoredirect, false}], []),
+ httpc:request(get, {URL300, []}, [{autoredirect, false}], []),
URL301 = ?URL_START ++ integer_to_list(Port) ++ "/301.html",
tsp("http_redirect -> issue request 3: "
"~n ~p", [URL301]),
{ok, {{_,200,_}, [_ | _], [_|_]}}
- = http:request(get, {URL301, []}, [], []),
+ = httpc:request(get, {URL301, []}, [], []),
tsp("http_redirect -> issue request 4: "
"~n ~p", [URL301]),
{ok, {{_,200,_}, [_ | _], []}}
- = http:request(head, {URL301, []}, [], []),
+ = httpc:request(head, {URL301, []}, [], []),
tsp("http_redirect -> issue request 5: "
"~n ~p", [URL301]),
{ok, {{_,301,_}, [_ | _], [_|_]}}
- = http:request(post, {URL301, [],"text/plain", "foobar"},
+ = httpc:request(post, {URL301, [],"text/plain", "foobar"},
[], []),
URL302 = ?URL_START ++ integer_to_list(Port) ++ "/302.html",
@@ -978,8 +1099,8 @@ http_redirect(Config) when is_list(Config) ->
tsp("http_redirect -> issue request 6: "
"~n ~p", [URL302]),
{ok, {{_,200,_}, [_ | _], [_|_]}}
- = http:request(get, {URL302, []}, [], []),
- case http:request(get, {URL302, []}, [], []) of
+ = httpc:request(get, {URL302, []}, [], []),
+ case httpc:request(get, {URL302, []}, [], []) of
{ok, Reply7} ->
case Reply7 of
{{_,200,_}, [_ | _], [_|_]} ->
@@ -1006,12 +1127,12 @@ http_redirect(Config) when is_list(Config) ->
tsp("http_redirect -> issue request 7: "
"~n ~p", [URL302]),
{ok, {{_,200,_}, [_ | _], []}}
- = http:request(head, {URL302, []}, [], []),
+ = httpc:request(head, {URL302, []}, [], []),
tsp("http_redirect -> issue request 8: "
"~n ~p", [URL302]),
{ok, {{_,302,_}, [_ | _], [_|_]}}
- = http:request(post, {URL302, [],"text/plain", "foobar"},
+ = httpc:request(post, {URL302, [],"text/plain", "foobar"},
[], []),
URL307 = ?URL_START ++ integer_to_list(Port) ++ "/307.html",
@@ -1019,23 +1140,23 @@ http_redirect(Config) when is_list(Config) ->
tsp("http_redirect -> issue request 9: "
"~n ~p", [URL307]),
{ok, {{_,200,_}, [_ | _], [_|_]}}
- = http:request(get, {URL307, []}, [], []),
+ = httpc:request(get, {URL307, []}, [], []),
tsp("http_redirect -> issue request 10: "
"~n ~p", [URL307]),
{ok, {{_,200,_}, [_ | _], []}}
- = http:request(head, {URL307, []}, [], []),
+ = httpc:request(head, {URL307, []}, [], []),
tsp("http_redirect -> issue request 11: "
"~n ~p", [URL307]),
{ok, {{_,307,_}, [_ | _], [_|_]}}
- = http:request(post, {URL307, [],"text/plain", "foobar"},
+ = httpc:request(post, {URL307, [],"text/plain", "foobar"},
[], []),
tsp("http_redirect -> stop dummy server"),
DummyServerPid ! stop,
tsp("http_redirect -> reset ipfamily option (to inet6fb4)"),
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
tsp("http_redirect -> done"),
ok;
@@ -1051,15 +1172,15 @@ http_redirect_loop(doc) ->
http_redirect_loop(suite) ->
[];
http_redirect_loop(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/redirectloop.html",
{ok, {{_,300,_}, [_ | _], _}}
- = http:request(get, {URL, []}, [], []),
+ = httpc:request(get, {URL, []}, [], []),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
%%-------------------------------------------------------------------------
@@ -1068,13 +1189,13 @@ http_internal_server_error(doc) ->
http_internal_server_error(suite) ->
[];
http_internal_server_error(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL500 = ?URL_START ++ integer_to_list(Port) ++ "/500.html",
{ok, {{_,500,_}, [_ | _], _}}
- = http:request(get, {URL500, []}, [], []),
+ = httpc:request(get, {URL500, []}, [], []),
URL503 = ?URL_START ++ integer_to_list(Port) ++ "/503.html",
@@ -1084,16 +1205,16 @@ http_internal_server_error(Config) when is_list(Config) ->
ets:insert(unavailable, {503, unavailable}),
{ok, {{_,200, _}, [_ | _], [_|_]}} =
- http:request(get, {URL503, []}, [], []),
+ httpc:request(get, {URL503, []}, [], []),
ets:insert(unavailable, {503, long_unavailable}),
{ok, {{_,503, _}, [_ | _], [_|_]}} =
- http:request(get, {URL503, []}, [], []),
+ httpc:request(get, {URL503, []}, [], []),
ets:delete(unavailable),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1103,7 +1224,7 @@ http_userinfo(doc) ->
http_userinfo(suite) ->
[];
http_userinfo(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
@@ -1111,16 +1232,16 @@ http_userinfo(Config) when is_list(Config) ->
++ integer_to_list(Port) ++ "/userinfo.html",
{ok, {{_,200,_}, [_ | _], _}}
- = http:request(get, {URLAuth, []}, [], []),
+ = httpc:request(get, {URLAuth, []}, [], []),
URLUnAuth = "http://alladin:foobar@localhost:"
++ integer_to_list(Port) ++ "/userinfo.html",
{ok, {{_,401, _}, [_ | _], _}} =
- http:request(get, {URLUnAuth, []}, [], []),
+ httpc:request(get, {URLUnAuth, []}, [], []),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1130,7 +1251,7 @@ http_cookie(doc) ->
http_cookie(suite) ->
[];
http_cookie(Config) when is_list(Config) ->
- ok = http:set_options([{cookies, enabled}, {ipfamily, inet}]),
+ ok = httpc:set_options([{cookies, enabled}, {ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URLStart = ?URL_START
@@ -1139,19 +1260,19 @@ http_cookie(Config) when is_list(Config) ->
URLCookie = URLStart ++ "/cookie.html",
{ok, {{_,200,_}, [_ | _], [_|_]}}
- = http:request(get, {URLCookie, []}, [], []),
+ = httpc:request(get, {URLCookie, []}, [], []),
ets:new(cookie, [named_table, public, set]),
ets:insert(cookie, {cookies, true}),
{ok, {{_,200,_}, [_ | _], [_|_]}}
- = http:request(get, {URLStart ++ "/", []}, [], []),
+ = httpc:request(get, {URLStart ++ "/", []}, [], []),
ets:delete(cookie),
- ok = http:set_options([{cookies, disabled}, {ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{cookies, disabled}, {ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6************
ok.
%%-------------------------------------------------------------------------
@@ -1162,7 +1283,7 @@ proxy_options(suite) ->
proxy_options(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
- case http:request(options, {?PROXY_URL, []}, [], []) of
+ case httpc:request(options, {?PROXY_URL, []}, [], []) of
{ok, {{_,200,_}, Headers, _}} ->
case lists:keysearch("allow", 1, Headers) of
{value, {"allow", _}} ->
@@ -1186,7 +1307,7 @@ proxy_head(suite) ->
proxy_head(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
- case http:request(head, {?PROXY_URL, []}, [], []) of
+ case httpc:request(head, {?PROXY_URL, []}, [], []) of
{ok, {{_,200, _}, [_ | _], []}} ->
ok;
Unexpected ->
@@ -1205,7 +1326,7 @@ proxy_get(suite) ->
proxy_get(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
- case http:request(get, {?PROXY_URL, []}, [], []) of
+ case httpc:request(get, {?PROXY_URL, []}, [], []) of
{ok, {{_,200,_}, [_ | _], Body = [_ | _]}} ->
inets_test_lib:check_body(Body);
Unexpected ->
@@ -1257,7 +1378,7 @@ proxy_emulate_lower_versions(Config) when is_list(Config) ->
end.
pelv_get(Version) ->
- http:request(get, {?PROXY_URL, []}, [{version, Version}], []).
+ httpc:request(get, {?PROXY_URL, []}, [{version, Version}], []).
%%-------------------------------------------------------------------------
proxy_trace(doc) ->
@@ -1266,7 +1387,7 @@ proxy_trace(suite) ->
[];
proxy_trace(Config) when is_list(Config) ->
%%{ok, {{_,200,_}, [_ | _], "TRACE " ++ _}} =
- %% http:request(trace, {?PROXY_URL, []}, [], []),
+ %% httpc:request(trace, {?PROXY_URL, []}, [], []),
{skip, "HTTP TRACE is no longer allowed on the ?PROXY_URL server due "
"to security reasons"}.
@@ -1281,7 +1402,7 @@ proxy_post(suite) ->
proxy_post(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
- case http:request(post, {?PROXY_URL, [],
+ case httpc:request(post, {?PROXY_URL, [],
"text/plain", "foobar"}, [],[]) of
{ok, {{_,405,_}, [_ | _], [_ | _]}} ->
ok;
@@ -1303,7 +1424,7 @@ proxy_put(suite) ->
proxy_put(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
- case http:request(put, {"http://www.erlang.org/foobar.html", [],
+ case httpc:request(put, {"http://www.erlang.org/foobar.html", [],
"html", "<html> <body><h1> foo </h1>"
"<p>bar</p> </body></html>"}, [], []) of
{ok, {{_,405,_}, [_ | _], [_ | _]}} ->
@@ -1328,7 +1449,7 @@ proxy_delete(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
URL = ?PROXY_URL ++ "/foobar.html",
- case http:request(delete, {URL, []}, [], []) of
+ case httpc:request(delete, {URL, []}, [], []) of
{ok, {{_,404,_}, [_ | _], [_ | _]}} ->
ok;
Unexpected ->
@@ -1348,7 +1469,7 @@ proxy_headers(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
{ok, {{_,200,_}, [_ | _], [_ | _]}}
- = http:request(get, {?PROXY_URL,
+ = httpc:request(get, {?PROXY_URL,
[
{"Accept",
"text/*, text/html,"
@@ -1383,7 +1504,7 @@ proxy_auth(Config) when is_list(Config) ->
%% atleast the code for sending the header does not crash!
case ?config(skip, Config) of
undefined ->
- case http:request(get, {?PROXY_URL, []},
+ case httpc:request(get, {?PROXY_URL, []},
[{proxy_auth, {"foo", "bar"}}], []) of
{ok, {{_,200, _}, [_ | _], [_|_]}} ->
ok;
@@ -1403,7 +1524,7 @@ http_server_does_not_exist(suite) ->
[];
http_server_does_not_exist(Config) when is_list(Config) ->
{error, _} =
- http:request(get, {"http://localhost:" ++
+ httpc:request(get, {"http://localhost:" ++
integer_to_list(?NOT_IN_USE_PORT)
++ "/", []},[], []),
ok.
@@ -1418,7 +1539,7 @@ page_does_not_exist(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/doesnotexist.html",
{ok, {{_,404,_}, [_ | _], [_ | _]}}
- = http:request(get, {URL, []}, [], []),
+ = httpc:request(get, {URL, []}, [], []),
ok.
@@ -1432,7 +1553,7 @@ proxy_page_does_not_exist(Config) when is_list(Config) ->
undefined ->
URL = ?PROXY_URL ++ "/doesnotexist.html",
{ok, {{_,404,_}, [_ | _], [_ | _]}} =
- http:request(get, {URL, []}, [], []),
+ httpc:request(get, {URL, []}, [], []),
ok;
Reason ->
{skip, Reason}
@@ -1446,7 +1567,7 @@ proxy_https_not_supported(doc) ->
proxy_https_not_supported(suite) ->
[];
proxy_https_not_supported(Config) when is_list(Config) ->
- Result = http:request(get, {"https://login.yahoo.com", []}, [], []),
+ Result = httpc:request(get, {"https://login.yahoo.com", []}, [], []),
case Result of
{error, Reason} ->
%% ok so far
@@ -1478,10 +1599,10 @@ http_stream(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
{ok, {{_,200,_}, [_ | _], Body}} =
- http:request(get, {URL, []}, [], []),
+ httpc:request(get, {URL, []}, [], []),
{ok, RequestId} =
- http:request(get, {URL, []}, [], [{sync, false},
+ httpc:request(get, {URL, []}, [], [{sync, false},
{stream, self}]),
receive
@@ -1506,7 +1627,7 @@ http_stream_once(Config) when is_list(Config) ->
"~n Config: ~p", [Config]),
p("http_stream_once -> set ipfamily to inet", []),
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
p("http_stream_once -> start dummy server", []),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
@@ -1521,18 +1642,18 @@ http_stream_once(Config) when is_list(Config) ->
p("http_stream_once -> stop dummy server", []),
DummyServerPid ! stop,
p("http_stream_once -> set ipfamily to inet6fb4", []),
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
p("http_stream_once -> done", []),
ok.
once(URL) ->
p("once -> issue sync request for ~p", [URL]),
{ok, {{_,200,_}, [_ | _], Body}} =
- http:request(get, {URL, []}, [], []),
+ httpc:request(get, {URL, []}, [], []),
p("once -> issue async (self stream) request for ~p", [URL]),
{ok, RequestId} =
- http:request(get, {URL, []}, [], [{sync, false},
+ httpc:request(get, {URL, []}, [], [{sync, false},
{stream, {self, once}}]),
p("once -> await stream_start reply for (async) request ~p", [RequestId]),
@@ -1576,10 +1697,10 @@ proxy_stream(Config) when is_list(Config) ->
case ?config(skip, Config) of
undefined ->
{ok, {{_,200,_}, [_ | _], Body}} =
- http:request(get, {?PROXY_URL, []}, [], []),
+ httpc:request(get, {?PROXY_URL, []}, [], []),
{ok, RequestId} =
- http:request(get, {?PROXY_URL, []}, [],
+ httpc:request(get, {?PROXY_URL, []}, [],
[{sync, false}, {stream, self}]),
receive
@@ -1659,7 +1780,7 @@ ipv6(Config) when is_list(Config) ->
URL = "http://[" ++ ?IPV6_LOCAL_HOST ++ "]:" ++
integer_to_list(Port) ++ "/foobar.html",
{ok, {{_,200,_}, [_ | _], [_|_]}} =
- http:request(get, {URL, []}, [], []),
+ httpc:request(get, {URL, []}, [], []),
DummyServerPid ! stop,
ok;
@@ -1677,11 +1798,11 @@ headers_as_is(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
{ok, {{_,200,_}, [_|_], [_|_]}} =
- http:request(get, {URL, [{"Host", "localhost"},{"Te", ""}]},
+ httpc:request(get, {URL, [{"Host", "localhost"},{"Te", ""}]},
[], [{headers_as_is, true}]),
{ok, {{_,400,_}, [_|_], [_|_]}} =
- http:request(get, {URL, [{"Te", ""}]},[], [{headers_as_is, true}]),
+ httpc:request(get, {URL, [{"Te", ""}]},[], [{headers_as_is, true}]),
ok.
@@ -1696,13 +1817,13 @@ options(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/dummy.html",
{ok, {{_,200,_}, [_ | _], Bin}}
- = http:request(get, {URL, []}, [{foo, bar}],
+ = httpc:request(get, {URL, []}, [{foo, bar}],
%% Ignore unknown options
[{body_format, binary}, {foo, bar}]),
true = is_binary(Bin),
{ok, {200, [_|_]}}
- = http:request(get, {URL, []}, [{timeout, infinity}],
+ = httpc:request(get, {URL, []}, [{timeout, infinity}],
[{full_result, false}]);
_ ->
{skip, "Failed to start local http-server"}
@@ -1715,17 +1836,17 @@ http_invalid_http(doc) ->
http_invalid_http(suite) ->
[];
http_invalid_http(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/invalid_http.html",
{error, {could_not_parse_as_http, _} = Reason} =
- http:request(get, {URL, []}, [], []),
+ httpc:request(get, {URL, []}, [], []),
test_server:format("Parse error: ~p ~n", [Reason]),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1762,7 +1883,7 @@ empty_body_otp_6243(Config) when is_list(Config) ->
Port = ?config(local_port, Config),
URL = ?URL_START ++ integer_to_list(Port) ++ "/empty.html",
{ok, {{_,200,_}, [_ | _], []}} =
- http:request(get, {URL, []}, [{timeout, 500}], []).
+ httpc:request(get, {URL, []}, [{timeout, 500}], []).
%%-------------------------------------------------------------------------
@@ -1772,14 +1893,14 @@ transfer_encoding_otp_6807(doc) ->
transfer_encoding_otp_6807(suite) ->
[];
transfer_encoding_otp_6807(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++
"/capital_transfer_encoding.html",
- {ok, {{_,200,_}, [_|_], [_ | _]}} = http:request(URL),
+ {ok, {{_,200,_}, [_|_], [_ | _]}} = httpc:request(URL),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1805,13 +1926,13 @@ empty_response_header_otp_6830(doc) ->
empty_response_header_otp_6830(suite) ->
[];
empty_response_header_otp_6830(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/no_headers.html",
- {ok, {{_,200,_}, [], [_ | _]}} = http:request(URL),
+ {ok, {{_,200,_}, [], [_ | _]}} = httpc:request(URL),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1822,13 +1943,13 @@ no_content_204_otp_6982(doc) ->
no_content_204_otp_6982(suite) ->
[];
no_content_204_otp_6982(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/no_content.html",
- {ok, {{_,204,_}, [], []}} = http:request(URL),
+ {ok, {{_,204,_}, [], []}} = httpc:request(URL),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1840,13 +1961,13 @@ missing_CR_otp_7304(doc) ->
missing_CR_otp_7304(suite) ->
[];
missing_CR_otp_7304(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/missing_CR.html",
- {ok, {{_,200,_}, _, [_ | _]}} = http:request(URL),
+ {ok, {{_,200,_}, _, [_ | _]}} = httpc:request(URL),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1860,15 +1981,15 @@ otp_7883_1(doc) ->
otp_7883_1(suite) ->
[];
otp_7883_1(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++ "/just_close.html",
- {error, socket_closed_remotely} = http:request(URL),
+ {error, socket_closed_remotely} = httpc:request(URL),
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
otp_7883_2(doc) ->
@@ -1876,7 +1997,7 @@ otp_7883_2(doc) ->
otp_7883_2(suite) ->
[];
otp_7883_2(Config) when is_list(Config) ->
- ok = http:set_options([{ipfamily, inet}]),
+ ok = httpc:set_options([{ipfamily, inet}]),
{DummyServerPid, Port} = dummy_server(self(), ipv4),
@@ -1885,9 +2006,9 @@ otp_7883_2(Config) when is_list(Config) ->
Request = {URL, []},
HttpOptions = [],
Options = [{sync, false}],
- Profile = http:default_profile(),
+ Profile = httpc:default_profile(),
{ok, RequestId} =
- http:request(Method, Request, HttpOptions, Options, Profile),
+ httpc:request(Method, Request, HttpOptions, Options, Profile),
ok =
receive
{http, {RequestId, {error, socket_closed_remotely}}} ->
@@ -1895,7 +2016,7 @@ otp_7883_2(Config) when is_list(Config) ->
end,
DummyServerPid ! stop,
- ok = http:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
+ ok = httpc:set_options([{ipfamily, inet6fb4}]), % ********** ipfamily = inet6 *************
ok.
@@ -1966,7 +2087,7 @@ run_clients(NumClients, ServerPort, SeqNumServer) ->
fun() ->
io:format("[~w] client started - "
"issue request~n", [Id]),
- case http:request(Url) of
+ case httpc:request(Url) of
{ok, {{_,200,_}, _, Resp}} ->
io:format("[~w] 200 response: "
"~p~n", [Id, Resp]),
@@ -2354,7 +2475,7 @@ otp_8352(Config) when is_list(Config) ->
ConnOptions = [{max_sessions, MaxSessions},
{max_keep_alive_length, MaxKeepAlive},
{keep_alive_timeout, KeepAliveTimeout}],
- http:set_options(ConnOptions),
+ httpc:set_options(ConnOptions),
Method = get,
Port = ?config(local_port, Config),
@@ -2366,9 +2487,9 @@ otp_8352(Config) when is_list(Config) ->
Options1 = [{socket_opts, [{tos, 87},
{recbuf, 16#FFFF},
{sndbuf, 16#FFFF}]}],
- case http:request(Method, Request, HttpOptions1, Options1) of
+ case httpc:request(Method, Request, HttpOptions1, Options1) of
{ok, {{_,200,_}, [_ | _], ReplyBody1 = [_ | _]}} ->
- %% equivaliant to http:request(get, {URL, []}, [], []),
+ %% equivaliant to httpc:request(get, {URL, []}, [], []),
inets_test_lib:check_body(ReplyBody1);
{ok, UnexpectedReply1} ->
tsf({unexpected_reply, UnexpectedReply1});
@@ -2382,9 +2503,9 @@ otp_8352(Config) when is_list(Config) ->
Options2 = [{socket_opts, [{tos, 84},
{recbuf, 32#1FFFF},
{sndbuf, 32#1FFFF}]}],
- case http:request(Method, Request, HttpOptions2, Options2) of
+ case httpc:request(Method, Request, HttpOptions2, Options2) of
{ok, {{_,200,_}, [_ | _], ReplyBody2 = [_ | _]}} ->
- %% equivaliant to http:request(get, {URL, []}, [], []),
+ %% equivaliant to httpc:request(get, {URL, []}, [], []),
inets_test_lib:check_body(ReplyBody2);
{ok, UnexpectedReply2} ->
tsf({unexpected_reply, UnexpectedReply2});
@@ -2406,13 +2527,13 @@ otp_8371(doc) ->
otp_8371(suite) ->
[];
otp_8371(Config) when is_list(Config) ->
- ok = http:set_options([{ipv6, disabled}]), % also test the old option
+ ok = httpc:set_options([{ipv6, disabled}]), % also test the old option
{DummyServerPid, Port} = dummy_server(self(), ipv4),
URL = ?URL_START ++ integer_to_list(Port) ++
"/ensure_host_header_with_port.html",
- case http:request(get, {URL, []}, [], []) of
+ case httpc:request(get, {URL, []}, [], []) of
{ok, Result} ->
case Result of
{{_, 200, _}, _Headers, Body} ->
@@ -2436,7 +2557,7 @@ otp_8371(Config) when is_list(Config) ->
end,
DummyServerPid ! stop,
- ok = http:set_options([{ipv6, enabled}]),
+ ok = httpc:set_options([{ipv6, enabled}]),
ok.
@@ -2537,7 +2658,7 @@ receive_streamed_body(RequestId, Body) ->
end.
receive_streamed_body(RequestId, Body, Pid) ->
- http:stream_next(Pid),
+ httpc:stream_next(Pid),
test_server:format("~p:receive_streamed_body -> requested next stream ~n", [?MODULE]),
receive
{http, {RequestId, stream, BinBodyPart}} ->
@@ -2921,11 +3042,11 @@ provocate_not_modified_bug(Url) ->
Timeout = 15000, %% 15s should be plenty
{ok, {{_, 200, _}, ReplyHeaders, _Body}} =
- http:request(get, {Url, []}, [{timeout, Timeout}], []),
+ httpc:request(get, {Url, []}, [{timeout, Timeout}], []),
Etag = pick_header(ReplyHeaders, "ETag"),
Last = pick_header(ReplyHeaders, "last-modified"),
- case http:request(get, {Url, [{"If-None-Match", Etag},
+ case httpc:request(get, {Url, [{"If-None-Match", Etag},
{"If-Modified-Since", Last}]},
[{timeout, 15000}],
[]) of
diff --git a/lib/inets/test/httpd_SUITE.erl b/lib/inets/test/httpd_SUITE.erl
index 7403d4a643..3255cbec06 100644
--- a/lib/inets/test/httpd_SUITE.erl
+++ b/lib/inets/test/httpd_SUITE.erl
@@ -32,44 +32,176 @@
init_per_suite/1, end_per_suite/1]).
%% Test cases must be exported.
--export([ip/1, ssl/1, http_1_1_ip/1, http_1_0_ip/1, http_0_9_ip/1,
- ipv6/1, tickets/1]).
+-export([
+ ip/1,
+ ssl/1, pssl/1, ossl/1, essl/1,
+ http_1_1_ip/1,
+ http_1_0_ip/1,
+ http_0_9_ip/1,
+ ipv6/1,
+ tickets/1
+ ]).
%% Core Server tests
--export([ip_mod_alias/1, ip_mod_actions/1, ip_mod_security/1, ip_mod_auth/1,
- ip_mod_auth_api/1, ip_mod_auth_mnesia_api/1,
- ip_mod_htaccess/1, ip_mod_cgi/1, ip_mod_esi/1,
- ip_mod_get/1, ip_mod_head/1, ip_mod_all/1, ip_load_light/1,
- ip_load_medium/1, ip_load_heavy/1, ip_dos_hostname/1,
- ip_time_test/1, ip_block_disturbing_idle/1,
- ip_block_non_disturbing_idle/1, ip_block_503/1,
- ip_block_disturbing_active/1, ip_block_non_disturbing_active/1,
+-export([
+ ip_mod_alias/1,
+ ip_mod_actions/1,
+ ip_mod_security/1,
+ ip_mod_auth/1,
+ ip_mod_auth_api/1,
+ ip_mod_auth_mnesia_api/1,
+ ip_mod_htaccess/1,
+ ip_mod_cgi/1,
+ ip_mod_esi/1,
+ ip_mod_get/1,
+ ip_mod_head/1,
+ ip_mod_all/1,
+ ip_load_light/1,
+ ip_load_medium/1,
+ ip_load_heavy/1,
+ ip_dos_hostname/1,
+ ip_time_test/1,
+ ip_block_disturbing_idle/1,
+ ip_block_non_disturbing_idle/1,
+ ip_block_503/1,
+ ip_block_disturbing_active/1,
+ ip_block_non_disturbing_active/1,
ip_block_disturbing_active_timeout_not_released/1,
ip_block_disturbing_active_timeout_released/1,
ip_block_non_disturbing_active_timeout_not_released/1,
ip_block_non_disturbing_active_timeout_released/1,
ip_block_disturbing_blocker_dies/1,
ip_block_non_disturbing_blocker_dies/1,
- ip_restart_no_block/1, ip_restart_disturbing_block/1,
+ ip_restart_no_block/1,
+ ip_restart_disturbing_block/1,
ip_restart_non_disturbing_block/1
]).
--export([ssl_mod_alias/1, ssl_mod_actions/1, ssl_mod_security/1,
- ssl_mod_auth/1, ssl_mod_auth_api/1,
- ssl_mod_auth_mnesia_api/1, ssl_mod_htaccess/1,
- ssl_mod_cgi/1, ssl_mod_esi/1, ssl_mod_get/1, ssl_mod_head/1,
- ssl_mod_all/1, ssl_load_light/1, ssl_load_medium/1,
- ssl_load_heavy/1, ssl_dos_hostname/1, ssl_time_test/1,
- ssl_restart_no_block/1, ssl_restart_disturbing_block/1,
- ssl_restart_non_disturbing_block/1, ssl_block_disturbing_idle/1,
- ssl_block_non_disturbing_idle/1, ssl_block_503/1,
- ssl_block_disturbing_active/1, ssl_block_non_disturbing_active/1,
- ssl_block_disturbing_active_timeout_not_released/1,
- ssl_block_disturbing_active_timeout_released/1,
- ssl_block_non_disturbing_active_timeout_not_released/1,
- ssl_block_non_disturbing_active_timeout_released/1,
- ssl_block_disturbing_blocker_dies/1,
- ssl_block_non_disturbing_blocker_dies/1]).
+-export([
+ pssl_mod_alias/1,
+ ossl_mod_alias/1,
+ essl_mod_alias/1,
+
+ pssl_mod_actions/1,
+ ossl_mod_actions/1,
+ essl_mod_actions/1,
+
+ pssl_mod_security/1,
+ ossl_mod_security/1,
+ essl_mod_security/1,
+
+ pssl_mod_auth/1,
+ ossl_mod_auth/1,
+ essl_mod_auth/1,
+
+ pssl_mod_auth_api/1,
+ ossl_mod_auth_api/1,
+ essl_mod_auth_api/1,
+
+ pssl_mod_auth_mnesia_api/1,
+ ossl_mod_auth_mnesia_api/1,
+ essl_mod_auth_mnesia_api/1,
+
+ pssl_mod_htaccess/1,
+ ossl_mod_htaccess/1,
+ essl_mod_htaccess/1,
+
+ pssl_mod_cgi/1,
+ ossl_mod_cgi/1,
+ essl_mod_cgi/1,
+
+ pssl_mod_esi/1,
+ ossl_mod_esi/1,
+ essl_mod_esi/1,
+
+ pssl_mod_get/1,
+ ossl_mod_get/1,
+ essl_mod_get/1,
+
+ pssl_mod_head/1,
+ ossl_mod_head/1,
+ essl_mod_head/1,
+
+ pssl_mod_all/1,
+ ossl_mod_all/1,
+ essl_mod_all/1,
+
+ pssl_load_light/1,
+ ossl_load_light/1,
+ essl_load_light/1,
+
+ pssl_load_medium/1,
+ ossl_load_medium/1,
+ essl_load_medium/1,
+
+ pssl_load_heavy/1,
+ ossl_load_heavy/1,
+ essl_load_heavy/1,
+
+ pssl_dos_hostname/1,
+ ossl_dos_hostname/1,
+ essl_dos_hostname/1,
+
+ pssl_time_test/1,
+ ossl_time_test/1,
+ essl_time_test/1,
+
+ pssl_restart_no_block/1,
+ ossl_restart_no_block/1,
+ essl_restart_no_block/1,
+
+ pssl_restart_disturbing_block/1,
+ ossl_restart_disturbing_block/1,
+ essl_restart_disturbing_block/1,
+
+ pssl_restart_non_disturbing_block/1,
+ ossl_restart_non_disturbing_block/1,
+ essl_restart_non_disturbing_block/1,
+
+ pssl_block_disturbing_idle/1,
+ ossl_block_disturbing_idle/1,
+ essl_block_disturbing_idle/1,
+
+ pssl_block_non_disturbing_idle/1,
+ ossl_block_non_disturbing_idle/1,
+ essl_block_non_disturbing_idle/1,
+
+ pssl_block_503/1,
+ ossl_block_503/1,
+ essl_block_503/1,
+
+ pssl_block_disturbing_active/1,
+ ossl_block_disturbing_active/1,
+ essl_block_disturbing_active/1,
+
+ pssl_block_non_disturbing_active/1,
+ ossl_block_non_disturbing_active/1,
+ essl_block_non_disturbing_active/1,
+
+ pssl_block_disturbing_active_timeout_not_released/1,
+ ossl_block_disturbing_active_timeout_not_released/1,
+ essl_block_disturbing_active_timeout_not_released/1,
+
+ pssl_block_disturbing_active_timeout_released/1,
+ ossl_block_disturbing_active_timeout_released/1,
+ essl_block_disturbing_active_timeout_released/1,
+
+ pssl_block_non_disturbing_active_timeout_not_released/1,
+ ossl_block_non_disturbing_active_timeout_not_released/1,
+ essl_block_non_disturbing_active_timeout_not_released/1,
+
+ pssl_block_non_disturbing_active_timeout_released/1,
+ ossl_block_non_disturbing_active_timeout_released/1,
+ essl_block_non_disturbing_active_timeout_released/1,
+
+ pssl_block_disturbing_blocker_dies/1,
+ ossl_block_disturbing_blocker_dies/1,
+ essl_block_disturbing_blocker_dies/1,
+
+ pssl_block_non_disturbing_blocker_dies/1,
+ ossl_block_non_disturbing_blocker_dies/1,
+ essl_block_non_disturbing_blocker_dies/1
+ ]).
%%% HTTP 1.1 tests
-export([ip_host/1, ip_chunked/1, ip_expect/1, ip_range/1,
@@ -103,8 +235,8 @@
%% Seconds before successful auths timeout.
-define(AUTH_TIMEOUT,5).
--record(httpd_user, {user_name, password, user_data}).
--record(httpd_group,{group_name, userlist}).
+-record(httpd_user, {user_name, password, user_data}).
+-record(httpd_group, {group_name, userlist}).
%%--------------------------------------------------------------------
@@ -197,9 +329,9 @@ init_per_testcase2(Case, Config) ->
"~n Config: ~p"
"~n", [?MODULE, Case, Config]),
- IpNormal = integer_to_list(?IP_PORT) ++ ".conf",
- IpHtacess = integer_to_list(?IP_PORT) ++ "htacess.conf",
- SslNormal = integer_to_list(?SSL_PORT) ++ ".conf",
+ IpNormal = integer_to_list(?IP_PORT) ++ ".conf",
+ IpHtacess = integer_to_list(?IP_PORT) ++ "htacess.conf",
+ SslNormal = integer_to_list(?SSL_PORT) ++ ".conf",
SslHtacess = integer_to_list(?SSL_PORT) ++ "htacess.conf",
DataDir = ?config(data_dir, Config),
@@ -210,8 +342,8 @@ init_per_testcase2(Case, Config) ->
"~n DataDir: ~p"
"~n", [?MODULE, Case, SuiteTopDir, DataDir]),
- TcTopDir = filename:join(SuiteTopDir, Case),
- ?line ok = file:make_dir(TcTopDir),
+ TcTopDir = filename:join(SuiteTopDir, Case),
+ ?line ok = file:make_dir(TcTopDir),
io:format(user, "~w:init_per_testcase2(~w) -> "
"~n TcTopDir: ~p"
@@ -267,9 +399,21 @@ init_per_testcase2(Case, Config) ->
%% To be used by SSL test cases
io:format(user, "~w:init_per_testcase2(~w) -> ssl testcase setups~n",
[?MODULE, Case]),
- create_config([{port, ?SSL_PORT}, {sock_type, ssl} | NewConfig],
+ SocketType =
+ case atom_to_list(Case) of
+ [X, $s, $s, $l | _] ->
+ case X of
+ $p -> ssl;
+ $o -> ossl;
+ $e -> essl
+ end;
+ _ ->
+ ssl
+ end,
+
+ create_config([{port, ?SSL_PORT}, {sock_type, SocketType} | NewConfig],
normal_acess, SslNormal),
- create_config([{port, ?SSL_PORT}, {sock_type, ssl} | NewConfig],
+ create_config([{port, ?SSL_PORT}, {sock_type, SocketType} | NewConfig],
mod_htaccess, SslHtacess),
%% To be used by IPv6 test cases. Case-clause is so that
@@ -300,8 +444,14 @@ init_per_testcase3(Case, Config) ->
io:format(user, "~w:init_per_testcase3(~w) -> entry with"
"~n Config: ~p", [?MODULE, Case, Config]),
+
+%% %% Create a new fresh node to be used by the server in this test-case
+
+%% NodeName = list_to_atom(atom_to_list(Case) ++ "_httpd"),
+%% Node = inets_test_lib:start_node(NodeName),
+
%% Clean up (we do not want this clean up in end_per_testcase
- %% if init_per_testcase crases for some testcase it will
+ %% if init_per_testcase crashes for some testcase it will
%% have contaminated the environment and there will be no clean up.)
%% This init can take a few different paths so that one crashes
%% does not mean that all invocations will.
@@ -310,15 +460,26 @@ init_per_testcase3(Case, Config) ->
application:stop(inets),
application:stop(ssl),
cleanup_mnesia(),
-
- %% TraceLevel = max,
- TraceLevel = 70,
- TraceDest = io,
- inets:enable_trace(TraceLevel, TraceDest),
+ %% Set trace
+ case lists:reverse(atom_to_list(Case)) of
+ "tset_emit" ++ _Rest -> % test-cases ending with time_test
+ io:format(user, "~w:init_per_testcase3(~w) -> disabling trace",
+ [?MODULE, Case]),
+ inets:disable_trace();
+ _ ->
+ %% TraceLevel = max,
+ io:format(user, "~w:init_per_testcase3(~w) -> enabling trace",
+ [?MODULE, Case]),
+ TraceLevel = 70,
+ TraceDest = io,
+ inets:enable_trace(TraceLevel, TraceDest, httpd)
+ end,
+
%% Start initialization
io:format(user, "~w:init_per_testcase3(~w) -> start init",
[?MODULE, Case]),
+
Dog = test_server:timetrap(inets_test_lib:minutes(10)),
NewConfig = lists:keydelete(watchdog, 1, Config),
@@ -351,22 +512,35 @@ init_per_testcase3(Case, Config) ->
filename:join(TcTopDir,
integer_to_list(?IP_PORT) ++ ".conf")}]),
Rest;
- "ssl_mod_htaccess" ->
+
+ [X, $s, $s, $l, $_, $m, $o, $d, $_, $h, $t, $a, $c, $c, $e, $s, $s] ->
+ SslTag =
+ case X of
+ $p -> ssl; % plain
+ $o -> ossl; % OpenSSL based ssl
+ $e -> essl % Erlang based ssl
+ end,
case inets_test_lib:start_http_server_ssl(
filename:join(TcTopDir,
integer_to_list(?SSL_PORT) ++
- "htacess.conf")) of
+ "htacess.conf"), SslTag) of
ok ->
"mod_htaccess";
Other ->
error_logger:info_report("Other: ~p~n", [Other]),
{skip, "SSL does not seem to be supported"}
end;
- "ssl_" ++ Rest ->
+ [X, $s, $s, $l, $_ | Rest] ->
+ SslTag =
+ case X of
+ $p -> ssl;
+ $o -> ossl;
+ $e -> essl
+ end,
case inets_test_lib:start_http_server_ssl(
filename:join(TcTopDir,
integer_to_list(?SSL_PORT) ++
- ".conf")) of
+ ".conf"), SslTag) of
ok ->
Rest;
Other ->
@@ -431,6 +605,7 @@ end_per_testcase2(Case, Config) ->
application:unset_env(inets, services),
application:stop(inets),
application:stop(ssl),
+ application:stop(crypto), % used by the new ssl (essl test cases)
cleanup_mnesia(),
io:format(user, "~w:end_per_testcase2(~w) -> done~n",
[?MODULE, Case]),
@@ -461,6 +636,9 @@ ip(suite) ->
ip_load_heavy,
ip_dos_hostname,
ip_time_test,
+ ip_restart_no_block,
+ ip_restart_disturbing_block,
+ ip_restart_non_disturbing_block,
ip_block_disturbing_idle,
ip_block_non_disturbing_idle,
ip_block_503,
@@ -471,10 +649,7 @@ ip(suite) ->
ip_block_non_disturbing_active_timeout_not_released,
ip_block_non_disturbing_active_timeout_released,
ip_block_disturbing_blocker_dies,
- ip_block_non_disturbing_blocker_dies,
- ip_restart_no_block,
- ip_restart_disturbing_block,
- ip_restart_non_disturbing_block
+ ip_block_non_disturbing_blocker_dies
].
%%-------------------------------------------------------------------------
@@ -482,39 +657,124 @@ ssl(doc) ->
["HTTP test using SSL"];
ssl(suite) ->
[
- ssl_mod_alias,
- ssl_mod_actions,
- ssl_mod_security,
- ssl_mod_auth,
- ssl_mod_auth_api,
- ssl_mod_auth_mnesia_api,
- ssl_mod_htaccess,
- ssl_mod_cgi,
- ssl_mod_esi,
- ssl_mod_get,
- ssl_mod_head,
- ssl_mod_all,
- ssl_load_light,
- ssl_load_medium,
- ssl_load_heavy,
- ssl_dos_hostname,
- ssl_time_test,
- ssl_restart_no_block,
- ssl_restart_disturbing_block,
- ssl_restart_non_disturbing_block,
- ssl_block_disturbing_idle,
- ssl_block_non_disturbing_idle,
- ssl_block_503,
- ssl_block_disturbing_active,
- ssl_block_non_disturbing_active,
- ssl_block_disturbing_active_timeout_not_released,
- ssl_block_disturbing_active_timeout_released,
- ssl_block_non_disturbing_active_timeout_not_released,
- ssl_block_non_disturbing_active_timeout_released,
- ssl_block_disturbing_blocker_dies,
- ssl_block_non_disturbing_blocker_dies
+ pssl,
+ ossl,
+ essl
].
+
+pssl(doc) ->
+ ["HTTP test using SSL - using old way of configuring SSL"];
+pssl(suite) ->
+ [
+ pssl_mod_alias,
+ pssl_mod_actions,
+ pssl_mod_security,
+ pssl_mod_auth,
+ pssl_mod_auth_api,
+ pssl_mod_auth_mnesia_api,
+ pssl_mod_htaccess,
+ pssl_mod_cgi,
+ pssl_mod_esi,
+ pssl_mod_get,
+ pssl_mod_head,
+ pssl_mod_all,
+ pssl_load_light,
+ pssl_load_medium,
+ pssl_load_heavy,
+ pssl_dos_hostname,
+ pssl_time_test,
+ pssl_restart_no_block,
+ pssl_restart_disturbing_block,
+ pssl_restart_non_disturbing_block,
+ pssl_block_disturbing_idle,
+ pssl_block_non_disturbing_idle,
+ pssl_block_503,
+ pssl_block_disturbing_active,
+ pssl_block_non_disturbing_active,
+ pssl_block_disturbing_active_timeout_not_released,
+ pssl_block_disturbing_active_timeout_released,
+ pssl_block_non_disturbing_active_timeout_not_released,
+ pssl_block_non_disturbing_active_timeout_released,
+ pssl_block_disturbing_blocker_dies,
+ pssl_block_non_disturbing_blocker_dies
+ ].
+
+ossl(doc) ->
+ ["HTTP test using SSL - using new way of configuring usage of old SSL"];
+ossl(suite) ->
+ [
+ ossl_mod_alias,
+ ossl_mod_actions,
+ ossl_mod_security,
+ ossl_mod_auth,
+ ossl_mod_auth_api,
+ ossl_mod_auth_mnesia_api,
+ ossl_mod_htaccess,
+ ossl_mod_cgi,
+ ossl_mod_esi,
+ ossl_mod_get,
+ ossl_mod_head,
+ ossl_mod_all,
+ ossl_load_light,
+ ossl_load_medium,
+ ossl_load_heavy,
+ ossl_dos_hostname,
+ ossl_time_test,
+ ossl_restart_no_block,
+ ossl_restart_disturbing_block,
+ ossl_restart_non_disturbing_block,
+ ossl_block_disturbing_idle,
+ ossl_block_non_disturbing_idle,
+ ossl_block_503,
+ ossl_block_disturbing_active,
+ ossl_block_non_disturbing_active,
+ ossl_block_disturbing_active_timeout_not_released,
+ ossl_block_disturbing_active_timeout_released,
+ ossl_block_non_disturbing_active_timeout_not_released,
+ ossl_block_non_disturbing_active_timeout_released,
+ ossl_block_disturbing_blocker_dies,
+ ossl_block_non_disturbing_blocker_dies
+ ].
+
+essl(doc) ->
+ ["HTTP test using SSL - using new way of configuring usage of new SSL"];
+essl(suite) ->
+ [
+ essl_mod_alias,
+ essl_mod_actions,
+ essl_mod_security,
+ essl_mod_auth,
+ essl_mod_auth_api,
+ essl_mod_auth_mnesia_api,
+ essl_mod_htaccess,
+ essl_mod_cgi,
+ essl_mod_esi,
+ essl_mod_get,
+ essl_mod_head,
+ essl_mod_all,
+ essl_load_light,
+ essl_load_medium,
+ essl_load_heavy,
+ essl_dos_hostname,
+ essl_time_test,
+ essl_restart_no_block,
+ essl_restart_disturbing_block,
+ essl_restart_non_disturbing_block,
+ essl_block_disturbing_idle,
+ essl_block_non_disturbing_idle,
+ essl_block_503,
+ essl_block_disturbing_active,
+ essl_block_non_disturbing_active,
+ essl_block_disturbing_active_timeout_not_released,
+ essl_block_disturbing_active_timeout_released,
+ essl_block_non_disturbing_active_timeout_not_released,
+ essl_block_non_disturbing_active_timeout_released,
+ essl_block_disturbing_blocker_dies,
+ essl_block_non_disturbing_blocker_dies
+ ].
+
+
%%-------------------------------------------------------------------------
http_1_1_ip(doc) ->
["HTTP/1.1"];
@@ -721,6 +981,8 @@ ip_load_heavy(Config) when is_list(Config) ->
?config(node, Config),
get_nof_clients(ip_comm, heavy)),
ok.
+
+
%%-------------------------------------------------------------------------
ip_dos_hostname(doc) ->
["Denial Of Service (DOS) attack test case"];
@@ -730,6 +992,8 @@ ip_dos_hostname(Config) when is_list(Config) ->
dos_hostname(ip_comm, ?IP_PORT, ?config(host, Config),
?config(node, Config), ?MAX_HEADER_SIZE),
ok.
+
+
%%-------------------------------------------------------------------------
ip_time_test(doc) ->
[""];
@@ -966,363 +1230,1072 @@ ip_restart_non_disturbing_block(Config) when is_list(Config) ->
ok.
%%-------------------------------------------------------------------------
-ssl_mod_alias(doc) ->
- ["Module test: mod_alias"];
-ssl_mod_alias(suite) ->
+
+pssl_mod_alias(doc) ->
+ ["Module test: mod_alias - old SSL config"];
+pssl_mod_alias(suite) ->
+ [];
+pssl_mod_alias(Config) when is_list(Config) ->
+ ssl_mod_alias(ssl, Config).
+
+ossl_mod_alias(doc) ->
+ ["Module test: mod_alias - using new of configure old SSL"];
+ossl_mod_alias(suite) ->
+ [];
+ossl_mod_alias(Config) when is_list(Config) ->
+ ssl_mod_alias(ossl, Config).
+
+essl_mod_alias(doc) ->
+ ["Module test: mod_alias - using new of configure new SSL"];
+essl_mod_alias(suite) ->
[];
-ssl_mod_alias(Config) when is_list(Config) ->
- httpd_mod:alias(ssl, ?SSL_PORT,
+essl_mod_alias(Config) when is_list(Config) ->
+ ssl_mod_alias(essl, Config).
+
+
+ssl_mod_alias(Tag, Config) ->
+ httpd_mod:alias(Tag, ?SSL_PORT,
?config(host, Config), ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_actions(doc) ->
- ["Module test: mod_actions"];
-ssl_mod_actions(suite) ->
+
+pssl_mod_actions(doc) ->
+ ["Module test: mod_actions - old SSL config"];
+pssl_mod_actions(suite) ->
[];
-ssl_mod_actions(Config) when is_list(Config) ->
- httpd_mod:actions(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_actions(Config) when is_list(Config) ->
+ ssl_mod_actions(ssl, Config).
+
+ossl_mod_actions(doc) ->
+ ["Module test: mod_actions - using new of configure old SSL"];
+ossl_mod_actions(suite) ->
+ [];
+ossl_mod_actions(Config) when is_list(Config) ->
+ ssl_mod_actions(ossl, Config).
+
+essl_mod_actions(doc) ->
+ ["Module test: mod_actions - using new of configure new SSL"];
+essl_mod_actions(suite) ->
+ [];
+essl_mod_actions(Config) when is_list(Config) ->
+ ssl_mod_actions(essl, Config).
+
+
+ssl_mod_actions(Tag, Config) ->
+ httpd_mod:actions(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_security(doc) ->
- ["Module test: mod_security"];
-ssl_mod_security(suite) ->
+
+pssl_mod_security(doc) ->
+ ["Module test: mod_security - old SSL config"];
+pssl_mod_security(suite) ->
[];
-ssl_mod_security(Config) when is_list(Config) ->
+pssl_mod_security(Config) when is_list(Config) ->
+ ssl_mod_security(ssl, Config).
+
+ossl_mod_security(doc) ->
+ ["Module test: mod_security - using new of configure old SSL"];
+ossl_mod_security(suite) ->
+ [];
+ossl_mod_security(Config) when is_list(Config) ->
+ ssl_mod_security(ossl, Config).
+
+essl_mod_security(doc) ->
+ ["Module test: mod_security - using new of configure new SSL"];
+essl_mod_security(suite) ->
+ [];
+essl_mod_security(Config) when is_list(Config) ->
+ ssl_mod_security(essl, Config).
+
+ssl_mod_security(Tag, Config) ->
ServerRoot = ?config(server_root, Config),
- httpd_mod:security(ServerRoot, ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+ httpd_mod:security(ServerRoot,
+ Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_auth(doc) ->
- ["Module test: mod_auth"];
-ssl_mod_auth(suite) ->
+
+pssl_mod_auth(doc) ->
+ ["Module test: mod_auth - old SSL config"];
+pssl_mod_auth(suite) ->
[];
-ssl_mod_auth(Config) when is_list(Config) ->
- httpd_mod:auth(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_auth(Config) when is_list(Config) ->
+ ssl_mod_auth(ssl, Config).
+
+ossl_mod_auth(doc) ->
+ ["Module test: mod_auth - using new of configure old SSL"];
+ossl_mod_auth(suite) ->
+ [];
+ossl_mod_auth(Config) when is_list(Config) ->
+ ssl_mod_auth(ossl, Config).
+
+essl_mod_auth(doc) ->
+ ["Module test: mod_auth - using new of configure new SSL"];
+essl_mod_auth(suite) ->
+ [];
+essl_mod_auth(Config) when is_list(Config) ->
+ ssl_mod_auth(essl, Config).
+
+ssl_mod_auth(Tag, Config) ->
+ httpd_mod:auth(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_auth_api(doc) ->
- ["Module test: mod_auth"];
-ssl_mod_auth_api(suite) ->
+
+pssl_mod_auth_api(doc) ->
+ ["Module test: mod_auth - old SSL config"];
+pssl_mod_auth_api(suite) ->
+ [];
+pssl_mod_auth_api(Config) when is_list(Config) ->
+ ssl_mod_auth_api(ssl, Config).
+
+ossl_mod_auth_api(doc) ->
+ ["Module test: mod_auth - using new of configure old SSL"];
+ossl_mod_auth_api(suite) ->
+ [];
+ossl_mod_auth_api(Config) when is_list(Config) ->
+ ssl_mod_auth_api(ossl, Config).
+
+essl_mod_auth_api(doc) ->
+ ["Module test: mod_auth - using new of configure new SSL"];
+essl_mod_auth_api(suite) ->
[];
-ssl_mod_auth_api(Config) when is_list(Config) ->
+essl_mod_auth_api(Config) when is_list(Config) ->
+ ssl_mod_auth_api(essl, Config).
+
+ssl_mod_auth_api(Tag, Config) ->
ServerRoot = ?config(server_root, Config),
- Host = ?config(host, Config),
- Node = ?config(node, Config),
- httpd_mod:auth_api(ServerRoot, "", ssl, ?SSL_PORT, Host, Node),
- httpd_mod:auth_api(ServerRoot, "dets_", ssl, ?SSL_PORT, Host, Node),
- httpd_mod:auth_api(ServerRoot, "mnesia_", ssl, ?SSL_PORT, Host, Node),
+ Host = ?config(host, Config),
+ Node = ?config(node, Config),
+ httpd_mod:auth_api(ServerRoot, "", Tag, ?SSL_PORT, Host, Node),
+ httpd_mod:auth_api(ServerRoot, "dets_", Tag, ?SSL_PORT, Host, Node),
+ httpd_mod:auth_api(ServerRoot, "mnesia_", Tag, ?SSL_PORT, Host, Node),
ok.
+
%%-------------------------------------------------------------------------
-ssl_mod_auth_mnesia_api(doc) ->
- ["Module test: mod_auth_mnesia_api"];
-ssl_mod_auth_mnesia_api(suite) ->
+
+pssl_mod_auth_mnesia_api(doc) ->
+ ["Module test: mod_auth_mnesia_api - old SSL config"];
+pssl_mod_auth_mnesia_api(suite) ->
[];
-ssl_mod_auth_mnesia_api(Config) when is_list(Config) ->
- httpd_mod:auth_mnesia_api(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_auth_mnesia_api(Config) when is_list(Config) ->
+ ssl_mod_auth_mnesia_api(ssl, Config).
+
+ossl_mod_auth_mnesia_api(doc) ->
+ ["Module test: mod_auth_mnesia_api - using new of configure old SSL"];
+ossl_mod_auth_mnesia_api(suite) ->
+ [];
+ossl_mod_auth_mnesia_api(Config) when is_list(Config) ->
+ ssl_mod_auth_mnesia_api(ossl, Config).
+
+essl_mod_auth_mnesia_api(doc) ->
+ ["Module test: mod_auth_mnesia_api - using new of configure new SSL"];
+essl_mod_auth_mnesia_api(suite) ->
+ [];
+essl_mod_auth_mnesia_api(Config) when is_list(Config) ->
+ ssl_mod_auth_mnesia_api(essl, Config).
+
+ssl_mod_auth_mnesia_api(Tag, Config) ->
+ httpd_mod:auth_mnesia_api(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_htaccess(doc) ->
- ["Module test: mod_htaccess"];
-ssl_mod_htaccess(suite) ->
+
+pssl_mod_htaccess(doc) ->
+ ["Module test: mod_htaccess - old SSL config"];
+pssl_mod_htaccess(suite) ->
[];
-ssl_mod_htaccess(Config) when is_list(Config) ->
- httpd_mod:htaccess(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_htaccess(Config) when is_list(Config) ->
+ ssl_mod_htaccess(ssl, Config).
+
+ossl_mod_htaccess(doc) ->
+ ["Module test: mod_htaccess - using new of configure old SSL"];
+ossl_mod_htaccess(suite) ->
+ [];
+ossl_mod_htaccess(Config) when is_list(Config) ->
+ ssl_mod_htaccess(ossl, Config).
+
+essl_mod_htaccess(doc) ->
+ ["Module test: mod_htaccess - using new of configure new SSL"];
+essl_mod_htaccess(suite) ->
+ [];
+essl_mod_htaccess(Config) when is_list(Config) ->
+ ssl_mod_htaccess(essl, Config).
+
+ssl_mod_htaccess(Tag, Config) ->
+ httpd_mod:htaccess(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_cgi(doc) ->
- ["Module test: mod_cgi"];
-ssl_mod_cgi(suite) ->
+
+pssl_mod_cgi(doc) ->
+ ["Module test: mod_cgi - old SSL config"];
+pssl_mod_cgi(suite) ->
+ [];
+pssl_mod_cgi(Config) when is_list(Config) ->
+ ssl_mod_cgi(ssl, Config).
+
+ossl_mod_cgi(doc) ->
+ ["Module test: mod_cgi - using new of configure old SSL"];
+ossl_mod_cgi(suite) ->
+ [];
+ossl_mod_cgi(Config) when is_list(Config) ->
+ ssl_mod_cgi(ossl, Config).
+
+essl_mod_cgi(doc) ->
+ ["Module test: mod_cgi - using new of configure new SSL"];
+essl_mod_cgi(suite) ->
[];
-ssl_mod_cgi(Config) when is_list(Config) ->
+essl_mod_cgi(Config) when is_list(Config) ->
+ ssl_mod_cgi(essl, Config).
+
+ssl_mod_cgi(Tag, Config) ->
case test_server:os_type() of
vxworks ->
{skip, cgi_not_supported_on_vxwoks};
_ ->
- httpd_mod:cgi(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+ httpd_mod:cgi(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok
end.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_esi(doc) ->
- ["Module test: mod_esi"];
-ssl_mod_esi(suite) ->
+
+pssl_mod_esi(doc) ->
+ ["Module test: mod_esi - old SSL config"];
+pssl_mod_esi(suite) ->
[];
-ssl_mod_esi(Config) when is_list(Config) ->
- httpd_mod:esi(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_esi(Config) when is_list(Config) ->
+ ssl_mod_esi(ssl, Config).
+
+ossl_mod_esi(doc) ->
+ ["Module test: mod_esi - using new of configure old SSL"];
+ossl_mod_esi(suite) ->
+ [];
+ossl_mod_esi(Config) when is_list(Config) ->
+ ssl_mod_esi(ossl, Config).
+
+essl_mod_esi(doc) ->
+ ["Module test: mod_esi - using new of configure new SSL"];
+essl_mod_esi(suite) ->
+ [];
+essl_mod_esi(Config) when is_list(Config) ->
+ ssl_mod_esi(essl, Config).
+
+ssl_mod_esi(Tag, Config) ->
+ httpd_mod:esi(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_mod_get(doc) ->
- ["Module test: mod_get"];
-ssl_mod_get(suite) ->
+
+pssl_mod_get(doc) ->
+ ["Module test: mod_get - old SSL config"];
+pssl_mod_get(suite) ->
[];
-ssl_mod_get(Config) when is_list(Config) ->
- httpd_mod:get(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_get(Config) when is_list(Config) ->
+ ssl_mod_get(ssl, Config).
+
+ossl_mod_get(doc) ->
+ ["Module test: mod_get - using new of configure old SSL"];
+ossl_mod_get(suite) ->
+ [];
+ossl_mod_get(Config) when is_list(Config) ->
+ ssl_mod_get(ossl, Config).
+
+essl_mod_get(doc) ->
+ ["Module test: mod_get - using new of configure new SSL"];
+essl_mod_get(suite) ->
+ [];
+essl_mod_get(Config) when is_list(Config) ->
+ ssl_mod_get(essl, Config).
+
+ssl_mod_get(Tag, Config) ->
+ httpd_mod:get(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_head(doc) ->
- ["Module test: mod_head"];
-ssl_mod_head(suite) ->
+
+pssl_mod_head(doc) ->
+ ["Module test: mod_head - old SSL config"];
+pssl_mod_head(suite) ->
[];
-ssl_mod_head(Config) when is_list(Config) ->
- httpd_mod:head(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_head(Config) when is_list(Config) ->
+ ssl_mod_head(ssl, Config).
+
+ossl_mod_head(doc) ->
+ ["Module test: mod_head - using new of configure old SSL"];
+ossl_mod_head(suite) ->
+ [];
+ossl_mod_head(Config) when is_list(Config) ->
+ ssl_mod_head(ossl, Config).
+
+essl_mod_head(doc) ->
+ ["Module test: mod_head - using new of configure new SSL"];
+essl_mod_head(suite) ->
+ [];
+essl_mod_head(Config) when is_list(Config) ->
+ ssl_mod_head(essl, Config).
+
+ssl_mod_head(Tag, Config) ->
+ httpd_mod:head(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_mod_all(doc) ->
- ["All modules test"];
-ssl_mod_all(suite) ->
+
+pssl_mod_all(doc) ->
+ ["All modules test - old SSL config"];
+pssl_mod_all(suite) ->
[];
-ssl_mod_all(Config) when is_list(Config) ->
- httpd_mod:all(ssl, ?SSL_PORT,
- ?config(host, Config), ?config(node, Config)),
+pssl_mod_all(Config) when is_list(Config) ->
+ ssl_mod_all(ssl, Config).
+
+ossl_mod_all(doc) ->
+ ["All modules test - using new of configure old SSL"];
+ossl_mod_all(suite) ->
+ [];
+ossl_mod_all(Config) when is_list(Config) ->
+ ssl_mod_all(ossl, Config).
+
+essl_mod_all(doc) ->
+ ["All modules test - using new of configure new SSL"];
+essl_mod_all(suite) ->
+ [];
+essl_mod_all(Config) when is_list(Config) ->
+ ssl_mod_all(essl, Config).
+
+ssl_mod_all(Tag, Config) ->
+ httpd_mod:all(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_load_light(doc) ->
- ["Test light load"];
-ssl_load_light(suite) ->
+
+pssl_load_light(doc) ->
+ ["Test light load - old SSL config"];
+pssl_load_light(suite) ->
+ [];
+pssl_load_light(Config) when is_list(Config) ->
+ ssl_load_light(ssl, Config).
+
+ossl_load_light(doc) ->
+ ["Test light load - using new of configure old SSL"];
+ossl_load_light(suite) ->
+ [];
+ossl_load_light(Config) when is_list(Config) ->
+ ssl_load_light(ossl, Config).
+
+essl_load_light(doc) ->
+ ["Test light load - using new of configure new SSL"];
+essl_load_light(suite) ->
[];
-ssl_load_light(Config) when is_list(Config) ->
- httpd_load:load_test(ssl, ?SSL_PORT, ?config(host, Config),
+essl_load_light(Config) when is_list(Config) ->
+ ssl_load_light(essl, Config).
+
+ssl_load_light(Tag, Config) ->
+ httpd_load:load_test(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
?config(node, Config),
get_nof_clients(ssl, light)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_load_medium(doc) ->
- ["Test medium load"];
-ssl_load_medium(suite) ->
+
+pssl_load_medium(doc) ->
+ ["Test medium load - old SSL config"];
+pssl_load_medium(suite) ->
+ [];
+pssl_load_medium(Config) when is_list(Config) ->
+ ssl_load_medium(ssl, Config).
+
+ossl_load_medium(doc) ->
+ ["Test medium load - using new of configure old SSL"];
+ossl_load_medium(suite) ->
[];
-ssl_load_medium(Config) when is_list(Config) ->
+ossl_load_medium(Config) when is_list(Config) ->
+ ssl_load_medium(ossl, Config).
+
+essl_load_medium(doc) ->
+ ["Test medium load - using new of configure new SSL"];
+essl_load_medium(suite) ->
+ [];
+essl_load_medium(Config) when is_list(Config) ->
+ ssl_load_medium(essl, Config).
+
+ssl_load_medium(Tag, Config) ->
%% <CONDITIONAL-SKIP>
Skippable = [win32],
Condition = fun() -> ?OS_BASED_SKIP(Skippable) end,
?NON_PC_TC_MAYBE_SKIP(Config, Condition),
%% </CONDITIONAL-SKIP>
- httpd_load:load_test(ssl, ?SSL_PORT, ?config(host, Config),
+ httpd_load:load_test(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
?config(node, Config),
get_nof_clients(ssl, medium)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_load_heavy(doc) ->
- ["Test heavy load"];
-ssl_load_heavy(suite) ->
+
+pssl_load_heavy(doc) ->
+ ["Test heavy load - old SSL config"];
+pssl_load_heavy(suite) ->
[];
-ssl_load_heavy(Config) when is_list(Config) ->
+pssl_load_heavy(Config) when is_list(Config) ->
+ ssl_load_heavy(ssl, Config).
+
+ossl_load_heavy(doc) ->
+ ["Test heavy load - using new of configure old SSL"];
+ossl_load_heavy(suite) ->
+ [];
+ossl_load_heavy(Config) when is_list(Config) ->
+ ssl_load_heavy(ossl, Config).
+
+essl_load_heavy(doc) ->
+ ["Test heavy load - using new of configure new SSL"];
+essl_load_heavy(suite) ->
+ [];
+essl_load_heavy(Config) when is_list(Config) ->
+ ssl_load_heavy(essl, Config).
+
+ssl_load_heavy(Tag, Config) ->
%% <CONDITIONAL-SKIP>
Skippable = [win32],
Condition = fun() -> ?OS_BASED_SKIP(Skippable) end,
?NON_PC_TC_MAYBE_SKIP(Config, Condition),
%% </CONDITIONAL-SKIP>
- httpd_load:load_test(ssl, ?SSL_PORT, ?config(host, Config),
+ httpd_load:load_test(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
?config(node, Config),
get_nof_clients(ssl, heavy)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_dos_hostname(doc) ->
- ["Denial Of Service (DOS) attack test case"];
-ssl_dos_hostname(suite) ->
+
+pssl_dos_hostname(doc) ->
+ ["Denial Of Service (DOS) attack test case - old SSL config"];
+pssl_dos_hostname(suite) ->
[];
-ssl_dos_hostname(Config) when is_list(Config) ->
- dos_hostname(ssl, ?SSL_PORT, ?config(host, Config),
- ?config(node, Config), ?MAX_HEADER_SIZE),
+pssl_dos_hostname(Config) when is_list(Config) ->
+ ssl_dos_hostname(ssl, Config).
+
+ossl_dos_hostname(doc) ->
+ ["Denial Of Service (DOS) attack test case - using new of configure old SSL"];
+ossl_dos_hostname(suite) ->
+ [];
+ossl_dos_hostname(Config) when is_list(Config) ->
+ ssl_dos_hostname(ossl, Config).
+
+essl_dos_hostname(doc) ->
+ ["Denial Of Service (DOS) attack test case - using new of configure new SSL"];
+essl_dos_hostname(suite) ->
+ [];
+essl_dos_hostname(Config) when is_list(Config) ->
+ ssl_dos_hostname(essl, Config).
+
+ssl_dos_hostname(Tag, Config) ->
+ dos_hostname(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config),
+ ?MAX_HEADER_SIZE),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_time_test(doc) ->
- [""];
-ssl_time_test(suite) ->
+
+pssl_time_test(doc) ->
+ ["old SSL config"];
+pssl_time_test(suite) ->
[];
-ssl_time_test(Config) when is_list(Config) ->
+pssl_time_test(Config) when is_list(Config) ->
+ ssl_time_test(ssl, Config).
+
+ossl_time_test(doc) ->
+ ["using new of configure old SSL"];
+ossl_time_test(suite) ->
+ [];
+ossl_time_test(Config) when is_list(Config) ->
+ ssl_time_test(ossl, Config).
+
+essl_time_test(doc) ->
+ ["using new of configure new SSL"];
+essl_time_test(suite) ->
+ [];
+essl_time_test(Config) when is_list(Config) ->
+ ssl_time_test(essl, Config).
+
+ssl_time_test(Tag, Config) when is_list(Config) ->
%% <CONDITIONAL-SKIP>
- Condition = fun() -> true end,
+ FreeBSDVersionVerify =
+ fun() ->
+ case os:version() of
+ {7, 1, _} -> % We only have one such machine, so...
+ true;
+ _ ->
+ false
+ end
+ end,
+ Skippable = [win32, {unix, [{freebsd, FreeBSDVersionVerify}]}],
+ Condition = fun() -> ?OS_BASED_SKIP(Skippable) end,
?NON_PC_TC_MAYBE_SKIP(Config, Condition),
%% </CONDITIONAL-SKIP>
- httpd_time_test:t(ssl, ?config(host, Config), ?SSL_PORT),
+ httpd_time_test:t(Tag,
+ ?config(host, Config),
+ ?SSL_PORT),
ok.
+
%%-------------------------------------------------------------------------
-ssl_block_503(doc) ->
+
+pssl_block_503(doc) ->
["Check that you will receive status code 503 when the server"
- " is blocked and 200 when its not blocked."];
-ssl_block_503(suite) ->
+ " is blocked and 200 when its not blocked - old SSL config."];
+pssl_block_503(suite) ->
[];
-ssl_block_503(Config) when is_list(Config) ->
- httpd_block:block_503(ssl, ?SSL_PORT, ?config(host, Config),
+pssl_block_503(Config) when is_list(Config) ->
+ ssl_block_503(ssl, Config).
+
+ossl_block_503(doc) ->
+ ["Check that you will receive status code 503 when the server"
+ " is blocked and 200 when its not blocked - using new of configure old SSL."];
+ossl_block_503(suite) ->
+ [];
+ossl_block_503(Config) when is_list(Config) ->
+ ssl_block_503(ossl, Config).
+
+essl_block_503(doc) ->
+ ["Check that you will receive status code 503 when the server"
+ " is blocked and 200 when its not blocked - using new of configure new SSL."];
+essl_block_503(suite) ->
+ [];
+essl_block_503(Config) when is_list(Config) ->
+ ssl_block_503(essl, Config).
+
+ssl_block_503(Tag, Config) ->
+ httpd_block:block_503(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_disturbing_idle(doc) ->
+
+pssl_block_disturbing_idle(doc) ->
["Check that you can block/unblock an idle server. The strategy "
- "distribing does not really make a difference in this case."];
-ssl_block_disturbing_idle(suite) ->
+ "distribing does not really make a difference in this case."
+ "Old SSL config"];
+pssl_block_disturbing_idle(suite) ->
[];
-ssl_block_disturbing_idle(Config) when is_list(Config) ->
- httpd_block:block_disturbing_idle(ssl, ?SSL_PORT,
+pssl_block_disturbing_idle(Config) when is_list(Config) ->
+ ssl_block_disturbing_idle(ssl, Config).
+
+ossl_block_disturbing_idle(doc) ->
+ ["Check that you can block/unblock an idle server. The strategy "
+ "distribing does not really make a difference in this case."
+ "Using new of configure old SSL"];
+ossl_block_disturbing_idle(suite) ->
+ [];
+ossl_block_disturbing_idle(Config) when is_list(Config) ->
+ ssl_block_disturbing_idle(ossl, Config).
+
+essl_block_disturbing_idle(doc) ->
+ ["Check that you can block/unblock an idle server. The strategy "
+ "distribing does not really make a difference in this case."
+ "Using new of configure new SSL"];
+essl_block_disturbing_idle(suite) ->
+ [];
+essl_block_disturbing_idle(Config) when is_list(Config) ->
+ ssl_block_disturbing_idle(essl, Config).
+
+ssl_block_disturbing_idle(Tag, Config) ->
+ httpd_block:block_disturbing_idle(Tag,
+ ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_non_disturbing_idle(doc) ->
+
+pssl_block_non_disturbing_idle(doc) ->
["Check that you can block/unblock an idle server. The strategy "
- "non distribing does not really make a difference in this case."];
-ssl_block_non_disturbing_idle(suite) ->
+ "non distribing does not really make a difference in this case."
+ "Old SSL config"];
+pssl_block_non_disturbing_idle(suite) ->
+ [];
+pssl_block_non_disturbing_idle(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_idle(ssl, Config).
+
+ossl_block_non_disturbing_idle(doc) ->
+ ["Check that you can block/unblock an idle server. The strategy "
+ "non distribing does not really make a difference in this case."
+ "Using new of configure old SSL"];
+ossl_block_non_disturbing_idle(suite) ->
+ [];
+ossl_block_non_disturbing_idle(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_idle(ossl, Config).
+
+essl_block_non_disturbing_idle(doc) ->
+ ["Check that you can block/unblock an idle server. The strategy "
+ "non distribing does not really make a difference in this case."
+ "Using new of configure new SSL"];
+essl_block_non_disturbing_idle(suite) ->
[];
-ssl_block_non_disturbing_idle(Config) when is_list(Config) ->
- httpd_block:block_non_disturbing_idle(ssl, ?SSL_PORT,
+essl_block_non_disturbing_idle(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_idle(essl, Config).
+
+ssl_block_non_disturbing_idle(Tag, Config) ->
+ httpd_block:block_non_disturbing_idle(Tag,
+ ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_disturbing_active(doc) ->
+
+pssl_block_disturbing_active(doc) ->
["Check that you can block/unblock an active server. The strategy "
- "distribing means ongoing requests should be terminated."];
-ssl_block_disturbing_active(suite) ->
+ "distribing means ongoing requests should be terminated."
+ "Old SSL config"];
+pssl_block_disturbing_active(suite) ->
+ [];
+pssl_block_disturbing_active(Config) when is_list(Config) ->
+ ssl_block_disturbing_active(ssl, Config).
+
+ossl_block_disturbing_active(doc) ->
+ ["Check that you can block/unblock an active server. The strategy "
+ "distribing means ongoing requests should be terminated."
+ "Using new of configure old SSL"];
+ossl_block_disturbing_active(suite) ->
[];
-ssl_block_disturbing_active(Config) when is_list(Config) ->
- httpd_block:block_disturbing_active(ssl, ?SSL_PORT,
+ossl_block_disturbing_active(Config) when is_list(Config) ->
+ ssl_block_disturbing_active(ossl, Config).
+
+essl_block_disturbing_active(doc) ->
+ ["Check that you can block/unblock an active server. The strategy "
+ "distribing means ongoing requests should be terminated."
+ "Using new of configure new SSL"];
+essl_block_disturbing_active(suite) ->
+ [];
+essl_block_disturbing_active(Config) when is_list(Config) ->
+ ssl_block_disturbing_active(essl, Config).
+
+ssl_block_disturbing_active(Tag, Config) ->
+ httpd_block:block_disturbing_active(Tag,
+ ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_non_disturbing_active(doc) ->
+
+pssl_block_non_disturbing_active(doc) ->
["Check that you can block/unblock an idle server. The strategy "
- "non distribing means the ongoing requests should be compleated."];
-ssl_block_non_disturbing_active(suite) ->
+ "non distribing means the ongoing requests should be compleated."
+ "Old SSL config"];
+pssl_block_non_disturbing_active(suite) ->
+ [];
+pssl_block_non_disturbing_active(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_active(ssl, Config).
+
+ossl_block_non_disturbing_active(doc) ->
+ ["Check that you can block/unblock an idle server. The strategy "
+ "non distribing means the ongoing requests should be compleated."
+ "Using new of configure old SSL"];
+ossl_block_non_disturbing_active(suite) ->
[];
-ssl_block_non_disturbing_active(Config) when is_list(Config) ->
- httpd_block:block_non_disturbing_idle(ssl, ?SSL_PORT,
+ossl_block_non_disturbing_active(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_active(ossl, Config).
+
+essl_block_non_disturbing_active(doc) ->
+ ["Check that you can block/unblock an idle server. The strategy "
+ "non distribing means the ongoing requests should be compleated."
+ "Using new of configure new SSL"];
+essl_block_non_disturbing_active(suite) ->
+ [];
+essl_block_non_disturbing_active(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_active(essl, Config).
+
+ssl_block_non_disturbing_active(Tag, Config) ->
+ httpd_block:block_non_disturbing_idle(Tag,
+ ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_block_disturbing_active_timeout_not_released(doc) ->
+
+pssl_block_disturbing_active_timeout_not_released(doc) ->
["Check that you can block an active server. The strategy "
"distribing means ongoing requests should be compleated"
- "if the timeout does not occur."];
-ssl_block_disturbing_active_timeout_not_released(suite) ->
+ "if the timeout does not occur."
+ "Old SSL config"];
+pssl_block_disturbing_active_timeout_not_released(suite) ->
[];
-ssl_block_disturbing_active_timeout_not_released(Config)
+pssl_block_disturbing_active_timeout_not_released(Config)
when is_list(Config) ->
- httpd_block:
- block_disturbing_active_timeout_not_released(ssl,
- ?SSL_PORT,
- ?config(host,
- Config),
- ?config(node,
- Config)),
+ ssl_block_disturbing_active_timeout_not_released(ssl, Config).
+
+ossl_block_disturbing_active_timeout_not_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "distribing means ongoing requests should be compleated"
+ "if the timeout does not occur."
+ "Using new of configure old SSL"];
+ossl_block_disturbing_active_timeout_not_released(suite) ->
+ [];
+ossl_block_disturbing_active_timeout_not_released(Config)
+ when is_list(Config) ->
+ ssl_block_disturbing_active_timeout_not_released(ossl, Config).
+
+essl_block_disturbing_active_timeout_not_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "distribing means ongoing requests should be compleated"
+ "if the timeout does not occur."
+ "Using new of configure new SSL"];
+essl_block_disturbing_active_timeout_not_released(suite) ->
+ [];
+essl_block_disturbing_active_timeout_not_released(Config)
+ when is_list(Config) ->
+ ssl_block_disturbing_active_timeout_not_released(essl, Config).
+
+ssl_block_disturbing_active_timeout_not_released(Tag, Config) ->
+ Port = ?SSL_PORT,
+ Host = ?config(host, Config),
+ Node = ?config(node, Config),
+ httpd_block:block_disturbing_active_timeout_not_released(Tag,
+ Port, Host, Node),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_disturbing_active_timeout_released(doc) ->
+
+pssl_block_disturbing_active_timeout_released(doc) ->
["Check that you can block an active server. The strategy "
"distribing means ongoing requests should be terminated when"
- "the timeout occurs."];
-ssl_block_disturbing_active_timeout_released(suite) ->
+ "the timeout occurs."
+ "Old SSL config"];
+pssl_block_disturbing_active_timeout_released(suite) ->
[];
-ssl_block_disturbing_active_timeout_released(Config)
+pssl_block_disturbing_active_timeout_released(Config)
when is_list(Config) ->
- httpd_block:block_disturbing_active_timeout_released(ssl,
- ?SSL_PORT,
- ?config(host,
- Config),
- ?config(node,
- Config)),
+ ssl_block_disturbing_active_timeout_released(ssl, Config).
+
+ossl_block_disturbing_active_timeout_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "distribing means ongoing requests should be terminated when"
+ "the timeout occurs."
+ "Using new of configure old SSL"];
+ossl_block_disturbing_active_timeout_released(suite) ->
+ [];
+ossl_block_disturbing_active_timeout_released(Config)
+ when is_list(Config) ->
+ ssl_block_disturbing_active_timeout_released(ossl, Config).
+
+essl_block_disturbing_active_timeout_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "distribing means ongoing requests should be terminated when"
+ "the timeout occurs."
+ "Using new of configure new SSL"];
+essl_block_disturbing_active_timeout_released(suite) ->
+ [];
+essl_block_disturbing_active_timeout_released(Config)
+ when is_list(Config) ->
+ ssl_block_disturbing_active_timeout_released(essl, Config).
+
+ssl_block_disturbing_active_timeout_released(Tag, Config) ->
+ Port = ?SSL_PORT,
+ Host = ?config(host, Config),
+ Node = ?config(node, Config),
+ httpd_block:block_disturbing_active_timeout_released(Tag,
+ Port,
+ Host,
+ Node),
ok.
+
%%-------------------------------------------------------------------------
-ssl_block_non_disturbing_active_timeout_not_released(doc) ->
+
+pssl_block_non_disturbing_active_timeout_not_released(doc) ->
["Check that you can block an active server. The strategy "
- "non non distribing means ongoing requests should be completed."];
-ssl_block_non_disturbing_active_timeout_not_released(suite) ->
+ "non non distribing means ongoing requests should be completed."
+ "Old SSL config"];
+pssl_block_non_disturbing_active_timeout_not_released(suite) ->
[];
-ssl_block_non_disturbing_active_timeout_not_released(Config)
+pssl_block_non_disturbing_active_timeout_not_released(Config)
when is_list(Config) ->
- httpd_block:
- block_non_disturbing_active_timeout_not_released(ssl,
- ?SSL_PORT,
- ?config(host,
- Config),
- ?config(node,
- Config)),
+ ssl_block_non_disturbing_active_timeout_not_released(ssl, Config).
+
+ossl_block_non_disturbing_active_timeout_not_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "non non distribing means ongoing requests should be completed."
+ "Using new of configure old SSL"];
+ossl_block_non_disturbing_active_timeout_not_released(suite) ->
+ [];
+ossl_block_non_disturbing_active_timeout_not_released(Config)
+ when is_list(Config) ->
+ ssl_block_non_disturbing_active_timeout_not_released(ossl, Config).
+
+essl_block_non_disturbing_active_timeout_not_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "non non distribing means ongoing requests should be completed."
+ "Using new of configure new SSL"];
+essl_block_non_disturbing_active_timeout_not_released(suite) ->
+ [];
+essl_block_non_disturbing_active_timeout_not_released(Config)
+ when is_list(Config) ->
+ ssl_block_non_disturbing_active_timeout_not_released(essl, Config).
+
+ssl_block_non_disturbing_active_timeout_not_released(Tag, Config) ->
+ Port = ?SSL_PORT,
+ Host = ?config(host, Config),
+ Node = ?config(node, Config),
+ httpd_block:block_non_disturbing_active_timeout_not_released(Tag,
+ Port,
+ Host,
+ Node),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_non_disturbing_active_timeout_released(doc) ->
+
+pssl_block_non_disturbing_active_timeout_released(doc) ->
["Check that you can block an active server. The strategy "
- "non non distribing means ongoing requests should be completed. "
- "When the timeout occurs the block operation sohould be canceled." ];
-ssl_block_non_disturbing_active_timeout_released(suite) ->
+ "non distribing means ongoing requests should be completed. "
+ "When the timeout occurs the block operation sohould be canceled."
+ "Old SSL config"];
+pssl_block_non_disturbing_active_timeout_released(suite) ->
[];
-ssl_block_non_disturbing_active_timeout_released(Config)
+pssl_block_non_disturbing_active_timeout_released(Config)
when is_list(Config) ->
- httpd_block:
- block_non_disturbing_active_timeout_released(ssl,
- ?SSL_PORT,
- ?config(host,
- Config),
- ?config(node,
- Config)),
+ ssl_block_non_disturbing_active_timeout_released(ssl, Config).
+
+ossl_block_non_disturbing_active_timeout_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "non distribing means ongoing requests should be completed. "
+ "When the timeout occurs the block operation sohould be canceled."
+ "Using new of configure old SSL"];
+ossl_block_non_disturbing_active_timeout_released(suite) ->
+ [];
+ossl_block_non_disturbing_active_timeout_released(Config)
+ when is_list(Config) ->
+ ssl_block_non_disturbing_active_timeout_released(ossl, Config).
+
+essl_block_non_disturbing_active_timeout_released(doc) ->
+ ["Check that you can block an active server. The strategy "
+ "non distribing means ongoing requests should be completed. "
+ "When the timeout occurs the block operation sohould be canceled."
+ "Using new of configure new SSL"];
+essl_block_non_disturbing_active_timeout_released(suite) ->
+ [];
+essl_block_non_disturbing_active_timeout_released(Config)
+ when is_list(Config) ->
+ ssl_block_non_disturbing_active_timeout_released(essl, Config).
+
+ssl_block_non_disturbing_active_timeout_released(Tag, Config)
+ when is_list(Config) ->
+ Port = ?SSL_PORT,
+ Host = ?config(host, Config),
+ Node = ?config(node, Config),
+ httpd_block:block_non_disturbing_active_timeout_released(Tag,
+ Port,
+ Host,
+ Node),
+
ok.
+
%%-------------------------------------------------------------------------
-ssl_block_disturbing_blocker_dies(doc) ->
+
+pssl_block_disturbing_blocker_dies(doc) ->
+ ["old SSL config"];
+pssl_block_disturbing_blocker_dies(suite) ->
[];
-ssl_block_disturbing_blocker_dies(suite) ->
+pssl_block_disturbing_blocker_dies(Config) when is_list(Config) ->
+ ssl_block_disturbing_blocker_dies(ssl, Config).
+
+ossl_block_disturbing_blocker_dies(doc) ->
+ ["using new of configure old SSL"];
+ossl_block_disturbing_blocker_dies(suite) ->
+ [];
+ossl_block_disturbing_blocker_dies(Config) when is_list(Config) ->
+ ssl_block_disturbing_blocker_dies(ossl, Config).
+
+essl_block_disturbing_blocker_dies(doc) ->
+ ["using new of configure new SSL"];
+essl_block_disturbing_blocker_dies(suite) ->
[];
-ssl_block_disturbing_blocker_dies(Config) when is_list(Config) ->
- httpd_block:disturbing_blocker_dies(ssl, ?SSL_PORT,
+essl_block_disturbing_blocker_dies(Config) when is_list(Config) ->
+ ssl_block_disturbing_blocker_dies(essl, Config).
+
+ssl_block_disturbing_blocker_dies(Tag, Config) ->
+ httpd_block:disturbing_blocker_dies(Tag,
+ ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_block_non_disturbing_blocker_dies(doc) ->
+
+pssl_block_non_disturbing_blocker_dies(doc) ->
+ ["old SSL config"];
+pssl_block_non_disturbing_blocker_dies(suite) ->
+ [];
+pssl_block_non_disturbing_blocker_dies(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_blocker_dies(ssl, Config).
+
+ossl_block_non_disturbing_blocker_dies(doc) ->
+ ["using new of configure old SSL"];
+ossl_block_non_disturbing_blocker_dies(suite) ->
[];
-ssl_block_non_disturbing_blocker_dies(suite) ->
+ossl_block_non_disturbing_blocker_dies(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_blocker_dies(ossl, Config).
+
+essl_block_non_disturbing_blocker_dies(doc) ->
+ ["using new of configure new SSL"];
+essl_block_non_disturbing_blocker_dies(suite) ->
[];
-ssl_block_non_disturbing_blocker_dies(Config) when is_list(Config) ->
- httpd_block:non_disturbing_blocker_dies(ssl, ?SSL_PORT,
+essl_block_non_disturbing_blocker_dies(Config) when is_list(Config) ->
+ ssl_block_non_disturbing_blocker_dies(essl, Config).
+
+ssl_block_non_disturbing_blocker_dies(Tag, Config) ->
+ httpd_block:non_disturbing_blocker_dies(Tag,
+ ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_restart_no_block(doc) ->
- [""];
-ssl_restart_no_block(suite) ->
+
+pssl_restart_no_block(doc) ->
+ ["old SSL config"];
+pssl_restart_no_block(suite) ->
+ [];
+pssl_restart_no_block(Config) when is_list(Config) ->
+ ssl_restart_no_block(ssl, Config).
+
+ossl_restart_no_block(doc) ->
+ ["using new of configure old SSL"];
+ossl_restart_no_block(suite) ->
+ [];
+ossl_restart_no_block(Config) when is_list(Config) ->
+ ssl_restart_no_block(ossl, Config).
+
+essl_restart_no_block(doc) ->
+ ["using new of configure new SSL"];
+essl_restart_no_block(suite) ->
[];
-ssl_restart_no_block(Config) when is_list(Config) ->
- httpd_block:restart_no_block(ssl, ?SSL_PORT, ?config(host, Config),
+essl_restart_no_block(Config) when is_list(Config) ->
+ ssl_restart_no_block(essl, Config).
+
+ssl_restart_no_block(Tag, Config) ->
+ httpd_block:restart_no_block(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
?config(node, Config)),
ok.
+
+
%%-------------------------------------------------------------------------
-ssl_restart_disturbing_block(doc) ->
- [""];
-ssl_restart_disturbing_block(suite) ->
+
+pssl_restart_disturbing_block(doc) ->
+ ["old SSL config"];
+pssl_restart_disturbing_block(suite) ->
+ [];
+pssl_restart_disturbing_block(Config) when is_list(Config) ->
+ ssl_restart_disturbing_block(ssl, Config).
+
+ossl_restart_disturbing_block(doc) ->
+ ["using new of configure old SSL"];
+ossl_restart_disturbing_block(suite) ->
+ [];
+ossl_restart_disturbing_block(Config) when is_list(Config) ->
+ ssl_restart_disturbing_block(ossl, Config).
+
+essl_restart_disturbing_block(doc) ->
+ ["using new of configure new SSL"];
+essl_restart_disturbing_block(suite) ->
[];
-ssl_restart_disturbing_block(Config) when is_list(Config) ->
+essl_restart_disturbing_block(Config) when is_list(Config) ->
+ ssl_restart_disturbing_block(essl, Config).
+
+ssl_restart_disturbing_block(Tag, Config) ->
%% <CONDITIONAL-SKIP>
Condition =
fun() ->
case os:type() of
{unix, linux} ->
- HW = string:strip(os:cmd("uname -m"), right, $\n),
- case HW of
+ case ?OSCMD("uname -m") of
"ppc" ->
- case inet:gethostname() of
- {ok, "peach"} ->
- true;
+ case file:read_file_info("/etc/fedora-release") of
+ {ok, _} ->
+ case ?OSCMD("awk '{print $2}' /etc/fedora-release") of
+ "release" ->
+ %% Fedora 7 and later
+ case ?OSCMD("awk '{print $3}' /etc/fedora-release") of
+ "7" ->
+ true;
+ _ ->
+ false
+ end;
+ _ ->
+ false
+ end;
_ ->
false
end;
@@ -1336,17 +2309,36 @@ ssl_restart_disturbing_block(Config) when is_list(Config) ->
?NON_PC_TC_MAYBE_SKIP(Config, Condition),
%% </CONDITIONAL-SKIP>
- httpd_block:restart_disturbing_block(ssl, ?SSL_PORT,
+ httpd_block:restart_disturbing_block(Tag, ?SSL_PORT,
?config(host, Config),
?config(node, Config)),
ok.
+
%%-------------------------------------------------------------------------
-ssl_restart_non_disturbing_block(doc) ->
- [""];
-ssl_restart_non_disturbing_block(suite) ->
+
+pssl_restart_non_disturbing_block(doc) ->
+ ["old SSL config"];
+pssl_restart_non_disturbing_block(suite) ->
[];
-ssl_restart_non_disturbing_block(Config) when is_list(Config) ->
+pssl_restart_non_disturbing_block(Config) when is_list(Config) ->
+ ssl_restart_non_disturbing_block(ssl, Config).
+
+ossl_restart_non_disturbing_block(doc) ->
+ ["using new of configure old SSL"];
+ossl_restart_non_disturbing_block(suite) ->
+ [];
+ossl_restart_non_disturbing_block(Config) when is_list(Config) ->
+ ssl_restart_non_disturbing_block(ossl, Config).
+
+essl_restart_non_disturbing_block(doc) ->
+ ["using new of configure new SSL"];
+essl_restart_non_disturbing_block(suite) ->
+ [];
+essl_restart_non_disturbing_block(Config) when is_list(Config) ->
+ ssl_restart_non_disturbing_block(essl, Config).
+
+ssl_restart_non_disturbing_block(Tag, Config) ->
%% <CONDITIONAL-SKIP>
Condition =
fun() ->
@@ -1371,11 +2363,13 @@ ssl_restart_non_disturbing_block(Config) when is_list(Config) ->
?NON_PC_TC_MAYBE_SKIP(Config, Condition),
%% </CONDITIONAL-SKIP>
- httpd_block:restart_non_disturbing_block(ssl, ?SSL_PORT,
- ?config(host, Config),
- ?config(node, Config)),
+ httpd_block:restart_non_disturbing_block(Tag,
+ ?SSL_PORT,
+ ?config(host, Config),
+ ?config(node, Config)),
ok.
+
%%-------------------------------------------------------------------------
ip_host(doc) ->
["Control that the server accepts/rejects requests with/ without host"];
@@ -1665,17 +2659,29 @@ dos_hostname(Type, Port, Host, Node, Max) ->
%% Other help functions
create_config(Config, Access, FileName) ->
ServerRoot = ?config(server_root, Config),
- TcTopDir = ?config(tc_top_dir, Config),
- Port = ?config(port, Config),
- Type = ?config(sock_type, Config),
- Host = ?config(host, Config),
- Mods = io_lib:format("~p", [httpd_mod]),
- Funcs = io_lib:format("~p", [ssl_password_cb]),
- MaxHdrSz = io_lib:format("~p", [256]),
- MaxHdrAct = io_lib:format("~p", [close]),
+ TcTopDir = ?config(tc_top_dir, Config),
+ Port = ?config(port, Config),
+ Type = ?config(sock_type, Config),
+ Host = ?config(host, Config),
+ Mods = io_lib:format("~p", [httpd_mod]),
+ Funcs = io_lib:format("~p", [ssl_password_cb]),
+ MaxHdrSz = io_lib:format("~p", [256]),
+ MaxHdrAct = io_lib:format("~p", [close]),
+
+ io:format(user,
+ "create_config -> "
+ "~n ServerRoot: ~p"
+ "~n TcTopDir: ~p"
+ "~n Type: ~p"
+ "~n Port: ~p"
+ "~n Host: ~p"
+ "~n", [ServerRoot, TcTopDir, Port, Type, Host]),
+
SSL =
- case Type of
- ssl ->
+ if
+ (Type =:= ssl) orelse
+ (Type =:= ossl) orelse
+ (Type =:= essl) ->
[cline(["SSLCertificateFile ",
filename:join(ServerRoot, "ssl/ssl_server.pem")]),
cline(["SSLCertificateKeyFile ",
@@ -1686,25 +2692,25 @@ create_config(Config, Access, FileName) ->
cline(["SSLPasswordCallbackFunction ", Funcs]),
cline(["SSLVerifyClient 0"]),
cline(["SSLVerifyDepth 1"])];
- _ ->
+ true ->
[]
end,
- Mod_order = case Access of
- mod_htaccess ->
- "Modules mod_alias mod_htaccess mod_auth "
- "mod_security "
- "mod_responsecontrol mod_trace mod_esi "
- "mod_actions mod_cgi mod_include mod_dir "
- "mod_range mod_get "
- "mod_head mod_log mod_disk_log";
- _ ->
- "Modules mod_alias mod_auth mod_security "
- "mod_responsecontrol mod_trace mod_esi "
- "mod_actions mod_cgi mod_include mod_dir "
- "mod_range mod_get "
- "mod_head mod_log mod_disk_log"
- end,
-
+ ModOrder = case Access of
+ mod_htaccess ->
+ "Modules mod_alias mod_htaccess mod_auth "
+ "mod_security "
+ "mod_responsecontrol mod_trace mod_esi "
+ "mod_actions mod_cgi mod_include mod_dir "
+ "mod_range mod_get "
+ "mod_head mod_log mod_disk_log";
+ _ ->
+ "Modules mod_alias mod_auth mod_security "
+ "mod_responsecontrol mod_trace mod_esi "
+ "mod_actions mod_cgi mod_include mod_dir "
+ "mod_range mod_get "
+ "mod_head mod_log mod_disk_log"
+ end,
+
%% The test suite currently does not handle an explicit BindAddress.
%% They assume any has been used, that is Addr is always set to undefined!
@@ -1720,7 +2726,7 @@ create_config(Config, Access, FileName) ->
cline(["Port ", integer_to_list(Port)]),
cline(["ServerName ", Host]),
cline(["SocketType ", atom_to_list(Type)]),
- cline([Mod_order]),
+ cline([ModOrder]),
%% cline(["LogFormat ", "erlang"]),
cline(["ServerAdmin [email protected]"]),
cline(["BindAddress ", BindAddress]),
@@ -1882,18 +2888,18 @@ start_mnesia(Node) ->
ok ->
ok;
Other ->
- test_server:fail({failed_to_cleanup_mnesia, Other})
+ tsf({failed_to_cleanup_mnesia, Other})
end,
- case rpc:call(Node, ?MODULE, setup_mnesia, []) of
+ case rpc:call(Node, ?MODULE, setup_mnesia, []) of
{atomic, ok} ->
ok;
Other2 ->
- test_server:fail({failed_to_setup_mnesia, Other2})
+ tsf({failed_to_setup_mnesia, Other2})
end,
ok.
setup_mnesia() ->
- setup_mnesia([node()]).
+ setup_mnesia([node()]).
setup_mnesia(Nodes) ->
ok = mnesia:create_schema(Nodes),
@@ -2029,20 +3035,20 @@ dos_hostname_request(Host) ->
get_nof_clients(Mode, Load) ->
get_nof_clients(test_server:os_type(), Mode, Load).
-get_nof_clients(vxworks, _, light) -> 1;
+get_nof_clients(vxworks, _, light) -> 1;
get_nof_clients(vxworks, ip_comm, medium) -> 3;
-get_nof_clients(vxworks, ssl, medium) -> 3;
+get_nof_clients(vxworks, ssl, medium) -> 3;
get_nof_clients(vxworks, ip_comm, heavy) -> 5;
-get_nof_clients(vxworks, ssl, heavy) -> 5;
-get_nof_clients(_, ip_comm, light) -> 5;
-get_nof_clients(_, ssl, light) -> 2;
-get_nof_clients(_, ip_comm, medium) -> 10;
-get_nof_clients(_, ssl, medium) -> 4;
-get_nof_clients(_, ip_comm, heavy) -> 20;
-get_nof_clients(_, ssl, heavy) -> 6.
+get_nof_clients(vxworks, ssl, heavy) -> 5;
+get_nof_clients(_, ip_comm, light) -> 5;
+get_nof_clients(_, ssl, light) -> 2;
+get_nof_clients(_, ip_comm, medium) -> 10;
+get_nof_clients(_, ssl, medium) -> 4;
+get_nof_clients(_, ip_comm, heavy) -> 20;
+get_nof_clients(_, ssl, heavy) -> 6.
%% Make a file 100 bytes long containing 012...9*10
-create_range_data(Path)->
+create_range_data(Path) ->
PathAndFileName=filename:join([Path,"range.txt"]),
file:write_file(PathAndFileName,list_to_binary(["12345678901234567890",
"12345678901234567890",
@@ -2079,3 +3085,6 @@ create_range_data(Path)->
%% {ok, Fd} = file:open(ConfigFile, [write]),
%% ok = file:write(Fd, lists:flatten(HttpConfig)),
%% ok = file:close(Fd).
+
+tsf(Reason) ->
+ test_server:fail(Reason).
diff --git a/lib/inets/test/httpd_SUITE_data/server_root/Makefile b/lib/inets/test/httpd_SUITE_data/server_root/Makefile
new file mode 100644
index 0000000000..d7a3231068
--- /dev/null
+++ b/lib/inets/test/httpd_SUITE_data/server_root/Makefile
@@ -0,0 +1,209 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1997-2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+VSN=$(INETS_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULE=
+
+AUTH_FILES = auth/group \
+ auth/passwd
+CGI_FILES = cgi-bin/printenv.sh
+CONF_FILES = conf/8080.conf \
+ conf/8888.conf \
+ conf/httpd.conf \
+ conf/ssl.conf \
+ conf/mime.types
+OPEN_FILES = htdocs/open/dummy.html
+MNESIA_OPEN_FILES = htdocs/mnesia_open/dummy.html
+MISC_FILES = htdocs/misc/friedrich.html \
+ htdocs/misc/oech.html
+SECRET_FILES = htdocs/secret/dummy.html
+MNESIA_SECRET_FILES = htdocs/mnesia_secret/dummy.html
+HTDOCS_FILES = htdocs/index.html \
+ htdocs/config.shtml \
+ htdocs/echo.shtml \
+ htdocs/exec.shtml \
+ htdocs/flastmod.shtml \
+ htdocs/fsize.shtml \
+ htdocs/include.shtml
+ICON_FILES = icons/README \
+ icons/a.gif \
+ icons/alert.black.gif \
+ icons/alert.red.gif \
+ icons/apache_pb.gif \
+ icons/back.gif \
+ icons/ball.gray.gif \
+ icons/ball.red.gif \
+ icons/binary.gif \
+ icons/binhex.gif \
+ icons/blank.gif \
+ icons/bomb.gif \
+ icons/box1.gif \
+ icons/box2.gif \
+ icons/broken.gif \
+ icons/burst.gif \
+ icons/button1.gif \
+ icons/button10.gif \
+ icons/button2.gif \
+ icons/button3.gif \
+ icons/button4.gif \
+ icons/button5.gif \
+ icons/button6.gif \
+ icons/button7.gif \
+ icons/button8.gif \
+ icons/button9.gif \
+ icons/buttonl.gif \
+ icons/buttonr.gif \
+ icons/c.gif \
+ icons/comp.blue.gif \
+ icons/comp.gray.gif \
+ icons/compressed.gif \
+ icons/continued.gif \
+ icons/dir.gif \
+ icons/down.gif \
+ icons/dvi.gif \
+ icons/f.gif \
+ icons/folder.gif \
+ icons/folder.open.gif \
+ icons/folder.sec.gif \
+ icons/forward.gif \
+ icons/generic.gif \
+ icons/generic.red.gif \
+ icons/generic.sec.gif \
+ icons/hand.right.gif \
+ icons/hand.up.gif \
+ icons/htdig.gif \
+ icons/icon.sheet.gif \
+ icons/image1.gif \
+ icons/image2.gif \
+ icons/image3.gif \
+ icons/index.gif \
+ icons/layout.gif \
+ icons/left.gif \
+ icons/link.gif \
+ icons/movie.gif \
+ icons/p.gif \
+ icons/patch.gif \
+ icons/pdf.gif \
+ icons/pie0.gif \
+ icons/pie1.gif \
+ icons/pie2.gif \
+ icons/pie3.gif \
+ icons/pie4.gif \
+ icons/pie5.gif \
+ icons/pie6.gif \
+ icons/pie7.gif \
+ icons/pie8.gif \
+ icons/portal.gif \
+ icons/poweredby.gif \
+ icons/ps.gif \
+ icons/quill.gif \
+ icons/right.gif \
+ icons/screw1.gif \
+ icons/screw2.gif \
+ icons/script.gif \
+ icons/sound1.gif \
+ icons/sound2.gif \
+ icons/sphere1.gif \
+ icons/sphere2.gif \
+ icons/star.gif \
+ icons/star_blank.gif \
+ icons/tar.gif \
+ icons/tex.gif \
+ icons/text.gif \
+ icons/transfer.gif \
+ icons/unknown.gif \
+ icons/up.gif \
+ icons/uu.gif \
+ icons/uuencoded.gif \
+ icons/world1.gif \
+ icons/world2.gif
+
+SSL_FILES = ssl/ssl_client.pem \
+ ssl/ssl_server.pem
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_COMPILE_FLAGS +=
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt:
+
+clean:
+
+docs:
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/auth
+ $(INSTALL_DATA) $(AUTH_FILES) $(RELSYSDIR)/examples/server_root/auth
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/cgi-bin
+ $(INSTALL_SCRIPT) $(CGI_FILES) $(RELSYSDIR)/examples/server_root/cgi-bin
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/conf
+ $(INSTALL_DATA) $(CONF_FILES) $(RELSYSDIR)/examples/server_root/conf
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/open
+ $(INSTALL_DATA) $(OPEN_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/open
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
+ $(INSTALL_DATA) $(MNESIA_OPEN_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/misc
+ $(INSTALL_DATA) $(MISC_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/misc
+ $(INSTALL_DIR) \
+ $(RELSYSDIR)/examples/server_root/htdocs/secret/top_secret
+ $(INSTALL_DIR) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret/top_secret
+ $(INSTALL_DATA) $(SECRET_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/secret
+ $(INSTALL_DATA) $(MNESIA_SECRET_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs
+ $(INSTALL_DATA) $(HTDOCS_FILES) $(RELSYSDIR)/examples/server_root/htdocs
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/icons
+ $(INSTALL_DATA) $(ICON_FILES) $(RELSYSDIR)/examples/server_root/icons
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/ssl
+ $(INSTALL_DATA) $(SSL_FILES) $(RELSYSDIR)/examples/server_root/ssl
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/logs
+
+release_docs_spec:
+
diff --git a/lib/inets/test/httpd_block.erl b/lib/inets/test/httpd_block.erl
index f967d8172a..ac1bf43ff5 100644
--- a/lib/inets/test/httpd_block.erl
+++ b/lib/inets/test/httpd_block.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -36,6 +36,7 @@
]).
%% Help functions
+-export([httpd_block/3, httpd_block/4, httpd_unblock/2, httpd_restart/2]).
-export([do_block_server/4, do_block_nd_server/5, do_long_poll/6]).
-define(report(Label, Content),
@@ -47,18 +48,24 @@
%% Test cases starts here.
%%-------------------------------------------------------------------------
block_disturbing_idle(_Type, Port, Host, Node) ->
- unblocked = get_admin_state(Node, Host, Port),
+ io:format("block_disturbing_idle -> entry~n", []),
+ validate_admin_state(Node, Host, Port, unblocked),
block_server(Node, Host, Port),
- blocked = get_admin_state(Node, Host, Port),
+ validate_admin_state(Node, Host, Port, blocked),
unblock_server(Node, Host, Port),
- unblocked = get_admin_state(Node, Host, Port).
+ validate_admin_state(Node, Host, Port, unblocked),
+ io:format("block_disturbing_idle -> done~n", []),
+ ok.
+
%%--------------------------------------------------------------------
block_non_disturbing_idle(_Type, Port, Host, Node) ->
unblocked = get_admin_state(Node, Host, Port),
block_nd_server(Node, Host, Port),
blocked = get_admin_state(Node, Host, Port),
unblock_server(Node, Host, Port),
- unblocked = get_admin_state(Node, Host, Port).
+ unblocked = get_admin_state(Node, Host, Port),
+ ok.
+
%%--------------------------------------------------------------------
block_503(Type, Port, Host, Node) ->
Req = "GET / HTTP/1.0\r\ndummy-host.ericsson.se:\r\n\r\n",
@@ -76,6 +83,7 @@ block_503(Type, Port, Host, Node) ->
ok = httpd_test_lib:verify_request(Type, Host, Port, Node, Req,
[{statuscode, 200},
{version, "HTTP/1.0"}]).
+
%%--------------------------------------------------------------------
block_disturbing_active(Type, Port, Host, Node) ->
process_flag(trap_exit, true),
@@ -87,6 +95,7 @@ block_disturbing_active(Type, Port, Host, Node) ->
blocked = get_admin_state(Node, Host, Port),
process_flag(trap_exit, false),
ok.
+
%%--------------------------------------------------------------------
block_non_disturbing_active(Type, Port, Host, Node) ->
process_flag(trap_exit, true),
@@ -219,32 +228,91 @@ do_block_nd_server(Node, Host, Port, Timeout, Reply) ->
restart_server(Node, _Host, Port) ->
Addr = undefined,
- rpc:call(Node, httpd, restart, [Addr, Port]).
+ rpc:call(Node, ?MODULE, httpd_restart, [Addr, Port]).
+
block_server(Node, _Host, Port) ->
+ io:format("block_server -> entry~n", []),
Addr = undefined,
- rpc:call(Node, httpd, block, [Addr, Port]).
+ rpc:call(Node, ?MODULE, httpd_block, [Addr, Port, disturbing]).
+
block_server(Node, _Host, Port, Timeout) ->
Addr = undefined,
- rpc:call(Node, httpd, block, [Addr, Port, disturbing, Timeout]).
+ rpc:call(Node, ?MODULE, httpd_block, [Addr, Port, disturbing, Timeout]).
+
block_nd_server(Node, _Host, Port) ->
Addr = undefined,
- rpc:call(Node, httpd, block, [Addr, Port, non_disturbing]).
+ rpc:call(Node, ?MODULE, httpd_block, [Addr, Port, non_disturbing]).
block_nd_server(Node, _Host, Port, Timeout) ->
Addr = undefined,
- rpc:call(Node, httpd, block, [Addr, Port, non_disturbing, Timeout]).
+ rpc:call(Node, ?MODULE, httpd_block, [Addr, Port, non_disturbing, Timeout]).
unblock_server(Node, _Host, Port) ->
+ io:format("~p:~p:block_server -> entry~n", [node(),self()]),
Addr = undefined,
- rpc:call(Node, httpd, unblock, [Addr, Port]).
+ rpc:call(Node, ?MODULE, httpd_unblock, [Addr, Port]).
+
+
+httpd_block(Addr, Port, Mode) ->
+ io:format("~p:~p:httpd_block -> entry~n", [node(),self()]),
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ Pid when is_pid(Pid) ->
+ httpd_manager:block(Pid, Mode);
+ _ ->
+ {error, not_started}
+ end.
+
+httpd_block(Addr, Port, Mode, Timeout) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ Pid when is_pid(Pid) ->
+ httpd_manager:block(Pid, Mode, Timeout);
+ _ ->
+ {error, not_started}
+ end.
+
+httpd_unblock(Addr, Port) ->
+ io:format("~p:~p:httpd_unblock -> entry~n", [node(),self()]),
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ Pid when is_pid(Pid) ->
+ httpd_manager:unblock(Pid);
+ _ ->
+ {error, not_started}
+ end.
+
+httpd_restart(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ Pid when is_pid(Pid) ->
+ httpd_manager:reload(Pid, undefined);
+ _ ->
+ {error, not_started}
+ end.
+
+make_name(Addr, Port) ->
+ httpd_util:make_name("httpd", Addr, Port).
-get_admin_state(Node,_Host,Port) ->
+get_admin_state(Node, _Host, Port) ->
Addr = undefined,
rpc:call(Node, httpd, get_admin_state, [Addr, Port]).
+validate_admin_state(Node, Host, Port, Expect) ->
+ io:format("try validating server admin state: ~p~n", [Expect]),
+ case get_admin_state(Node, Host, Port) of
+ Expect ->
+ ok;
+ Unexpected ->
+ io:format("failed validating server admin state: ~p~n",
+ [Unexpected]),
+ exit({unexpected_admin_state, Unexpected, Expect})
+ end.
+
+
await_normal_process_exit(Pid, Name, Timeout) ->
receive
{'EXIT', Pid, normal} ->
@@ -260,6 +328,7 @@ await_normal_process_exit(Pid, Name, Timeout) ->
test_server:fail("timeout while waiting for " ++ Name)
end.
+
await_suite_failed_process_exit(Pid, Name, Timeout, Why) ->
receive
{'EXIT', Pid, {suite_failed, Why}} ->
diff --git a/lib/inets/test/httpd_mod.erl b/lib/inets/test/httpd_mod.erl
index b03f842e7c..f2c1fd6a65 100644
--- a/lib/inets/test/httpd_mod.erl
+++ b/lib/inets/test/httpd_mod.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -40,6 +40,13 @@
%% Test cases starts here.
%%-------------------------------------------------------------------------
alias(Type, Port, Host, Node) ->
+%% io:format(user, "~w:alias -> entry with"
+%% "~n Type: ~p"
+%% "~n Port: ~p"
+%% "~n Host: ~p"
+%% "~n Node: ~p"
+%% "~n", [?MODULE, Type, Port, Host, Node]),
+
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /pics/icon.sheet.gif "
"HTTP/1.0\r\n\r\n",
@@ -82,14 +89,15 @@ actions(Type, Port, Host, Node) ->
%%-------------------------------------------------------------------------
security(ServerRoot, Type, Port, Host, Node) ->
- io:format(user, "~w:security -> entry with"
- "~n ServerRoot: ~p"
- "~n Type: ~p"
- "~n Port: ~p"
- "~n Host: ~p"
- "~n Node: ~p"
- "~n", [?MODULE, ServerRoot, Type, Port, Host, Node]),
+%% io:format(user, "~w:security -> entry with"
+%% "~n ServerRoot: ~p"
+%% "~n Type: ~p"
+%% "~n Port: ~p"
+%% "~n Host: ~p"
+%% "~n Node: ~p"
+%% "~n", [?MODULE, ServerRoot, Type, Port, Host, Node]),
+%% io:format(user, "~w:security -> register~n", [?MODULE]),
global:register_name(mod_security_test, self()), % Receive events
test_server:sleep(5000),
@@ -99,54 +107,71 @@ security(ServerRoot, Type, Port, Host, Node) ->
%% Test blocking / unblocking of users.
%% /open, require user one Aladdin
+%% io:format(user, "~w:security -> remove user~n", [?MODULE]),
remove_users(Node, ServerRoot, Host, Port, "open"),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node, "/open/", "one", "onePassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> await fail security event~n", [?MODULE]),
receive_security_event({event, auth_fail, Port, OpenDir,
[{user, "one"}, {password, "onePassword"}]},
Node, Port),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type,Host,Port,Node,"/open/", "two", "twoPassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> await fail security event~n", [?MODULE]),
receive_security_event({event, auth_fail, Port, OpenDir,
[{user, "two"}, {password, "twoPassword"}]},
Node, Port),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "Aladdin",
"AladdinPassword", [{statuscode, 401}]),
+%% io:format(user, "~w:security -> await fail security event~n", [?MODULE]),
receive_security_event({event, auth_fail, Port, OpenDir,
[{user, "Aladdin"},
{password, "AladdinPassword"}]},
Node, Port),
+%% io:format(user, "~w:security -> add users~n", [?MODULE]),
add_user(Node, ServerRoot, Port, "open", "one", "onePassword", []),
add_user(Node, ServerRoot, Port, "open", "two", "twoPassword", []),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "one", "WrongPassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> await fail security event~n", [?MODULE]),
receive_security_event({event, auth_fail, Port, OpenDir,
[{user, "one"}, {password, "WrongPassword"}]},
Node, Port),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "one", "WrongPassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> await fail security event~n", [?MODULE]),
receive_security_event({event, auth_fail, Port, OpenDir,
[{user, "one"}, {password, "WrongPassword"}]},
Node, Port),
+%% io:format(user, "~w:security -> await block security event~n", [?MODULE]),
receive_security_event({event, user_block, Port, OpenDir,
[{user, "one"}]}, Node, Port),
+%% io:format(user, "~w:security -> unregister~n", [?MODULE]),
global:unregister_name(mod_security_test), % No more events.
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "one", "WrongPassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "one", "onePassword",
[{statuscode, 403}]),
%% User "one" should be blocked now..
%% [{"one",_, Port, OpenDir,_}] = list_blocked_users(Node,Port),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
case list_blocked_users(Node, Port) of
[{"one",_, Port, OpenDir,_}] ->
ok;
@@ -156,35 +181,54 @@ security(ServerRoot, Type, Port, Host, Node) ->
exit({unexpected_blocked, Blocked})
end,
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
[{"one",_, Port, OpenDir,_}] = list_blocked_users(Node,Port,OpenDir),
+%% io:format(user, "~w:security -> unblock user~n", [?MODULE]),
true = unblock_user(Node, "one", Port, OpenDir),
%% User "one" should not be blocked any more..
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
[] = list_blocked_users(Node, Port),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
[] = list_blocked_users(Node, Port, OpenDir),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "one", "onePassword",
[{statuscode, 200}]),
%% Test list_auth_users & auth_timeout
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
["one"] = list_auth_users(Node, Port),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
["one"] = list_auth_users(Node, Port, OpenDir),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "two", "onePassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
["one"] = list_auth_users(Node, Port),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
["one"] = list_auth_users(Node, Port, OpenDir),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "two", "twoPassword",
[{statuscode, 401}]),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
["one"] = list_auth_users(Node, Port),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
["one"] = list_auth_users(Node, Port, OpenDir),
%% Wait for successful auth to timeout.
test_server:sleep(?AUTH_TIMEOUT*1001),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
[] = list_auth_users(Node, Port),
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
[] = list_auth_users(Node, Port, OpenDir),
%% "two" is blocked.
+%% io:format(user, "~w:security -> unblock user~n", [?MODULE]),
true = unblock_user(Node, "two", Port, OpenDir),
%% Test explicit blocking. Block user 'two'.
+%% io:format(user, "~w:security -> list blocked users~n", [?MODULE]),
[] = list_blocked_users(Node,Port,OpenDir),
+%% io:format(user, "~w:security -> block user~n", [?MODULE]),
true = block_user(Node, "two", Port, OpenDir, 10),
+%% io:format(user, "~w:security -> auth request~n", [?MODULE]),
auth_request(Type, Host, Port, Node,"/open/", "two", "twoPassword",
[{statuscode, 401}]).
@@ -600,6 +644,11 @@ htaccess(Type, Port, Host, Node) ->
{header, "WWW-Authenticate"}]).
%%--------------------------------------------------------------------
cgi(Type, Port, Host, Node) ->
+%% tsp("cgi -> entry with"
+%% "~n Type: ~p"
+%% "~n Port: ~p"
+%% "~n Host: ~p"
+%% "~n Node: ~p", []),
{Script, Script2, Script3} =
case test_server:os_type() of
{win32, _} ->
@@ -609,6 +658,7 @@ cgi(Type, Port, Host, Node) ->
end,
%% The length (> 100) is intentional
+%% tsp("cgi -> request 01 with length > 100"),
ok = httpd_test_lib:
verify_request(Type, Host, Port, Node,
"POST /cgi-bin/" ++ Script3 ++
@@ -636,46 +686,55 @@ cgi(Type, Port, Host, Node) ->
{version, "HTTP/1.0"},
{header, "content-type", "text/plain"}]),
+%% tsp("cgi -> request 02"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /cgi-bin/"++ Script ++
" HTTP/1.0\r\n\r\n",
[{statuscode, 200},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 03"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /cgi-bin/not_there "
"HTTP/1.0\r\n\r\n",
[{statuscode, 404},{statuscode, 500},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 04"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /cgi-bin/"++ Script ++
"?Nisse:kkk?sss/lll HTTP/1.0\r\n\r\n",
[{statuscode, 200},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 04"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"POST /cgi-bin/"++ Script ++
" HTTP/1.0\r\n\r\n",
[{statuscode, 200},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 05"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /htbin/"++ Script ++
" HTTP/1.0\r\n\r\n",
[{statuscode, 200},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 06"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /htbin/not_there "
"HTTP/1.0\r\n\r\n",
[{statuscode, 404},{statuscode, 500},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 07"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"GET /htbin/"++ Script ++
"?Nisse:kkk?sss/lll HTTP/1.0\r\n\r\n",
[{statuscode, 200},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 08"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"POST /htbin/"++ Script ++
" HTTP/1.0\r\n\r\n",
[{statuscode, 200},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 09"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"POST /htbin/"++ Script ++
" HTTP/1.0\r\n\r\n",
@@ -683,19 +742,24 @@ cgi(Type, Port, Host, Node) ->
{version, "HTTP/1.0"}]),
%% Execute an existing, but bad CGI script..
+%% tsp("cgi -> request 10 - bad script"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"POST /htbin/"++ Script2 ++
" HTTP/1.0\r\n\r\n",
[{statuscode, 404},
{version, "HTTP/1.0"}]),
+%% tsp("cgi -> request 11 - bad script"),
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
"POST /cgi-bin/"++ Script2 ++
" HTTP/1.0\r\n\r\n",
[{statuscode, 404},
{version, "HTTP/1.0"}]),
+
+%% tsp("cgi -> done"),
ok.
+
%%--------------------------------------------------------------------
esi(Type, Port, Host, Node) ->
%% Check "ErlScriptAlias" and "EvalScriptAlias" directives
@@ -850,25 +914,44 @@ list_users(Node, Root, _Host, Port, Dir) ->
Directory = filename:join([Root, "htdocs", Dir]),
rpc:call(Node, mod_auth, list_users, [Addr, Port, Directory]).
+
receive_security_event(Event, Node, Port) ->
- io:format(user, "~w:receive_security_event -> entry with"
- "~n Event: ~p"
- "~n Node: ~p"
- "~n Port: ~p"
- "~n", [?MODULE, Event, Node, Port]),
+%% io:format(user, "~w:receive_security_event -> entry with"
+%% "~n Event: ~p"
+%% "~n Node: ~p"
+%% "~n Port: ~p"
+%% "~n", [?MODULE, Event, Node, Port]),
receive
Event ->
ok;
{'EXIT', _, _} ->
- receive_security_event(Event, Node, Port);
- Other ->
- test_server:fail({unexpected_event,
- {expected, Event}, {received, Other}})
+ receive_security_event(Event, Node, Port)
after 5000 ->
- test_server:fail(no_event_recived)
+ %% Flush the message queue, to see if we got something...
+ Msgs = inets_test_lib:flush(),
+ tsf({expected_event_not_received, Msgs})
end.
+%% receive_security_event(Event, Node, Port) ->
+%% io:format(user, "~w:receive_security_event -> entry with"
+%% "~n Event: ~p"
+%% "~n Node: ~p"
+%% "~n Port: ~p"
+%% "~n", [?MODULE, Event, Node, Port]),
+%% receive
+%% Event ->
+%% ok;
+%% {'EXIT', _, _} ->
+%% receive_security_event(Event, Node, Port);
+%% Other ->
+%% test_server:fail({unexpected_event,
+%% {expected, Event}, {received, Other}})
+%% after 5000 ->
+%% test_server:fail(no_event_recived)
+
+%% end.
+
list_blocked_users(Node,Port) ->
Addr = undefined, % Assumed to be on the same host
rpc:call(Node, mod_security, list_blocked_users, [Addr,Port]).
@@ -945,3 +1028,12 @@ check_lists_members1(L,L) ->
ok;
check_lists_members1(L1,L2) ->
{error,{lists_not_equal,L1,L2}}.
+
+
+%% tsp(F) ->
+%% tsp(F, []).
+%% tsp(F, A) ->
+%% test_server:format("~p ~p:" ++ F ++ "~n", [self(), ?MODULE | A]).
+
+tsf(Reason) ->
+ test_server:fail(Reason).
diff --git a/lib/inets/test/httpd_poll.erl b/lib/inets/test/httpd_poll.erl
index 1cc10365a7..32335cabcf 100644
--- a/lib/inets/test/httpd_poll.erl
+++ b/lib/inets/test/httpd_poll.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -27,7 +27,8 @@
%% gen_server exports
-export([init/1,
- handle_call/3, handle_cast/2, handle_info/2, terminate/2]).
+ handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+ code_change/3]).
-define(default_verbosity,error).
@@ -86,8 +87,8 @@ options(Options) ->
options([], Defaults, Options) ->
Options ++ Defaults;
-options([{Key,Val} = Opt|Opts], Defaults, Options) ->
- options(Opts, lists:keydelete(Key, 1, Defaults), [Opt|Options]).
+options([{Key, _Val} = Opt|Opts], Defaults, Options) ->
+ options(Opts, lists:keydelete(Key, 1, Defaults), [Opt | Options]).
verbosity(silence) ->
@@ -134,10 +135,9 @@ uris(otp) ->
uri_top_index(),
uri_internal_product1(),
uri_internal_product2(),
- uri_p7a_test_results(),
+ uri_r13b03_test_results(),
uri_bjorn1(),
- uri_bjorn2(),
- uri_top_ronja()
+ uri_bjorn2()
].
uri_top_index() ->
@@ -149,9 +149,9 @@ uri_internal_product1() ->
uri_internal_product2() ->
{"product internal page (2)","/product/internal"}.
-uri_p7a_test_results() ->
- {"test summery index page",
- "/product/internal/test/test_results/progress_P7A/index.html"}.
+uri_r13b03_test_results() ->
+ {"daily build index page",
+ "/product/internal/test/daily/logs.html"}.
uri_bjorn1() ->
{"bjorns home page (1)","/~bjorn/"}.
@@ -159,9 +159,6 @@ uri_bjorn1() ->
uri_bjorn2() ->
{"bjorns home page (2)","/~bjorn"}.
-uri_top_ronja() ->
- {"ronja top page","/ronja/"}.
-
handle_call(stop, _From, State) ->
vlog("stop request"),
@@ -199,7 +196,11 @@ handle_info(Info, State) ->
{noreply, State}.
-terminate(Reason,State) ->
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+
+terminate(_Reason, State) ->
tcancel(State#state.tref),
log_close(get(log_file)),
ok.
@@ -287,16 +288,16 @@ trash_the_rest(Socket,N) ->
end.
-add(N1,N2) when integer(N1),integer(N2) ->
+add(N1, N2) when is_integer(N1) andalso is_integer(N2) ->
N1 + N2;
-add(N1,N2) when integer(N1) ->
+add(N1, _N2) when is_integer(N1) ->
N1;
-add(N1,N2) when integer(N2) ->
+add(_N1, N2) when is_integer(N2) ->
N2.
-sz(L) when list(L) ->
+sz(L) when is_list(L) ->
length(lists:flatten(L));
-sz(B) when binary(B) ->
+sz(B) when is_binary(B) ->
size(B);
sz(O) ->
{unknown_size,O}.
@@ -307,9 +308,9 @@ sz(O) ->
%% Status code to printable string
%%
-status_to_message(L) when list(L) ->
+status_to_message(L) when is_list(L) ->
case (catch list_to_integer(L)) of
- I when integer(I) ->
+ I when is_integer(I) ->
status_to_message(I);
_ ->
io_lib:format("UNKNOWN STATUS CODE: '~p'",[L])
@@ -470,12 +471,12 @@ vlog(F,A) -> vprint(get(verbosity),log,F,A).
verror(F) -> vprint(get(verbosity),error,F,[]).
verror(F,A) -> vprint(get(verbosity),error,F,A).
-vprint(trace,Severity,F,A) -> vprint(Severity,F,A);
-vprint(debug,trace,F,A) -> ok;
-vprint(debug,Severity,F,A) -> vprint(Severity,F,A);
-vprint(log,log,F,A) -> vprint(log,F,A);
-vprint(log,error,F,A) -> vprint(log,F,A);
-vprint(error,error,F,A) -> vprint(error,F,A);
+vprint(trace, Severity, F, A) -> vprint(Severity,F,A);
+vprint(debug, trace, _F, _A) -> ok;
+vprint(debug, Severity, F, A) -> vprint(Severity,F,A);
+vprint(log, log, F, A) -> vprint(log,F,A);
+vprint(log, error, F, A) -> vprint(log,F,A);
+vprint(error, error, F, A) -> vprint(error,F,A);
vprint(_Verbosity,_Severity,_F,_A) -> ok.
vprint(Severity,F,A) ->
@@ -491,6 +492,3 @@ image_of(trace) -> "TRC: ".
local_time() -> calendar:local_time().
-
-
-
diff --git a/lib/inets/test/httpd_test_data/server_root/Makefile b/lib/inets/test/httpd_test_data/server_root/Makefile
new file mode 100644
index 0000000000..d7a3231068
--- /dev/null
+++ b/lib/inets/test/httpd_test_data/server_root/Makefile
@@ -0,0 +1,209 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1997-2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+VSN=$(INETS_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULE=
+
+AUTH_FILES = auth/group \
+ auth/passwd
+CGI_FILES = cgi-bin/printenv.sh
+CONF_FILES = conf/8080.conf \
+ conf/8888.conf \
+ conf/httpd.conf \
+ conf/ssl.conf \
+ conf/mime.types
+OPEN_FILES = htdocs/open/dummy.html
+MNESIA_OPEN_FILES = htdocs/mnesia_open/dummy.html
+MISC_FILES = htdocs/misc/friedrich.html \
+ htdocs/misc/oech.html
+SECRET_FILES = htdocs/secret/dummy.html
+MNESIA_SECRET_FILES = htdocs/mnesia_secret/dummy.html
+HTDOCS_FILES = htdocs/index.html \
+ htdocs/config.shtml \
+ htdocs/echo.shtml \
+ htdocs/exec.shtml \
+ htdocs/flastmod.shtml \
+ htdocs/fsize.shtml \
+ htdocs/include.shtml
+ICON_FILES = icons/README \
+ icons/a.gif \
+ icons/alert.black.gif \
+ icons/alert.red.gif \
+ icons/apache_pb.gif \
+ icons/back.gif \
+ icons/ball.gray.gif \
+ icons/ball.red.gif \
+ icons/binary.gif \
+ icons/binhex.gif \
+ icons/blank.gif \
+ icons/bomb.gif \
+ icons/box1.gif \
+ icons/box2.gif \
+ icons/broken.gif \
+ icons/burst.gif \
+ icons/button1.gif \
+ icons/button10.gif \
+ icons/button2.gif \
+ icons/button3.gif \
+ icons/button4.gif \
+ icons/button5.gif \
+ icons/button6.gif \
+ icons/button7.gif \
+ icons/button8.gif \
+ icons/button9.gif \
+ icons/buttonl.gif \
+ icons/buttonr.gif \
+ icons/c.gif \
+ icons/comp.blue.gif \
+ icons/comp.gray.gif \
+ icons/compressed.gif \
+ icons/continued.gif \
+ icons/dir.gif \
+ icons/down.gif \
+ icons/dvi.gif \
+ icons/f.gif \
+ icons/folder.gif \
+ icons/folder.open.gif \
+ icons/folder.sec.gif \
+ icons/forward.gif \
+ icons/generic.gif \
+ icons/generic.red.gif \
+ icons/generic.sec.gif \
+ icons/hand.right.gif \
+ icons/hand.up.gif \
+ icons/htdig.gif \
+ icons/icon.sheet.gif \
+ icons/image1.gif \
+ icons/image2.gif \
+ icons/image3.gif \
+ icons/index.gif \
+ icons/layout.gif \
+ icons/left.gif \
+ icons/link.gif \
+ icons/movie.gif \
+ icons/p.gif \
+ icons/patch.gif \
+ icons/pdf.gif \
+ icons/pie0.gif \
+ icons/pie1.gif \
+ icons/pie2.gif \
+ icons/pie3.gif \
+ icons/pie4.gif \
+ icons/pie5.gif \
+ icons/pie6.gif \
+ icons/pie7.gif \
+ icons/pie8.gif \
+ icons/portal.gif \
+ icons/poweredby.gif \
+ icons/ps.gif \
+ icons/quill.gif \
+ icons/right.gif \
+ icons/screw1.gif \
+ icons/screw2.gif \
+ icons/script.gif \
+ icons/sound1.gif \
+ icons/sound2.gif \
+ icons/sphere1.gif \
+ icons/sphere2.gif \
+ icons/star.gif \
+ icons/star_blank.gif \
+ icons/tar.gif \
+ icons/tex.gif \
+ icons/text.gif \
+ icons/transfer.gif \
+ icons/unknown.gif \
+ icons/up.gif \
+ icons/uu.gif \
+ icons/uuencoded.gif \
+ icons/world1.gif \
+ icons/world2.gif
+
+SSL_FILES = ssl/ssl_client.pem \
+ ssl/ssl_server.pem
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_COMPILE_FLAGS +=
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt:
+
+clean:
+
+docs:
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/auth
+ $(INSTALL_DATA) $(AUTH_FILES) $(RELSYSDIR)/examples/server_root/auth
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/cgi-bin
+ $(INSTALL_SCRIPT) $(CGI_FILES) $(RELSYSDIR)/examples/server_root/cgi-bin
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/conf
+ $(INSTALL_DATA) $(CONF_FILES) $(RELSYSDIR)/examples/server_root/conf
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/open
+ $(INSTALL_DATA) $(OPEN_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/open
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
+ $(INSTALL_DATA) $(MNESIA_OPEN_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_open
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs/misc
+ $(INSTALL_DATA) $(MISC_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/misc
+ $(INSTALL_DIR) \
+ $(RELSYSDIR)/examples/server_root/htdocs/secret/top_secret
+ $(INSTALL_DIR) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret/top_secret
+ $(INSTALL_DATA) $(SECRET_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/secret
+ $(INSTALL_DATA) $(MNESIA_SECRET_FILES) \
+ $(RELSYSDIR)/examples/server_root/htdocs/mnesia_secret
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/htdocs
+ $(INSTALL_DATA) $(HTDOCS_FILES) $(RELSYSDIR)/examples/server_root/htdocs
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/icons
+ $(INSTALL_DATA) $(ICON_FILES) $(RELSYSDIR)/examples/server_root/icons
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/ssl
+ $(INSTALL_DATA) $(SSL_FILES) $(RELSYSDIR)/examples/server_root/ssl
+ $(INSTALL_DIR) $(RELSYSDIR)/examples/server_root/logs
+
+release_docs_spec:
+
diff --git a/lib/inets/test/httpd_test_lib.erl b/lib/inets/test/httpd_test_lib.erl
index 6abee5be2c..3189a758a5 100644
--- a/lib/inets/test/httpd_test_lib.erl
+++ b/lib/inets/test/httpd_test_lib.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -72,6 +72,8 @@
'last-modified',
other=[] % list() - Key/Value list with other headers
}).
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%--------------------------------------------------------------------
@@ -81,7 +83,8 @@ verify_request(SocketType, Host, Port, Node, RequestStr, Options) ->
verify_request(SocketType, Host, Port, Node, RequestStr, Options, 30000).
verify_request(SocketType, Host, Port, Node, RequestStr, Options, TimeOut) ->
{ok, Socket} = inets_test_lib:connect_bin(SocketType, Host, Port),
- inets_test_lib:send(SocketType, Socket, RequestStr),
+
+ _SendRes = inets_test_lib:send(SocketType, Socket, RequestStr),
State = case inets_regexp:match(RequestStr, "printenv") of
nomatch ->
@@ -90,18 +93,26 @@ verify_request(SocketType, Host, Port, Node, RequestStr, Options, TimeOut) ->
#state{print = true}
end,
- case request(State#state{request = RequestStr, socket = Socket}, TimeOut) of
- {error, Reson} ->
- {error, Reson};
+ case request(State#state{request = RequestStr,
+ socket = Socket}, TimeOut) of
+ {error, Reason} ->
+ tsp("request failed: "
+ "~n Reason: ~p", [Reason]),
+ {error, Reason};
NewState ->
+ tsp("validate reply: "
+ "~n NewState: ~p", [NewState]),
ValidateResult = validate(RequestStr, NewState, Options,
Node, Port),
+ tsp("validation result: "
+ "~n ~p", [ValidateResult]),
inets_test_lib:close(SocketType, Socket),
ValidateResult
end.
request(#state{mfa = {Module, Function, Args},
request = RequestStr, socket = Socket} = State, TimeOut) ->
+
HeadRequest = lists:sublist(RequestStr, 1, 4),
receive
{tcp, Socket, Data} ->
@@ -109,12 +120,12 @@ request(#state{mfa = {Module, Function, Args},
case Module:Function([Data | Args]) of
{ok, Parsed} ->
handle_http_msg(Parsed, State);
- {_, whole_body, _} when HeadRequest == "HEAD" ->
+ {_, whole_body, _} when HeadRequest =:= "HEAD" ->
State#state{body = <<>>};
NewMFA ->
request(State#state{mfa = NewMFA}, TimeOut)
end;
- {tcp_closed, Socket} when Function == whole_body ->
+ {tcp_closed, Socket} when Function =:= whole_body ->
print(tcp, "closed", State),
State#state{body = hd(Args)};
{tcp_closed, Socket} ->
@@ -126,12 +137,12 @@ request(#state{mfa = {Module, Function, Args},
case Module:Function([Data | Args]) of
{ok, Parsed} ->
handle_http_msg(Parsed, State);
- {_, whole_body, _} when HeadRequest == "HEAD" ->
+ {_, whole_body, _} when HeadRequest =:= "HEAD" ->
State#state{body = <<>>};
NewMFA ->
request(State#state{mfa = NewMFA}, TimeOut)
end;
- {ssl_closed, Socket} when Function == whole_body ->
+ {ssl_closed, Socket} when Function =:= whole_body ->
print(ssl, "closed", State),
State#state{body = hd(Args)};
{ssl_closed, Socket} ->
@@ -330,3 +341,9 @@ print(Proto, Data, #state{print = true}) ->
print(_, _, #state{print = false}) ->
ok.
+
+%% tsp(F) ->
+%% tsp(F, []).
+tsp(F, A) ->
+ test_server:format("~p ~p:" ++ F ++ "~n", [self(), ?MODULE | A]).
+
diff --git a/lib/inets/test/httpd_time_test.erl b/lib/inets/test/httpd_time_test.erl
index 7d6aa08542..f39f9faff0 100644
--- a/lib/inets/test/httpd_time_test.erl
+++ b/lib/inets/test/httpd_time_test.erl
@@ -1,25 +1,25 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
-module(httpd_time_test).
--export([t/3, t1/2, t2/2]).
+-export([t/3, t1/2, t2/2, t3/2, t4/2]).
-export([do/1, do/2, do/3, do/4, do/5]).
@@ -29,6 +29,9 @@
-record(stat, {pid, time = undefined, count = undefined, res}).
+%% -define(NUM_POLLERS, 10).
+-define(NUM_POLLERS, 1).
+
%%% -----------------------------------------------------------------
%%% Test suite interface
@@ -42,9 +45,17 @@ t2(Host, Port) ->
t(ssl, Host, Port).
+t3(Host, Port) ->
+ t(ossl, Host, Port).
+
+
+t4(Host, Port) ->
+ t(essl, Host, Port).
+
+
t(SocketType, Host, Port) ->
%% put(dbg,true),
- main(1, SocketType, Host, Port, 60000).
+ main(?NUM_POLLERS, SocketType, Host, Port, 60000).
@@ -111,28 +122,40 @@ loop(Pollers, Timeout) ->
"~n Timeout: ~p", [Timeout]),
Start = t(),
receive
- {'EXIT', Pid, {poller_stat_failure, Time, Reason}} ->
+ {'EXIT', Pid, {poller_stat_failure, SocketType, Host, Port, Time, Reason}} ->
case is_poller(Pid, Pollers) of
true ->
error_msg("received unexpected exit from poller ~p~n"
"befor completion of test "
- "(after ~p micro sec):~n"
- "~p~n", [Pid,Time,Reason]),
- exit({fail, {poller_exit, Pid, Reason}});
+ "after ~p micro sec"
+ "~n SocketType: ~p"
+ "~n Host: ~p"
+ "~n Port: ~p"
+ "~n~p~n",
+ [Pid, SocketType, Host, Port, Time, Reason]),
+ exit({fail, {poller_exit, Pid, Time, Reason}});
false ->
error_msg("received unexpected ~p from ~p"
"befor completion of test", [Reason, Pid]),
loop(Pollers, to(Timeout, Start))
end;
- {poller_stat_failure, Pid, {Time, Reason}} ->
+ {poller_stat_failure, Pid, {SocketType, Host, Port, Time, Reason}} ->
error_msg("received stat failure ~p from poller ~p after ~p "
- "befor completion of test", [Reason, Pid, Time]),
- exit({fail, {poller_failure, Pid, Reason}});
-
- {poller_stat_failure, Pid, Reason} ->
+ "befor completion of test"
+ "~n SocketType: ~p"
+ "~n Host: ~p"
+ "~n Port: ~p",
+ [Reason, Pid, Time, SocketType, Host, Port]),
+ exit({fail, {poller_failure, Pid, Time, Reason}});
+
+ {poller_stat_failure, Pid, SocketType, Host, Port, Reason} ->
error_msg("received stat failure ~p from poller ~p "
- "befor completion of test", [Reason, Pid]),
+ "befor completion of test"
+ "~n SocketType: ~p"
+ "~n Host: ~p"
+ "~n Port: ~p",
+ [Reason, Pid, SocketType, Host, Port]),
exit({fail, {poller_failure, Pid, Reason}});
Any ->
@@ -250,16 +273,16 @@ is_poller(Pid, [_|Rest]) ->
poller_main(Parent, SocketType, Host, Port) ->
process_flag(trap_exit,true),
- put(sname,poller),
+ put(sname, poller),
case timer:tc(?MODULE, poller_loop, [SocketType, Host, Port, uris()]) of
{Time, Count} when is_integer(Time) andalso is_integer(Count) ->
Parent ! {poller_statistics, self(), {Time, Count}};
{Time, {'EXIT', Reason}} when is_integer(Time) ->
- exit({poller_stat_failure, Time, Reason});
+ exit({poller_stat_failure, SocketType, Host, Port, Time, Reason});
{Time, Other} when is_integer(Time) ->
- Parent ! {poller_stat_failure, self(), {Time, Other}};
+ Parent ! {poller_stat_failure, self(), {SocketType, Host, Port, Time, Other}};
Else ->
- Parent ! {poller_stat_failure, self(), Else}
+ Parent ! {poller_stat_failure, self(), SocketType, Host, Port, Else}
end.
diff --git a/lib/inets/test/inets_sup_SUITE.erl b/lib/inets/test/inets_sup_SUITE.erl
index ba41e0960c..1e701bc074 100644
--- a/lib/inets/test/inets_sup_SUITE.erl
+++ b/lib/inets/test/inets_sup_SUITE.erl
@@ -372,11 +372,11 @@ httpc_subtree(Config) when is_list(Config) ->
"~n Config: ~p", [Config]),
tsp("httpc_subtree -> start inets service httpc with profile foo"),
- {ok, Foo} = inets:start(httpc, [{profile, foo}]),
+ {ok, _Foo} = inets:start(httpc, [{profile, foo}]),
tsp("httpc_subtree -> "
"start stand-alone inets service httpc with profile bar"),
- {ok, Bar} = inets:start(httpc, [{profile, bar}], stand_alone),
+ {ok, _Bar} = inets:start(httpc, [{profile, bar}], stand_alone),
tsp("httpc_subtree -> retreive list of httpc instances"),
HttpcChildren = supervisor:which_children(httpc_profile_sup),
diff --git a/lib/inets/test/inets_test_lib.erl b/lib/inets/test/inets_test_lib.erl
index 6af2ad32f7..86fc2d1a32 100644
--- a/lib/inets/test/inets_test_lib.erl
+++ b/lib/inets/test/inets_test_lib.erl
@@ -1,44 +1,136 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
-module(inets_test_lib).
-include("inets_test_lib.hrl").
+-include_lib("inets/src/http_lib/http_internal.hrl").
%% Various small utility functions
--export([start_http_server/1, start_http_server_ssl/1]).
+-export([start_http_server/1, start_http_server/2]).
+-export([start_http_server_ssl/1, start_http_server_ssl/2]).
-export([hostname/0]).
-export([connect_bin/3, connect_byte/3, send/3, close/2]).
-export([copy_file/3, copy_files/2, copy_dirs/2, del_dirs/1]).
-export([info/4, log/4, debug/4, print/4]).
-export([check_body/1]).
-export([millis/0, millis_diff/2, hours/1, minutes/1, seconds/1, sleep/1]).
+-export([oscmd/1]).
-export([non_pc_tc_maybe_skip/4, os_based_skip/1]).
+-export([flush/0]).
+-export([start_node/1, stop_node/1]).
+
+%% -- Misc os command and stuff
+
+oscmd(Cmd) ->
+ string:strip(os:cmd(Cmd), right, $\n).
+
+%% -- Misc node operation wrapper functions --
+
+start_node(Name) ->
+ Pa = filename:dirname(code:which(?MODULE)),
+ Args = case init:get_argument('CC_TEST') of
+ {ok, [[]]} ->
+ " -pa /clearcase/otp/libraries/snmp/ebin ";
+ {ok, [[Path]]} ->
+ " -pa " ++ Path;
+ error ->
+ ""
+ end,
+ A = Args ++ " -pa " ++ Pa,
+ Opts = [{cleanup,false}, {args, A}],
+ case (catch test_server:start_node(Name, slave, Opts)) of
+ {ok, Node} ->
+ Node;
+ Else ->
+ exit({failed_starting_node, Name, Else})
+ end.
+
+stop_node(Node) ->
+ rpc:cast(Node, erlang, halt, []),
+ await_stopped(Node, 5).
+
+await_stopped(_, 0) ->
+ ok;
+await_stopped(Node, N) ->
+ Nodes = erlang:nodes(),
+ case lists:member(Node, Nodes) of
+ true ->
+ sleep(1000),
+ await_stopped(Node, N-1);
+ false ->
+ ok
+ end.
+
+
+%% ----------------------------------------------------------------
+%% HTTPD starter functions
+%%
start_http_server(Conf) ->
+ start_http_server(Conf, ?HTTP_DEFAULT_SSL_KIND).
+
+start_http_server(Conf, essl = _SslTag) ->
+ application:start(crypto),
+ do_start_http_server(Conf);
+start_http_server(Conf, _SslTag) ->
+ do_start_http_server(Conf).
+
+do_start_http_server(Conf) ->
+ tsp("start http server with "
+ "~n Conf: ~p"
+ "~n", [Conf]),
application:load(inets),
- ok = application:set_env(inets, services, [{httpd, Conf}]),
- ok = application:start(inets).
-
+ case application:set_env(inets, services, [{httpd, Conf}]) of
+ ok ->
+ case application:start(inets) of
+ ok ->
+ ok;
+ Error1 ->
+ test_server:format("<ERROR> Failed starting application: "
+ "~n Error: ~p"
+ "~n", [Error1]),
+ Error1
+ end;
+ Error2 ->
+ test_server:format("<ERROR> Failed set application env: "
+ "~n Error: ~p"
+ "~n", [Error2]),
+ Error2
+ end.
+
start_http_server_ssl(FileName) ->
+ start_http_server_ssl(FileName, ?HTTP_DEFAULT_SSL_KIND).
+
+start_http_server_ssl(FileName, essl = _SslTag) ->
+ application:start(crypto),
+ do_start_http_server_ssl(FileName);
+start_http_server_ssl(FileName, _SslTag) ->
+ do_start_http_server_ssl(FileName).
+
+do_start_http_server_ssl(FileName) ->
+ tsp("start (ssl) http server with "
+ "~n FileName: ~p"
+ "~n", [FileName]),
application:start(ssl),
- catch start_http_server(FileName).
+ catch do_start_http_server(FileName).
+
%% ----------------------------------------------------------------------
%% print functions
@@ -84,27 +176,17 @@ copy_files(FromDir, ToDir) ->
copy_dirs(FromDirRoot, ToDirRoot) ->
-%% io:format("~w:copy_dirs -> entry with"
-%% "~n FromDirRoot: ~p"
-%% "~n ToDirRoot: ~p"
-%% "~n", [?MODULE, FromDirRoot, ToDirRoot]),
{ok, Files} = file:list_dir(FromDirRoot),
lists:foreach(
fun(FileOrDir) ->
%% Check if it's a directory or a file
-%% io:format("~w:copy_dirs -> check ~p"
-%% "~n", [?MODULE, FileOrDir]),
case filelib:is_dir(filename:join(FromDirRoot, FileOrDir)) of
true ->
-%% io:format("~w:copy_dirs -> ~p is a directory"
-%% "~n", [?MODULE, FileOrDir]),
FromDir = filename:join([FromDirRoot, FileOrDir]),
ToDir = filename:join([ToDirRoot, FileOrDir]),
ok = file:make_dir(ToDir),
copy_dirs(FromDir, ToDir);
false ->
-%% io:format("~w:copy_dirs -> ~p is a file"
-%% "~n", [?MODULE, FileOrDir]),
copy_file(FileOrDir, FromDirRoot, ToDirRoot)
end
end, Files).
@@ -133,8 +215,8 @@ check_body(Body) ->
0 ->
case string:rstr(Body, "</HTML>") of
0 ->
- test_server:format("Body ~p~n", [Body]),
- test_server:fail(did_not_receive_whole_body);
+ tsp("Body ~p", [Body]),
+ tsf(did_not_receive_whole_body);
_ ->
ok
end;
@@ -204,9 +286,31 @@ os_based_skip(_) ->
%% Port -> integer()
connect_bin(ssl, Host, Port) ->
+ connect(ssl, Host, Port, [binary, {packet,0}]);
+connect_bin(ossl, Host, Port) ->
+ connect(ssl, Host, Port, [{ssl_imp, old}, binary, {packet,0}]);
+connect_bin(essl, Host, Port) ->
+ connect(ssl, Host, Port, [{ssl_imp, new}, binary, {packet,0}, {reuseaddr, true}]);
+connect_bin(ip_comm, Host, Port) ->
+ Opts = [inet6, binary, {packet,0}],
+ connect(ip_comm, Host, Port, Opts).
+
+
+connect_byte(ssl, Host, Port) ->
+ connect(ssl, Host, Port, [{packet,0}]);
+connect_byte(ossl, Host, Port) ->
+ connect(ssl, Host, Port, [{ssl_imp, old}, {packet,0}]);
+connect_byte(essl, Host, Port) ->
+ connect(ssl, Host, Port, [{ssl_imp, new}, {packet,0}]);
+connect_byte(ip_comm, Host, Port) ->
+ Opts = [inet6, {packet,0}],
+ connect(ip_comm, Host, Port, Opts).
+
+
+connect(ssl, Host, Port, Opts) ->
ssl:start(),
%% Does not support ipv6 in old ssl
- case ssl:connect(Host, Port, [binary, {packet,0}]) of
+ case ssl:connect(Host, Port, Opts) of
{ok, Socket} ->
{ok, Socket};
{error, Reason} ->
@@ -214,61 +318,48 @@ connect_bin(ssl, Host, Port) ->
Error ->
Error
end;
-connect_bin(ip_comm, Host, Port) ->
- Opts = [inet6, binary, {packet,0}],
- connect(ip_comm, Host, Port, Opts).
-
-
connect(ip_comm, Host, Port, Opts) ->
- test_server:format("gen_tcp:connect(~p, ~p, ~p) ~n", [Host, Port, Opts]),
case gen_tcp:connect(Host,Port, Opts) of
{ok, Socket} ->
- test_server:format("connect success~n", []),
+ %% tsp("connect success"),
{ok, Socket};
{error, nxdomain} ->
- test_server:format("nxdomain opts: ~p~n", [Opts]),
+ tsp("nxdomain opts: ~p", [Opts]),
connect(ip_comm, Host, Port, lists:delete(inet6, Opts));
{error, eafnosupport} ->
- test_server:format("eafnosupport opts: ~p~n", [Opts]),
+ tsp("eafnosupport opts: ~p", [Opts]),
connect(ip_comm, Host, Port, lists:delete(inet6, Opts));
{error, {enfile,_}} ->
- test_server:format("Error enfile~n", []),
+ tsp("Error enfile"),
{error, enfile};
Error ->
- test_server:format("Unexpected error: "
- "~n Error: ~p"
- "~nwhen"
- "~n Host: ~p"
- "~n Port: ~p"
- "~n Opts: ~p"
- "~n", [Error, Host, Port, Opts]),
+ tsp("Unexpected error: "
+ "~n Error: ~p"
+ "~nwhen"
+ "~n Host: ~p"
+ "~n Port: ~p"
+ "~n Opts: ~p"
+ "~n", [Error, Host, Port, Opts]),
Error
end.
-connect_byte(ip_comm, Host, Port) ->
- Opts = [inet6, {packet,0}],
- connect(ip_comm, Host, Port, Opts);
-
-connect_byte(ssl, Host, Port) ->
- ssl:start(),
- %% Does not support ipv6 in old ssl
- case ssl:connect(Host,Port,[{packet,0}]) of
- {ok,Socket} ->
- {ok,Socket};
- {error,{enfile,_}} ->
- {error, enfile};
- Error ->
- Error
- end.
send(ssl, Socket, Data) ->
ssl:send(Socket, Data);
+send(ossl, Socket, Data) ->
+ ssl:send(Socket, Data);
+send(essl, Socket, Data) ->
+ ssl:send(Socket, Data);
send(ip_comm,Socket,Data) ->
gen_tcp:send(Socket,Data).
close(ssl,Socket) ->
catch ssl:close(Socket);
+close(ossl,Socket) ->
+ catch ssl:close(Socket);
+close(essl,Socket) ->
+ catch ssl:close(Socket);
close(ip_comm,Socket) ->
catch gen_tcp:close(Socket).
@@ -300,3 +391,20 @@ sleep(MSecs) ->
skip(Reason, File, Line) ->
exit({skipped, {Reason, File, Line}}).
+
+flush() ->
+ receive
+ Msg ->
+ [Msg | flush()]
+ after 1000 ->
+ []
+ end.
+
+
+tsp(F) ->
+ tsp(F, []).
+tsp(F, A) ->
+ test_server:format("~p ~p:" ++ F ++ "~n", [self(), ?MODULE | A]).
+
+tsf(Reason) ->
+ test_server:fail(Reason).
diff --git a/lib/inets/test/inets_test_lib.hrl b/lib/inets/test/inets_test_lib.hrl
index 12a43fa136..0cdb04139c 100644
--- a/lib/inets/test/inets_test_lib.hrl
+++ b/lib/inets/test/inets_test_lib.hrl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%
@@ -46,6 +46,11 @@
-endif.
+%% - OS Command and stuff
+
+-define(OSCMD(Cmd), inets_test_lib:oscmd(Cmd)).
+
+
%% - Test case macros -
-define(EXPANDABLE(I, C, F), inets_test_lib:expandable(I, C, F)).
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index 7776bef0a5..57c87e7036 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -18,11 +18,20 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 5.3.2
+INETS_VSN = 5.4
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
-TICKETS = OTP-8542 OTP-8607
+TICKETS = OTP-7907 OTP-8564 OTP-8573
+
+TICKETS_5_3_3 = \
+ OTP-8609 \
+ OTP-8610 \
+ OTP-8624
+
+TICKETS_5_3_2 = \
+ OTP-8542 \
+ OTP-8607
TICKETS_5_3_1 = \
OTP-8508 \
@@ -42,7 +51,14 @@ TICKETS_5_3 = \
OTP-8359 \
OTP-8371
-TICKETS_5_2 = OTP-8204 OTP-8206 OTP-8247 OTP-8248 OTP-8249 OTP-8258 OTP-8280
+TICKETS_5_2 = \
+ OTP-8204 \
+ OTP-8206 \
+ OTP-8247 \
+ OTP-8248 \
+ OTP-8249 \
+ OTP-8258 \
+ OTP-8280
TICKETS_5_1_3 = OTP-8154
diff --git a/lib/kernel/doc/src/file.xml b/lib/kernel/doc/src/file.xml
index 50f9722a1c..a9ceac0bcf 100644
--- a/lib/kernel/doc/src/file.xml
+++ b/lib/kernel/doc/src/file.xml
@@ -62,6 +62,25 @@ time() = {{Year, Month, Day}, {Hour, Minute, Second}}
</section>
<funcs>
<func>
+ <name>advise(IoDevice, Offset, Length, Advise) -> ok | {error, Reason}</name>
+ <fsummary>Predeclare an access pattern for file data</fsummary>
+ <type>
+ <v>IoDevice = io_device()</v>
+ <v>Offset = int()</v>
+ <v>Length = int()</v>
+ <v>Advise = posix_file_advise()</v>
+ <v>posix_file_advise() = normal | sequential | random | no_reuse
+ | will_need | dont_need</v>
+ <v>Reason = ext_posix()</v>
+ </type>
+ <desc>
+ <p><c>advise/4</c> can be used to announce an intention to access file
+ data in a specific pattern in the future, thus allowing the
+ operating system to perform appropriate optimizations.</p>
+ <p>On some platforms, this function might have no effect.</p>
+ </desc>
+ </func>
+ <func>
<name>change_group(Filename, Gid) -> ok | {error, Reason}</name>
<fsummary>Change group of a file</fsummary>
<type>
@@ -584,7 +603,7 @@ f.txt: {person, "kalle", 25}.
<type>
<v>Filename = name()</v>
<v>Modes = [Mode]</v>
- <v>&nbsp;Mode = read | write | append | raw | binary | {delayed_write, Size, Delay} | delayed_write | {read_ahead, Size} | read_ahead | compressed</v>
+ <v>&nbsp;Mode = read | write | append | exclusive | raw | binary | {delayed_write, Size, Delay} | delayed_write | {read_ahead, Size} | read_ahead | compressed</v>
<v>&nbsp;&nbsp;Size = Delay = int()</v>
<v>IoDevice = io_device()</v>
<v>Reason = ext_posix() | system_limit</v>
@@ -611,6 +630,17 @@ f.txt: {person, "kalle", 25}.
file opened with <c>append</c> will take place at
the end of the file.</p>
</item>
+ <tag><c>exclusive</c></tag>
+ <item>
+ <p>The file, when opened for writing, is created if it
+ does not exist. If the file exists, open will return
+ <c>{error, eexist}</c>.</p>
+ <warning><p>This option does not guarantee exclusiveness on
+ file systems that do not support O_EXCL properly,
+ such as NFS. Do not depend on this option unless you
+ know that the file system supports it (in general, local
+ file systems should be safe).</p></warning>
+ </item>
<tag><c>raw</c></tag>
<item>
<p>The <c>raw</c> option allows faster access to a file,
@@ -1641,6 +1671,33 @@ f.txt: {person, "kalle", 25}.
</desc>
</func>
<func>
+ <name>datasync(IoDevice) -> ok | {error, Reason}</name>
+ <fsummary>Synchronizes the in-memory data of a file, ignoring most of its metadata, with that on the physical medium</fsummary>
+ <type>
+ <v>IoDevice = io_device()</v>
+ <v>Reason = ext_posix() | terminated</v>
+ </type>
+ <desc>
+ <p>Makes sure that any buffers kept by the operating system
+ (not by the Erlang runtime system) are written to disk. In
+ many ways it's resembles fsync but it not requires to update
+ some of file's metadata such as the access time. On
+ some platforms, this function might have no effect.</p>
+ <p>Applications that access databases or log files often write
+ a tiny data fragment (e.g., one line in a log file) and then
+ call fsync() immediately in order to ensure that the written
+ data is physically stored on the harddisk. Unfortunately, fsync()
+ will always initiate two write operations: one for the newly
+ written data and another one in order to update the modification
+ time stored in the inode. If the modification time is not a part
+ of the transaction concept fdatasync() can be used to avoid
+ unnecessary inode disk write operations.</p>
+ <p>Available only in some POSIX systems. This call results in a
+ call to fsync(), or has no effect, in systems not implementing
+ the fdatasync syscall.</p>
+ </desc>
+ </func>
+ <func>
<name>truncate(IoDevice) -> ok | {error, Reason}</name>
<fsummary>Truncate a file</fsummary>
<type>
diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl
index affa5fc0fd..42d4818f08 100644
--- a/lib/kernel/src/code.erl
+++ b/lib/kernel/src/code.erl
@@ -66,6 +66,8 @@
set_primary_archive/3,
clash/0]).
+-export_type([load_error_rsn/0, load_ret/0]).
+
-include_lib("kernel/include/file.hrl").
%% User interface.
diff --git a/lib/kernel/src/dist_util.erl b/lib/kernel/src/dist_util.erl
index a2937d60b8..f0d54a2f3e 100644
--- a/lib/kernel/src/dist_util.erl
+++ b/lib/kernel/src/dist_util.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%%----------------------------------------------------------------------
@@ -564,7 +564,7 @@ recv_challenge(#hs_data{socket=Socket,other_node=Node,
case Recv(Socket, 0, infinity) of
{ok,[$n,V1,V0,Fl1,Fl2,Fl3,Fl4,CA3,CA2,CA1,CA0 | Ns]} ->
Flags = ?u32(Fl1,Fl2,Fl3,Fl4),
- case {list_to_existing_atom(Ns),?u16(V1,V0)} of
+ try {list_to_existing_atom(Ns),?u16(V1,V0)} of
{Node,Version} ->
Challenge = ?u32(CA3,CA2,CA1,CA0),
?trace("recv: node=~w, challenge=~w version=~w\n",
@@ -572,6 +572,9 @@ recv_challenge(#hs_data{socket=Socket,other_node=Node,
{Flags,Challenge};
_ ->
?shutdown(no_node)
+ catch
+ error:badarg ->
+ ?shutdown(no_node)
end;
_ ->
?shutdown(no_node)
diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl
index 46ffa9d708..cffe4e3db5 100644
--- a/lib/kernel/src/file.erl
+++ b/lib/kernel/src/file.erl
@@ -36,11 +36,11 @@
%% Specialized
-export([ipread_s32bu_p32bu/3]).
%% Generic file contents.
--export([open/2, close/1,
+-export([open/2, close/1, advise/4,
read/2, write/2,
pread/2, pread/3, pwrite/2, pwrite/3,
read_line/1,
- position/2, truncate/1, sync/1,
+ position/2, truncate/1, datasync/1, sync/1,
copy/2, copy/3]).
%% High level operations
-export([consult/1, path_consult/2]).
@@ -61,6 +61,9 @@
-export([ipread_s32bu_p32bu_int/3]).
+%% Types that can be used from other modules -- alphabetically ordered.
+-export_type([date_time/0, fd/0, file_info/0, filename/0, io_device/0,
+ name/0, posix/0]).
%%% Includes and defines
-include("file.hrl").
@@ -81,7 +84,7 @@
-type mode() :: 'read' | 'write' | 'append' | 'raw' | 'binary' |
{'delayed_write', non_neg_integer(), non_neg_integer()} |
'delayed_write' | {'read_ahead', pos_integer()} |
- 'read_ahead' | 'compressed'.
+ 'read_ahead' | 'compressed' | 'exclusive'.
-type name() :: string() | atom() | [name()].
-type posix() :: atom().
-type bindings() :: any().
@@ -89,6 +92,8 @@
-type date() :: {pos_integer(), pos_integer(), pos_integer()}.
-type time() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}.
-type date_time() :: {date(), time()}.
+-type posix_file_advise() :: 'normal' | 'sequential' | 'random' | 'no_reuse' |
+ 'will_need' | 'dont_need'.
%%%-----------------------------------------------------------------
%%% General functions
@@ -352,10 +357,22 @@ close(#file_descriptor{module = Module} = Handle) ->
close(_) ->
{error, badarg}.
--spec read(File :: io_device(), Size :: non_neg_integer()) ->
+-spec advise(File :: io_device(), Offset :: integer(),
+ Length :: integer(), Advise :: posix_file_advise()) ->
+ 'ok' | {'error', posix()}.
+
+advise(File, Offset, Length, Advise) when is_pid(File) ->
+ R = file_request(File, {advise, Offset, Length, Advise}),
+ wait_file_reply(File, R);
+advise(#file_descriptor{module = Module} = Handle, Offset, Length, Advise) ->
+ Module:advise(Handle, Offset, Length, Advise);
+advise(_, _, _, _) ->
+ {error, badarg}.
+
+-spec read(File :: io_device() | atom(), Size :: non_neg_integer()) ->
'eof' | {'ok', [char()] | binary()} | {'error', posix()}.
-read(File, Sz) when is_pid(File), is_integer(Sz), Sz >= 0 ->
+read(File, Sz) when (is_pid(File) orelse is_atom(File)), is_integer(Sz), Sz >= 0 ->
case io:request(File, {get_chars, '', Sz}) of
Data when is_list(Data); is_binary(Data) ->
{ok, Data};
@@ -368,10 +385,10 @@ read(#file_descriptor{module = Module} = Handle, Sz)
read(_, _) ->
{error, badarg}.
--spec read_line(File :: io_device()) ->
+-spec read_line(File :: io_device() | atom()) ->
'eof' | {'ok', [char()] | binary()} | {'error', posix()}.
-read_line(File) when is_pid(File) ->
+read_line(File) when (is_pid(File) orelse is_atom(File)) ->
case io:request(File, {get_line, ''}) of
Data when is_list(Data); is_binary(Data) ->
{ok, Data};
@@ -422,10 +439,10 @@ pread(#file_descriptor{module = Module} = Handle, Offs, Sz)
pread(_, _, _) ->
{error, badarg}.
--spec write(File :: io_device(), Byte :: iodata()) ->
+-spec write(File :: io_device() | atom(), Byte :: iodata()) ->
'ok' | {'error', posix()}.
-write(File, Bytes) when is_pid(File) ->
+write(File, Bytes) when (is_pid(File) orelse is_atom(File)) ->
case make_binary(Bytes) of
Bin when is_binary(Bin) ->
io:request(File, {put_chars,Bin});
@@ -472,6 +489,16 @@ pwrite(#file_descriptor{module = Module} = Handle, Offs, Bytes) ->
pwrite(_, _, _) ->
{error, badarg}.
+-spec datasync(File :: io_device()) -> 'ok' | {'error', posix()}.
+
+datasync(File) when is_pid(File) ->
+ R = file_request(File, datasync),
+ wait_file_reply(File, R);
+datasync(#file_descriptor{module = Module} = Handle) ->
+ Module:datasync(Handle);
+datasync(_) ->
+ {error, badarg}.
+
-spec sync(File :: io_device()) -> 'ok' | {'error', posix()}.
sync(File) when is_pid(File) ->
diff --git a/lib/kernel/src/file_io_server.erl b/lib/kernel/src/file_io_server.erl
index 3ac35a209d..39dc32bb79 100644
--- a/lib/kernel/src/file_io_server.erl
+++ b/lib/kernel/src/file_io_server.erl
@@ -198,6 +198,14 @@ io_reply(From, ReplyAs, Reply) ->
%%%-----------------------------------------------------------------
%%% file requests
+file_request({advise,Offset,Length,Advise},
+ #state{handle=Handle}=State) ->
+ case ?PRIM_FILE:advise(Handle, Offset, Length, Advise) of
+ {error,_}=Reply ->
+ {stop,normal,Reply,State};
+ Reply ->
+ {reply,Reply,State}
+ end;
file_request({pread,At,Sz},
#state{handle=Handle,buf=Buf,read_mode=ReadMode}=State) ->
case position(Handle, At, Buf) of
@@ -219,6 +227,14 @@ file_request({pwrite,At,Data},
Reply ->
std_reply(Reply, State)
end;
+file_request(datasync,
+ #state{handle=Handle}=State) ->
+ case ?PRIM_FILE:datasync(Handle) of
+ {error,_}=Reply ->
+ {stop,normal,Reply,State};
+ Reply ->
+ {reply,Reply,State}
+ end;
file_request(sync,
#state{handle=Handle}=State) ->
case ?PRIM_FILE:sync(Handle) of
diff --git a/lib/kernel/src/group.erl b/lib/kernel/src/group.erl
index a45ba34eae..f92c6f7208 100644
--- a/lib/kernel/src/group.erl
+++ b/lib/kernel/src/group.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(group).
@@ -477,15 +477,15 @@ get_line(Chars, Pbs, Drv, Encoding) ->
get_line1(edlin:edit_line(Chars, Cont), Drv, new_stack(get(line_buffer)),
Encoding).
-get_line1({done,Line,Rest,Rs}, Drv, _Ls, _Encoding) ->
+get_line1({done,Line,Rest,Rs}, Drv, Ls, _Encoding) ->
send_drv_reqs(Drv, Rs),
- put(line_buffer, [Line|lists:delete(Line, get(line_buffer))]),
+ save_line_buffer(Line, get_lines(Ls)),
{done,Line,Rest};
get_line1({undefined,{_A,Mode,Char},Cs,Cont,Rs}, Drv, Ls0, Encoding)
when ((Mode =:= none) and (Char =:= $\^P))
or ((Mode =:= meta_left_sq_bracket) and (Char =:= $A)) ->
send_drv_reqs(Drv, Rs),
- case up_stack(Ls0) of
+ case up_stack(save_line(Ls0, edlin:current_line(Cont))) of
{none,_Ls} ->
send_drv(Drv, beep),
get_line1(edlin:edit_line(Cs, Cont), Drv, Ls0, Encoding);
@@ -498,14 +498,14 @@ get_line1({undefined,{_A,Mode,Char},Cs,Cont,Rs}, Drv, Ls0, Encoding)
Drv,
Ls, Encoding)
end;
-get_line1({undefined,{_A,Mode,Char},_Cs,Cont,Rs}, Drv, Ls0, Encoding)
+get_line1({undefined,{_A,Mode,Char},Cs,Cont,Rs}, Drv, Ls0, Encoding)
when ((Mode =:= none) and (Char =:= $\^N))
or ((Mode =:= meta_left_sq_bracket) and (Char =:= $B)) ->
send_drv_reqs(Drv, Rs),
- case down_stack(Ls0) of
- {none,Ls} ->
- send_drv_reqs(Drv, edlin:erase_line(Cont)),
- get_line1(edlin:start(edlin:prompt(Cont)), Drv, Ls, Encoding);
+ case down_stack(save_line(Ls0, edlin:current_line(Cont))) of
+ {none,_Ls} ->
+ send_drv(Drv, beep),
+ get_line1(edlin:edit_line(Cs, Cont), Drv, Ls0, Encoding);
{Lcs,Ls} ->
send_drv_reqs(Drv, edlin:erase_line(Cont)),
{more_chars,Ncont,Nrs} = edlin:start(edlin:prompt(Cont)),
@@ -627,6 +627,28 @@ down_stack({stack,U,{},[]}) ->
down_stack({stack,U,C,D}) ->
down_stack({stack,[C|U],{},D}).
+save_line({stack, U, {}, []}, Line) ->
+ {stack, U, {}, [Line]};
+save_line({stack, U, _L, D}, Line) ->
+ {stack, U, Line, D}.
+
+get_lines({stack, U, {}, []}) ->
+ U;
+get_lines({stack, U, {}, D}) ->
+ tl(lists:reverse(D, U));
+get_lines({stack, U, L, D}) ->
+ get_lines({stack, U, {}, [L|D]}).
+
+save_line_buffer("\n", Lines) ->
+ save_line_buffer(Lines);
+save_line_buffer(Line, [Line|_Lines]=Lines) ->
+ save_line_buffer(Lines);
+save_line_buffer(Line, Lines) ->
+ save_line_buffer([Line|Lines]).
+
+save_line_buffer(Lines) ->
+ put(line_buffer, Lines).
+
%% This is get_line without line editing (except for backspace) and
%% without echo.
get_password_line(Chars, Drv) ->
diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl
index eb503235d8..93d75321ba 100644
--- a/lib/kernel/src/inet.erl
+++ b/lib/kernel/src/inet.erl
@@ -62,6 +62,8 @@
%% timer interface
-export([start_timer/1, timeout/1, timeout/2, stop_timer/1]).
+-export_type([ip_address/0, socket/0]).
+
%% imports
-import(lists, [append/1, duplicate/2, filter/2, foldl/3]).
diff --git a/lib/kernel/src/inet_dns.erl b/lib/kernel/src/inet_dns.erl
index 669a361c9d..1289e176c7 100644
--- a/lib/kernel/src/inet_dns.erl
+++ b/lib/kernel/src/inet_dns.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(inet_dns).
@@ -129,27 +129,33 @@ do_decode(<<Id:16,
RA:1,PR:1,_:2,Rcode:4,
QdCount:16,AnCount:16,NsCount:16,ArCount:16,
QdBuf/binary>>=Buffer) ->
- {AnBuf,QdList} = decode_query_section(QdBuf,QdCount,Buffer),
- {NsBuf,AnList} = decode_rr_section(AnBuf,AnCount,Buffer),
- {ArBuf,NsList} = decode_rr_section(NsBuf,NsCount,Buffer),
- {Rest,ArList} = decode_rr_section(ArBuf,ArCount,Buffer),
+ {AnBuf,QdList,QdTC} = decode_query_section(QdBuf,QdCount,Buffer),
+ {NsBuf,AnList,AnTC} = decode_rr_section(AnBuf,AnCount,Buffer),
+ {ArBuf,NsList,NsTC} = decode_rr_section(NsBuf,NsCount,Buffer),
+ {Rest,ArList,ArTC} = decode_rr_section(ArBuf,ArCount,Buffer),
case Rest of
<<>> ->
+ HdrTC = decode_boolean(TC),
DnsHdr =
#dns_header{id=Id,
qr=decode_boolean(QR),
opcode=decode_opcode(Opcode),
aa=decode_boolean(AA),
- tc=decode_boolean(TC),
+ tc=HdrTC,
rd=decode_boolean(RD),
ra=decode_boolean(RA),
pr=decode_boolean(PR),
rcode=Rcode},
- #dns_rec{header=DnsHdr,
- qdlist=QdList,
- anlist=AnList,
- nslist=NsList,
- arlist=ArList};
+ case QdTC or AnTC or NsTC or ArTC of
+ true when not HdrTC ->
+ throw(?DECODE_ERROR);
+ _ ->
+ #dns_rec{header=DnsHdr,
+ qdlist=QdList,
+ anlist=AnList,
+ nslist=NsList,
+ arlist=ArList}
+ end;
_ ->
%% Garbage data after DNS message
throw(?DECODE_ERROR)
@@ -161,8 +167,10 @@ do_decode(_) ->
decode_query_section(Bin, N, Buffer) ->
decode_query_section(Bin, N, Buffer, []).
+decode_query_section(<<>>=Rest, N, _Buffer, Qs) ->
+ {Rest,reverse(Qs),N =/= 0};
decode_query_section(Rest, 0, _Buffer, Qs) ->
- {Rest,reverse(Qs)};
+ {Rest,reverse(Qs),false};
decode_query_section(Bin, N, Buffer, Qs) ->
case decode_name(Bin, Buffer) of
{<<Type:16,Class:16,Rest/binary>>,Name} ->
@@ -179,8 +187,10 @@ decode_query_section(Bin, N, Buffer, Qs) ->
decode_rr_section(Bin, N, Buffer) ->
decode_rr_section(Bin, N, Buffer, []).
+decode_rr_section(<<>>=Rest, N, _Buffer, RRs) ->
+ {Rest,reverse(RRs),N =/= 0};
decode_rr_section(Rest, 0, _Buffer, RRs) ->
- {Rest,reverse(RRs)};
+ {Rest,reverse(RRs),false};
decode_rr_section(Bin, N, Buffer, RRs) ->
case decode_name(Bin, Buffer) of
{<<T:16/unsigned,C:16/unsigned,TTL:4/binary,
diff --git a/lib/kernel/src/inet_res.erl b/lib/kernel/src/inet_res.erl
index 9b9e078898..de0f23bf24 100644
--- a/lib/kernel/src/inet_res.erl
+++ b/lib/kernel/src/inet_res.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% RFC 1035, 2671, 2782, 2915.
@@ -592,6 +592,7 @@ query_retries(_Q, _NSs, _Timer, Retry, Retry, S) ->
query_retries(Q, NSs, Timer, Retry, I, S0) ->
Num = length(NSs),
if Num =:= 0 ->
+ udp_close(S0),
{error,timeout};
true ->
case query_nss(Q, NSs, Timer, Retry, I, S0, []) of
diff --git a/lib/kernel/src/net_kernel.erl b/lib/kernel/src/net_kernel.erl
index 3afaedf274..0e5cc8c2c6 100644
--- a/lib/kernel/src/net_kernel.erl
+++ b/lib/kernel/src/net_kernel.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(net_kernel).
@@ -72,7 +72,7 @@
-export([publish_on_node/1, update_publish_nodes/1]).
-%% Internal Exports
+%% Internal Exports
-export([do_spawn/3,
spawn_func/6,
ticker/2,
@@ -94,7 +94,7 @@
connecttime, %% the connection setuptime.
connections, %% table of connections
conn_owners = [], %% List of connection owner pids,
- pend_owners = [], %% List of potential owners
+ pend_owners = [], %% List of potential owners
listen, %% list of #listen
allowed, %% list of allowed nodes in a restricted system
verbose = 0, %% level of verboseness
@@ -232,7 +232,7 @@ do_connect(Node, Type, WaitForBarred) -> %% Type = normal | hidden
%% "connected from other end.~n",[Node]),
true;
{Pid, false} ->
- ?connect_failure(Node,{barred_connection,
+ ?connect_failure(Node,{barred_connection,
ets:lookup(sys_dist, Node)}),
%%io:format("Net Kernel: barred connection (~p) "
%% "- failure.~n",[Node]),
@@ -244,12 +244,12 @@ do_connect(Node, Type, WaitForBarred) -> %% Type = normal | hidden
{ok, never} ->
?connect_failure(Node,{dist_auto_connect,never}),
false;
- % This might happen due to connection close
+ % This might happen due to connection close
% not beeing propagated to user space yet.
- % Save the day by just not connecting...
+ % Save the day by just not connecting...
{ok, once} when Else =/= [],
(hd(Else))#connection.state =:= up ->
- ?connect_failure(Node,{barred_connection,
+ ?connect_failure(Node,{barred_connection,
ets:lookup(sys_dist, Node)}),
false;
_ ->
@@ -276,8 +276,8 @@ passive_connect_monitor(Parent, Node) ->
Parent ! {self(),true}
end
end.
-
-%% If the net_kernel isn't running we ignore all requests to the
+
+%% If the net_kernel isn't running we ignore all requests to the
%% kernel, thus basically accepting them :-)
request(Req) ->
case whereis(net_kernel) of
@@ -302,7 +302,7 @@ start_link([Name, LongOrShortNames]) ->
start_link([Name, LongOrShortNames, 15000]);
start_link([Name, LongOrShortNames, Ticktime]) ->
- case gen_server:start_link({local, net_kernel}, net_kernel,
+ case gen_server:start_link({local, net_kernel}, net_kernel,
{Name, LongOrShortNames, Ticktime}, []) of
{ok, Pid} ->
{ok, Pid};
@@ -313,7 +313,7 @@ start_link([Name, LongOrShortNames, Ticktime]) ->
end.
%% auth:get_cookie should only be able to return an atom
-%% tuple cookies are unknowns
+%% tuple cookies are unknowns
init({Name, LongOrShortNames, TickT}) ->
process_flag(trap_exit,true),
@@ -354,13 +354,13 @@ init({Name, LongOrShortNames, TickT}) ->
%% The response is delayed until the connection is up and
%% running.
%%
-handle_call({connect, _, Node}, _From, State) when Node =:= node() ->
- {reply, true, State};
+handle_call({connect, _, Node}, From, State) when Node =:= node() ->
+ async_reply({reply, true, State}, From);
handle_call({connect, Type, Node}, From, State) ->
verbose({connect, Type, Node}, 1, State),
case ets:lookup(sys_dist, Node) of
[Conn] when Conn#connection.state =:= up ->
- {reply, true, State};
+ async_reply({reply, true, State}, From);
[Conn] when Conn#connection.state =:= pending ->
Waiting = Conn#connection.waiting,
ets:insert(sys_dist, Conn#connection{waiting = [From|Waiting]}),
@@ -376,74 +376,75 @@ handle_call({connect, Type, Node}, From, State) ->
{noreply,State#state{conn_owners=Owners}};
_ ->
?connect_failure(Node, {setup_call, failed}),
- {reply, false, State}
+ async_reply({reply, false, State}, From)
end
end;
%%
%% Close the connection to Node.
%%
-handle_call({disconnect, Node}, _From, State) when Node =:= node() ->
- {reply, false, State};
-handle_call({disconnect, Node}, _From, State) ->
+handle_call({disconnect, Node}, From, State) when Node =:= node() ->
+ async_reply({reply, false, State}, From);
+handle_call({disconnect, Node}, From, State) ->
verbose({disconnect, Node}, 1, State),
{Reply, State1} = do_disconnect(Node, State),
- {reply, Reply, State1};
+ async_reply({reply, Reply, State1}, From);
-%%
+%%
%% The spawn/4 BIF ends up here.
-%%
+%%
handle_call({spawn,M,F,A,Gleader},{From,Tag},State) when is_pid(From) ->
do_spawn([no_link,{From,Tag},M,F,A,Gleader],[],State);
-%%
+%%
%% The spawn_link/4 BIF ends up here.
-%%
+%%
handle_call({spawn_link,M,F,A,Gleader},{From,Tag},State) when is_pid(From) ->
do_spawn([link,{From,Tag},M,F,A,Gleader],[],State);
-%%
+%%
%% The spawn_opt/5 BIF ends up here.
-%%
+%%
handle_call({spawn_opt,M,F,A,O,L,Gleader},{From,Tag},State) when is_pid(From) ->
do_spawn([L,{From,Tag},M,F,A,Gleader],O,State);
-%%
+%%
%% Only allow certain nodes.
-%%
-handle_call({allow, Nodes}, _From, State) ->
+%%
+handle_call({allow, Nodes}, From, State) ->
case all_atoms(Nodes) of
true ->
Allowed = State#state.allowed,
- {reply,ok,State#state{allowed = Allowed ++ Nodes}};
+ async_reply({reply,ok,State#state{allowed = Allowed ++ Nodes}},
+ From);
false ->
- {reply,error,State}
+ async_reply({reply,error,State}, From)
end;
-%%
+%%
%% authentication, used by auth. Simply works as this:
%% if the message comes through, the other node IS authorized.
-%%
-handle_call({is_auth, _Node}, _From, State) ->
- {reply,yes,State};
+%%
+handle_call({is_auth, _Node}, From, State) ->
+ async_reply({reply,yes,State}, From);
-%%
+%%
%% Not applicable any longer !?
-%%
-handle_call({apply,_Mod,_Fun,_Args}, {From,Tag}, State)
+%%
+handle_call({apply,_Mod,_Fun,_Args}, {From,Tag}, State)
when is_pid(From), node(From) =:= node() ->
- gen_server:reply({From,Tag}, not_implemented),
+ async_gen_server_reply({From,Tag}, not_implemented),
% Port = State#state.port,
% catch apply(Mod,Fun,[Port|Args]),
{noreply,State};
-handle_call(longnames, _From, State) ->
- {reply, get(longnames), State};
+handle_call(longnames, From, State) ->
+ async_reply({reply, get(longnames), State}, From);
-handle_call({update_publish_nodes, Ns}, _From, State) ->
- {reply, ok, State#state{publish_on_nodes = Ns}};
+handle_call({update_publish_nodes, Ns}, From, State) ->
+ async_reply({reply, ok, State#state{publish_on_nodes = Ns}}, From);
-handle_call({publish_on_node, Node}, _From, State) ->
+handle_call({publish_on_node, Node}, From, State) ->
NewState = case State#state.publish_on_nodes of
undefined ->
State#state{publish_on_nodes =
@@ -457,11 +458,12 @@ handle_call({publish_on_node, Node}, _From, State) ->
Nodes ->
lists:member(Node, Nodes)
end,
- {reply, Publish, NewState};
+ async_reply({reply, Publish, NewState}, From);
-handle_call({verbose, Level}, _From, State) ->
- {reply, State#state.verbose, State#state{verbose = Level}};
+handle_call({verbose, Level}, From, State) ->
+ async_reply({reply, State#state.verbose, State#state{verbose = Level}},
+ From);
%%
%% Set new ticktime
@@ -471,16 +473,16 @@ handle_call({verbose, Level}, _From, State) ->
%% #tick_change{} record if the ticker process has been upgraded;
%% otherwise, an integer or an atom.
-handle_call(ticktime, _, #state{tick = #tick{time = T}} = State) ->
- {reply, T, State};
-handle_call(ticktime, _, #state{tick = #tick_change{time = T}} = State) ->
- {reply, {ongoing_change_to, T}, State};
+handle_call(ticktime, From, #state{tick = #tick{time = T}} = State) ->
+ async_reply({reply, T, State}, From);
+handle_call(ticktime, From, #state{tick = #tick_change{time = T}} = State) ->
+ async_reply({reply, {ongoing_change_to, T}, State}, From);
-handle_call({new_ticktime,T,_TP}, _, #state{tick = #tick{time = T}} = State) ->
+handle_call({new_ticktime,T,_TP}, From, #state{tick = #tick{time = T}} = State) ->
?tckr_dbg(no_tick_change),
- {reply, unchanged, State};
+ async_reply({reply, unchanged, State}, From);
-handle_call({new_ticktime,T,TP}, _, #state{tick = #tick{ticker = Tckr,
+handle_call({new_ticktime,T,TP}, From, #state{tick = #tick{ticker = Tckr,
time = OT}} = State) ->
?tckr_dbg(initiating_tick_change),
start_aux_ticker(T, OT, TP),
@@ -493,14 +495,18 @@ handle_call({new_ticktime,T,TP}, _, #state{tick = #tick{ticker = Tckr,
?tckr_dbg(shorter_ticktime),
shorter
end,
- {reply, change_initiated, State#state{tick = #tick_change{ticker = Tckr,
- time = T,
- how = How}}};
+ async_reply({reply, change_initiated,
+ State#state{tick = #tick_change{ticker = Tckr,
+ time = T,
+ how = How}}}, From);
-handle_call({new_ticktime,_,_},
+handle_call({new_ticktime,From,_},
_,
#state{tick = #tick_change{time = T}} = State) ->
- {reply, {ongoing_change_to, T}, State}.
+ async_reply({reply, {ongoing_change_to, T}, State}, From);
+
+handle_call(_Msg, _From, State) ->
+ {noreply, State}.
%% ------------------------------------------------------------
%% handle_cast.
@@ -568,7 +574,7 @@ handle_info({accept,AcceptPid,Socket,Family,Proto}, State) ->
%%
%% A node has successfully been connected.
%%
-handle_info({SetupPid, {nodeup,Node,Address,Type,Immediate}},
+handle_info({SetupPid, {nodeup,Node,Address,Type,Immediate}},
State) ->
case {Immediate, ets:lookup(sys_dist, Node)} of
{true, [Conn]} when Conn#connection.state =:= pending,
@@ -656,7 +662,7 @@ handle_info({From,registered_send,To,Mess},State) ->
send(From,To,Mess),
{noreply,State};
-%% badcookies SHOULD not be sent
+%% badcookies SHOULD not be sent
%% (if someone does erlang:set_cookie(node(),foo) this may be)
handle_info({From,badcookie,_To,_Mess}, State) ->
error_logger:error_msg("~n** Got OLD cookie from ~w~n",
@@ -704,7 +710,7 @@ handle_info(X, State) ->
%% 4. The ticker process.
%% (5. Garbage pid.)
%%
-%% The process type function that handled the process throws
+%% The process type function that handled the process throws
%% the handle_info return value !
%% -----------------------------------------------------------
@@ -994,9 +1000,9 @@ ticker(Kernel, Tick) when is_integer(Tick) ->
ticker_loop(Kernel, Tick).
to_integer(T) when is_integer(T) -> T;
-to_integer(T) when is_atom(T) ->
+to_integer(T) when is_atom(T) ->
list_to_integer(atom_to_list(T));
-to_integer(T) when is_list(T) ->
+to_integer(T) when is_list(T) ->
list_to_integer(T).
ticker_loop(Kernel, Tick) ->
@@ -1004,7 +1010,7 @@ ticker_loop(Kernel, Tick) ->
{new_ticktime, NewTick} ->
?tckr_dbg({ticker_changed_time, Tick, NewTick}),
?MODULE:ticker_loop(Kernel, NewTick)
- after Tick ->
+ after Tick ->
Kernel ! tick,
?MODULE:ticker_loop(Kernel, Tick)
end.
@@ -1052,7 +1058,7 @@ send(_From,To,Mess) ->
-ifdef(UNUSED).
safesend(Name,Mess) when is_atom(Name) ->
- case whereis(Name) of
+ case whereis(Name) of
undefined ->
Mess;
P when is_pid(P) ->
@@ -1063,11 +1069,12 @@ safesend(Pid, Mess) -> Pid ! Mess.
-endif.
do_spawn(SpawnFuncArgs, SpawnOpts, State) ->
+ [_,From|_] = SpawnFuncArgs,
case catch spawn_opt(?MODULE, spawn_func, SpawnFuncArgs, SpawnOpts) of
- {'EXIT', {Reason,_}} ->
- {reply, {'EXIT', {Reason,[]}}, State};
- {'EXIT', Reason} ->
- {reply, {'EXIT', {Reason,[]}}, State};
+ {'EXIT', {Reason,_}} ->
+ async_reply({reply, {'EXIT', {Reason,[]}}, State}, From);
+ {'EXIT', Reason} ->
+ async_reply({reply, {'EXIT', {Reason,[]}}, State}, From);
_ ->
{noreply,State}
end.
@@ -1145,7 +1152,7 @@ get_proto_mod(Family,Protocol,[L|Ls]) ->
true ->
get_proto_mod(Family,Protocol,Ls)
end;
-get_proto_mod(_Family, _Protocol, []) ->
+get_proto_mod(_Family, _Protocol, []) ->
error.
%% -------- Initialisation functions ------------------------
@@ -1156,9 +1163,9 @@ init_node(Name, LongOrShortNames) ->
case create_name(Name, LongOrShortNames, 1) of
{ok,Node} ->
case start_protos(list_to_atom(NameWithoutHost),Node) of
- {ok, Ls} ->
+ {ok, Ls} ->
{ok, Node, Ls};
- Error ->
+ Error ->
Error
end;
Error ->
@@ -1167,9 +1174,9 @@ init_node(Name, LongOrShortNames) ->
%% Create the node name
create_name(Name, LongOrShortNames, Try) ->
- put(longnames, case LongOrShortNames of
- shortnames -> false;
- longnames -> true
+ put(longnames, case LongOrShortNames of
+ shortnames -> false;
+ longnames -> true
end),
{Head,Host1} = create_hostpart(Name, LongOrShortNames),
case Host1 of
@@ -1218,7 +1225,7 @@ create_hostpart(Name, LongOrShortNames) ->
{Head,Host1}.
%%
-%%
+%%
%%
protocol_childspecs() ->
case init:get_argument(proto_dist) of
@@ -1228,7 +1235,7 @@ protocol_childspecs() ->
protocol_childspecs(["inet_tcp"])
end.
-protocol_childspecs([]) ->
+protocol_childspecs([]) ->
[];
protocol_childspecs([H|T]) ->
Mod = list_to_atom(H ++ "_dist"),
@@ -1238,15 +1245,15 @@ protocol_childspecs([H|T]) ->
_ ->
protocol_childspecs(T)
end.
-
-
+
+
%%
%% epmd_module() -> module_name of erl_epmd or similar gen_server_module.
%%
epmd_module() ->
case init:get_argument(epmd_module) of
- {ok,[[Module]]} ->
+ {ok,[[Module]]} ->
Module;
_ ->
erl_epmd
@@ -1293,7 +1300,7 @@ start_protos(Name, [Proto | Ps], Node, Ls) ->
error_logger:info_msg("Protocol: ~p: not supported~n", [Proto]),
start_protos(Name,Ps, Node, Ls);
{'EXIT', Reason} ->
- error_logger:info_msg("Protocol: ~p: register error: ~p~n",
+ error_logger:info_msg("Protocol: ~p: register error: ~p~n",
[Proto, Reason]),
start_protos(Name,Ps, Node, Ls);
{error, duplicate_name} ->
@@ -1303,7 +1310,7 @@ start_protos(Name, [Proto | Ps], Node, Ls) ->
[Proto]),
start_protos(Name,Ps, Node, Ls);
{error, Reason} ->
- error_logger:info_msg("Protocol: ~p: register/listen error: ~p~n",
+ error_logger:info_msg("Protocol: ~p: register/listen error: ~p~n",
[Proto, Reason]),
start_protos(Name,Ps, Node, Ls)
end;
@@ -1409,7 +1416,7 @@ reply_waiting(_Node, Waiting, Rep) ->
reply_waiting1(lists:reverse(Waiting), Rep).
reply_waiting1([From|W], Rep) ->
- gen_server:reply(From, Rep),
+ async_gen_server_reply(From, Rep),
reply_waiting1(W, Rep);
reply_waiting1([], _) ->
ok.
@@ -1455,7 +1462,7 @@ display_info({Node, Info}, {I,O}) ->
integer_to_list(In), integer_to_list(Out), Address),
{I+In,O+Out}.
-fmt_address(undefined) ->
+fmt_address(undefined) ->
"-";
fmt_address(A) ->
case A#net_address.family of
@@ -1511,3 +1518,21 @@ verbose(_, _, _) ->
getnode(P) when is_pid(P) -> node(P);
getnode(P) -> P.
+
+async_reply({reply, Msg, State}, From) ->
+ async_gen_server_reply(From, Msg),
+ {noreply, State}.
+
+async_gen_server_reply(From, Msg) ->
+ {Pid, Tag} = From,
+ M = {Tag, Msg},
+ case catch erlang:send(Pid, M, [nosuspend, noconnect]) of
+ ok ->
+ ok;
+ nosuspend ->
+ spawn(fun() -> catch erlang:send(Pid, M, [noconnect]) end);
+ noconnect ->
+ ok; % The gen module takes care of this case.
+ {'EXIT', _}=EXIT ->
+ EXIT
+ end.
diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl
index d0b498edc9..75a11a8afd 100644
--- a/lib/kernel/src/os.erl
+++ b/lib/kernel/src/os.erl
@@ -50,7 +50,7 @@ find_executable(Name, Path) ->
relative ->
find_executable1(Name, split_path(Path), Extensions);
_ ->
- case verify_executable(Name, Extensions) of
+ case verify_executable(Name, Extensions, Extensions) of
{ok, Complete} ->
Complete;
error ->
@@ -60,7 +60,7 @@ find_executable(Name, Path) ->
find_executable1(Name, [Base|Rest], Extensions) ->
Complete0 = filename:join(Base, Name),
- case verify_executable(Complete0, Extensions) of
+ case verify_executable(Complete0, Extensions, Extensions) of
{ok, Complete} ->
Complete;
error ->
@@ -69,7 +69,7 @@ find_executable1(Name, [Base|Rest], Extensions) ->
find_executable1(_Name, [], _Extensions) ->
false.
-verify_executable(Name0, [Ext|Rest]) ->
+verify_executable(Name0, [Ext|Rest], OrigExtensions) ->
Name1 = Name0 ++ Ext,
case os:type() of
vxworks ->
@@ -78,7 +78,7 @@ verify_executable(Name0, [Ext|Rest]) ->
{ok, _} ->
{ok, Name1};
_ ->
- verify_executable(Name0, Rest)
+ verify_executable(Name0, Rest, OrigExtensions)
end;
_ ->
case file:read_file_info(Name1) of
@@ -87,12 +87,30 @@ verify_executable(Name0, [Ext|Rest]) ->
%% on Unix, since we test if any execution bit is set.
{ok, Name1};
_ ->
- verify_executable(Name0, Rest)
+ verify_executable(Name0, Rest, OrigExtensions)
end
end;
-verify_executable(_, []) ->
+verify_executable(Name, [], OrigExtensions) when OrigExtensions =/= [""] -> %% Windows
+ %% Will only happen on windows, hence case insensitivity
+ case can_be_full_name(string:to_lower(Name),OrigExtensions) of
+ true ->
+ verify_executable(Name,[""],[""]);
+ _ ->
+ error
+ end;
+verify_executable(_, [], _) ->
error.
+can_be_full_name(_Name,[]) ->
+ false;
+can_be_full_name(Name,[H|T]) ->
+ case lists:suffix(H,Name) of %% Name is in lowercase, cause this is a windows thing
+ true ->
+ true;
+ _ ->
+ can_be_full_name(Name,T)
+ end.
+
split_path(Path) ->
case type() of
{win32, _} ->
@@ -119,6 +137,7 @@ reverse_element(List) ->
lists:reverse(List).
-spec extensions() -> [string()].
+%% Extensions in lower case
extensions() ->
case type() of
{win32, _} -> [".exe",".com",".cmd",".bat"];
diff --git a/lib/kernel/src/pg2.erl b/lib/kernel/src/pg2.erl
index cb9fec2ffe..956a900adc 100644
--- a/lib/kernel/src/pg2.erl
+++ b/lib/kernel/src/pg2.erl
@@ -251,7 +251,9 @@ terminate(_Reason, _S) ->
%%% Pid is a member of group Name.
store(List) ->
- _ = [assure_group(Name) andalso [join_group(Name, P) || P <- Members] ||
+ _ = [(assure_group(Name)
+ andalso
+ [join_group(Name, P) || P <- Members -- group_members(Name)]) ||
[Name, Members] <- List],
ok.
diff --git a/lib/kernel/src/ram_file.erl b/lib/kernel/src/ram_file.erl
index d996650948..48ea871433 100644
--- a/lib/kernel/src/ram_file.erl
+++ b/lib/kernel/src/ram_file.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(ram_file).
@@ -24,11 +24,11 @@
-export([open/2, close/1]).
-export([write/2, read/2, copy/3,
pread/2, pread/3, pwrite/2, pwrite/3,
- position/2, truncate/1, sync/1]).
+ position/2, truncate/1, datasync/1, sync/1]).
%% Specialized file operations
-export([get_size/1, get_file/1, set_file/2, get_file_close/1]).
--export([compress/1, uncompress/1, uuencode/1, uudecode/1]).
+-export([compress/1, uncompress/1, uuencode/1, uudecode/1, advise/4]).
-export([open_mode/1]). %% used by ftp-file
@@ -60,6 +60,7 @@
-define(RAM_FILE_TRUNCATE, 14).
-define(RAM_FILE_PREAD, 17).
-define(RAM_FILE_PWRITE, 18).
+-define(RAM_FILE_FDATASYNC, 19).
%% Other operations
-define(RAM_FILE_GET, 30).
@@ -70,6 +71,7 @@
-define(RAM_FILE_UUENCODE, 35).
-define(RAM_FILE_UUDECODE, 36).
-define(RAM_FILE_SIZE, 37).
+-define(RAM_FILE_ADVISE, 38).
%% Open modes for RAM_FILE_OPEN
-define(RAM_FILE_MODE_READ, 1).
@@ -90,6 +92,14 @@
-define(RAM_FILE_RESP_NUMBER, 3).
-define(RAM_FILE_RESP_INFO, 4).
+%% POSIX file advises
+-define(POSIX_FADV_NORMAL, 0).
+-define(POSIX_FADV_RANDOM, 1).
+-define(POSIX_FADV_SEQUENTIAL, 2).
+-define(POSIX_FADV_WILLNEED, 3).
+-define(POSIX_FADV_DONTNEED, 4).
+-define(POSIX_FADV_NOREUSE, 5).
+
%% --------------------------------------------------------------------------
%% Generic file contents operations.
%%
@@ -167,6 +177,8 @@ copy(#file_descriptor{module = ?MODULE} = Source,
%% XXX Should be moved down to the driver for optimization.
file:copy_opened(Source, Dest, Length).
+datasync(#file_descriptor{module = ?MODULE, data = Port}) ->
+ call_port(Port, <<?RAM_FILE_FDATASYNC>>).
sync(#file_descriptor{module = ?MODULE, data = Port}) ->
call_port(Port, <<?RAM_FILE_FSYNC>>).
@@ -349,6 +361,28 @@ uudecode(#file_descriptor{module = ?MODULE, data = Port}) ->
uudecode(#file_descriptor{}) ->
{error, enotsup}.
+advise(#file_descriptor{module = ?MODULE, data = Port}, Offset,
+ Length, Advise) ->
+ Cmd0 = <<?RAM_FILE_ADVISE, Offset:64/signed, Length:64/signed>>,
+ case Advise of
+ normal ->
+ call_port(Port, <<Cmd0/binary, ?POSIX_FADV_NORMAL:32/signed>>);
+ random ->
+ call_port(Port, <<Cmd0/binary, ?POSIX_FADV_RANDOM:32/signed>>);
+ sequential ->
+ call_port(Port, <<Cmd0/binary, ?POSIX_FADV_SEQUENTIAL:32/signed>>);
+ will_need ->
+ call_port(Port, <<Cmd0/binary, ?POSIX_FADV_WILLNEED:32/signed>>);
+ dont_need ->
+ call_port(Port, <<Cmd0/binary, ?POSIX_FADV_DONTNEED:32/signed>>);
+ no_reuse ->
+ call_port(Port, <<Cmd0/binary, ?POSIX_FADV_NOREUSE:32/signed>>);
+ _ ->
+ {error, einval}
+ end;
+advise(#file_descriptor{}, _Offset, _Length, _Advise) ->
+ {error, enotsup}.
+
%%%-----------------------------------------------------------------
diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl
index 1d170790a3..17c47f871d 100644
--- a/lib/kernel/test/file_SUITE.erl
+++ b/lib/kernel/test/file_SUITE.erl
@@ -52,8 +52,8 @@
old_modes/1, new_modes/1, path_open/1, open_errors/1]).
-export([file_info/1, file_info_basic_file/1, file_info_basic_directory/1,
file_info_bad/1, file_info_times/1, file_write_file_info/1]).
--export([rename/1, access/1, truncate/1, sync/1,
- read_write/1, pread_write/1, append/1]).
+-export([rename/1, access/1, truncate/1, datasync/1, sync/1,
+ read_write/1, pread_write/1, append/1, exclusive/1]).
-export([errors/1, e_delete/1, e_rename/1, e_make_dir/1, e_del_dir/1]).
-export([otp_5814/1]).
@@ -82,6 +82,10 @@
-export([read_line_1/1, read_line_2/1, read_line_3/1,read_line_4/1]).
+-export([advise/1]).
+
+-export([standard_io/1,mini_server/1]).
+
%% Debug exports
-export([create_file_slow/2, create_file/2, create_bin/2]).
-export([verify_file/2, verify_bin/3]).
@@ -101,7 +105,8 @@ all(suite) ->
compression, links, copy,
delayed_write, read_ahead, segment_read, segment_write,
ipread, pid2name, interleaved_read_write,
- otp_5814, large_file, read_line_1, read_line_2, read_line_3, read_line_4],
+ otp_5814, large_file, read_line_1, read_line_2, read_line_3, read_line_4,
+ standard_io],
fini}.
init(Config) when is_list(Config) ->
@@ -170,6 +175,85 @@ time_dist({_D1, _T1} = DT1, {_D2, _T2} = DT2) ->
- calendar:datetime_to_gregorian_seconds(DT1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+mini_server(Parent) ->
+ receive
+ die ->
+ ok;
+ {io_request,From,To,{put_chars,Data}} ->
+ Parent ! {io_request,From,To,{put_chars,Data}},
+ From ! {io_reply, To, ok},
+ mini_server(Parent);
+ {io_request,From,To,{get_chars,'',N}} ->
+ Parent ! {io_request,From,To,{get_chars,'',N}},
+ From ! {io_reply, To, {ok, lists:duplicate(N,$a)}},
+ mini_server(Parent);
+ {io_request,From,To,{get_line,''}} ->
+ Parent ! {io_request,From,To,{get_line,''}},
+ From ! {io_reply, To, {ok, "hej\n"}},
+ mini_server(Parent)
+ end.
+
+standard_io(suite) ->
+ [];
+standard_io(doc) ->
+ ["Test that standard i/o-servers work with file module"];
+standard_io(Config) when is_list(Config) ->
+ %% Really just a smoke test
+ ?line Pid = spawn(?MODULE,mini_server,[self()]),
+ ?line register(mini_server,Pid),
+ ?line ok = file:write(mini_server,<<"hej\n">>),
+ ?line receive
+ {io_request,_,_,{put_chars,<<"hej\n">>}} ->
+ ok
+ after 1000 ->
+ exit(noreply)
+ end,
+ ?line {ok,"aaaaa"} = file:read(mini_server,5),
+ ?line receive
+ {io_request,_,_,{get_chars,'',5}} ->
+ ok
+ after 1000 ->
+ exit(noreply)
+ end,
+ ?line {ok,"hej\n"} = file:read_line(mini_server),
+ ?line receive
+ {io_request,_,_,{get_line,''}} ->
+ ok
+ after 1000 ->
+ exit(noreply)
+ end,
+ ?line OldGL = group_leader(),
+ ?line group_leader(Pid,self()),
+ ?line ok = file:write(standard_io,<<"hej\n">>),
+ ?line group_leader(OldGL,self()),
+ ?line receive
+ {io_request,_,_,{put_chars,<<"hej\n">>}} ->
+ ok
+ after 1000 ->
+ exit(noreply)
+ end,
+ ?line group_leader(Pid,self()),
+ ?line {ok,"aaaaa"} = file:read(standard_io,5),
+ ?line group_leader(OldGL,self()),
+ ?line receive
+ {io_request,_,_,{get_chars,'',5}} ->
+ ok
+ after 1000 ->
+ exit(noreply)
+ end,
+ ?line group_leader(Pid,self()),
+ ?line {ok,"hej\n"} = file:read_line(standard_io),
+ ?line group_leader(OldGL,self()),
+ ?line receive
+ {io_request,_,_,{get_line,''}} ->
+ ok
+ after 1000 ->
+ exit(noreply)
+ end,
+ Pid ! die,
+ receive after 1000 -> ok end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
read_write_file(suite) -> [];
read_write_file(doc) -> [];
@@ -377,10 +461,12 @@ win_cur_dir_1(_Config) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-files(suite) -> [open,pos,file_info,consult,eval,script,truncate,sync].
+files(suite) ->
+ [open,pos,file_info,consult,eval,script,truncate,
+ sync,datasync,advise].
open(suite) -> [open1,old_modes,new_modes,path_open,close,access,read_write,
- pread_write,append,open_errors].
+ pread_write,append,open_errors,exclusive].
open1(suite) -> [];
open1(doc) -> [];
@@ -754,6 +840,22 @@ open_errors(Config) when is_list(Config) ->
?line test_server:timetrap_cancel(Dog),
ok.
+exclusive(suite) -> [];
+exclusive(doc) -> "Test exclusive access to a file.";
+exclusive(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(5)),
+ ?line RootDir = ?config(priv_dir,Config),
+ ?line NewDir = filename:join(RootDir,
+ atom_to_list(?MODULE)
+ ++"_exclusive"),
+ ?line ok = ?FILE_MODULE:make_dir(NewDir),
+ ?line Name = filename:join(NewDir, "ex_file.txt"),
+ ?line {ok, Fd} = ?FILE_MODULE:open(Name, [write, exclusive]),
+ ?line {error, eexist} = ?FILE_MODULE:open(Name, [write, exclusive]),
+ ?line ok = ?FILE_MODULE:close(Fd),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
pos(suite) -> [pos1,pos2].
@@ -1355,6 +1457,30 @@ truncate(Config) when is_list(Config) ->
ok.
+datasync(suite) -> [];
+datasync(doc) -> "Tests that ?FILE_MODULE:datasync/1 at least doesn't crash.";
+datasync(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(5)),
+ ?line PrivDir = ?config(priv_dir, Config),
+ ?line Sync = filename:join(PrivDir,
+ atom_to_list(?MODULE)
+ ++"_sync.fil"),
+
+ %% Raw open.
+ ?line {ok, Fd} = ?FILE_MODULE:open(Sync, [write, raw]),
+ ?line ok = ?FILE_MODULE:datasync(Fd),
+ ?line ok = ?FILE_MODULE:close(Fd),
+
+ %% Ordinary open.
+ ?line {ok, Fd2} = ?FILE_MODULE:open(Sync, [write]),
+ ?line ok = ?FILE_MODULE:datasync(Fd2),
+ ?line ok = ?FILE_MODULE:close(Fd2),
+
+ ?line [] = flush(),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
+
sync(suite) -> [];
sync(doc) -> "Tests that ?FILE_MODULE:sync/1 at least doesn't crash.";
sync(Config) when is_list(Config) ->
@@ -1378,6 +1504,77 @@ sync(Config) when is_list(Config) ->
?line test_server:timetrap_cancel(Dog),
ok.
+advise(suite) -> [];
+advise(doc) -> "Tests that ?FILE_MODULE:advise/4 at least doesn't crash.";
+advise(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(5)),
+ ?line PrivDir = ?config(priv_dir, Config),
+ ?line Advise = filename:join(PrivDir,
+ atom_to_list(?MODULE)
+ ++"_advise.fil"),
+
+ Line1 = "Hello\n",
+ Line2 = "World!\n",
+
+ ?line {ok, Fd} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = ?FILE_MODULE:advise(Fd, 0, 0, normal),
+ ?line ok = io:format(Fd, "~s", [Line1]),
+ ?line ok = io:format(Fd, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd),
+
+ ?line {ok, Fd2} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = ?FILE_MODULE:advise(Fd2, 0, 0, random),
+ ?line ok = io:format(Fd2, "~s", [Line1]),
+ ?line ok = io:format(Fd2, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd2),
+
+ ?line {ok, Fd3} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = ?FILE_MODULE:advise(Fd3, 0, 0, sequential),
+ ?line ok = io:format(Fd3, "~s", [Line1]),
+ ?line ok = io:format(Fd3, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd3),
+
+ ?line {ok, Fd4} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = ?FILE_MODULE:advise(Fd4, 0, 0, will_need),
+ ?line ok = io:format(Fd4, "~s", [Line1]),
+ ?line ok = io:format(Fd4, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd4),
+
+ ?line {ok, Fd5} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = ?FILE_MODULE:advise(Fd5, 0, 0, dont_need),
+ ?line ok = io:format(Fd5, "~s", [Line1]),
+ ?line ok = io:format(Fd5, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd5),
+
+ ?line {ok, Fd6} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = ?FILE_MODULE:advise(Fd6, 0, 0, no_reuse),
+ ?line ok = io:format(Fd6, "~s", [Line1]),
+ ?line ok = io:format(Fd6, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd6),
+
+ ?line {ok, Fd7} = ?FILE_MODULE:open(Advise, [write]),
+ ?line {error, einval} = ?FILE_MODULE:advise(Fd7, 0, 0, bad_advise),
+ ?line ok = ?FILE_MODULE:close(Fd7),
+
+ %% test write without advise, then a read after an advise
+ ?line {ok, Fd8} = ?FILE_MODULE:open(Advise, [write]),
+ ?line ok = io:format(Fd8, "~s", [Line1]),
+ ?line ok = io:format(Fd8, "~s", [Line2]),
+ ?line ok = ?FILE_MODULE:close(Fd8),
+ ?line {ok, Fd9} = ?FILE_MODULE:open(Advise, [read]),
+ Offset = 0,
+ %% same as a 0 length in some implementations
+ Length = length(Line1) + length(Line2),
+ ?line ok = ?FILE_MODULE:advise(Fd9, Offset, Length, sequential),
+ ?line {ok, Line1} = ?FILE_MODULE:read_line(Fd9),
+ ?line {ok, Line2} = ?FILE_MODULE:read_line(Fd9),
+ ?line eof = ?FILE_MODULE:read_line(Fd9),
+ ?line ok = ?FILE_MODULE:close(Fd9),
+
+ ?line [] = flush(),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/kernel/test/os_SUITE.erl b/lib/kernel/test/os_SUITE.erl
index 6a3534b094..ace9501d18 100644
--- a/lib/kernel/test/os_SUITE.erl
+++ b/lib/kernel/test/os_SUITE.erl
@@ -137,6 +137,13 @@ find_executable(Config) when is_list(Config) ->
?line find_exe(Abin, "my_ar", ".exe", Path),
?line find_exe(Abin, "my_ascii", ".com", Path),
?line find_exe(Abin, "my_adb", ".bat", Path),
+ %% OTP-3626 find names of executables given with extension
+ ?line find_exe(Abin, "my_ar.exe", "", Path),
+ ?line find_exe(Abin, "my_ascii.com", "", Path),
+ ?line find_exe(Abin, "my_adb.bat", "", Path),
+ ?line find_exe(Abin, "my_ar.EXE", "", Path),
+ ?line find_exe(Abin, "my_ascii.COM", "", Path),
+ ?line find_exe(Abin, "MY_ADB.BAT", "", Path),
%% Search for programs in Abin (second element in PATH).
?line find_exe(Abin, "my_ar", ".exe", Path),
diff --git a/lib/kernel/test/pg2_SUITE.erl b/lib/kernel/test/pg2_SUITE.erl
index 8eb1a7ca19..df28dcf447 100644
--- a/lib/kernel/test/pg2_SUITE.erl
+++ b/lib/kernel/test/pg2_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%----------------------------------------------------------------
%% Purpose:Test Suite for the 'pg2' module.
@@ -26,8 +26,8 @@
-export([all/1, init_per_testcase/2, fin_per_testcase/2]).
--export([tickets/1,
- otp_7277/1, otp_8259/1,
+-export([tickets/1,
+ otp_7277/1, otp_8259/1, otp_8653/1,
compat/1, basic/1]).
% Default timetrap timeout (set in init_per_testcase).
@@ -37,7 +37,8 @@
-define(testcase, ?config(?TESTCASE, Config)).
%% Internal export.
--export([mk_part_node/3, part1/5, p_init/3, start_proc/1, sane/0]).
+-export([mk_part_node_and_group/3, part2/4,
+ mk_part_node/3, part1/5, p_init/3, start_proc/1, sane/0]).
init_per_testcase(Case, Config) ->
?line Dog = ?t:timetrap(?default_timeout),
@@ -48,11 +49,11 @@ fin_per_testcase(_Case, _Config) ->
test_server:timetrap_cancel(Dog),
ok.
-all(suite) ->
+all(suite) ->
[tickets].
tickets(suite) ->
- [otp_7277, otp_8259, compat, basic].
+ [otp_7277, otp_8259, otp_8653, compat, basic].
otp_7277(doc) ->
"OTP-7277. Bugfix leave().";
@@ -65,9 +66,9 @@ otp_7277(Config) when is_list(Config) ->
?line ok = pg2:leave(b, P),
?line true = exit(P, kill),
case {pg2:get_members(a), pg2:get_local_members(a)} of
- {[], []} ->
+ {[], []} ->
ok;
- _ ->
+ _ ->
timer:sleep(100),
?line [] = pg2:get_members(a),
?line [] = pg2:get_local_members(a)
@@ -79,6 +80,63 @@ otp_7277(Config) when is_list(Config) ->
-define(UNTIL(Seq), loop_until_true(fun() -> Seq end, Config)).
-define(UNTIL_LOOP, 300).
+otp_8653(suite) -> [];
+otp_8653(doc) ->
+ ["OTP-8259. Member was not removed after being killed."];
+otp_8653(Config) when is_list(Config) ->
+ Timeout = 15,
+ ?line Dog = test_server:timetrap({seconds,Timeout}),
+
+ ?line [A, B, C] = start_nodes([a, b, c], peer, Config),
+
+ ?line wait_for_ready_net(Config),
+
+ % make b and c connected, partitioned from node() and a
+ ?line rpc_cast(B, ?MODULE, part2, [Config, node(), A, C]),
+ ?line ?UNTIL(is_ready_partition(Config)),
+
+ % Connect to the other partition.
+ ?line pong = net_adm:ping(B),
+ timer:sleep(100),
+ ?line pong = net_adm:ping(C),
+ ?line _ = global:sync(),
+ ?line [A, B, C] = lists:sort(nodes()),
+
+ G = pg2_otp_8653,
+ ?line ?UNTIL(begin
+ GA = lists:sort(rpc:call(A, pg2, get_members, [G])),
+ GB = lists:sort(rpc:call(B, pg2, get_members, [G])),
+ GC = lists:sort(rpc:call(C, pg2, get_members, [G])),
+ GT = lists:sort(pg2:get_members(G)),
+ GA =:= GB andalso
+ GB =:= GC andalso
+ GC =:= GT andalso
+ 8 =:= length(GA)
+ end),
+ ?line ok = pg2:delete(G),
+ ?line stop_nodes([A,B,C]),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
+part2(Config, Main, A, C) ->
+ Function = mk_part_node_and_group,
+ case catch begin
+ make_partition(Config, [Main, A], [node(), C], Function)
+ end
+ of
+ ok -> ok
+ end.
+
+mk_part_node_and_group(File, MyPart0, Config) ->
+ touch(File, "start"), % debug
+ MyPart = lists:sort(MyPart0),
+ ?UNTIL(is_node_in_part(File, MyPart)),
+ G = pg2_otp_8653,
+ Pid = spawn(forever()),
+ ok = pg2:create(G),
+ _ = [ok = pg2:join(G, Pid) || _ <- [1,1]],
+ touch(File, "done").
+
otp_8259(suite) -> [];
otp_8259(doc) ->
["OTP-8259. Member was not removed after being killed."];
@@ -102,7 +160,7 @@ otp_8259(Config) when is_list(Config) ->
% make b and c connected, partitioned from node() and a
?line rpc_cast(B, ?MODULE, part1, [Config, node(), A, C, Name]),
?line ?UNTIL(is_ready_partition(Config)),
-
+
% Connect to the other partition.
% The resolver on node b will be called.
?line pong = net_adm:ping(B),
@@ -140,9 +198,9 @@ start_proc(Name) ->
p_init(Parent, Name, TestServer) ->
Resolve = fun(_Name, Pid1, Pid2) ->
%% The pid on node a will be chosen.
- [{_,Min}, {_,Max}] =
+ [{_,Min}, {_,Max}] =
lists:sort([{node(Pid1),Pid1}, {node(Pid2),Pid2}]),
- %% b is connected to test_server.
+ %% b is connected to test_server.
%% exit(Min, kill), % would ping a
rpc:cast(TestServer, erlang, exit, [Min, kill]),
Max
@@ -165,7 +223,7 @@ compat(Config) when is_list(Config) ->
true ->
Timeout = 15,
?line Dog = test_server:timetrap({seconds,Timeout}),
- Pid = spawn(forever()),
+ Pid = spawn(forever()),
G = a,
?line ok = pg2:create(G),
?line ok = pg2:join(G, Pid),
@@ -365,7 +423,7 @@ killit(N, P, Ps, Ns) ->
timer:sleep(100),
sane(Ns),
lists:keydelete(P, 1, Ps).
-
+
pr(Node, C) ->
_ = [?t:format("~p: ", [Node]) || Node =/= node()],
?t:format("do ~p~n", [C]).
@@ -412,27 +470,27 @@ sane(Ns) ->
wsane(Ns) ->
%% Same members on all nodes:
- {[_],gs} =
+ {[_],gs} =
{lists:usort([rpc:call(N, pg2, which_groups, []) || N <- Ns]),gs},
- _ = [{[_],ms,G} = {lists:usort([rpc:call(N, pg2, get_members, [G]) ||
+ _ = [{[_],ms,G} = {lists:usort([rpc:call(N, pg2, get_members, [G]) ||
N <- Ns]),ms,G} ||
G <- pg2:which_groups()],
%% The local members are a partitioning of the members:
- [begin
- LocalMembers =
+ [begin
+ LocalMembers =
lists:sort(lists:append(
- [rpc:call(N, pg2, get_local_members, [G]) ||
+ [rpc:call(N, pg2, get_local_members, [G]) ||
N <- Ns])),
{part, LocalMembers} = {part, lists:sort(pg2:get_members(G))}
end || G <- pg2:which_groups()],
%% The closest pid should run on the local node, if possible.
[[case rpc:call(N, pg2, get_closest_pid, [G]) of
Pid when is_pid(Pid), node(Pid) =:= N ->
- true =
+ true =
lists:member(Pid, rpc:call(N, pg2, get_local_members, [G]));
%% FIXME. Om annan nod: member, local = [].
_ -> [] = rpc:call(N, pg2, get_local_members, [G])
- end || N <- Ns]
+ end || N <- Ns]
|| G <- pg2:which_groups()].
%% Look inside the pg2_table.
@@ -482,9 +540,9 @@ start_node_rel(Name, Rel, How) ->
{RelList, ""}
end,
?line Pa = filename:dirname(code:which(?MODULE)),
- ?line Res = test_server:start_node(Name, How,
+ ?line Res = test_server:start_node(Name, How,
[{args,
- Compat ++
+ Compat ++
" -kernel net_setuptime 100 "
" -pa " ++ Pa},
{erl, Release}]),
@@ -575,29 +633,30 @@ get_known(Node) ->
case catch gen_server:call({global_name_server,Node},get_known,infinity) of
{'EXIT', _} ->
[list, without, nodenames];
- Known when is_list(Known) ->
+ Known when is_list(Known) ->
lists:sort([Node | Known])
end.
node_name(Name, Config) ->
U = "_",
{{Y,M,D}, {H,Min,S}} = calendar:now_to_local_time(now()),
- Date = io_lib:format("~4w_~2..0w_~2..0w__~2..0w_~2..0w_~2..0w",
+ Date = io_lib:format("~4w_~2..0w_~2..0w__~2..0w_~2..0w_~2..0w",
[Y,M,D, H,Min,S]),
L = lists:flatten(Date),
lists:concat([Name,U,?testcase,U,U,L]).
-%% this one runs on one node in Part2
-%% The partition is ready when is_ready_partition(Config) returns (true).
-%% this one runs on one node in Part2
+%% This one runs on one node in Part2.
%% The partition is ready when is_ready_partition(Config) returns (true).
make_partition(Config, Part1, Part2) ->
+ make_partition(Config, Part1, Part2, mk_part_node).
+
+make_partition(Config, Part1, Part2, Function) ->
Dir = ?config(priv_dir, Config),
- Ns = [begin
+ Ns = [begin
Name = lists:concat([atom_to_list(N),"_",msec(),".part"]),
File = filename:join([Dir, Name]),
file:delete(File),
- rpc_cast(N, ?MODULE, mk_part_node, [File, Part, Config], File),
+ rpc_cast(N, ?MODULE, Function, [File, Part, Config], File),
{N, File}
end || Part <- [Part1, Part2], N <- Part],
all_nodes_files(Ns, "done", Config),
@@ -614,10 +673,10 @@ mk_part_node(File, MyPart0, Config) ->
%% The calls to append_to_file are for debugging.
is_node_in_part(File, MyPart) ->
- lists:foreach(fun(N) ->
+ lists:foreach(fun(N) ->
_ = erlang:disconnect_node(N)
end, nodes() -- MyPart),
- case {(Known = get_known(node())) =:= MyPart,
+ case {(Known = get_known(node())) =:= MyPart,
(Nodes = lists:sort([node() | nodes()])) =:= MyPart} of
{true, true} ->
%% Make sure the resolvers have been terminated,
@@ -649,7 +708,7 @@ wait_for_ready_net(Nodes0, Config) ->
?t:format("wait_for_ready_net ~p~n", [Nodes]),
?UNTIL(begin
lists:all(fun(N) -> Nodes =:= get_known(N) end, Nodes) and
- lists:all(fun(N) ->
+ lists:all(fun(N) ->
LNs = rpc:call(N, erlang, nodes, []),
Nodes =:= lists:sort([N | LNs])
end, Nodes)
@@ -688,11 +747,11 @@ file_contents(File, ContentsList, Config) ->
file_contents(File, ContentsList, Config, no_log_file).
file_contents(File, ContentsList, Config, LogFile) ->
- Contents = list_to_binary(ContentsList),
+ Contents = list_to_binary(ContentsList),
Sz = size(Contents),
?UNTIL(begin
case file:read_file(File) of
- {ok, FileContents}=Reply ->
+ {ok, FileContents}=Reply ->
case catch split_binary(FileContents, Sz) of
{Contents,_} ->
true;
diff --git a/lib/kernel/test/prim_file_SUITE.erl b/lib/kernel/test/prim_file_SUITE.erl
index 6badbb5090..1688ec45ca 100644
--- a/lib/kernel/test/prim_file_SUITE.erl
+++ b/lib/kernel/test/prim_file_SUITE.erl
@@ -34,8 +34,8 @@
file_info_times_a/1, file_info_times_b/1,
file_write_file_info_a/1, file_write_file_info_b/1]).
-export([rename_a/1, rename_b/1,
- access/1, truncate/1, sync/1,
- read_write/1, pread_write/1, append/1]).
+ access/1, truncate/1, datasync/1, sync/1,
+ read_write/1, pread_write/1, append/1, exclusive/1]).
-export([errors/1, e_delete/1, e_rename/1, e_make_dir/1, e_del_dir/1]).
-export([compression/1, read_not_really_compressed/1,
@@ -48,6 +48,8 @@
symlinks_a/1, symlinks_b/1,
list_dir_limit/1]).
+-export([advise/1]).
+
-include("test_server.hrl").
-include_lib("kernel/include/file.hrl").
@@ -380,10 +382,10 @@ win_cur_dir_1(_Config, Handle) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-files(suite) -> [open,pos,file_info,truncate,sync].
+files(suite) -> [open,pos,file_info,truncate,sync,datasync,advise].
open(suite) -> [open1,modes,close,access,read_write,
- pread_write,append].
+ pread_write,append,exclusive].
open1(suite) -> [];
open1(doc) -> [];
@@ -608,6 +610,22 @@ append(Config) when is_list(Config) ->
?line test_server:timetrap_cancel(Dog),
ok.
+exclusive(suite) -> [];
+exclusive(doc) -> "Test exclusive access to a file.";
+exclusive(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(5)),
+ ?line RootDir = ?config(priv_dir,Config),
+ ?line NewDir = filename:join(RootDir,
+ atom_to_list(?MODULE)
+ ++"_exclusive"),
+ ?line ok = ?PRIM_FILE:make_dir(NewDir),
+ ?line Name = filename:join(NewDir, "ex_file.txt"),
+ ?line {ok,Fd} = ?PRIM_FILE:open(Name, [write, exclusive]),
+ ?line {error, eexist} = ?PRIM_FILE:open(Name, [write, exclusive]),
+ ?line ok = ?PRIM_FILE:close(Fd),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
pos(suite) -> [pos1,pos2].
@@ -1064,6 +1082,24 @@ truncate(Config) when is_list(Config) ->
ok.
+datasync(suite) -> [];
+datasync(doc) -> "Tests that ?PRIM_FILE:datasync/1 at least doesn't crash.";
+datasync(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(5)),
+ ?line PrivDir = ?config(priv_dir, Config),
+ ?line Sync = filename:join(PrivDir,
+ atom_to_list(?MODULE)
+ ++"_sync.fil"),
+
+ %% Raw open.
+ ?line {ok, Fd} = ?PRIM_FILE:open(Sync, [write]),
+ ?line ok = ?PRIM_FILE:datasync(Fd),
+ ?line ok = ?PRIM_FILE:close(Fd),
+
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
+
sync(suite) -> [];
sync(doc) -> "Tests that ?PRIM_FILE:sync/1 at least doesn't crash.";
sync(Config) when is_list(Config) ->
@@ -1082,6 +1118,77 @@ sync(Config) when is_list(Config) ->
ok.
+advise(suite) -> [];
+advise(doc) -> "Tests that ?PRIM_FILE:advise/4 at least doesn't crash.";
+advise(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(5)),
+ ?line PrivDir = ?config(priv_dir, Config),
+ ?line Advise = filename:join(PrivDir,
+ atom_to_list(?MODULE)
+ ++"_advise.fil"),
+
+ Line1 = "Hello\n",
+ Line2 = "World!\n",
+
+ ?line {ok, Fd} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:advise(Fd, 0, 0, normal),
+ ?line ok = ?PRIM_FILE:write(Fd, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd),
+
+ ?line {ok, Fd2} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:advise(Fd2, 0, 0, random),
+ ?line ok = ?PRIM_FILE:write(Fd2, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd2, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd2),
+
+ ?line {ok, Fd3} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:advise(Fd3, 0, 0, sequential),
+ ?line ok = ?PRIM_FILE:write(Fd3, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd3, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd3),
+
+ ?line {ok, Fd4} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:advise(Fd4, 0, 0, will_need),
+ ?line ok = ?PRIM_FILE:write(Fd4, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd4, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd4),
+
+ ?line {ok, Fd5} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:advise(Fd5, 0, 0, dont_need),
+ ?line ok = ?PRIM_FILE:write(Fd5, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd5, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd5),
+
+ ?line {ok, Fd6} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:advise(Fd6, 0, 0, no_reuse),
+ ?line ok = ?PRIM_FILE:write(Fd6, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd6, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd6),
+
+ ?line {ok, Fd7} = ?PRIM_FILE:open(Advise, [write]),
+ ?line {error, einval} = ?PRIM_FILE:advise(Fd7, 0, 0, bad_advise),
+ ?line ok = ?PRIM_FILE:close(Fd7),
+
+ %% test write without advise, then a read after an advise
+ ?line {ok, Fd8} = ?PRIM_FILE:open(Advise, [write]),
+ ?line ok = ?PRIM_FILE:write(Fd8, Line1),
+ ?line ok = ?PRIM_FILE:write(Fd8, Line2),
+ ?line ok = ?PRIM_FILE:close(Fd8),
+ ?line {ok, Fd9} = ?PRIM_FILE:open(Advise, [read]),
+ Offset = 0,
+ %% same as a 0 length in some implementations
+ Length = length(Line1) + length(Line2),
+ ?line ok = ?PRIM_FILE:advise(Fd9, Offset, Length, sequential),
+ ?line {ok, Line1} = ?PRIM_FILE:read_line(Fd9),
+ ?line {ok, Line2} = ?PRIM_FILE:read_line(Fd9),
+ ?line eof = ?PRIM_FILE:read_line(Fd9),
+ ?line ok = ?PRIM_FILE:close(Fd9),
+
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
delete_a(suite) -> [];
diff --git a/lib/megaco/doc/src/Makefile b/lib/megaco/doc/src/Makefile
index 2355a1b8b9..4b3c117b20 100644
--- a/lib/megaco/doc/src/Makefile
+++ b/lib/megaco/doc/src/Makefile
@@ -125,6 +125,12 @@ DVIPS_FLAGS +=
$(HTMLDIR)/%.gif: %.gif
$(INSTALL_DATA) $< $@
+$(HTMLDIR)/%.jpg: %.jpg
+ $(INSTALL_DATA) $< $@
+
+$(HTMLDIR)/%.png: %.png
+ $(INSTALL_DATA) $< $@
+
ifdef DOCSUPPORT
docs: pdf html man
@@ -135,7 +141,7 @@ $(TOP_PDF_FILE): $(XML_FILES)
pdf: $(TOP_PDF_FILE)
-html: gifs $(HTML_REF_MAN_FILE)
+html: imgs $(HTML_REF_MAN_FILE)
clean clean_docs: clean_html clean_man
rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo)
@@ -149,7 +155,7 @@ else
ifeq ($(DOCTYPE),ps)
docs: ps
else
-docs: html gifs man
+docs: html imgs man
endif
endif
@@ -157,7 +163,7 @@ pdf: $(TOP_PDF_FILE)
ps: $(TOP_PS_FILE)
-html: gifs $(HTML_FILES) $(TOP_HTML_FILES)
+html: imgs $(HTML_FILES) $(TOP_HTML_FILES)
mhtml: html $(HTML_REF3_FILES) $(HTML_CHAPTER_FILES)
@@ -182,7 +188,7 @@ clean_man:
clean_html:
rm -rf $(HTMLDIR)/*
-gifs: $(GIF_FILES:%=$(HTMLDIR)/%)
+imgs: $(IMG_FILES:%=$(HTMLDIR)/%)
man: $(MAN3_FILES)
@@ -208,7 +214,7 @@ info:
@echo "XML_REF3_FILES = $(XML_REF3_FILES)"
@echo "XML_CHAPTER_FILES = $(XML_CHAPTER_FILES)"
@echo ""
- @echo "GIF_FILES = $(GIF_FILES)"
+ @echo "IMG_FILES = $(IMG_FILES)"
@echo ""
@echo "TEX_FILES_USERS_GUIDE = $(TEX_FILES_USERS_GUIDE)"
@echo "TEX_FILES_REF_MAN = $(TEX_FILES_REF_MAN)"
@@ -258,7 +264,7 @@ release_docs_spec: ps
else
release_docs_spec: docs
$(INSTALL_DIR) $(RELSYSDIR)/doc/html
- $(INSTALL_DATA) $(GIF_FILES) $(EXTRA_FILES) $(HTML_FILES) \
+ $(INSTALL_DATA) $(IMG_FILES) $(EXTRA_FILES) $(HTML_FILES) \
$(RELSYSDIR)/doc/html
$(INSTALL_DATA) $(INFO_FILE) $(RELSYSDIR)
$(INSTALL_DIR) $(RELEASE_PATH)/man/man3
diff --git a/lib/megaco/doc/src/files.mk b/lib/megaco/doc/src/files.mk
index debc5d278d..efacb7e422 100644
--- a/lib/megaco/doc/src/files.mk
+++ b/lib/megaco/doc/src/files.mk
@@ -1,20 +1,20 @@
#-*-makefile-*- ; force emacs to enter makefile-mode
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2001-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 2001-2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
XML_APPLICATION_FILES = \
@@ -56,7 +56,7 @@ XML_CHAPTER_FILES = \
BOOK_FILES = book.xml
-GIF_FILES = \
+IMG_FILES = \
single_node_config.gif \
distr_node_config.gif \
megaco_sys_arch.gif \
@@ -70,4 +70,4 @@ GIF_FILES = \
MG_startup_call_flow.gif \
call_flow.gif \
call_flow_cont.gif \
- mstone1.gif
+ mstone1.jpg
diff --git a/lib/megaco/doc/src/megaco.xml b/lib/megaco/doc/src/megaco.xml
index 0fb9d5aac6..ae9e250965 100644
--- a/lib/megaco/doc/src/megaco.xml
+++ b/lib/megaco/doc/src/megaco.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2000</year><year>2009</year>
+ <year>2000</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>megaco</title>
@@ -40,6 +40,16 @@
<section>
<title>DATA TYPES</title>
<code type="none"><![CDATA[
+megaco_mid() = ip4Address() | ip6Address() |
+ domainName() | deviceName() |
+ mtpAddress()
+ip4Address() = #'IP4Address'{}
+ip6Address() = #'IP6Address'{}
+domainName() = #'DomainName'{}
+deviceName() = pathName()
+pathName() = ia5String(1..64)
+mtpAddress() = octetString(2..4)
+
action_request() = #'ActionRequest'{}
action_reply() = #'ActionReply'{}
error_desc() = #'ErrorDescriptor'{}
diff --git a/lib/megaco/doc/src/megaco_performance.xml b/lib/megaco/doc/src/megaco_performance.xml
index 72b5c156ba..eb3d852a19 100644
--- a/lib/megaco/doc/src/megaco_performance.xml
+++ b/lib/megaco/doc/src/megaco_performance.xml
@@ -50,9 +50,15 @@
of these configurations for each codec. The figures presented are
the average of all used messages.</p>
- <p>For comparison, also included are performance figures
- where the flex driver was built as <c>non-reentrant</c> flex
- (figures within parenthesis). </p>
+ <p>For comparison, also included are first, performance figures with
+ megaco (including the measurement software) and asn1 applications
+ hipe-compiled (second figure in the time columns, note that per bin
+ decode had some issues so those figures are not included), and second,
+ performance figures where the flex driver was built as
+ <c>non-reentrant</c> flex
+ (third figure in the time columns,
+ only valid for text codecs using the flex-scanner,
+ figures within parenthesis). </p>
<table>
<row>
@@ -67,122 +73,122 @@
<row>
<cell align="left" valign="middle">pretty</cell>
<cell align="right" valign="middle">336</cell>
- <cell align="right" valign="middle">22</cell>
- <cell align="right" valign="middle">76</cell>
- <cell align="right" valign="middle">98</cell>
+ <cell align="right" valign="middle">20 / 13</cell>
+ <cell align="right" valign="middle">75 / 40</cell>
+ <cell align="right" valign="middle">95 / 53</cell>
</row>
<row>
<cell align="left" valign="middle">pretty [flex]</cell>
<cell align="right" valign="middle">336</cell>
- <cell align="right" valign="middle">22 (22)</cell>
- <cell align="right" valign="middle">41 (40)</cell>
- <cell align="right" valign="middle">63 (62)</cell>
+ <cell align="right" valign="middle">20 / 13 / 20</cell>
+ <cell align="right" valign="middle">39 / 33 / 38</cell>
+ <cell align="right" valign="middle">59 / 46 / 58</cell>
</row>
<!-- COMPACT -->
<row>
<cell align="left" valign="middle">compact</cell>
<cell align="right" valign="middle">181</cell>
- <cell align="right" valign="middle">19</cell>
- <cell align="right" valign="middle">63</cell>
- <cell align="right" valign="middle">82</cell>
+ <cell align="right" valign="middle">17 / 10</cell>
+ <cell align="right" valign="middle">62 / 35</cell>
+ <cell align="right" valign="middle">79 / 45</cell>
</row>
<row>
<cell align="left" valign="middle">compact [flex]</cell>
<cell align="right" valign="middle">181</cell>
- <cell align="right" valign="middle">19 (19)</cell>
- <cell align="right" valign="middle">38 (36)</cell>
- <cell align="right" valign="middle">57 (55)</cell>
+ <cell align="right" valign="middle">17 / 10 / 17</cell>
+ <cell align="right" valign="middle">37 / 31 / 36</cell>
+ <cell align="right" valign="middle">54 / 41 / 53</cell>
</row>
<!-- PER -->
<row>
<cell align="left" valign="middle">per bin</cell>
<cell align="right" valign="middle">91</cell>
- <cell align="right" valign="middle">63</cell>
- <cell align="right" valign="middle">69</cell>
- <cell align="right" valign="middle">132</cell>
+ <cell align="right" valign="middle">60 / 29</cell>
+ <cell align="right" valign="middle">64 / -</cell>
+ <cell align="right" valign="middle">124 / -</cell>
</row>
<row>
<cell align="left" valign="middle">per bin [driver]</cell>
<cell align="right" valign="middle">91</cell>
- <cell align="right" valign="middle">43</cell>
- <cell align="right" valign="middle">45</cell>
- <cell align="right" valign="middle">88</cell>
+ <cell align="right" valign="middle">39 / 24</cell>
+ <cell align="right" valign="middle">42 / 26</cell>
+ <cell align="right" valign="middle">81 / 50</cell>
</row>
<row>
<cell align="left" valign="middle">per bin [native]</cell>
<cell align="right" valign="middle">91</cell>
- <cell align="right" valign="middle">47</cell>
- <cell align="right" valign="middle">51</cell>
- <cell align="right" valign="middle">99</cell>
+ <cell align="right" valign="middle">45 / 21</cell>
+ <cell align="right" valign="middle">48 / -</cell>
+ <cell align="right" valign="middle">93 / -</cell>
</row>
<row>
<cell align="left" valign="middle">per bin [driver,native]</cell>
<cell align="right" valign="middle">91</cell>
- <cell align="right" valign="middle">26</cell>
- <cell align="right" valign="middle">29</cell>
- <cell align="right" valign="middle">55</cell>
+ <cell align="right" valign="middle">25 / 15</cell>
+ <cell align="right" valign="middle">27 / 18</cell>
+ <cell align="right" valign="middle">52 / 33</cell>
</row>
<!-- BER -->
<row>
<cell align="left" valign="middle">ber bin</cell>
<cell align="right" valign="middle">165</cell>
- <cell align="right" valign="middle">35</cell>
- <cell align="right" valign="middle">42</cell>
- <cell align="right" valign="middle">77</cell>
+ <cell align="right" valign="middle">32 / 19</cell>
+ <cell align="right" valign="middle">38 / 21</cell>
+ <cell align="right" valign="middle">70 / 40</cell>
</row>
<row>
<cell align="left" valign="middle">ber bin [driver]</cell>
<cell align="right" valign="middle">165</cell>
- <cell align="right" valign="middle">35</cell>
- <cell align="right" valign="middle">37</cell>
- <cell align="right" valign="middle">72</cell>
+ <cell align="right" valign="middle">32 / 19</cell>
+ <cell align="right" valign="middle">33 / 20</cell>
+ <cell align="right" valign="middle">65 / 39</cell>
</row>
<row>
<cell align="left" valign="middle">ber bin [native]</cell>
<cell align="right" valign="middle">165</cell>
- <cell align="right" valign="middle">19</cell>
- <cell align="right" valign="middle">26</cell>
- <cell align="right" valign="middle">45</cell>
+ <cell align="right" valign="middle">17 / 11</cell>
+ <cell align="right" valign="middle">25 / 13</cell>
+ <cell align="right" valign="middle">42 / 24</cell>
</row>
<row>
<cell align="left" valign="middle">ber bin [driver,native]</cell>
<cell align="right" valign="middle">165</cell>
- <cell align="right" valign="middle">19</cell>
- <cell align="right" valign="middle">20</cell>
- <cell align="right" valign="middle">39</cell>
+ <cell align="right" valign="middle">17 / 11</cell>
+ <cell align="right" valign="middle">17 / 12</cell>
+ <cell align="right" valign="middle">34 / 23</cell>
</row>
<!-- ERLANG -->
<row>
<cell align="left" valign="middle">erl_dist</cell>
<cell align="right" valign="middle">875</cell>
- <cell align="right" valign="middle">5</cell>
- <cell align="right" valign="middle">10</cell>
- <cell align="right" valign="middle">15</cell>
+ <cell align="right" valign="middle">5 / 5</cell>
+ <cell align="right" valign="middle">10 / 10</cell>
+ <cell align="right" valign="middle">15 / 15</cell>
</row>
<row>
<cell align="left" valign="middle">erl_dist [megaco_compressed]</cell>
<cell align="right" valign="middle">405</cell>
- <cell align="right" valign="middle">6</cell>
- <cell align="right" valign="middle">7</cell>
- <cell align="right" valign="middle">13</cell>
+ <cell align="right" valign="middle">6 / 4</cell>
+ <cell align="right" valign="middle">7 / 4</cell>
+ <cell align="right" valign="middle">13 / 8</cell>
</row>
<row>
<cell align="left" valign="middle">erl_dist [compressed]</cell>
<cell align="right" valign="middle">345</cell>
- <cell align="right" valign="middle">86</cell>
- <cell align="right" valign="middle">21</cell>
- <cell align="right" valign="middle">107</cell>
+ <cell align="right" valign="middle">47 / 47</cell>
+ <cell align="right" valign="middle">20 / 20</cell>
+ <cell align="right" valign="middle">67 / 67</cell>
</row>
<row>
<cell align="left" valign="middle">erl_dist [megaco_compressed,compressed]</cell>
<cell align="right" valign="middle">200</cell>
- <cell align="right" valign="middle">71</cell>
- <cell align="right" valign="middle">12</cell>
- <cell align="right" valign="middle">83</cell>
+ <cell align="right" valign="middle">34 / 33</cell>
+ <cell align="right" valign="middle">11 / 9</cell>
+ <cell align="right" valign="middle">45 / 42</cell>
</row>
<tcaption>Codec performance</tcaption>
@@ -201,8 +207,8 @@
<p>When running SMP erlang on a multi-core machine the "throughput"
is significantly higher. The mstone1 test is an extreme test,
but it shows what is gained by using the reentrant flex-scanner. </p>
- <image file="mstone1.gif">
- <icaption>MStone1 with mstone1.sh -d flex -s 8</icaption>
+ <image file="mstone1.jpg">
+ <icaption>MStone1 with mstone1.sh -d flex -s 4</icaption>
</image>
</section>
@@ -276,10 +282,10 @@ MEGACO/1 [124.124.124.222]
<section>
<title>Setup</title>
<p>The measurements has been performed on a
- Dell PowerEdge 1950iii with
- 2* Intel Xeon L5430 @ 2.66 GHz, with 8 GB memory and
- running SLES 10 SP2 x86_64, kernel 2.6.16.60-0.34-smp.
- Software versions was open source OTP R13B and megaco-3.11.</p>
+ HP xw4600 Workstation with
+ a Intel(R) Core(TM)2 Quad CPU Q9550 @ 2.83GHz, with 4 GB memory and
+ running Ubuntu 10.04 x86_64, kernel 2.6.32-22-generic.
+ Software versions was open source OTP R13B04 (megaco-3.14).</p>
</section>
<section>
@@ -302,7 +308,7 @@ MEGACO/1 [124.124.124.222]
to our fastest text encoder (compact). </p>
</item>
<item>
- <p>our fastest binary decoder (ber) is about 47% (44%) faster than our
+ <p>our fastest binary decoder (ber) is about 54% (61%) faster than our
fastest text decoder (compact). </p>
</item>
</list>
diff --git a/lib/megaco/doc/src/mstone1-s8flex.log b/lib/megaco/doc/src/mstone1-s8flex.log
deleted file mode 100644
index d9d28399f3..0000000000
--- a/lib/megaco/doc/src/mstone1-s8flex.log
+++ /dev/null
@@ -1,234 +0,0 @@
-
----------------------------------------------
-Factor 01
-
-erl -noshell -smp +S 8 -pa /ldisk/bmk/pgm/otp-r13b-m311p08-re/lib/erlang/lib/megaco-3.11/examples/meas -s megaco_codec_mstone1 start_flex time_test 01 -s init stop
-
-OS: unix-linux: 2.6.16
-System architecture: x86_64-unknown-linux-gnu
-OTP release: R13B
-System version: Erlang R13B (erts-5.7.1) [source] [64-bit] [smp:8:8] [rq:8] [async-threads:0] [hipe] [kernel-poll:false]
-Heap type: private
-Global heap size: 0
-Thread support: true
-Thread pool size: 0
-Process limit: 32768
-SMP support: true
-Num schedulers: 8
-Scheduler bindings: {0,1,4,5,2,3,6,7}
-Scheduler bind type: thread_no_node_processor_spread
-Cpu topology: [{processor,[{core,{logical,0}},{core,{logical,4}},{core,{logical,2}},{core,{logical,6}}]},{processor,[{core,{logical,1}},{core,{logical,5}},{core,{logical,3}},{core,{logical,7}}]}]
-Megaco version: megaco-3.11-p08
-ASN.1 version: 1.6.10
-
- * starting runners [16] ................ done
- * await runners ready ................ done
- * now snooze
- * release them
-
-16 runners
-Runner heap size data:
- Min: 75025
- Max: 1682835
- Avg: 582577
-Runner reductions data:
- Min: 927126711
- Max: 5292487523
- Avg: 1929935038
-
-MStone: 63283727
-
----------------------------------------------
-Factor 02
-
-erl -noshell -smp +S 8 -pa /ldisk/bmk/pgm/otp-r13b-m311p08-re/lib/erlang/lib/megaco-3.11/examples/meas -s megaco_codec_mstone1 start_flex time_test 02 -s init stop
-
-OS: unix-linux: 2.6.16
-System architecture: x86_64-unknown-linux-gnu
-OTP release: R13B
-System version: Erlang R13B (erts-5.7.1) [source] [64-bit] [smp:8:8] [rq:8] [async-threads:0] [hipe] [kernel-poll:false]
-Heap type: private
-Global heap size: 0
-Thread support: true
-Thread pool size: 0
-Process limit: 32768
-SMP support: true
-Num schedulers: 8
-Scheduler bindings: {0,1,4,5,2,3,6,7}
-Scheduler bind type: thread_no_node_processor_spread
-Cpu topology: [{processor,[{core,{logical,0}},{core,{logical,4}},{core,{logical,2}},{core,{logical,6}}]},{processor,[{core,{logical,1}},{core,{logical,5}},{core,{logical,3}},{core,{logical,7}}]}]
-Megaco version: megaco-3.11-p08
-ASN.1 version: 1.6.10
-
- * starting runners [32] ................................ done
- * await runners ready ................................ done
- * now snooze
- * release them
-
-32 runners
-Runner heap size data:
- Min: 75025
- Max: 1346269
- Avg: 388569
-Runner reductions data:
- Min: 645498054
- Max: 2774469009
- Avg: 943407719
-
-MStone: 61441342
-
----------------------------------------------
-Factor 04
-
-erl -noshell -smp +S 8 -pa /ldisk/bmk/pgm/otp-r13b-m311p08-re/lib/erlang/lib/megaco-3.11/examples/meas -s megaco_codec_mstone1 start_flex time_test 04 -s init stop
-
-OS: unix-linux: 2.6.16
-System architecture: x86_64-unknown-linux-gnu
-OTP release: R13B
-System version: Erlang R13B (erts-5.7.1) [source] [64-bit] [smp:8:8] [rq:8] [async-threads:0] [hipe] [kernel-poll:false]
-Heap type: private
-Global heap size: 0
-Thread support: true
-Thread pool size: 0
-Process limit: 32768
-SMP support: true
-Num schedulers: 8
-Scheduler bindings: {0,1,4,5,2,3,6,7}
-Scheduler bind type: thread_no_node_processor_spread
-Cpu topology: [{processor,[{core,{logical,0}},{core,{logical,4}},{core,{logical,2}},{core,{logical,6}}]},{processor,[{core,{logical,1}},{core,{logical,5}},{core,{logical,3}},{core,{logical,7}}]}]
-Megaco version: megaco-3.11-p08
-ASN.1 version: 1.6.10
-
- * starting runners [64] ................................................................ done
- * await runners ready ................................................................ done
- * now snooze
- * release them
-
-64 runners
-Runner heap size data:
- Min: 75025
- Max: 1682835
- Avg: 462690
-Runner reductions data:
- Min: 395464832
- Max: 916378232
- Avg: 507760636
-
-MStone: 66958216
-
----------------------------------------------
-Factor 08
-
-erl -noshell -smp +S 8 -pa /ldisk/bmk/pgm/otp-r13b-m311p08-re/lib/erlang/lib/megaco-3.11/examples/meas -s megaco_codec_mstone1 start_flex time_test 08 -s init stop
-
-OS: unix-linux: 2.6.16
-System architecture: x86_64-unknown-linux-gnu
-OTP release: R13B
-System version: Erlang R13B (erts-5.7.1) [source] [64-bit] [smp:8:8] [rq:8] [async-threads:0] [hipe] [kernel-poll:false]
-Heap type: private
-Global heap size: 0
-Thread support: true
-Thread pool size: 0
-Process limit: 32768
-SMP support: true
-Num schedulers: 8
-Scheduler bindings: {0,1,4,5,2,3,6,7}
-Scheduler bind type: thread_no_node_processor_spread
-Cpu topology: [{processor,[{core,{logical,0}},{core,{logical,4}},{core,{logical,2}},{core,{logical,6}}]},{processor,[{core,{logical,1}},{core,{logical,5}},{core,{logical,3}},{core,{logical,7}}]}]
-Megaco version: megaco-3.11-p08
-ASN.1 version: 1.6.10
-
- * starting runners [128] ................................................................................................................................ done
- * await runners ready ................................................................................................................................ done
- * now snooze
- * release them
-
-128 runners
-Runner heap size data:
- Min: 75025
- Max: 832040
- Avg: 173900
-Runner reductions data:
- Min: 236710819
- Max: 457961244
- Avg: 269562568
-
-MStone: 72098418
-
----------------------------------------------
-Factor 16
-
-erl -noshell -smp +S 8 -pa /ldisk/bmk/pgm/otp-r13b-m311p08-re/lib/erlang/lib/megaco-3.11/examples/meas -s megaco_codec_mstone1 start_flex time_test 16 -s init stop
-
-OS: unix-linux: 2.6.16
-System architecture: x86_64-unknown-linux-gnu
-OTP release: R13B
-System version: Erlang R13B (erts-5.7.1) [source] [64-bit] [smp:8:8] [rq:8] [async-threads:0] [hipe] [kernel-poll:false]
-Heap type: private
-Global heap size: 0
-Thread support: true
-Thread pool size: 0
-Process limit: 32768
-SMP support: true
-Num schedulers: 8
-Scheduler bindings: {0,1,4,5,2,3,6,7}
-Scheduler bind type: thread_no_node_processor_spread
-Cpu topology: [{processor,[{core,{logical,0}},{core,{logical,4}},{core,{logical,2}},{core,{logical,6}}]},{processor,[{core,{logical,1}},{core,{logical,5}},{core,{logical,3}},{core,{logical,7}}]}]
-Megaco version: megaco-3.11-p08
-ASN.1 version: 1.6.10
-
- * starting runners [256] ................................................................................................................................................................................................................................................................ done
- * await runners ready ................................................................................................................................................................................................................................................................ done
- * now snooze
- * release them
-
-256 runners
-Runner heap size data:
- Min: 75025
- Max: 317811
- Avg: 131652
-Runner reductions data:
- Min: 134104991
- Max: 163429204
- Avg: 142654707
-
-MStone: 77139535
-
----------------------------------------------
-Factor 32
-
-erl -noshell -smp +S 8 -pa /ldisk/bmk/pgm/otp-r13b-m311p08-re/lib/erlang/lib/megaco-3.11/examples/meas -s megaco_codec_mstone1 start_flex time_test 32 -s init stop
-
-OS: unix-linux: 2.6.16
-System architecture: x86_64-unknown-linux-gnu
-OTP release: R13B
-System version: Erlang R13B (erts-5.7.1) [source] [64-bit] [smp:8:8] [rq:8] [async-threads:0] [hipe] [kernel-poll:false]
-Heap type: private
-Global heap size: 0
-Thread support: true
-Thread pool size: 0
-Process limit: 32768
-SMP support: true
-Num schedulers: 8
-Scheduler bindings: {0,1,4,5,2,3,6,7}
-Scheduler bind type: thread_no_node_processor_spread
-Cpu topology: [{processor,[{core,{logical,0}},{core,{logical,4}},{core,{logical,2}},{core,{logical,6}}]},{processor,[{core,{logical,1}},{core,{logical,5}},{core,{logical,3}},{core,{logical,7}}]}]
-Megaco version: megaco-3.11-p08
-ASN.1 version: 1.6.10
-
- * starting runners [512] ................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................ done
- * await runners ready ................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................ done
- * now snooze
- * release them
-
-512 runners
-Runner heap size data:
- Min: 75025
- Max: 196418
- Avg: 107328
-Runner reductions data:
- Min: 71186547
- Max: 74170110
- Avg: 72380653
-
-MStone: 78820851
diff --git a/lib/megaco/doc/src/mstone1.gif b/lib/megaco/doc/src/mstone1.gif
deleted file mode 100644
index 54c9c5514c..0000000000
--- a/lib/megaco/doc/src/mstone1.gif
+++ /dev/null
Binary files differ
diff --git a/lib/megaco/doc/src/mstone1.jpg b/lib/megaco/doc/src/mstone1.jpg
index b417429a08..3edc65faf1 100644
--- a/lib/megaco/doc/src/mstone1.jpg
+++ b/lib/megaco/doc/src/mstone1.jpg
Binary files differ
diff --git a/lib/megaco/doc/src/mstone1.png b/lib/megaco/doc/src/mstone1.png
deleted file mode 100644
index 19af210abc..0000000000
--- a/lib/megaco/doc/src/mstone1.png
+++ /dev/null
Binary files differ
diff --git a/lib/megaco/doc/src/mstone1.ps b/lib/megaco/doc/src/mstone1.ps
deleted file mode 100644
index 6436a4eb43..0000000000
--- a/lib/megaco/doc/src/mstone1.ps
+++ /dev/null
@@ -1,1959 +0,0 @@
-%!PS-Adobe-3.0
-%%Creator: GIMP PostScript file plugin V 1.17 by Peter Kirchgessner
-%%Title: mstone1_html_m5496b992.ps
-%%CreationDate: Fri May 29 19:07:25 2009
-%%DocumentData: Clean7Bit
-%%LanguageLevel: 2
-%%Pages: 1
-%%BoundingBox: 14 14 476 247
-%%EndComments
-%%BeginProlog
-% Use own dictionary to avoid conflicts
-10 dict begin
-%%EndProlog
-%%Page: 1 1
-% Translate for offset
-14.173228346456694 14.173228346456694 translate
-% Translate to begin of first scanline
-0 231.99920747433686 translate
-460.99842519685041 -231.99920747433686 scale
-% Image geometry
-461 232 8
-% Transformation matrix
-[ 461 0 0 232 0 0 ]
-% Strings to hold RGB-samples per scanline
-/rstr 461 string def
-/gstr 461 string def
-/bstr 461 string def
-{currentfile /ASCII85Decode filter /RunLengthDecode filter rstr readstring pop}
-{currentfile /ASCII85Decode filter /RunLengthDecode filter gstr readstring pop}
-{currentfile /ASCII85Decode filter /RunLengthDecode filter bstr readstring pop}
-true 3
-%%BeginData: 120502 ASCII Bytes
-colorimage
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-i;[-^"<[I>jo>>[s!S-Pn#nhl!"q57bl%G=s7lKks8;orq>UEis7ZKkpAapfqu?Nmr:^0\rj;e)
-s8Dutrr<#nq!DD]s0Gl?!Ao]1s8;oe!<;Hd"Sr)ls7>sarVlfr/b/l@q#CBds82ck([qD(r;Z+-
-8G3#_q>X&-q#CBlrVZ]fs7lWkrV6<jq"ssfrrVrprVcaFr<)rsrr)lpo`#!n$31)/s8VZin5BJi
-oDZ*k&-j;?o`+scYV-/(\,61)<Vugjs8VP@2$sI+9X"<ms7ZKmo_\Zn>9<i*IeWm8s8W#ss6a>7
-!'%(Ss5mc1$9F^WqXX[`s7QE7?l9"V-$I''!:^$grVccn!WW2js8VWhoDeR_rsA5qr;V'f!%3Qh
-rVnG1s8W&:BG^ja+`,[)rrEc4s7u]mq>Wh_s8Vrqr;ZWnp](3as8)ckq>^KXrr35rs8Vlns8W)t
-ruq.6s7?$[s8VurqZ$9hs7--bmf3=ap\k-lp1<7`r;Zfpir=N~>
-i;[-^"<[I>jo>>[s!S-Pn#nhl!"q57bl%G=s7lKks8;orq>UEis7ZKkpAapfqu?Nmr:^0\rj;e)
-s8Dutrr<#nq!DD]s0Gl?!Ao]1s8;oe!<;Hd"Sr)ls7>sarVlfr/b/l@q#CBds82ck([qD(r;Z+-
-8G3#_q>X&-q#CBlrVZ]fs7lWkrV6<jq"ssfrrVrprVcaFr<)rsrr)lpo`#!n$31)/s8VZin5BJi
-oDZ*k&-j;?o`+scYV-/(\,61)<Vugjs8VP@2$sI+9X"<ms7ZKmo_\Zn>9<i*IeWm8s8W#ss6a>7
-!'%(Ss5mc1$9F^WqXX[`s7QE7?l9"V-$I''!:^$grVccn!WW2js8VWhoDeR_rsA5qr;V'f!%3Qh
-rVnG1s8W&:BG^ja+`,[)rrEc4s7u]mq>Wh_s8Vrqr;ZWnp](3as8)ckq>^KXrr35rs8Vlns8W)t
-ruq.6s7?$[s8VurqZ$9hs7--bmf3=ap\k-lp1<7`r;Zfpir=N~>
-i;[-^"<[I>jo>>[s!S-Pn#nhl!"q57bl%G=s7lKks8;orq>UEis7ZKkpAapfqu?Nmr:^0\rj;e)
-s8Dutrr<#nq!DD]s0Gl?!Ao]1s8;oe!<;Hd"Sr)ls7>sarVlfr/b/l@q#CBds82ck([qD(r;Z+-
-8G3#_q>X&-q#CBlrVZ]fs7lWkrV6<jq"ssfrrVrprVcaFr<)rsrr)lpo`#!n$31)/s8VZin5BJi
-oDZ*k&-j;?o`+scYV-/(\,61)<Vugjs8VP@2$sI+9X"<ms7ZKmo_\Zn>9<i*IeWm8s8W#ss6a>7
-!'%(Ss5mc1$9F^WqXX[`s7QE7?l9"V-$I''!:^$grVccn!WW2js8VWhoDeR_rsA5qr;V'f!%3Qh
-rVnG1s8W&:BG^ja+`,[)rrEc4s7u]mq>Wh_s8Vrqr;ZWnp](3as8)ckq>^KXrr35rs8Vlns8W)t
-ruq.6s7?$[s8VurqZ$9hs7--bmf3=ap\k-lp1<7`r;Zfpir=N~>
-iVtA&rr<bBqu6Wds4%SZs+<S^q"W^NJ3s8@meQb[r9s[crVuors69R`q#::FqZ$Qjrr<#ks/l_1
-r;Z`qs8N&ns8Pa:s7KNEj7gnuGk1l7O91hXnc8^Ws6ose')2G)!<<)is8VZas8VrqrrE)krVmPV
-&W6Sfs*kU(s7G^Ys8;ors8V]hrsAN$rqZTir;ZforVm9("8i,tnGE7bp'gKjs%rUhq>]s9#70bt
-s'(TAs2e'#me,NGYlD]_]Dqa-s8)]oRic.Ls6/p>1H44gs7ZKls7'<Ae,7KeMuWhJs7>OU'qaXY
-Q!44a2h^2\T3:=UruCM-s-G.0iqhZ[?qLA7s8VoprVuI#kl(P[k5Y)Prr`3!o'ZMW0P!u3s56>%
-pAY*cs7lVu-^WlnhTC"&s7lX)XnMbos8C%?!<<)ts8N&js8VQes8)]o"nMKhs7,aYrs/&nq"+Oc
-qY^?m%efr!rr;lqo()MSr;ZHQrr3?"s8C2KpAP$krq5=OJ,~>
-iVtA&rr<bBqu6Wds4%SZs+<S^q"W^NJ3s8@meQb[r9s[crVuors69R`q#::FqZ$Qjrr<#ks/l_1
-r;Z`qs8N&ns8Pa:s7KNEj7gnuGk1l7O91hXnc8^Ws6ose')2G)!<<)is8VZas8VrqrrE)krVmPV
-&W6Sfs*kU(s7G^Ys8;ors8V]hrsAN$rqZTir;ZforVm9("8i,tnGE7bp'gKjs%rUhq>]s9#70bt
-s'(TAs2e'#me,NGYlD]_]Dqa-s8)]oRic.Ls6/p>1H44gs7ZKls7'<Ae,7KeMuWhJs7>OU'qaXY
-Q!44a2h^2\T3:=UruCM-s-G.0iqhZ[?qLA7s8VoprVuI#kl(P[k5Y)Prr`3!o'ZMW0P!u3s56>%
-pAY*cs7lVu-^WlnhTC"&s7lX)XnMbos8C%?!<<)ts8N&js8VQes8)]o"nMKhs7,aYrs/&nq"+Oc
-qY^?m%efr!rr;lqo()MSr;ZHQrr3?"s8C2KpAP$krq5=OJ,~>
-iVtA&rr<bBqu6Wds4%SZs+<S^q"W^NJ3s8@meQb[r9s[crVuors69R`q#::FqZ$Qjrr<#ks/l_1
-r;Z`qs8N&ns8Pa:s7KNEj7gnuGk1l7O91hXnc8^Ws6ose')2G)!<<)is8VZas8VrqrrE)krVmPV
-&W6Sfs*kU(s7G^Ys8;ors8V]hrsAN$rqZTir;ZforVm9("8i,tnGE7bp'gKjs%rUhq>]s9#70bt
-s'(TAs2e'#me,NGYlD]_]Dqa-s8)]oRic.Ls6/p>1H44gs7ZKls7'<Ae,7KeMuWhJs7>OU'qaXY
-Q!44a2h^2\T3:=UruCM-s-G.0iqhZ[?qLA7s8VoprVuI#kl(P[k5Y)Prr`3!o'ZMW0P!u3s56>%
-pAY*cs7lVu-^WlnhTC"&s7lX)XnMbos8C%?!<<)ts8N&js8VQes8)]o"nMKhs7,aYrs/&nq"+Oc
-qY^?m%efr!rr;lqo()MSr;ZHQrr3?"s8C2KpAP$krq5=OJ,~>
-iVsMdrr>bfrr;ors%6/hq@)lhn,E=ko(;q^s8)9bru_14rVuWlqZ$Tcs7cE_s7lWorV?Kn!&)Lr
-r;Z`qs8N#t.'ZYLmeleYs82cG/-#GF7K>jVr"Sl)p@SC]pAb'j)>O7*s8;osp%\Od(]aO7s8DZk
-s)AFjq8OP?nGiIerU]p`s5<nV%JTntp\jaaoDejOs7Q9grs&E(qu?Zqqu6UQ!<;cmo`+C[rV/$r
-$Ma5lo)J[grSmnXnR7@Ts8Rmls8W)unGfm(c27P*s8;QiK1>k?q"Odf!:]se$J-*hkPtP]s-Irp
-rr4S:s+4_[rVuTks7u]ap]&\`dIcf*s7?9jli@"`l2UY\rrE)lrVuoos8Dull2:Q%q#:`hrr;HX
-!WVlnnGi%.&^(.JoDeF^rVua+<^-N_)#aJ;:`9!,nG`I\s8V`hs8VZimf2kXs7QBk(ARe,qu?Hk
-s7lWks8V9^lhUDWs8)ZfqYq*#s8Duis8V]j>.4G(r;ZHMs*t~>
-iVsMdrr>bfrr;ors%6/hq@)lhn,E=ko(;q^s8)9bru_14rVuWlqZ$Tcs7cE_s7lWorV?Kn!&)Lr
-r;Z`qs8N#t.'ZYLmeleYs82cG/-#GF7K>jVr"Sl)p@SC]pAb'j)>O7*s8;osp%\Od(]aO7s8DZk
-s)AFjq8OP?nGiIerU]p`s5<nV%JTntp\jaaoDejOs7Q9grs&E(qu?Zqqu6UQ!<;cmo`+C[rV/$r
-$Ma5lo)J[grSmnXnR7@Ts8Rmls8W)unGfm(c27P*s8;QiK1>k?q"Odf!:]se$J-*hkPtP]s-Irp
-rr4S:s+4_[rVuTks7u]ap]&\`dIcf*s7?9jli@"`l2UY\rrE)lrVuoos8Dull2:Q%q#:`hrr;HX
-!WVlnnGi%.&^(.JoDeF^rVua+<^-N_)#aJ;:`9!,nG`I\s8V`hs8VZimf2kXs7QBk(ARe,qu?Hk
-s7lWks8V9^lhUDWs8)ZfqYq*#s8Duis8V]j>.4G(r;ZHMs*t~>
-iVsMdrr>bfrr;ors%6/hq@)lhn,E=ko(;q^s8)9bru_14rVuWlqZ$Tcs7cE_s7lWorV?Kn!&)Lr
-r;Z`qs8N#t.'ZYLmeleYs82cG/-#GF7K>jVr"Sl)p@SC]pAb'j)>O7*s8;osp%\Od(]aO7s8DZk
-s)AFjq8OP?nGiIerU]p`s5<nV%JTntp\jaaoDejOs7Q9grs&E(qu?Zqqu6UQ!<;cmo`+C[rV/$r
-$Ma5lo)J[grSmnXnR7@Ts8Rmls8W)unGfm(c27P*s8;QiK1>k?q"Odf!:]se$J-*hkPtP]s-Irp
-rr4S:s+4_[rVuTks7u]ap]&\`dIcf*s7?9jli@"`l2UY\rrE)lrVuoos8Dull2:Q%q#:`hrr;HX
-!WVlnnGi%.&^(.JoDeF^rVua+<^-N_)#aJ;:`9!,nG`I\s8V`hs8VZimf2kXs7QBk(ARe,qu?Hk
-s7lWks8V9^lhUDWs8)ZfqYq*#s8Duis8V]j>.4G(r;ZHMs*t~>
-hu>A1^]NWns8UpUgAq7!D=RZ+n,!(^!XA]1qu=Z&&kLmDs7cu%,760Ys8Mqh.K^9HrVmB&!W;ur
-s8W)uqu8sss8)`p./i`:k5bP^F=I8:JGoQKrrBGN!B&+$!!!$$s7QDa0-1[tp&>$ls6ose+nu"/
-/bh7Ss8VWhSfo'pRf<?bb<$_4_Z0Z5!j*[M9Z?]'s#p;_qu?Zqs6T4S!;$6im/Qq^potP)!<<&j
-s7H?hs61U)ru9Php&!_is76-gs6(<As6+D9(1oI>r(2A7rr4,.p&FgUs7bF]s8W#ss6)5Xs7uTm
-rr5gAlMpMVrr;]_rr3@+L]@DRs8Mrjmf*:brsIui$2sc%!!WE/kl(Miq=4LTs8NAjs8VcZ)?'S!
-k5SDNs8)cqpAb*knc&U.$0_Efp?`1<!;ZQmndM'D"XDHk*"4[RqY'shXY^#%PlLafMEq4nc^?0]
-1u/'/ruAa=*'MXCrs[:"2Xh6@s6fle']f;88cSDXs8DoWs*t~>
-hu>A1^]NWns8UpUgAq7!D=RZ+n,!(^!XA]1qu=Z&&kLmDs7cu%,760Ys8Mqh.K^9HrVmB&!W;ur
-s8W)uqu8sss8)`p./i`:k5bP^F=I8:JGoQKrrBGN!B&+$!!!$$s7QDa0-1[tp&>$ls6ose+nu"/
-/bh7Ss8VWhSfo'pRf<?bb<$_4_Z0Z5!j*[M9Z?]'s#p;_qu?Zqs6T4S!;$6im/Qq^potP)!<<&j
-s7H?hs61U)ru9Php&!_is76-gs6(<As6+D9(1oI>r(2A7rr4,.p&FgUs7bF]s8W#ss6)5Xs7uTm
-rr5gAlMpMVrr;]_rr3@+L]@DRs8Mrjmf*:brsIui$2sc%!!WE/kl(Miq=4LTs8NAjs8VcZ)?'S!
-k5SDNs8)cqpAb*knc&U.$0_Efp?`1<!;ZQmndM'D"XDHk*"4[RqY'shXY^#%PlLafMEq4nc^?0]
-1u/'/ruAa=*'MXCrs[:"2Xh6@s6fle']f;88cSDXs8DoWs*t~>
-hu>A1^]NWns8UpUgAq7!D=RZ+n,!(^!XA]1qu=Z&&kLmDs7cu%,760Ys8Mqh.K^9HrVmB&!W;ur
-s8W)uqu8sss8)`p./i`:k5bP^F=I8:JGoQKrrBGN!B&+$!!!$$s7QDa0-1[tp&>$ls6ose+nu"/
-/bh7Ss8VWhSfo'pRf<?bb<$_4_Z0Z5!j*[M9Z?]'s#p;_qu?Zqs6T4S!;$6im/Qq^potP)!<<&j
-s7H?hs61U)ru9Php&!_is76-gs6(<As6+D9(1oI>r(2A7rr4,.p&FgUs7bF]s8W#ss6)5Xs7uTm
-rr5gAlMpMVrr;]_rr3@+L]@DRs8Mrjmf*:brsIui$2sc%!!WE/kl(Miq=4LTs8NAjs8VcZ)?'S!
-k5SDNs8)cqpAb*knc&U.$0_Efp?`1<!;ZQmndM'D"XDHk*"4[RqY'shXY^#%PlLafMEq4nc^?0]
-1u/'/ruAa=*'MXCrs[:"2Xh6@s6fle']f;88cSDXs8DoWs*t~>
-hu=Gls!)ggr:-(+q$-Q.*rnZciW&rUr<<3#HEe%,rPo\lq=jqll18LAq"DES`;\%HNq`SHrrN&t
-rVuoss8MF/q#C?nh=psLqi"MGs(0mabZ+TBs764eh"6%KrVmE&q>^,f0WtH)25fU?s82Wls7ZKm
-g(jo5kPtDYXuF;bc8V'gY9D$HaYCQorr?WSr*M;YrVm'""8i,to`"k0r"U.P+0tq?s5tW#rrM9X
-s8V3\r6eJYq>V#urr3)e$M45qs%7iJs6<$gp=)Y:U&N7op@eO`s8V9^s7?8R6N@)]s8)a6Shg?n
-N;qlVV)86&S*L%Rr;Zff)WLPbs8V3\s8)Quq#C<is8W#tmJd+jqGDJ:s8Duarr3c/s8R::cMu?[
-J,''*n,ND(g&M*Ms7#sd!s/<QFT2;3KeMlps8V`f"BX+<M?$H#s)'jms8&H8^])G=M>mQ_f_'Cq
-8)j2O62:KF^+I@_as>1!!,:ots'SRus8Q(cqZ$@$pAb!hrr(pXJ,~>
-hu=Gls!)ggr:-(+q$-Q.*rnZciW&rUr<<3#HEe%,rPo\lq=jqll18LAq"DES`;\%HNq`SHrrN&t
-rVuoss8MF/q#C?nh=psLqi"MGs(0mabZ+TBs764eh"6%KrVmE&q>^,f0WtH)25fU?s82Wls7ZKm
-g(jo5kPtDYXuF;bc8V'gY9D$HaYCQorr?WSr*M;YrVm'""8i,to`"k0r"U.P+0tq?s5tW#rrM9X
-s8V3\r6eJYq>V#urr3)e$M45qs%7iJs6<$gp=)Y:U&N7op@eO`s8V9^s7?8R6N@)]s8)a6Shg?n
-N;qlVV)86&S*L%Rr;Zff)WLPbs8V3\s8)Quq#C<is8W#tmJd+jqGDJ:s8Duarr3c/s8R::cMu?[
-J,''*n,ND(g&M*Ms7#sd!s/<QFT2;3KeMlps8V`f"BX+<M?$H#s)'jms8&H8^])G=M>mQ_f_'Cq
-8)j2O62:KF^+I@_as>1!!,:ots'SRus8Q(cqZ$@$pAb!hrr(pXJ,~>
-hu=Gls!)ggr:-(+q$-Q.*rnZciW&rUr<<3#HEe%,rPo\lq=jqll18LAq"DES`;\%HNq`SHrrN&t
-rVuoss8MF/q#C?nh=psLqi"MGs(0mabZ+TBs764eh"6%KrVmE&q>^,f0WtH)25fU?s82Wls7ZKm
-g(jo5kPtDYXuF;bc8V'gY9D$HaYCQorr?WSr*M;YrVm'""8i,to`"k0r"U.P+0tq?s5tW#rrM9X
-s8V3\r6eJYq>V#urr3)e$M45qs%7iJs6<$gp=)Y:U&N7op@eO`s8V9^s7?8R6N@)]s8)a6Shg?n
-N;qlVV)86&S*L%Rr;Zff)WLPbs8V3\s8)Quq#C<is8W#tmJd+jqGDJ:s8Duarr3c/s8R::cMu?[
-J,''*n,ND(g&M*Ms7#sd!s/<QFT2;3KeMlps8V`f"BX+<M?$H#s)'jms8&H8^])G=M>mQ_f_'Cq
-8)j2O62:KF^+I@_as>1!!,:ots'SRus8Q(cqZ$@$pAb!hrr(pXJ,~>
-i;ZpV!<8Drq>9\2SG36as8TND%g5M.s8N)mrr5.-s8)bi70!8hq"t*c#l4KZ`;fi:gFN=$s8N,t
-s8Dutrr<#=.KBDCq#CBdq8EQds8Vid#64So&b,f+rr3>[%fcM.s7l3c-Ii%p"P4Ii$2+8s*:Npm
-s*k`&s7u]p6.>E)s3!1t6+Hspp8p1ErrE&u"8*3)o)AZ'r<)rsrr;lkqu?]pr7;p=s8R?rs6g^&
-s7Q0err<$rs8VNoqZ$![rt=f#s7H=ZWrK@ps8Du7!8?u:$NL#%oD\CPr9aO!$.f%Js7H0f!%,\^
-dO::Y"X1b_`$(]@q>]j^rrE)hs7cNm'CGer!<<*%!WWu9!<;9WrM:1ui;N[.m.^P[rr<!\!!!Er
-qZ$?js7Gmgs6]jUp](9^nc/@cs-aGes7RD(rrE)os8N)us#ol^gA(^=!<;Wi38aE/p?a[3!WVfl
-mg&CQs8E3"s7&%Vs8W&%2ZEi^lMpGI!8mbC!hKAas"VUpr;?TnjSs`~>
-i;ZpV!<8Drq>9\2SG36as8TND%g5M.s8N)mrr5.-s8)bi70!8hq"t*c#l4KZ`;fi:gFN=$s8N,t
-s8Dutrr<#=.KBDCq#CBdq8EQds8Vid#64So&b,f+rr3>[%fcM.s7l3c-Ii%p"P4Ii$2+8s*:Npm
-s*k`&s7u]p6.>E)s3!1t6+Hspp8p1ErrE&u"8*3)o)AZ'r<)rsrr;lkqu?]pr7;p=s8R?rs6g^&
-s7Q0err<$rs8VNoqZ$![rt=f#s7H=ZWrK@ps8Du7!8?u:$NL#%oD\CPr9aO!$.f%Js7H0f!%,\^
-dO::Y"X1b_`$(]@q>]j^rrE)hs7cNm'CGer!<<*%!WWu9!<;9WrM:1ui;N[.m.^P[rr<!\!!!Er
-qZ$?js7Gmgs6]jUp](9^nc/@cs-aGes7RD(rrE)os8N)us#ol^gA(^=!<;Wi38aE/p?a[3!WVfl
-mg&CQs8E3"s7&%Vs8W&%2ZEi^lMpGI!8mbC!hKAas"VUpr;?TnjSs`~>
-i;ZpV!<8Drq>9\2SG36as8TND%g5M.s8N)mrr5.-s8)bi70!8hq"t*c#l4KZ`;fi:gFN=$s8N,t
-s8Dutrr<#=.KBDCq#CBdq8EQds8Vid#64So&b,f+rr3>[%fcM.s7l3c-Ii%p"P4Ii$2+8s*:Npm
-s*k`&s7u]p6.>E)s3!1t6+Hspp8p1ErrE&u"8*3)o)AZ'r<)rsrr;lkqu?]pr7;p=s8R?rs6g^&
-s7Q0err<$rs8VNoqZ$![rt=f#s7H=ZWrK@ps8Du7!8?u:$NL#%oD\CPr9aO!$.f%Js7H0f!%,\^
-dO::Y"X1b_`$(]@q>]j^rrE)hs7cNm'CGer!<<*%!WWu9!<;9WrM:1ui;N[.m.^P[rr<!\!!!Er
-qZ$?js7Gmgs6]jUp](9^nc/@cs-aGes7RD(rrE)os8N)us#ol^gA(^=!<;Wi38aE/p?a[3!WVfl
-mg&CQs8E3"s7&%Vs8W&%2ZEi^lMpGI!8mbC!hKAas"VUpr;?TnjSs`~>
-i;ZRO!rha,VZ*Y'q#:?oq>1$WrNT!/K`2#Prq?EUpAb0Z'`[_(s8DQf"98B(!!!*$!#,/'s7??i
-s8Dutrr;l;1]$nHnFuq^^&YY=s7?3h')2D+s6]meqtC$iqYgNkr;Z@0r;['9"oeT&s8MlnruTZ)
-'bKO-s76?n&-r7F!:^KhqXjgf&H;A3jo>5T!;uk'r<)rsrp]U\s6Tdbs6]:rrn7A3s8N*!s6K^b
-q>]DNE:j/>mJ$YXmKijjpAaabs6q>Op%n]W!<<#k$iBl"s8VZinGgT1](Q*qs8VWc&`EKbqU6\Z
-!:]RUs4J1drtY2*(ubMpo`"mk$3:)/s8V?`rVultrr38t,anQ0r:p<jrVo=_l&%7PpVo^Equ?<^
-s8N`"qXFOb!!!N=oDK$rm38#!TH`n#$2"8lknEpgq>UHps8;j)nFHSZ!"&]3!!*$!s8VQa#lai)
-nc8[h)%HEAnbr=frW)uur;YkTDZ>J/s8;fp9=k!!rrDuXs*t~>
-i;ZRO!rha,VZ*Y'q#:?oq>1$WrNT!/K`2#Prq?EUpAb0Z'`[_(s8DQf"98B(!!!*$!#,/'s7??i
-s8Dutrr;l;1]$nHnFuq^^&YY=s7?3h')2D+s6]meqtC$iqYgNkr;Z@0r;['9"oeT&s8MlnruTZ)
-'bKO-s76?n&-r7F!:^KhqXjgf&H;A3jo>5T!;uk'r<)rsrp]U\s6Tdbs6]:rrn7A3s8N*!s6K^b
-q>]DNE:j/>mJ$YXmKijjpAaabs6q>Op%n]W!<<#k$iBl"s8VZinGgT1](Q*qs8VWc&`EKbqU6\Z
-!:]RUs4J1drtY2*(ubMpo`"mk$3:)/s8V?`rVultrr38t,anQ0r:p<jrVo=_l&%7PpVo^Equ?<^
-s8N`"qXFOb!!!N=oDK$rm38#!TH`n#$2"8lknEpgq>UHps8;j)nFHSZ!"&]3!!*$!s8VQa#lai)
-nc8[h)%HEAnbr=frW)uur;YkTDZ>J/s8;fp9=k!!rrDuXs*t~>
-i;ZRO!rha,VZ*Y'q#:?oq>1$WrNT!/K`2#Prq?EUpAb0Z'`[_(s8DQf"98B(!!!*$!#,/'s7??i
-s8Dutrr;l;1]$nHnFuq^^&YY=s7?3h')2D+s6]meqtC$iqYgNkr;Z@0r;['9"oeT&s8MlnruTZ)
-'bKO-s76?n&-r7F!:^KhqXjgf&H;A3jo>5T!;uk'r<)rsrp]U\s6Tdbs6]:rrn7A3s8N*!s6K^b
-q>]DNE:j/>mJ$YXmKijjpAaabs6q>Op%n]W!<<#k$iBl"s8VZinGgT1](Q*qs8VWc&`EKbqU6\Z
-!:]RUs4J1drtY2*(ubMpo`"mk$3:)/s8V?`rVultrr38t,anQ0r:p<jrVo=_l&%7PpVo^Equ?<^
-s8N`"qXFOb!!!N=oDK$rm38#!TH`n#$2"8lknEpgq>UHps8;j)nFHSZ!"&]3!!*$!s8VQa#lai)
-nc8[h)%HEAnbr=frW)uur;YkTDZ>J/s8;fp9=k!!rrDuXs*t~>
-hu@6fqYsn^r],f2pC$Qlr;ZZoo`)`;r;ccirt=o&s8)a"r;Qcsrr;fps8)frnG`Idp%JF^rW2rs
-rVuoss8C)%s8;QirV=&7VZ-Vis82WjqZ$<or;Qcls76$qs82lspAb-mpAY'qq>L*lq"k!i,P;$$
-<AiSg48AjU#Q"/es8Vrq"nqurr9=[irrE)os8N*!rr2pHr<)rsrr<#oqZ$Tfs8V<cs$L;rp\4si
-rVuors7Q9[#QO?Iec5@Bk8aL$)uKX8o`#-hs8W"65kb,l2ZN[Sp](!_s0)d1q#::5qt:!h+m]1*
-s8N*[email protected]"BoEY0ls7u]gs8;j*q"4NE-3*K7rVccqs7cQbs8NYkq>^KR
-(]471s8VmuR/[+$p&F[a!;lcrs8?dlr]Y<$rrDrqs8N)urtG)5oD&@c!VcWo#4MQgs8;Wi!;ZWo
-$3B\ss8!'"s8NPrrr3c)&afl"s7,ma`ZXe%I/X*FqC:.ps8:mVJ,~>
-hu@6fqYsn^r],f2pC$Qlr;ZZoo`)`;r;ccirt=o&s8)a"r;Qcsrr;fps8)frnG`Idp%JF^rW2rs
-rVuoss8C)%s8;QirV=&7VZ-Vis82WjqZ$<or;Qcls76$qs82lspAb-mpAY'qq>L*lq"k!i,P;$$
-<AiSg48AjU#Q"/es8Vrq"nqurr9=[irrE)os8N*!rr2pHr<)rsrr<#oqZ$Tfs8V<cs$L;rp\4si
-rVuors7Q9[#QO?Iec5@Bk8aL$)uKX8o`#-hs8W"65kb,l2ZN[Sp](!_s0)d1q#::5qt:!h+m]1*
-s8N*[email protected]"BoEY0ls7u]gs8;j*q"4NE-3*K7rVccqs7cQbs8NYkq>^KR
-(]471s8VmuR/[+$p&F[a!;lcrs8?dlr]Y<$rrDrqs8N)urtG)5oD&@c!VcWo#4MQgs8;Wi!;ZWo
-$3B\ss8!'"s8NPrrr3c)&afl"s7,ma`ZXe%I/X*FqC:.ps8:mVJ,~>
-hu@6fqYsn^r],f2pC$Qlr;ZZoo`)`;r;ccirt=o&s8)a"r;Qcsrr;fps8)frnG`Idp%JF^rW2rs
-rVuoss8C)%s8;QirV=&7VZ-Vis82WjqZ$<or;Qcls76$qs82lspAb-mpAY'qq>L*lq"k!i,P;$$
-<AiSg48AjU#Q"/es8Vrq"nqurr9=[irrE)os8N*!rr2pHr<)rsrr<#oqZ$Tfs8V<cs$L;rp\4si
-rVuors7Q9[#QO?Iec5@Bk8aL$)uKX8o`#-hs8W"65kb,l2ZN[Sp](!_s0)d1q#::5qt:!h+m]1*
-s8N*[email protected]"BoEY0ls7u]gs8;j*q"4NE-3*K7rVccqs7cQbs8NYkq>^KR
-(]471s8VmuR/[+$p&F[a!;lcrs8?dlr]Y<$rrDrqs8N)urtG)5oD&@c!VcWo#4MQgs8;Wi!;ZWo
-$3B\ss8!'"s8NPrrr3c)&afl"s7,ma`ZXe%I/X*FqC:.ps8:mVJ,~>
-iVuRLrrE*!c4-6IhZ*3Js8Dutp](9kiY;Csmf3;>]Dqa,Z7GtT!;lfrrW)uu/@,<Uq>^*ep&Fgf
-s7cQno_SUfjVRgop\Fif$G?05s"aTPs8VZhquH`r!rr2rrrW5s!<<)op/@des7u]frrMros8W#f
-s+gd-o%O@us8P$^rr2pPo`$SRn,NBj5PP0Xs763i!WW,mrr;uupAb$es8;fpqu?]nat*Jo!"o84
-!"')5s7cNm?J6(ms'UW]rVrNjs82irrX.T`#6"2okSn.3s"pbPs8W)unc,-\YlF_&s8N&urr6/k
-q>('@1\+qDq=Xd80DYPGpAap8%I<]as8;co!<3>us7H?kr;Qrjs7cT(rr30!s8Vfirr)it']T,l
-$gIusp]'a_oY_[/q#::<qYpQqo`tNsaoJaJhZ!NVs8;fp!rr;trW)ZlnG`Lgqu8@Pr;S>F$MON!
-rrW5ur;Qius8+jcq#BZi49#EVs8;iqs"]65cg^u/r=Ar$s8DunjSs`~>
-iVuRLrrE*!c4-6IhZ*3Js8Dutp](9kiY;Csmf3;>]Dqa,Z7GtT!;lfrrW)uu/@,<Uq>^*ep&Fgf
-s7cQno_SUfjVRgop\Fif$G?05s"aTPs8VZhquH`r!rr2rrrW5s!<<)op/@des7u]frrMros8W#f
-s+gd-o%O@us8P$^rr2pPo`$SRn,NBj5PP0Xs763i!WW,mrr;uupAb$es8;fpqu?]nat*Jo!"o84
-!"')5s7cNm?J6(ms'UW]rVrNjs82irrX.T`#6"2okSn.3s"pbPs8W)unc,-\YlF_&s8N&urr6/k
-q>('@1\+qDq=Xd80DYPGpAap8%I<]as8;co!<3>us7H?kr;Qrjs7cT(rr30!s8Vfirr)it']T,l
-$gIusp]'a_oY_[/q#::<qYpQqo`tNsaoJaJhZ!NVs8;fp!rr;trW)ZlnG`Lgqu8@Pr;S>F$MON!
-rrW5ur;Qius8+jcq#BZi49#EVs8;iqs"]65cg^u/r=Ar$s8DunjSs`~>
-iVuRLrrE*!c4-6IhZ*3Js8Dutp](9kiY;Csmf3;>]Dqa,Z7GtT!;lfrrW)uu/@,<Uq>^*ep&Fgf
-s7cQno_SUfjVRgop\Fif$G?05s"aTPs8VZhquH`r!rr2rrrW5s!<<)op/@des7u]frrMros8W#f
-s+gd-o%O@us8P$^rr2pPo`$SRn,NBj5PP0Xs763i!WW,mrr;uupAb$es8;fpqu?]nat*Jo!"o84
-!"')5s7cNm?J6(ms'UW]rVrNjs82irrX.T`#6"2okSn.3s"pbPs8W)unc,-\YlF_&s8N&urr6/k
-q>('@1\+qDq=Xd80DYPGpAap8%I<]as8;co!<3>us7H?kr;Qrjs7cT(rr30!s8Vfirr)it']T,l
-$gIusp]'a_oY_[/q#::<qYpQqo`tNsaoJaJhZ!NVs8;fp!rr;trW)ZlnG`Lgqu8@Pr;S>F$MON!
-rrW5ur;Qius8+jcq#BZi49#EVs8;iqs"]65cg^u/r=Ar$s8DunjSs`~>
-i;Yn5#Q4Q#*tUm@jSo\OD2u@1qr,"6UAl%`o)Hi4ci(g)b5_A?s7lKd!rr;B*jb_$L2$\fmJd[r
-s7>s^rrGR1rr3@j)795>s7ZKks7QBjs$-SancAUes7uceruJoTrVrlC\Gt3F;Y'ngs6BUZs3h7G
-s7cAY!87.V(qo_+N`l+s'AEK&-+j3W!:0[\quufnrrhE_)#4(/s*XYB?@_><^a#H>s8VBarrV]a
-s$`OBm`S"Lr:SMI^A#;IcN!_?oD`*\s'Jk-`^Kl^dUl2Hs7bp[qYuBfaT)59s8P"Us7--F(V'.f
-1kYhf'tO@m4,rq2q>1-kn[KQqf_G0uR0!*`s6Td`o`+pks6sYsrr;imqZ$Bis(h*'qK><Wq8,F=
-s8VZip\k-##*Rp8qUgcKlMgq[rVoLj:]L@`!W)irq>p0Zs8NH+s8MWjrVuo4(rGb2L/IsNs763\
-!W)irq>p0es1BSnrn8$orrr2ps8Dip[fA)]s7--5:[e8^rqtgVJ,~>
-i;Yn5#Q4Q#*tUm@jSo\OD2u@1qr,"6UAl%`o)Hi4ci(g)b5_A?s7lKd!rr;B*jb_$L2$\fmJd[r
-s7>s^rrGR1rr3@j)795>s7ZKks7QBjs$-SancAUes7uceruJoTrVrlC\Gt3F;Y'ngs6BUZs3h7G
-s7cAY!87.V(qo_+N`l+s'AEK&-+j3W!:0[\quufnrrhE_)#4(/s*XYB?@_><^a#H>s8VBarrV]a
-s$`OBm`S"Lr:SMI^A#;IcN!_?oD`*\s'Jk-`^Kl^dUl2Hs7bp[qYuBfaT)59s8P"Us7--F(V'.f
-1kYhf'tO@m4,rq2q>1-kn[KQqf_G0uR0!*`s6Td`o`+pks6sYsrr;imqZ$Bis(h*'qK><Wq8,F=
-s8VZip\k-##*Rp8qUgcKlMgq[rVoLj:]L@`!W)irq>p0Zs8NH+s8MWjrVuo4(rGb2L/IsNs763\
-!W)irq>p0es1BSnrn8$orrr2ps8Dip[fA)]s7--5:[e8^rqtgVJ,~>
-i;Yn5#Q4Q#*tUm@jSo\OD2u@1qr,"6UAl%`o)Hi4ci(g)b5_A?s7lKd!rr;B*jb_$L2$\fmJd[r
-s7>s^rrGR1rr3@j)795>s7ZKks7QBjs$-SancAUes7uceruJoTrVrlC\Gt3F;Y'ngs6BUZs3h7G
-s7cAY!87.V(qo_+N`l+s'AEK&-+j3W!:0[\quufnrrhE_)#4(/s*XYB?@_><^a#H>s8VBarrV]a
-s$`OBm`S"Lr:SMI^A#;IcN!_?oD`*\s'Jk-`^Kl^dUl2Hs7bp[qYuBfaT)59s8P"Us7--F(V'.f
-1kYhf'tO@m4,rq2q>1-kn[KQqf_G0uR0!*`s6Td`o`+pks6sYsrr;imqZ$Bis(h*'qK><Wq8,F=
-s8VZip\k-##*Rp8qUgcKlMgq[rVoLj:]L@`!W)irq>p0Zs8NH+s8MWjrVuo4(rGb2L/IsNs763\
-!W)irq>p0es1BSnrn8$orrr2ps8Dip[fA)]s7--5:[e8^rqtgVJ,~>
-iW!firrVHbrNuX@s8VEcs8SNf$kESHV#UI_"[rC]b=3"4d-L0##PA&rr;urpm_([r$!!kqs8VZj
-gAh3Os8;Wgs0P_rs7-Nt!!j#6!<<&up&F:OrrDTh!<<#rrrE)mQkqs[s6d<?)&#W\rt#&-CAS]8
-s6fp`s&1*3s/n^'!(aa!p!bT#3kt^jp^@,js6p$gqY:*jo`,F$!<E06n[2F:"Y$2Gs8;oslMh%f
-p]%j$%gXtTs7?9j_C>O3`;]f5q>^2]./_t&$nf,N!#pR`s7H$br;Zd,qu@E4p+?=BrVuo=/-7/T
-s7c!"0*E>KrVlosr;Q^iY&59h!&@X2rW)Wkq#(*iq#L9k!!a#7!<<)n,l.B<r;HZqqM61+15c,(
-p&G'drUo^;9cO3J/7-Etnc8^gs.0D#rVults8Dor!<3!&ncSOFs8N3#s"O2E;ZmM4Yl=t$qssae
-s8Dor!<;ogs0t>r1Xc'u!<<)pm/QeZ"JYYbs6sBqs8VuTs*t~>
-iW!firrVHbrNuX@s8VEcs8SNf$kESHV#UI_"[rC]b=3"4d-L0##PA&rr;urpm_([r$!!kqs8VZj
-gAh3Os8;Wgs0P_rs7-Nt!!j#6!<<&up&F:OrrDTh!<<#rrrE)mQkqs[s6d<?)&#W\rt#&-CAS]8
-s6fp`s&1*3s/n^'!(aa!p!bT#3kt^jp^@,js6p$gqY:*jo`,F$!<E06n[2F:"Y$2Gs8;oslMh%f
-p]%j$%gXtTs7?9j_C>O3`;]f5q>^2]./_t&$nf,N!#pR`s7H$br;Zd,qu@E4p+?=BrVuo=/-7/T
-s7c!"0*E>KrVlosr;Q^iY&59h!&@X2rW)Wkq#(*iq#L9k!!a#7!<<)n,l.B<r;HZqqM61+15c,(
-p&G'drUo^;9cO3J/7-Etnc8^gs.0D#rVults8Dor!<3!&ncSOFs8N3#s"O2E;ZmM4Yl=t$qssae
-s8Dor!<;ogs0t>r1Xc'u!<<)pm/QeZ"JYYbs6sBqs8VuTs*t~>
-iW!firrVHbrNuX@s8VEcs8SNf$kESHV#UI_"[rC]b=3"4d-L0##PA&rr;urpm_([r$!!kqs8VZj
-gAh3Os8;Wgs0P_rs7-Nt!!j#6!<<&up&F:OrrDTh!<<#rrrE)mQkqs[s6d<?)&#W\rt#&-CAS]8
-s6fp`s&1*3s/n^'!(aa!p!bT#3kt^jp^@,js6p$gqY:*jo`,F$!<E06n[2F:"Y$2Gs8;oslMh%f
-p]%j$%gXtTs7?9j_C>O3`;]f5q>^2]./_t&$nf,N!#pR`s7H$br;Zd,qu@E4p+?=BrVuo=/-7/T
-s7c!"0*E>KrVlosr;Q^iY&59h!&@X2rW)Wkq#(*iq#L9k!!a#7!<<)n,l.B<r;HZqqM61+15c,(
-p&G'drUo^;9cO3J/7-Etnc8^gs.0D#rVults8Dor!<3!&ncSOFs8N3#s"O2E;ZmM4Yl=t$qssae
-s8Dor!<;ogs0t>r1Xc'u!<<)pm/QeZ"JYYbs6sBqs8VuTs*t~>
-i;X>_s8V`gp]('^s8Vrqp&Fdcrs\o,s82-^s7cQjs7Z9frs.uks6ojbrq69j!qH9]rr3T*s8Vur
-s7lNlqu?<gqGZ/Kp&=tMrVuogs8DriqZ$Tos8VQes8N#trpTmdn,<1Rs8N&ls6fpeo`"^erqufk
-s8;oqs7H'bq>^<err3f1s8Voms7u]ks8)3\s8Duos8)chpAb$hru1D,oDARbqt^9lo_/=Qq#C9j
-s7H?kp\+UUr;Q^#q=OUbmJlhXrsnkos("jhs7,^\m.LDYrr)j"q>9d`qtg=(med%ali6\Yl1Y/W
-rVuWlqYpL$o)Jafrr;Wjqu$Hn!r;Wjqu7Ass7H?es75sOs8VN&)?9U6rVu`jrr3;rm/QPJs7cQn
-r;Q^&mf3:dnFu_Js8;iq+Su-8q!\4^rr2rsn,N:bs8)cqnc/X^pAb0Ps8VKds6Tab-h%'7rr2rs
-n,*.as8N&uq#C*ds7?9jqu;`uiW&oWiEbsQs7QEjr8dm.~>
-i;X>_s8V`gp]('^s8Vrqp&Fdcrs\o,s82-^s7cQjs7Z9frs.uks6ojbrq69j!qH9]rr3T*s8Vur
-s7lNlqu?<gqGZ/Kp&=tMrVuogs8DriqZ$Tos8VQes8N#trpTmdn,<1Rs8N&ls6fpeo`"^erqufk
-s8;oqs7H'bq>^<err3f1s8Voms7u]ks8)3\s8Duos8)chpAb$hru1D,oDARbqt^9lo_/=Qq#C9j
-s7H?kp\+UUr;Q^#q=OUbmJlhXrsnkos("jhs7,^\m.LDYrr)j"q>9d`qtg=(med%ali6\Yl1Y/W
-rVuWlqYpL$o)Jafrr;Wjqu$Hn!r;Wjqu7Ass7H?es75sOs8VN&)?9U6rVu`jrr3;rm/QPJs7cQn
-r;Q^&mf3:dnFu_Js8;iq+Su-8q!\4^rr2rsn,N:bs8)cqnc/X^pAb0Ps8VKds6Tab-h%'7rr2rs
-n,*.as8N&uq#C*ds7?9jqu;`uiW&oWiEbsQs7QEjr8dm.~>
-i;X>_s8V`gp]('^s8Vrqp&Fdcrs\o,s82-^s7cQjs7Z9frs.uks6ojbrq69j!qH9]rr3T*s8Vur
-s7lNlqu?<gqGZ/Kp&=tMrVuogs8DriqZ$Tos8VQes8N#trpTmdn,<1Rs8N&ls6fpeo`"^erqufk
-s8;oqs7H'bq>^<err3f1s8Voms7u]ks8)3\s8Duos8)chpAb$hru1D,oDARbqt^9lo_/=Qq#C9j
-s7H?kp\+UUr;Q^#q=OUbmJlhXrsnkos("jhs7,^\m.LDYrr)j"q>9d`qtg=(med%ali6\Yl1Y/W
-rVuWlqYpL$o)Jafrr;Wjqu$Hn!r;Wjqu7Ass7H?es75sOs8VN&)?9U6rVu`jrr3;rm/QPJs7cQn
-r;Q^&mf3:dnFu_Js8;iq+Su-8q!\4^rr2rsn,N:bs8)cqnc/X^pAb0Ps8VKds6Tab-h%'7rr2rs
-n,*.as8N&uq#C*ds7?9jqu;`uiW&oWiEbsQs7QEjr8dm.~>
-i;WrSs7H?^rr3]+s8Vlns8DutqssdTr:p<dmJlnWrr4#2s8V`hpAb'jr9F=^mJm.bs6BXaoDeLZ
-qu?WkoDS[shbO4Hs8VlomJm4WrVm5is7cNfs8VQfs6ose!;QNm$iL&&s69R`p](-jmJ[&$kPX`I
-o)JRds7lWon+QeMs8)9co)JF\r;R-'q#:6is8W#ms6Tab"RuHjs82fq%I=&js8Voprr<#ks8W&s
-rsSi+s6p!bs7lWfs7cKl)u9)A+Fhr3s68@=90rUUq>]XXrU0^cqXjgfnGW@eo)AY)q#CBdr;ZQl
-nGiOUs6fdas8)Hho)8Lcq$[6%rr<#mr:Bs`qtL$g&H2Y(s$X6kq#C9jo`+Uas8;lr$i9o'nc/Ld
-s6BXYoD\b(r;ZTms7lNYs7$'`s8VQfs6p!frVc`umI^GSrr3?)nc/@Qs8VQfs6]gc"7Q9in,<8+
-p\Og]s763hs7Z0dnFkWSI/j$Bnkn9Do`"mjp>c1'~>
-i;WrSs7H?^rr3]+s8Vlns8DutqssdTr:p<dmJlnWrr4#2s8V`hpAb'jr9F=^mJm.bs6BXaoDeLZ
-qu?WkoDS[shbO4Hs8VlomJm4WrVm5is7cNfs8VQfs6ose!;QNm$iL&&s69R`p](-jmJ[&$kPX`I
-o)JRds7lWon+QeMs8)9co)JF\r;R-'q#:6is8W#ms6Tab"RuHjs82fq%I=&js8Voprr<#ks8W&s
-rsSi+s6p!bs7lWfs7cKl)u9)A+Fhr3s68@=90rUUq>]XXrU0^cqXjgfnGW@eo)AY)q#CBdr;ZQl
-nGiOUs6fdas8)Hho)8Lcq$[6%rr<#mr:Bs`qtL$g&H2Y(s$X6kq#C9jo`+Uas8;lr$i9o'nc/Ld
-s6BXYoD\b(r;ZTms7lNYs7$'`s8VQfs6p!frVc`umI^GSrr3?)nc/@Qs8VQfs6]gc"7Q9in,<8+
-p\Og]s763hs7Z0dnFkWSI/j$Bnkn9Do`"mjp>c1'~>
-i;WrSs7H?^rr3]+s8Vlns8DutqssdTr:p<dmJlnWrr4#2s8V`hpAb'jr9F=^mJm.bs6BXaoDeLZ
-qu?WkoDS[shbO4Hs8VlomJm4WrVm5is7cNfs8VQfs6ose!;QNm$iL&&s69R`p](-jmJ[&$kPX`I
-o)JRds7lWon+QeMs8)9co)JF\r;R-'q#:6is8W#ms6Tab"RuHjs82fq%I=&js8Voprr<#ks8W&s
-rsSi+s6p!bs7lWfs7cKl)u9)A+Fhr3s68@=90rUUq>]XXrU0^cqXjgfnGW@eo)AY)q#CBdr;ZQl
-nGiOUs6fdas8)Hho)8Lcq$[6%rr<#mr:Bs`qtL$g&H2Y(s$X6kq#C9jo`+Uas8;lr$i9o'nc/Ld
-s6BXYoD\b(r;ZTms7lNYs7$'`s8VQfs6p!frVc`umI^GSrr3?)nc/@Qs8VQfs6]gc"7Q9in,<8+
-p\Og]s763hs7Z0dnFkWSI/j$Bnkn9Do`"mjp>c1'~>
-hu=5brVuosp%J+TrVu`ds8V?\s8Vt's8Muss7QB^s7cQiqu?]ds8VQfqtg3^s8VWas7cQjoD/Fd
-rqZT]s82ior:^0\s8N&^mJlt]mJm4[s7u]ds8N#toDeXdq>'^`s7u]nqu>mQs8)ZnqXOUUs7u]f
-rr3,hq>^EjrVmT+rr;ilqu?]ls8Vfmq>^3hrqcZkrr5afo`+:Xs6Ta_nc/XYs7$'gn,*.Ts8V]j
-irArVq"+O\s7uTipAb0[rVHKmmf1RE0,FTt+@_mPq#B[Vs7Q-dn+m"`o`+dfoDS%Us82fn#lj]"
-o(rCbm/?n_&,Gu"q>L?ir;ZEhs82fgrVlg,li6taq"OgHs8Vlgs7--grW)Kert4r#s8Vcls7lEi
-meZtRs7QElrquuas7cQer;RN-s8V]jrr2rhs8)ccs7QEfs8;o`rr2p8rVuoos7lWds8V]jrr2rh
-s8W&ps7H?bs7lQm&b5]]%TrQ*pAb0ks8Vrks8W)Ys*t~>
-hu=5brVuosp%J+TrVu`ds8V?\s8Vt's8Muss7QB^s7cQiqu?]ds8VQfqtg3^s8VWas7cQjoD/Fd
-rqZT]s82ior:^0\s8N&^mJlt]mJm4[s7u]ds8N#toDeXdq>'^`s7u]nqu>mQs8)ZnqXOUUs7u]f
-rr3,hq>^EjrVmT+rr;ilqu?]ls8Vfmq>^3hrqcZkrr5afo`+:Xs6Ta_nc/XYs7$'gn,*.Ts8V]j
-irArVq"+O\s7uTipAb0[rVHKmmf1RE0,FTt+@_mPq#B[Vs7Q-dn+m"`o`+dfoDS%Us82fn#lj]"
-o(rCbm/?n_&,Gu"q>L?ir;ZEhs82fgrVlg,li6taq"OgHs8Vlgs7--grW)Kert4r#s8Vcls7lEi
-meZtRs7QElrquuas7cQer;RN-s8V]jrr2rhs8)ccs7QEfs8;o`rr2p8rVuoos7lWds8V]jrr2rh
-s8W&ps7H?bs7lQm&b5]]%TrQ*pAb0ks8Vrks8W)Ys*t~>
-hu=5brVuosp%J+TrVu`ds8V?\s8Vt's8Muss7QB^s7cQiqu?]ds8VQfqtg3^s8VWas7cQjoD/Fd
-rqZT]s82ior:^0\s8N&^mJlt]mJm4[s7u]ds8N#toDeXdq>'^`s7u]nqu>mQs8)ZnqXOUUs7u]f
-rr3,hq>^EjrVmT+rr;ilqu?]ls8Vfmq>^3hrqcZkrr5afo`+:Xs6Ta_nc/XYs7$'gn,*.Ts8V]j
-irArVq"+O\s7uTipAb0[rVHKmmf1RE0,FTt+@_mPq#B[Vs7Q-dn+m"`o`+dfoDS%Us82fn#lj]"
-o(rCbm/?n_&,Gu"q>L?ir;ZEhs82fgrVlg,li6taq"OgHs8Vlgs7--grW)Kert4r#s8Vcls7lEi
-meZtRs7QElrquuas7cQer;RN-s8V]jrr2rhs8)ccs7QEfs8;o`rr2p8rVuoos7lWds8V]jrr2rh
-s8W&ps7H?bs7lQm&b5]]%TrQ*pAb0ks8Vrks8W)Ys*t~>
-iVs,Srr<#ms8Mus..mQ;qu?Wpp]'m]s8Dutqt^9lkl:JYq"FUbs82Efs7uWfs82inp&4mi!Vl?e
-rt+u$s8VZir;ZNks8)<dp&FFYs$cb`s7?9jhZ*KJs82]nq"t'js7u]ns8;Qfs7QElkl:VRq#Bsc
-p&F[\s6TIZq>^-fnc/OSs8Dusr;ZEhq>^Kbs7cQmrVlrnqtU*h!qPsWrr4&<o`+OSs8W)up](9_
-s7$'Zs7Q0es7cBiq>^6ip%\Od!9jF^#Pe?!q#CBnq#::&p&G'cs5j:\kPkMAs5s=\(]+1&s7u]b
-s8)cms8)cps6BXap%SLdq#::@o)J"Lo`+shs7u3bo`"Xcs60LLs8VEbs8;`nq==Rcl2UJWqu$K^
-rr3)us8W)trr`#qr:g'f+o1]qs6'FZq#C6gs7lKcs8UsUs7cQfr;Z0as7Pp^s82cort"Ppq#C6g
-s7lKkq>^0grVc`q(%M>!s7QElq>^9jrqufcs8W)rs8VuWs*t~>
-iVs,Srr<#ms8Mus..mQ;qu?Wpp]'m]s8Dutqt^9lkl:JYq"FUbs82Efs7uWfs82inp&4mi!Vl?e
-rt+u$s8VZir;ZNks8)<dp&FFYs$cb`s7?9jhZ*KJs82]nq"t'js7u]ns8;Qfs7QElkl:VRq#Bsc
-p&F[\s6TIZq>^-fnc/OSs8Dusr;ZEhq>^Kbs7cQmrVlrnqtU*h!qPsWrr4&<o`+OSs8W)up](9_
-s7$'Zs7Q0es7cBiq>^6ip%\Od!9jF^#Pe?!q#CBnq#::&p&G'cs5j:\kPkMAs5s=\(]+1&s7u]b
-s8)cms8)cps6BXap%SLdq#::@o)J"Lo`+shs7u3bo`"Xcs60LLs8VEbs8;`nq==Rcl2UJWqu$K^
-rr3)us8W)trr`#qr:g'f+o1]qs6'FZq#C6gs7lKcs8UsUs7cQfr;Z0as7Pp^s82cort"Ppq#C6g
-s7lKkq>^0grVc`q(%M>!s7QElq>^9jrqufcs8W)rs8VuWs*t~>
-iVs,Srr<#ms8Mus..mQ;qu?Wpp]'m]s8Dutqt^9lkl:JYq"FUbs82Efs7uWfs82inp&4mi!Vl?e
-rt+u$s8VZir;ZNks8)<dp&FFYs$cb`s7?9jhZ*KJs82]nq"t'js7u]ns8;Qfs7QElkl:VRq#Bsc
-p&F[\s6TIZq>^-fnc/OSs8Dusr;ZEhq>^Kbs7cQmrVlrnqtU*h!qPsWrr4&<o`+OSs8W)up](9_
-s7$'Zs7Q0es7cBiq>^6ip%\Od!9jF^#Pe?!q#CBnq#::&p&G'cs5j:\kPkMAs5s=\(]+1&s7u]b
-s8)cms8)cps6BXap%SLdq#::@o)J"Lo`+shs7u3bo`"Xcs60LLs8VEbs8;`nq==Rcl2UJWqu$K^
-rr3)us8W)trr`#qr:g'f+o1]qs6'FZq#C6gs7lKcs8UsUs7cQfr;Z0as7Pp^s82cort"Ppq#C6g
-s7lKkq>^0grVc`q(%M>!s7QElq>^9jrqufcs8W)rs8VuWs*t~>
-i;X\as8Duqs8DrsoDe1Ws7uTms7H?kq#C-hq#14/pAagcs7cQkrr;orpAb0is8Vrps8Vijrr3#q
-rVlg%q>^Hhr;ZHgrVmN%s8W)ss7?9is7H?bs8Vrqs8)`p$hsYss8)ZnrVuonp&4mmq#C!arVllp
-rr3>ss8)c`s8M`lqYU3j!q?6frVpj2s8;osoDeOar;Z`hs8N&uoDJLcq>U<lqu-Ejrr;cks82Tk
-p&Fjdrr<#os8Duas8V]gp]'d\p&Fdas7?9eq#C-es8)cqq#CBhs8Vfmp](3js8)<dq"asirr;fe
-s8N&umJm4_s8Vinqu?Whqu9"Ns7ZKmrr<#ss8Vigs82]nr;Zfrqu?]mqu?9fqu?]hs82imrVuHg
-s8)Egnc/7]p[\@`s763ir;$Bcs8Vrqs8)`p&,Q>+p](9ks763irqHHmqYgEon,<7oqZ$Tls8W)u
-p&>!fqYpm$s8)Whs7QEjr;Q^"p](9kr;PdWJ,~>
-i;X\as8Duqs8DrsoDe1Ws7uTms7H?kq#C-hq#14/pAagcs7cQkrr;orpAb0is8Vrps8Vijrr3#q
-rVlg%q>^Hhr;ZHgrVmN%s8W)ss7?9is7H?bs8Vrqs8)`p$hsYss8)ZnrVuonp&4mmq#C!arVllp
-rr3>ss8)c`s8M`lqYU3j!q?6frVpj2s8;osoDeOar;Z`hs8N&uoDJLcq>U<lqu-Ejrr;cks82Tk
-p&Fjdrr<#os8Duas8V]gp]'d\p&Fdas7?9eq#C-es8)cqq#CBhs8Vfmp](3js8)<dq"asirr;fe
-s8N&umJm4_s8Vinqu?Whqu9"Ns7ZKmrr<#ss8Vigs82]nr;Zfrqu?]mqu?9fqu?]hs82imrVuHg
-s8)Egnc/7]p[\@`s763ir;$Bcs8Vrqs8)`p&,Q>+p](9ks763irqHHmqYgEon,<7oqZ$Tls8W)u
-p&>!fqYpm$s8)Whs7QEjr;Q^"p](9kr;PdWJ,~>
-i;X\as8Duqs8DrsoDe1Ws7uTms7H?kq#C-hq#14/pAagcs7cQkrr;orpAb0is8Vrps8Vijrr3#q
-rVlg%q>^Hhr;ZHgrVmN%s8W)ss7?9is7H?bs8Vrqs8)`p$hsYss8)ZnrVuonp&4mmq#C!arVllp
-rr3>ss8)c`s8M`lqYU3j!q?6frVpj2s8;osoDeOar;Z`hs8N&uoDJLcq>U<lqu-Ejrr;cks82Tk
-p&Fjdrr<#os8Duas8V]gp]'d\p&Fdas7?9eq#C-es8)cqq#CBhs8Vfmp](3js8)<dq"asirr;fe
-s8N&umJm4_s8Vinqu?Whqu9"Ns7ZKmrr<#ss8Vigs82]nr;Zfrqu?]mqu?9fqu?]hs82imrVuHg
-s8)Egnc/7]p[\@`s763ir;$Bcs8Vrqs8)`p&,Q>+p](9ks763irqHHmqYgEon,<7oqZ$Tls8W)u
-p&>!fqYpm$s8)Whs7QEjr;Q^"p](9kr;PdWJ,~>
-JcF^/#QFZ$rVuoqp\b$qp\Y!grVuWkrs&?"s82Wlq#:9pqY:$grs\c%q#:<cs8W#ss8)]nrrMrl
-qu6lrrr;Qhs8DZk'DVUss8VZis7cQnirB&Ls8;corVca!pAb!hp@nReqYU:(q>C9mkl:\Os8VWf
-s7$'grdk+1s*t~>
-JcF^/#QFZ$rVuoqp\b$qp\Y!grVuWkrs&?"s82Wlq#:9pqY:$grs\c%q#:<cs8W#ss8)]nrrMrl
-qu6lrrr;Qhs8DZk'DVUss8VZis7cQnirB&Ls8;corVca!pAb!hp@nReqYU:(q>C9mkl:\Os8VWf
-s7$'grdk+1s*t~>
-JcF^/#QFZ$rVuoqp\b$qp\Y!grVuWkrs&?"s82Wlq#:9pqY:$grs\c%q#:<cs8W#ss8)]nrrMrl
-qu6lrrr;Qhs8DZk'DVUss8VZis7cQnirB&Ls8;corVca!pAb!hp@nReqYU:(q>C9mkl:\Os8VWf
-s7$'grdk+1s*t~>
-JcF[.$1Iomo(MnZqYU6jruM"/s8Vrqs7uEhrVH6fr;HWoqtg?mr9X%Tqu?9drr3#up&=slq"ajf
-!r)9Zrr32as8VEbs7Z3e%eB&fo`+FUrq$0in+-MQrr3B"s8N#ns8Vccs8MW`rrVrcpAY'po`+s`
-m/6kcq>:3bJcFd1J,~>
-JcF[.$1Iomo(MnZqYU6jruM"/s8Vrqs7uEhrVH6fr;HWoqtg?mr9X%Tqu?9drr3#up&=slq"ajf
-!r)9Zrr32as8VEbs7Z3e%eB&fo`+FUrq$0in+-MQrr3B"s8N#ns8Vccs8MW`rrVrcpAY'po`+s`
-m/6kcq>:3bJcFd1J,~>
-JcF[.$1Iomo(MnZqYU6jruM"/s8Vrqs7uEhrVH6fr;HWoqtg?mr9X%Tqu?9drr3#up&=slq"ajf
-!r)9Zrr32as8VEbs7Z3e%eB&fo`+FUrq$0in+-MQrr3B"s8N#ns8Vccs8MW`rrVrcpAY'po`+s`
-m/6kcq>:3bJcFd1J,~>
-JcF^/%/p4rnc/Oequ?]pq=ssh'Dhb*s8;oqq>^?ls8)Wms7u]pq>UC,rVlisqu?]fs8Vufq>^Ko
-q>:'es8Vs"s7?6irUfmb!WDckrrMchr;RE/s7QElo)JUep](9bs8Vris8;]ms82`o&,Z2&pA=mi
-p&+L_s7QEjoR[&&s*t~>
-JcF^/%/p4rnc/Oequ?]pq=ssh'Dhb*s8;oqq>^?ls8)Wms7u]pq>UC,rVlisqu?]fs8Vufq>^Ko
-q>:'es8Vs"s7?6irUfmb!WDckrrMchr;RE/s7QElo)JUep](9bs8Vris8;]ms82`o&,Z2&pA=mi
-p&+L_s7QEjoR[&&s*t~>
-JcF^/%/p4rnc/Oequ?]pq=ssh'Dhb*s8;oqq>^?ls8)Wms7u]pq>UC,rVlisqu?]fs8Vufq>^Ko
-q>:'es8Vs"s7?6irUfmb!WDckrrMchr;RE/s7QElo)JUep](9bs8Vris8;]ms82`o&,Z2&pA=mi
-p&+L_s7QEjoR[&&s*t~>
-JcF^/(%VD)qZ$Qpp\Xges8Dfmp[eFbli6e[rrV]hn,E>7o_8Ccqu-Hgs8VQfs7uKes7Q3bs8Dfc
-s8W)oqu$<gs7H9cs7cQmq#:Eps7-*g%.sT!oDe4Wrr<#ps7-*g"7H0fr;Q]trr;rlrseu'qu$<g
-s8W)us6K^^rr3/us8W#srdk+1s*t~>
-JcF^/(%VD)qZ$Qpp\Xges8Dfmp[eFbli6e[rrV]hn,E>7o_8Ccqu-Hgs8VQfs7uKes7Q3bs8Dfc
-s8W)oqu$<gs7H9cs7cQmq#:Eps7-*g%.sT!oDe4Wrr<#ps7-*g"7H0fr;Q]trr;rlrseu'qu$<g
-s8W)us6K^^rr3/us8W#srdk+1s*t~>
-JcF^/(%VD)qZ$Qpp\Xges8Dfmp[eFbli6e[rrV]hn,E>7o_8Ccqu-Hgs8VQfs7uKes7Q3bs8Dfc
-s8W)oqu$<gs7H9cs7cQmq#:Eps7-*g%.sT!oDe4Wrr<#ps7-*g"7H0fr;Q]trr;rlrseu'qu$<g
-s8W)us6K^^rr3/us8W#srdk+1s*t~>
-JcF[.#k@rpk5Y>Qs8N#t(&7\+s7--hp](9boDej^s8;`ns7c-art>>2s7$'_q>^Enqu?]kr;ZZo
-rr)itp\Fgg"nqTfmJleQrrq`gpAaa]rr2uirVlokrVlfsp&+gnmeHh^l2LMY!<2rs%eof!s82Hg
-s7lKgl2USXrr2ukJcFg2J,~>
-JcF[.#k@rpk5Y>Qs8N#t(&7\+s7--hp](9boDej^s8;`ns7c-art>>2s7$'_q>^Enqu?]kr;ZZo
-rr)itp\Fgg"nqTfmJleQrrq`gpAaa]rr2uirVlokrVlfsp&+gnmeHh^l2LMY!<2rs%eof!s82Hg
-s7lKgl2USXrr2ukJcFg2J,~>
-JcF[.#k@rpk5Y>Qs8N#t(&7\+s7--hp](9boDej^s8;`ns7c-art>>2s7$'_q>^Enqu?]kr;ZZo
-rr)itp\Fgg"nqTfmJleQrrq`gpAaa]rr2uirVlokrVlfsp&+gnmeHh^l2LMY!<2rs%eof!s82Hg
-s7lKgl2USXrr2ukJcFg2J,~>
-JcF^/48/^Js8Vuls82]er;Zfrs8V?RrrDBbs8;ojs8;ons8W)urU]j`o)&ISs8McXFo^7rs8Duq
-p\k'fqu-Ntp]1?or:U(%r;HZ\&c_Fts8Vhn*]"3&mIpMY!r)`crr2ujp](9ls82cp%eTens4'Rf
-"`;Naq#C?dJcFd1J,~>
-JcF^/48/^Js8Vuls82]er;Zfrs8V?RrrDBbs8;ojs8;ons8W)urU]j`o)&ISs8McXFo^7rs8Duq
-p\k'fqu-Ntp]1?or:U(%r;HZ\&c_Fts8Vhn*]"3&mIpMY!r)`crr2ujp](9ls82cp%eTens4'Rf
-"`;Naq#C?dJcFd1J,~>
-JcF^/48/^Js8Vuls82]er;Zfrs8V?RrrDBbs8;ojs8;ons8W)urU]j`o)&ISs8McXFo^7rs8Duq
-p\k'fqu-Ntp]1?or:U(%r;HZ\&c_Fts8Vhn*]"3&mIpMY!r)`crr2ujp](9ls82cp%eTens4'Rf
-"`;Naq#C?dJcFd1J,~>
-JcF^/-2dN;li-GSr:g6kn,E@Zs82`opCRAopAFsds8VTgmelPRs7uZnrsSi+irB#\7h5S!oCi1`
-!;ZWo"oSE!o`"pjrrW&mq=t!irVum9!;HEbqu?[9l2C\_p](9clMpn^s8)?Zs8VihrrDlorseo+
-rUg-hs8P0/lg\Larr3#moR[&&s*t~>
-JcF^/-2dN;li-GSr:g6kn,E@Zs82`opCRAopAFsds8VTgmelPRs7uZnrsSi+irB#\7h5S!oCi1`
-!;ZWo"oSE!o`"pjrrW&mq=t!irVum9!;HEbqu?[9l2C\_p](9clMpn^s8)?Zs8VihrrDlorseo+
-rUg-hs8P0/lg\Larr3#moR[&&s*t~>
-JcF^/-2dN;li-GSr:g6kn,E@Zs82`opCRAopAFsds8VTgmelPRs7uZnrsSi+irB#\7h5S!oCi1`
-!;ZWo"oSE!o`"pjrrW&mq=t!irVum9!;HEbqu?[9l2C\_p](9clMpn^s8)?Zs8VihrrDlorseo+
-rUg-hs8P0/lg\Larr3#moR[&&s*t~>
-JcF[.70Shd+M3OG(8Um.riJ-[/>)nA!WWc1nO!I@pA+RkQkUF6p](9Z5mogQpAb*e!<;fns7H0f
-s7`BG!%k)JrrAE$%^k^!rrrAh1CKlXrr<#s'EJ16!<N)tm7IOWlhUPL0]i8m.eNN9"o"lL!%k)G
-rsJr/p](.$nGiOds7_*EjSs`~>
-JcF[.70Shd+M3OG(8Um.riJ-[/>)nA!WWc1nO!I@pA+RkQkUF6p](9Z5mogQpAb*e!<;fns7H0f
-s7`BG!%k)JrrAE$%^k^!rrrAh1CKlXrr<#s'EJ16!<N)tm7IOWlhUPL0]i8m.eNN9"o"lL!%k)G
-rsJr/p](.$nGiOds7_*EjSs`~>
-JcF[.70Shd+M3OG(8Um.riJ-[/>)nA!WWc1nO!I@pA+RkQkUF6p](9Z5mogQpAb*e!<;fns7H0f
-s7`BG!%k)JrrAE$%^k^!rrrAh1CKlXrr<#s'EJ16!<N)tm7IOWlhUPL0]i8m.eNN9"o"lL!%k)G
-rsJr/p](.$nGiOds7_*EjSs`~>
-JcF[..fjAaQ4IQjSJV>+r#*$@ZY&_,!VucoBlsB/Du]M9EU];5rVu\0POZ/#rVlg4!<;rro_ngb
-oD]r@p:4Q/r;_W1Te#U2rrD]js$?PYqdcSnI/s6Gr;ZNk!<;s#qYsojrO?nJs80N!rD:6%r;Z]p
-s8N&poD]r@p:4Q/s8Vur;N:_9CgR/<s8W)kJcFg2J,~>
-JcF[..fjAaQ4IQjSJV>+r#*$@ZY&_,!VucoBlsB/Du]M9EU];5rVu\0POZ/#rVlg4!<;rro_ngb
-oD]r@p:4Q/r;_W1Te#U2rrD]js$?PYqdcSnI/s6Gr;ZNk!<;s#qYsojrO?nJs80N!rD:6%r;Z]p
-s8N&poD]r@p:4Q/s8Vur;N:_9CgR/<s8W)kJcFg2J,~>
-JcF[..fjAaQ4IQjSJV>+r#*$@ZY&_,!VucoBlsB/Du]M9EU];5rVu\0POZ/#rVlg4!<;rro_ngb
-oD]r@p:4Q/r;_W1Te#U2rrD]js$?PYqdcSnI/s6Gr;ZNk!<;s#qYsojrO?nJs80N!rD:6%r;Z]p
-s8N&poD]r@p:4Q/s8Vur;N:_9CgR/<s8W)kJcFg2J,~>
-JcF[.0`_7Pm/["_rrDuqq\P"Ks7lWf!<;cm)#4'e"98B$s7?6js7cNrp]'8"r:Kpe"T.lks&]-q
-s8*=Xl14lQrrMurncSplqYpNop%/.^q\o#)lkB<ks8)cj!qH9js6ps$o(!^or;-Fi=C^=is7cQa
-pAb-ls8*=Xl14lQoDe^fi#Mdt+m](%rqV-Fir=N~>
-JcF[.0`_7Pm/["_rrDuqq\P"Ks7lWf!<;cm)#4'e"98B$s7?6js7cNrp]'8"r:Kpe"T.lks&]-q
-s8*=Xl14lQrrMurncSplqYpNop%/.^q\o#)lkB<ks8)cj!qH9js6ps$o(!^or;-Fi=C^=is7cQa
-pAb-ls8*=Xl14lQoDe^fi#Mdt+m](%rqV-Fir=N~>
-JcF[.0`_7Pm/["_rrDuqq\P"Ks7lWf!<;cm)#4'e"98B$s7?6js7cNrp]'8"r:Kpe"T.lks&]-q
-s8*=Xl14lQrrMurncSplqYpNop%/.^q\o#)lkB<ks8)cj!qH9js6ps$o(!^or;-Fi=C^=is7cQa
-pAb-ls8*=Xl14lQoDe^fi#Mdt+m](%rqV-Fir=N~>
-JcF^/$2+o.s8Nhls82lrrt=44'K<T$rrDfnoF9p^rsJf&%0$89!<<)b"onW-!<<)nqZQou1AUY?
-s8VfTC*kdZr:L'ipAY-mr:Bsg!"9S/p\t9hmJd1ds8)WdrrE*!#QOf-!"&f.rr4;CciM>is8;ob
-rsf#/rVuTRC*kdZr;ZfdmV%I@pP)lEs8Vopqgne.s*t~>
-JcF^/$2+o.s8Nhls82lrrt=44'K<T$rrDfnoF9p^rsJf&%0$89!<<)b"onW-!<<)nqZQou1AUY?
-s8VfTC*kdZr:L'ipAY-mr:Bsg!"9S/p\t9hmJd1ds8)WdrrE*!#QOf-!"&f.rr4;CciM>is8;ob
-rsf#/rVuTRC*kdZr;ZfdmV%I@pP)lEs8Vopqgne.s*t~>
-JcF^/$2+o.s8Nhls82lrrt=44'K<T$rrDfnoF9p^rsJf&%0$89!<<)b"onW-!<<)nqZQou1AUY?
-s8VfTC*kdZr:L'ipAY-mr:Bsg!"9S/p\t9hmJd1ds8)WdrrE*!#QOf-!"&f.rr4;CciM>is8;ob
-rsf#/rVuTRC*kdZr;ZfdmV%I@pP)lEs8Vopqgne.s*t~>
-JcF[.9*#"cp':Wir!EB$me$MYYS6d2!<;lp)<h+^+Sl$;s7l?jq<e2+q#C$ds6KRX!rW)or;Zfr
-qZ$<ipUCP0rrDofrrDrqs8;Hbqu6U#pFPJ,li@(arr4eM!;uisq=G*qq"k$_m/-fe<Fc'rrq?B`
-rVuirqZ$<ipUCP0qtpEk$1e#orrE)js8VP=s5X-0~>
-JcF[.9*#"cp':Wir!EB$me$MYYS6d2!<;lp)<h+^+Sl$;s7l?jq<e2+q#C$ds6KRX!rW)or;Zfr
-qZ$<ipUCP0rrDofrrDrqs8;Hbqu6U#pFPJ,li@(arr4eM!;uisq=G*qq"k$_m/-fe<Fc'rrq?B`
-rVuirqZ$<ipUCP0qtpEk$1e#orrE)js8VP=s5X-0~>
-JcF[.9*#"cp':Wir!EB$me$MYYS6d2!<;lp)<h+^+Sl$;s7l?jq<e2+q#C$ds6KRX!rW)or;Zfr
-qZ$<ipUCP0rrDofrrDrqs8;Hbqu6U#pFPJ,li@(arr4eM!;uisq=G*qq"k$_m/-fe<Fc'rrq?B`
-rVuirqZ$<ipUCP0qtpEk$1e#orrE)js8VP=s5X-0~>
-JcF[.!!*#u0`_4QrX/5rs'j%=]aXr@1=c$n?B+`3AGu65m/QSYqZ$S"N:4]/o)8Ug"RQ0goDe7X
-rsiM>m'7u<nIts&q?m#trr)j?rr;ZT@C>fB&+9Jtqtp:#rV['&s)En[c=ZqQs0b8o8YQ.`"Si#l
-r:p9k"CeJ!Z6oSN%J'N_C&.Oa70!;Nqu?PEs5a31~>
-JcF[.!!*#u0`_4QrX/5rs'j%=]aXr@1=c$n?B+`3AGu65m/QSYqZ$S"N:4]/o)8Ug"RQ0goDe7X
-rsiM>m'7u<nIts&q?m#trr)j?rr;ZT@C>fB&+9Jtqtp:#rV['&s)En[c=ZqQs0b8o8YQ.`"Si#l
-r:p9k"CeJ!Z6oSN%J'N_C&.Oa70!;Nqu?PEs5a31~>
-JcF[.!!*#u0`_4QrX/5rs'j%=]aXr@1=c$n?B+`3AGu65m/QSYqZ$S"N:4]/o)8Ug"RQ0goDe7X
-rsiM>m'7u<nIts&q?m#trr)j?rr;ZT@C>fB&+9Jtqtp:#rV['&s)En[c=ZqQs0b8o8YQ.`"Si#l
-r:p9k"CeJ!Z6oSN%J'N_C&.Oa70!;Nqu?PEs5a31~>
-JcF^/9`,:rq>(Ttn*UP_j8Z/$%48ggrQHBWs62]U-2[]A!;ulq!<;ZgqHa.WK)blHoF:j#p\uiF
-p\jib0-DUPs8N)ls8N*!qt^-cs8Dor#Nn&HW<rV!r;Q^3!<2uus81be"W!X.naRmpjPMT[q#C$c
-rtkJ/pS]_f-Fs0LnGhn@D#a]1jn\QKpA':>j8XW~>
-JcF^/9`,:rq>(Ttn*UP_j8Z/$%48ggrQHBWs62]U-2[]A!;ulq!<;ZgqHa.WK)blHoF:j#p\uiF
-p\jib0-DUPs8N)ls8N*!qt^-cs8Dor#Nn&HW<rV!r;Q^3!<2uus81be"W!X.naRmpjPMT[q#C$c
-rtkJ/pS]_f-Fs0LnGhn@D#a]1jn\QKpA':>j8XW~>
-JcF^/9`,:rq>(Ttn*UP_j8Z/$%48ggrQHBWs62]U-2[]A!;ulq!<;ZgqHa.WK)blHoF:j#p\uiF
-p\jib0-DUPs8N)ls8N*!qt^-cs8Dor#Nn&HW<rV!r;Q^3!<2uus81be"W!X.naRmpjPMT[q#C$c
-rtkJ/pS]_f-Fs0LnGhn@D#a]1jn\QKpA':>j8XW~>
-JcF^/!:p-h!W)corrD9^rt"f&s8DuknGiO]q>^<gs8)`p%/BJpnc/XZs8VQfs7QBk)"djoq>^Kg
-s82iroCN"\p\4^fq#:0^s8Drs)>sL2s8VKds82Bes8Ducqu?]bs7c3dp](*hrtbA/nc/Rds7lTn
-s8;`ms82iroCN"\p\t1#o(N"]s8;osrVuBbJcFd1J,~>
-JcF^/!:p-h!W)corrD9^rt"f&s8DuknGiO]q>^<gs8)`p%/BJpnc/XZs8VQfs7QBk)"djoq>^Kg
-s82iroCN"\p\4^fq#:0^s8Drs)>sL2s8VKds82Bes8Ducqu?]bs7c3dp](*hrtbA/nc/Rds7lTn
-s8;`ms82iroCN"\p\t1#o(N"]s8;osrVuBbJcFd1J,~>
-JcF^/!:p-h!W)corrD9^rt"f&s8DuknGiO]q>^<gs8)`p%/BJpnc/XZs8VQfs7QBk)"djoq>^Kg
-s82iroCN"\p\4^fq#:0^s8Drs)>sL2s8VKds82Bes8Ducqu?]bs7c3dp](*hrtbA/nc/Rds7lTn
-s8;`ms82iroCN"\p\t1#o(N"]s8;osrVuBbJcFd1J,~>
-JcFX-!V?<hrtbJ2s82`os8N#ms8VHbs8V`ks8Dipo`"k/oDejbs82irq>^Kas7ZKhs7ZEkp&G'[
-s8VZgrrMWbrVlllrr<#rs8NE(s8W)ps7uZorVlg>p&>!jrVuTkq#CBjp&G'hrr)los6]jYs8Vck
-s6fpeo)AY!p]'a_s7ZKgs7uWnnGi66s5X-0~>
-JcFX-!V?<hrtbJ2s82`os8N#ms8VHbs8V`ks8Dipo`"k/oDejbs82irq>^Kas7ZKhs7ZEkp&G'[
-s8VZgrrMWbrVlllrr<#rs8NE(s8W)ps7uZorVlg>p&>!jrVuTkq#CBjp&G'hrr)los6]jYs8Vck
-s6fpeo)AY!p]'a_s7ZKgs7uWnnGi66s5X-0~>
-JcFX-!V?<hrtbJ2s82`os8N#ms8VHbs8V`ks8Dipo`"k/oDejbs82irq>^Kas7ZKhs7ZEkp&G'[
-s8VZgrrMWbrVlllrr<#rs8NE(s8W)ps7uZorVlg>p&>!jrVuTkq#CBjp&G'hrr)los6]jYs8Vck
-s6fpeo)AY!p]'a_s7ZKgs7uWnnGi66s5X-0~>
-l2LbZrr33!r;Zfks8Dor!rDrkrr33%s7lQmq>1'i%fZM$s8W)uoDeF^q"FadJcC<$JcGWIJ,~>
-l2LbZrr33!r;Zfks8Dor!rDrkrr33%s7lQmq>1'i%fZM$s8W)uoDeF^q"FadJcC<$JcGWIJ,~>
-l2LbZrr33!r;Zfks8Dor!rDrkrr33%s7lQmq>1'i%fZM$s8W)uoDeF^q"FadJcC<$JcGWIJ,~>
-kl1_]oD8@a+8l07nb`@^s8Vccr;Zfqs8Dutp\=dcs8Micp&G'jq#13noCdb8JcC<$r;V9~>
-kl1_]oD8@a+8l07nb`@^s8Vccr;Zfqs8Dutp\=dcs8Micp&G'jq#13noCdb8JcC<$r;V9~>
-kl1_]oD8@a+8l07nb`@^s8Vccr;Zfqs8Dutp\=dcs8Micp&G'jq#13noCdb8JcC<$r;V9~>
-l2Le]rVc`uq"+ObrVlosr;-EnpAY'srr<#trVuWXrr3#tqu6Ttr;-BfJcC<$JcGWIJ,~>
-l2Le]rVc`uq"+ObrVlosr;-EnpAY'srr<#trVuWXrr3#tqu6Ttr;-BfJcC<$JcGWIJ,~>
-l2Le]rVc`uq"+ObrVlosr;-EnpAY'srr<#trVuWXrr3#tqu6Ttr;-BfJcC<$JcGWIJ,~>
-l2Lnbs8Vokrr3]'rr;ohs8DlqmJ[(Sn,NF_rr;c^rr3N*s8Vuks8W#ss7Z'as7u>=s+13$s8;nI~>
-l2Lnbs8Vokrr3]'rr;ohs8DlqmJ[(Sn,NF_rr;c^rr3N*s8Vuks8W#ss7Z'as7u>=s+13$s8;nI~>
-l2Lnbs8Vokrr3]'rr;ohs8DlqmJ[(Sn,NF_rr;c^rr3N*s8Vuks8W#ss7Z'as7u>=s+13$s8;nI~>
-kPl+kq#(0kp&G'goDS^hqu??^rVm#ss7uWlrr3&lrVlcq!rW)trr<#sJcC<$JcGTHJ,~>
-kPl+kq#(0kp&G'goDS^hqu??^rVm#ss7uWlrr3&lrVlcq!rW)trr<#sJcC<$JcGTHJ,~>
-kPl+kq#(0kp&G'goDS^hqu??^rVm#ss7uWlrr3&lrVlcq!rW)trr<#sJcC<$JcGTHJ,~>
-l2Le^qu6Trp\b$j&,lP.o)8U\s8Vfms7?6hq#:9rnc/OepAY(!r:g6`qYL6er;ZVEs+13$s8;nI~>
-l2Le^qu6Trp\b$j&,lP.o)8U\s8Vfms7?6hq#:9rnc/OepAY(!r:g6`qYL6er;ZVEs+13$s8;nI~>
-l2Le^qu6Trp\b$j&,lP.o)8U\s8Vfms7?6hq#:9rnc/OepAY(!r:g6`qYL6er;ZVEs+13$s8;nI~>
-k5Q_)rr<#lp&FX`s7cQhs7Z<hs8DutqZ$0ep%&.[s8Dunr;ZNbrVlumqZ#u7s+13$s8;nI~>
-k5Q_)rr<#lp&FX`s7cQhs7Z<hs8DutqZ$0ep%&.[s8Dunr;ZNbrVlumqZ#u7s+13$s8;nI~>
-k5Q_)rr<#lp&FX`s7cQhs7Z<hs8DutqZ$0ep%&.[s8Dunr;ZNbrVlumqZ#u7s+13$s8;nI~>
-l2NC0s8VZis-XfMeGlm-(Uj@a(F-Njs1fWp`W*e''u0d[+<@ojZmug4oDc6F+LZk$JcD,;qYog\
-J,~>
-l2NC0s8VZis-XfMeGlm-(Uj@a(F-Njs1fWp`W*e''u0d[+<@ojZmug4oDc6F+LZk$JcC<$qu;0~>
-l2NC0s8VZis-XfMeGlm-(Uj@a(F-Njs1fWp`W*e''u0d[+<@ojZmug4oDc6F+LZk$JcC<$qu;0~>
-kl1\ZoD\bA%ahRUp-P1O7/o??WB1(V1pE]9s!snQ1B0J7[5.VFUS]gHpH,d[2uipUq>L6k"o82u
-rr2kIs+13LrrE&rquQEerr2?cJ,~>
-kl1\ZoD\bA%ahRUp-P1O7/o??WB1(V1pE]9s!snQ1B0J7[5.VFUS]gHpH,d[2uipUq>L6k"o82u
-rr2kIs+13FrrDfYs*t~>
-kl1\ZoD\bA%ahRUp-P1O7/o??WB1(V1pE]9s!snQ1B0J7[5.VFUS]gHpH,d[2uipUq>L6k"o82u
-rr2kIs+13FrrDo\s*t~>
-l2NL9s8;]mlP[^`$iD+2k8!J#q">Ens!$P(%e(55mhPj5n(nl\"97^,li.:Trs/Jtrr)j'q=O[Z
-qu6Nms8RZJJc)PG"T.iaq"t'g!r2comf.e~>
-l2NL9s8;]mlP[^`$iD+2k8!J#q">Ens!$P(%e(55mhPj5n(nl\"97^,li.:Trs/Jtrr)j'q=O[Z
-qu6Nms8RZJJc)MF!r2WhrVllqm/MS~>
-l2NL9s8;]mlP[^`$iD+2k8!J#q">Ens!$P(%e(55mhPj5n(nl\"97^,li.:Trs/Jtrr)j'q=O[Z
-qu6Nms8RZJJc)PG!ri)qk5Tr~>
-l2NU)s8Drps.0Bb$LA6%rrE'!p[8CfrW)ue"9/B$m0<7es8NGs%J0T$s8N-"q?luns8Vrkp\t6l
-JcC<$WW*;(r;QTerr2KfrpKf:~>
-l2NU)s8Drps.0Bb$LA6%rrE'!p[8CfrW)ue"9/B$m0<7es8NGs%J0T$s8N-"q?luns8Vrkp\t6l
-JcC<$V>gYls8V]Ws*t~>
-l2NU)s8Drps.0Bb$LA6%rrE'!p[8CfrW)ue"9/B$m0<7es8NGs%J0T$s8N-"q?luns8Vrkp\t6l
-JcC<$V>gYps8VcYs*t~>
-kl37.q>^?ln,Mndrr_WI%f[:DknNde&aoK(r"Abl)u:6+lk9<uqtL?frt"Df$i^/8lc--3ZEBq2
-\$`TIZ`\kdJ[DD`$)t/6\[:u.r;Q]`s*t~>
-kl37.q>^?ln,Mndrr_WI%f[:DknNde&aoK(r"Abl)u:6+lk9<uqtL?frt"Df$i^/8lc--3ZEBq2
-\$`TIZ`\kdJ[DD`#H=r4\[;&0rTsQ7~>
-kl37.q>^?ln,Mndrr_WI%f[:DknNde&aoK(r"Abl)u:6+lk9<uqtL?frt"Df$i^/8lc--3ZEBq2
-\$`TIZ`\kdJ[DD`"f\`2\[;"os*t~>
-l2LhPs7ZEk/aB<BqF%]f9E.#MO];AW6Ck#<s%/0S8,[email protected]]nkcH]1AgtKs8D`lrrpF:
-rr<#rJcC<$V>gVpnA4o"s8MZjJ,~>
-l2LhPs7ZEk/aB<BqF%]f9E.#MO];AW6Ck#<s%/0S8,[email protected]]nkcH]1AgtKs8D`lrrpF:
-rr<#rJcC<$V>g\qn%eu&li2J~>
-l2LhPs7ZEk/aB<BqF%]f9E.#MO];AW6Ck#<s%/0S8,[email protected]]nkcH]1AgtKs8D`lrrpF:
-rr<#rJcC<$VuQeq"8V>urTaE5~>
-kl3XAp[S:Q0Esl%s3(fr[f=G^/[k3L_^-&Grjt9(\c8o_*P_Wt"<tV[rPK-YbQ%,1s8W&ts891u
-rr2uoJcC<$V#LSq[f?(#qu?]qo`'F~>
-kl3XAp[S:Q0Esl%s3(fr[f=G^/[k3L_^-&Grjt9(\c8o_*P_Wt"<tV[rPK-YbQ%,1s8W&ts891u
-rr2uoJcC<$VuI&"rr2\urq66hmJh\~>
-kl3XAp[S:Q0Esl%s3(fr[f=G^/[k3L_^-&Grjt9(\c8o_*P_Wt"<tV[rPK-YbQ%,1s8W&ts891u
-rr2uoJcC<$W;d/%q"ss]Z2F4jm/MS~>
-kl2"Xs8V?\s7Z?is8;]m#l"B!nGi:Ws7?0g"76'dnc&Omp](0ks7#XZrs\i&r;Q`%s8W)up&Fi=
-s+13LrrDurrrTY/qYL6lrq-5@~>
-kl2"Xs8V?\s7Z?is8;]m#l"B!nGi:Ws7?0g"76'dnc&Omp](0ks7#XZrs\i&r;Q`%s8W)up&Fi=
-s+13Lrs&8rrr9;'p?Va/~>
-kl2"Xs8V?\s7Z?is8;]m#l"B!nGi:Ws7?0g"76'dnc&Omp](0ks7#XZrs\i&r;Q`%s8W)up&Fi=
-s+13MrsJ_tq"jijqt'^`rU0]9~>
-kl344s7ZKmoDedgs7lWor;ZTmrr;ims8W)urVuoprqlWnrVucps7QEhs8;omrr<#srr3<(s0;V(
-qu?TorIP!"s/#_sYQ+:ls8W)js*t~>
-kl344s7ZKmoDedgs7lWor;ZTmrr;ims8W)urVuoprqlWnrVucps7QEhs8;omrr<#srr3<(s0;V(
-qu?TorIP!"s/H#&rr)]mX8_YTs*t~>
-kl344s7ZKmoDedgs7lWor;ZTmrr;ims8W)urVuoprqlWnrVucps7QEhs8;omrr<#srr3<(s0;V(
-qu?TorIP!"s/Q)+rVQBaql'D[qu-K]s*t~>
-kl1bPs8VcjrttV4rr;TipAb0jp]('hrVuWhs6]jbp&G$irsAZ$s7lWoq=t!eq>UN&s8.BIJcDPG
-$N9u(pU1%rs8N&lrVllro`'F~>
-kl1bPs8VcjrttV4rr;TipAb0jp]('hrVuWhs6]jbp&G$irsAZ$s7lWoq=t!eq>UN&s8.BIJcDPG
-$N0l%p9akos8;ojrVllro`'F~>
-kl1bPs8VcjrttV4rr;TipAb0jp]('hrVuWhs6]jbp&G$irsAZ$s7lWoq=t!eq>UN&s8.BIJcDPG
-$MsSsoWnGgrVccirVllro`'F~>
-kPkYPs8VZhs8Va"rVuohs760hnbiFYrr3,mq#C3frVm;ss8V<_q#9jas7cQeq>UN&s8.BIJcDPG
-$N9u(s0Mb$s6fpbrr2uroDa=~>
-kPkYPs8VZhs8Va"rVuohs760hnbiFYrr3,mq#C3frVm;ss8V<_q#9jas7cQeq>UN&s8.BIJcDPG
-$N9u(s0Mb$s6fpbrr2uroDa=~>
-kPkYPs8VZhs8Va"rVuohs760hnbiFYrr3,mq#C3frVm;ss8V<_q#9jas7cQeq>UN&s8.BIJcDPG
-$N9u(s0Mb$s6fpbrr2uroDa=~>
-l2LkRs8;?brsSPns8Vlorr;ZkoDJUf%/Khks82iro`+Uas5j7[$2FDtrr<#os8VulrrTP,qgncu
-s/#bqrWW2sr361urr*&tmf3=_oDa=~>
-l2LkRs8;?brsSPns8Vlorr;ZkoDJUf%/Khks82iro`+Uas5j7[$2FDtrr<#os8VulrrTP,qgncu
-s.KAnZ2ae%rri5es8Vlcs*t~>
-l2LkRs8;?brsSPns8Vlorr;ZkoDJUf%/Khks82iro`+Uas5j7[$2FDtrr<#os8VulrrTP,qgncu
-s.KAlZi'h,qsOLapAOX`J,~>
-kl1YUrr2ugrr3Dqs8Vfbs8VQfpAap[rr<#d'*%D"s7u]cs8Vc`s7Pj\q"jm_p\t<$s8.BIJcDSH
-$NBqsnG0$[qu#sPrr3#pp%/36~>
-kl1YUrr2ugrr3Dqs8Vfbs8VQfpAap[rr<#d'*%D"s7u]cs8Vc`s7Pj\q"jm_p\t<$s8.BIJcDMF
-"9/&pXoA>$o^Mk[!VuBZs*t~>
-kl1YUrr2ugrr3Dqs8Vfbs8VQfpAap[rr<#d'*%D"s7u]cs8Vc`s7Pj\q"jm_p\t<$s8.BIJcDJE
-!r`/(rr3#no)AXjp[\:Ts*t~>
-kl:\]/H5JFRL9Y(s1KZs_#D:j*5V[TV]62ks/d[j_Yq7g'Y"+^*$`N(s1g$'`q]B0!jhq(JcC<$
-V>h#&q"<qHWq,o\r;6HmrUKo<~>
-kl:\]/H5JFRL9Y(s1KZs_#D:j*5V[TV]62ks/d[j_Yq7g'Y"+^*$`N(s1g$'`q]B0!jhq(JcC<$
-UAk\ss0MV%s8W#qs8;orrq-5@~>
-kl:\]/H5JFRL9Y(s1KZs_#D:j*5V[TV]62ks/d[j_Yq7g'Y"+^*$`N(s1g$'`q]B0!jhq(JcC<$
-T`5#'rVm*$rVu`mrVZ<fJ,~>
-kl1Y\rr4G=+j7n>n5#UR6N9H@VFKqP:SXONpcYFE4o\<LX@E4JT;"sIqbWcO9_eVhZiBoRs+13H
-rt,,$o'Z.anFcP9r:U*bs8Vugs*t~>
-kl1Y\rr4G=+j7n>n5#UR6N9H@VFKqP:SXONpcYFE4o\<LX@E4JT;"sIqbWcO9_eVhZiBoRs+13E
-rso&.bPM5;kl:AVp](6hrq6;A~>
-kl1Y\rr4G=+j7n>n5#UR6N9H@VFKqP:SXONpcYFE4o\<LX@E4JT;"sIqbWcO9_eVhZiBoRs+13C
-rrCRJrs.ojq#C$cqtB[^J,~>
-l2NC/s8V?`ruf+h0)l"HpBpU.l1,/TrsJ;b%0$P)q#L3lrT=@a!<;@!nG`d[r;cWm!jhq(JcC<$
-V>gPmq#CU2k5PDWq=F4XJ,~>
-l2NC/s8V?`ruf+h0)l"HpBpU.l1,/TrsJ;b%0$P)q#L3lrT=@a!<;@!nG`d[r;cWm!jhq(JcC<$
-V#LN!$3^_7!so#ElMpn[q!\7^p&BO~>
-l2NC/s8V?`ruf+h0)l"HpBpU.l1,/TrsJ;b%0$P)q#L3lrT=@a!<;@!nG`d[r;cWm!jhq(JcC<$
-V#Lr8)&!bs%L`X^mJm4\p?_\Ks*t~>
-kPm..s8VSc$4`*r&+KT#rrE)s!<;iqs8E&u$Le!#rsJ,m!:pisrW)un!<;[+q>($lZiBoRs7?9d
-rpg$cro*k[rqu]nrr)lsrr)cprlP0KrquZhq"OIUq>C0irl>$Lq#D6N+;uIN-33i9p&G'^oDa=~>
-kPm..s8VSc$4`*r&+KT#rrE)s!<;iqs8E&u$Le!#rsJ,m!:pisrW)un!<;[+q>($lZiBoRs8;om
-rnm_arr;utrr;utrr;utrm(NJrr;utrr;u`s8DrbrrE&srrE&CrsfPt3^5_o5<AuIr:U*io(2m3~>
-kPm..s8VSc$4`*r&+KT#rrE)s!<;iqs8E&u$Le!#rsJ,m!:pisrW)un!<;[+q>($lZiBoRs7u]m
-rS[\arVuirrVuirrVuirrR1`FrV6EYrTX@\rQP9P,#D<E=Bnm#%fcM!qtodZo`'F~>
-l2O!Hp&G'Zs!,1n/,fqFo*t^4k4T;br"A8_$iV(,pC$m0o&gD\$M3d#s8NE#r<WB"s7+16Za[38
-b-JC`\?<;tZMh'.ZN%6:[CEcY]"G\dZh^g(Z2_-0Zhq'-Z2:^:Z*UdEZaI-IZaI-IZaI-IZa02-
-&[/4DXKAt8Zb<`DVmNG5WMd#lZN%6S[Bm9I[Bm?O]WecLYe@BY]!f/WYIq<TYJIQU^9tAU[C-"B
-!O\s,!!!&t!"G@3Yd1jK]!o/W]<eEFiNick\ZiNGTs_B+Z4X&9'a"sH!"feCo(r:apA=adp&BO~>
-l2O!Hp&G'Zs!,1n/,fqFo*t^4k4T;br"A8_$iV(,pC$m0o&gD\$M3d#s8NE#r<WB"s7+16Za[38
-b-JC`\?<;pZ2q59riuL,rN61)Z2(`rZMV!-Z4+"DZ*LX?Z*LX?Z*LY)Z4XFN_R-SWY-PaNY->UF
-_Qg2JrNZ1(rj2I+(p^KV\?E0CXh([I[B[*CWjf7@Wk>LA\Zu.?rj;^5&?Z-C"WRaQ$3UF*a0W([
-Z*jS:"1bb:\`'n#Y.CmIYbJS9qQg[?#!k4A5Wq\'#64`#s7c*aJ,~>
-l2O!Hp&G'Zs!,1n/,fqFo*t^4k4T;br"A8_$iV(,pC$m0o&gD\$M3d#s8NE#r<WB"s7+16Za[38
-b-JC`\?<;tZN%95[^<LB[/[E3Yl:d,Xfeu*[f3Z6ZN%0+ZMq6.[LomNY->(5Y->(5Y->(5Y->.9
-o!Aq:`3unXXK]CM[(!u_`3ZZF[f3Z6ZMq*-XoYi8rNcI-s0=MiX/rG%[Ap^@Xg"n(Z`UL0\>ld@
-ZG+/j]XbGVZ`OBC&1nkG+VbKhca^?iY-P77Z+@?E^#?C)Ye7<QZD>"AqR$jK-#RIKH?4=@*<6'4
-q!\+Os*t~>
-kPm[Dli6JUfY7df;3naMs$EN[3;WGJWNSJT9V&.Ps$E$a5kI[>X#U.J8?.bGs7QElq"4Obs1eC%
-r;ZfmJcGKEs82rqr;6Hjs8Mcms8N#q"T/,or;?'a*rc3=s8N&ts8N&ts8N&rpYkT=s8VuorqZ9_
-q!7bRl2Lh_rVQQn"nquqqZ$KlrsA,nrr;cnmJZq\rVd?)p])<Y#7Vab/-,8$lMgMUrrr#nrq?-[
-iq!6@p$)JK!VGjWoaC0f'ESXB!!EZ0i:6gH!r)<cp&BO~>
-kPm[Dli6JUfY7df;3naMs$EN[3;WGJWNSJT9V&.Ps$E$a5kI[>X#U.J8?.bGs7QElq"4Obs1eC%
-r;ZfmL&_)Ms8Duq"T/&mr;OP4!Uog_rr`#ms8N#t!qlTnr;Hfurr;ugrr`/rqYpKo&G?&!pAXjd
-rquc\rqcWdrp0L\s8N&u'a-H^0KiB!7g&e[nc/7\q>1!YrVucQrrW0!pAP!kr;$@"#=^mR91DK=
-#P.WfrrDuhs*t~>
-kPm[Dli6JUfY7df;3naMs$EN[3;WGJWNSJT9V&.Ps$E$a5kI[>X#U.J8?.bGs7QElq"4Obs1eC%
-r;ZfmJcG]KrqulprU^'hrUBjXrUBgkn+ZeXqXjU]rrDcds8W&sqZ6Qjq>CEkqY:!fr=JMrp[e:T
-qtp3dkPP#Nnb_\Ns8W)urt5`0=\r[[AmuMTqt^9drV?3ao$.1F/9uQ(O,/C),Q@B2pA"O\o`'F~>
-l2NI:qu?]fs0MnKWW0RV(!?*f&Mq#us0`ONf)NBG!6"oF3!IM*`sX*Co`)`G+3"$Pqu6s"Y5/+t
-rr;ioM>R;MqYU9is8E6#qtg0aqY9sap\Xmb!r2Werqc-]')_Y)qu$?hqu$?hqu$<fq#:6_rr3f.
-s8MrppAa^]r;QWnr;QWnr;QWnr;QWiqu6U/qZ$'brq$-gmJcVMrq$-[rqZQmqYUs$pZVZ0)?L9H
-![%L4o(VtXnc&OlrqZBbo'52r#2J>b%LFF&qB$:h!#PeC!<<-2!:p'eo)JU]rq6;A~>
-l2NI:qu?]fs0MnKWW0RV(!?*f&Mq#us0`ONf)NBG!6"oF3!IM*`sX*Co`)`G+3"$Pqu6s"Y5/+t
-rr;ioM>R;KrqlQgs7lZkrqccpqu#gX!rVrnmJ@[qqtg3dqtg3dqtg3dr;Zcrr9aL_qt9pf!r2fc
-qYCBmqYU-dqYpBhr;ZZort>>,rpTjdo`+sZs7,pbo`+O_qY'pqqZT_a2F]qm63R5d"n;Qlrr;uR
-rs&E$67s]U55@DR#=LXE845^.'`\41n,NFdo`'F~>
-l2NI:qu?]fs0MnKWW0RV(!?*f&Mq#us0`ONf)NBG!6"oF3!IM*`sX*Co`)`G+3"$Pqu6s"Y5/+t
-rr;ioLAUuKp](3j!rr6!r;HBequ$ZtrVuipnbX-uqt^-bqt^-bqt^-bqtpEiqtK:Iq"XI[rrrDp
-qsF:Zr;6NirV-<brq-6a!r)Ndr;R<%s8Vlos7$'[qu?Bip&Fs_rs0KZD.nHOF(H6errD`CrrcOr
-6=*ai=:e[fEGgA_J9>QXs8V`Rq#10`s*t~>
-l2Lq`s8W&trVlg*k5,,Ks7u]prTjI_rVlg=p\k*[s8W)up&G$krVuK`s7Q9ds8N&uqtg9krVcc*
-rr3&qs8ITLs8N#qrVQWo#64]&rr;utrr2ZlrVl`pnc&%X!VQBdrrVoooBuVYrqcO0o^)MQr:^'^
-q=O@ToD&.Op\=IFq=O+Mq>U7Vq=sXQlMq8)$31&2'`d[kh=pR=q"s(Hq!mu#,Tn9R+s8'P+s8'P
-+s8'P+s8'P+s8'P+s8'P+s8'P*%i/fl2KiYkiV*kkiV*k!$2IS#71YT!:p-grrW,krq6;A~>
-l2Lq`s8W&trVlg*k5,,Ks7u]prTjI_rVlg=p\k*[s8W)up&G$krVuK`s7Q9ds8N&uqtg9krVcc*
-rr3&qs8IlTqu-WrrlG-1rXJo$rVuosrr)`nrq6<\rr;Qgq#;-)nGiLfq>^9jrVZ]ls8Vrps8DHe
-rUopb3<qB17mo^73s>T`lMpb]qu>jZs8N$S4[2+p5!M4q5!M4q5!M4q5!M4q5!M4q5!M4q5!M4q
-3))='rs/it4$lD-5Xu%Xs8W'!s82HgJ,~>
-l2Lq`s8W&trVlg*k5,,Ks7u]prTjI_rVlg=p\k*[s8W)up&G$krVuK`s7Q9ds8N&uqtg9krVcc*
-rr3&qs8IiSrr'e8!;l?`&FfGhq"FFVq"t$]qs4%NpA=^bq>:0ir;QfpoD\airq-3mp&G'jp\tUF
->'5@LMM5FXs8VrnrrDHbs!'m0<)ut!<)ut!<)ut!<)ut!<)ut!<)ut!<)ut!<)usp@SZRq%jl"&
-F*rCSC)m9Rq"OUaqt0o=~>
-kl21ms8)cqoD7tVs7-'fli6\Xrt4\hs8;oso)Jacs7c9ap&F[[qu6lms82irrqlWn#Hn%(p%&.\
-r/1LNs8W'1rVH<ap\+7NpA=jhrr;utrr;usqu-?ir;QfsrU]mdrpTjln,NF_qu?]irVllmnc&Xh
-qtg/6qt9j\qtp*So^hbEp$;;Cp##H7n+?;Eq"aa\p\"+E,60.m"Tn`.+93]A-n7J,1FPd6/1i@D
-p\Ogar;-?fr;-?fr;-?fr;-?fr;-?fr;-?fr;-?frquchq#($crqucnrqucj!#lO^$P<dc!;Gp]
-s7H6erq6;A~>
-kl21ms8)cqoD7tVs7-'fli6\Xrt4\hs8;oso)Jacs7c9ap&F[[qu6lms82irrqlWn#Hn%(p%&.\
-r/1ISrVZWm!ri/tq#:TurVlcprVlcbrW)oorq$-jrr)Ecs8NMorr2N\rr2Hdrqucbnc&Ibrr2rt
-#5eH!s8W&krr3&us7uZo"7?-gqu-QprqudT64I-P92\AW9*RRK3B0\a4>BYZ3'B>&rr2lqrVlcp
-rVlcprVlcprVlcprVlcprVlcprVlcprr3,tq>^Blq>Up-4#oPk7QE[>s6fmcoD\@]J,~>
-kl21ms8)cqoD7tVs7-'fli6\Xrt4\hs8;oso)Jacs7c9ap&F[[qu6lms82irrqlWn#Hn%(p%&.\
-r/(CUr;-9frq69qr;QWnr;QWmrql`lrW2rrr;?-c!WMiao+:WjjnSW>nGE+Nq=sUSjn/BHqY:!_
-rs&B#q>:'^oD&=cp@J;"="8Z'IZ'#7DB;bZ8OZ`?764X,6qU)-rrN#qpAYU;<*EmMEaW,is6fa[
-n,;kXJ,~>
-kPkYOs8VQdrs8B!s7cBis7l-`rrr2tp&G'brr2umrr3<!s8W#jp&Fddq>C6srr<#qp\)"Gs82g&
-rquTfp\"1Lp\smdqu63e%fZD*r;QWnr;QWnr;QWnnG`mls8W&tqu6Whnc/UVruqC=q"a^\q"a^Q
-p%e1Nq!RnKm.'Z3n+,]5l13p+nes;<r\4X3/Li++!!O#6!"TG;lf[g/hY$X7nFQ;Clh0'5!;lNj%
-g4(&,SM7;*WZ$9r;ZNkrq-5@~>
-kPkYOs8VQdrs8B!s7cBis7l-`rrr2tp&G'brr2umrr3<!s8W#jp&Fddq>C6srr<#qp\)"Is8W)u
-rVlfdrrE&trrE&fs8N#es8W';rVlcprVlcprVlcprVlTls8Duqs8Vf]rqZTjnc&Ugq#;3(s8W#q
-s7cQnr;Zfns8Vloq>^*es!g;pr\tZR4$,V*&iWKN1/U+k%KHA+s6BXarVHNn!rW)uir'&VpAOaa
-o(`.nrt.7Y8Noa13X#K^p\sdTs*t~>
-kPkYOs8VQdrs8B!s7cBis7l-`rrr2tp&G'brr2umrr3<!s8W#jp&Fddq>C6srr<#qp\)"Hs8;ig
-s8Dlrqu-Kcs8DopqZ-Tbruh46qY9p^qY9p^qY9p^qY9[YqtpEms8Vl`qt9[PoCVnWrU9ajqu?]n
-r;ZE[rrY2T8kT%Q$VO:q0jK$KCm&jA,lR`CpA"XfrSmMSo'c;Ap?VMI%fI8:>@_,X>>Pn2r:BdU
-o)F4~>
-kl2:\s8)?ep](3lmJm4Js8D9`s7QBk!r;lgrr3,pnGhqIrr3#uqu6U#o_\Udrr;]hqu6o,rVuon
-rUfl9rs/Q$r;$-^p%\4[s8N&rs8MTh0E2"Ns8N&ts8N&ts8N&rq=j[Yq"ORXq"ORXq"ORYqr7GG
-m.Bo;q"a=WrV6Ebrq$%2qtTp\p\=LXp[Iq@p\=CPp[mq>ng$+K.4[1k1+"I@n+ZhV)u'$rnE9iY%
-hf-P&etE6mdfi;p\=LWp%7G<pYc&On,<7dn+cqY!!`Sfs8Vurs7c0cJ,~>
-kl2:\s8)?ep](3lmJm4Js8D9`s7QBk!r;lgrr3,pnGhqIrr3#uqu6U#o_\Udrr;]hqu6o,rVuon
-rUflAs8;iss8McmrVcfsrr2Kgq>T=P%d*fkq#CBjs8VNer:p<al2Lt_s6p!fr;$?l%Jp)]7RB*s
-6RY8Z3WK*Rrs0/k5"\.59K<IZrrN)tqu6Zjrr:sV!9jC\!9j7X&H;e7%1<CH#m:e(rqlEgrq?!a
-J,~>
-kl2:\s8)?ep](3lmJm4Js8D9`s7QBk!r;lgrr3,pnGhqIrr3#uqu6U#o_\Udrr;]hqu6o,rVuon
-rUfl>s8Mujs8DfpqYg9krV$9jrVcWnq>gENrrD6Xrs&&oq!e"DrUKmfrU9am5uUQK85M0>6Uq(T
-s8)fpr;R$W??D'^Q],2lo)I\D!93tP!93hL&GlV>)BKk?*YfOZrV?'bs82HgJ,~>
-l2NC7s8W#sqZ$<hs8Vrds8Vfms8Dorq#C<mrqcZns8W#ss6BX]qZ$?jl2Ue[s7ZEk$iB_ss8B>'
-s8Vrqrdk+JrrN,srql?f"TJ>srr2!Y!WMlco`FmR"Sr*6!s%ifqtg3cq"a^\q"a^\q"a^\q"a^[
-pDEEU0/G+;.O[5/./!6!o^_YIo^VSEp%S4[p`&u$oBQB%&el>n&M4"Jo'lDKoCr(Uq!n%Mo).MI
-#Pe>ir:g6kp%n\!rqlK_jLa@>o]lGUs82ieoDa=~>
-l2NC7s8W#sqZ$<hs8Vrds8Vfms8Dorq#C<mrqcZns8W#ss6BX]qZ$?jl2Ue[s7ZEk$iB_ss8B>'
-s8VrqreUULrqHHhrq$0frW<-!ro*kbp(7B6"Vq:Q";:k2jSobf4#f;X5;G&a4#[-=q#C?mrrW0!
-rqQL)r!#GE4A%Xq8-Jkjr;Zfprr2p"qu?]piVs/Xs76*^s8Vccrs\VKpAb0_s82cfrp]pZs*t~>
-l2NC7s8W#sqZ$<hs8Vrds8Vfms8Dorq#C<mrqcZns8W#ss6BX]qZ$?jl2Ue[s7ZEk$iB_ss8B>'
-s8Vrqre1=OrVQKlr;clrrVucoqZ$HmrV-?krVcTpqYU0\rUBgsr#$%d*&&K`)']Rgs8VuYrs;:h
-:.A2O6;U0<p&>0oq>'mdrVm-H@pa/,G]%"(e,K[Js76*^s8VcbrrCpTrs&5tqYTscp%/36~>
-kPkVVs82cp#Oh]mq>^*eq>UBpr;HKl"8)Wgr;Q^&rVuors8)clrqQKm$iU#%rr2lqZ2FOqq>UDO
-rr;ouqu-NkrrDrqrrW3"r;Q]rrql`qrWE,rqu-Bj$NL,*r;Zfjs8W)tr;QcqrVllorVlues8VK]
-ruh42!%8]k*$#V',QRB+o^)5Co(DSHo_%hKo_%kLo_%nQ0a96gp%S+QnG`.iq"ORXq"ORXq"ORX
-rV-BhrqcfrqsOgd!!Vuhq=XXYrql`ls7cWhp=&X?p\XXTm]cBYpAF=XJ,~>
-kPkVVs82cp#Oh]mq>^*eq>UBpr;HKl"8)Wgr;Q^&rVuors8)clrqQKm$iU#%rr2lqZ2FOqq>UDJ
-rr)fqrW`#prr;utr;Q]rqYpKorVc`qs7uX*s8Dups8VZhrqcKjrqlZnq>L9upAP!js6]jdmIpPd
-"@[email protected]'$57.>h!;Z-arAjm=s#U6@62gf`r:'acrY5>1rr)if&H`@D('Y<Q#lal(qZ$E5rr`,q
-q7Z_+!ri/srr2QiJ,~>
-kPkVVs82cp#Oh]mq>^*eq>UBpr;HKl"8)Wgr;Q^&rVuors8)clrqQKm$iU#%rr2lqZ2FOqq>UDO
-rqud4rqcHbp@RYBo(;SLqZ$Top\Xg`q>1$er;ZZn&cDV)rV?Ehnb`4Xp\Xj_qYU!bqZufiqu?]`
-s8VK[rs9EF>$5lf@oQVOm/R)\s%Ebm8Ol388,rVgs7kgX$2b\M()%u3*tAk]rrW3"rQ,!@pA=a)
-r;ZfqnGe"~>
-l2Me&r;Q`rli6bEs76!`rr;Tirr<#ns8VQfp&"X]rVc`ujT#8Trr3<#s8Dutnb)bUrqucr[f$.+
-q=ojI"9/2pr;?Qpqu$KnrWE)qs7lEirqlcqq>UZsrV$'es8;iq#6+Z&q#CB[r;Quus8Vros7Z6f
-(@hGG+pJ&S%2ffZmG7-jnEoB/kOS*`q(*+1p@e7Sp\+@Tpt#63oCr%Nqss[bqZ6Wor;R2es8DNa
[email protected]'^^qu?]q1&UqDp\=LXp\=LXp\=LXp\=LXp\=LXp\=LXp\=LXp\=LXp\=LXp\=LXp\=LX
-p\=R`rWN/$nF?GCs*t~>
-l2Me&r;Q`rli6bEs76!`rr;Tirr<#ns8VQfp&"X]rVc`ujT#8Trr3<#s8Dutnb)bUrqucr[f$.+
-q=o^ErVdK/nb2t]s8Vrps8W#sp\t-irr;uhruM%9pA+agqu6Wos7u]mp&=sQs8Dusrq??joCi1P
-q>UEo('['#4$?#$7QLbRoDeXdrqHHfs8O`8qE+a>rr)rurr)j$iW&rVs8W&ds8N#trVuj*jT#8R
-s8VrqhuEHKr58O=Zh=%ls8N#rr;cihs*t~>
-l2Me&r;Q`rli6bEs76!`rr;Tirr<#ns8VQfp&"X]rVc`ujT#8Trr3<#s8Dutnb)bUrqucr[f$.+
-q=oXC"T.ufkj89>%Ia#js8N&kqtg-bqYgBbrs/K#pA+agq>:0f#57ohnGE%ArqZotqXXFVmI9o8
-q=t!i#p*N"D/joGE*FC\rrE&srrb>P8P)SR9E7i_rs%TbrVZ]oq<7hiirB#Ns8VurirAfQrr<#s
-rQG3FrVHKmZhX@^s*t~>
-l2NC8s7c-brs8Z1!<:7_'Z'gh%kG'Xs2$#sci:I>%D;_D.NcD)aors)s8BhG'$U=S$2aPoqYRSs
-rqlNjs,-dYrVH<drV$6prqZ?br;?Hls8N#p!WN,trr;rqrXAMqr9s[WqYC0\s8VimrrW&pr;Q^!
-r:p<lrV6C/h>eDn":>,2"!7Nt+[Ik+.lSD0-n"TSqXP6jp\=RZq=sd\q=sUWq=XROrU^!krVQKj
-rr)isrqQKnrVlisrQ5'Br:p6d\EX$D~>
-l2NC8s7c-brs8Z1!<:7_'Z'gh%kG'Xs2$#sci:I>%D;_D.NcD)aors)s8BhG'$U=S$2aPoqYRSs
-rqlNjs,$aTs8W)tr<W?"r:g!cs8W)srsSf*qu6QnrVlfrrr;fnrVmi5r;Z9do(MkWn,E@Ys8N&l
-qtU*crU]LVrUfs_r>,G$*()8G:d.9-9,.(Z=%YA681[Iq3:6_LqZ$Tns75a[r;R9+rVcWbqu$3c
-pAFmhs8Drss8;]ls8=_NrVZWlrVZWlrVZWlrVZWlrVZWlrVZWlrVZWlrVZWlrVZWlrVZWlrVZWl
-rVQEiqRHP)rr2lp!<2WjJ,~>
-l2NC8s7c-brs8Z1!<:7_'Z'gh%kG'Xs2$#sci:I>%D;_D.NcD)aors)s8BhG'$U=S$2aPoqYRSs
-rqlNjs,-gRrsSi&p\"1MmdKlCq#($h"o.ujq>C0hrrN,srVl]o)u]g:o)8"Lo_e%RqssX_qt9^X
-q=OCIkk=`;oD8.rs8Ful>^qc]IV<[Ss$gHs5t=a0=$o=<rrN-!oDSdgq>9mjq=4@\qu?Zprs&H!
-rVcWgq"snHr;-6`q=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq=j^Zq>'aU
-q=V2plMlA~>
-l2NR5s8Vons6fp#8Gt65WD*%EV2nRTqa6pO4T@p2YtG$cVR8kS:TU0Xs6rdaSh^-<r;QZnrs-"4
-s8;Qdp&'^I"9/2pr;?Qpqu6U&o(;ePo'GW2qu6Tls8MrnrsJc'pA"7Vq=4:Xr;HU#rVl3`pAXaa
-p\t.FnG`@VpAY!gqu-Ejqu-<W!"]qQ!!!60-3,pkg#__eo]P`:n`oc=oCVYHoCVbNqY:B^qsjRU
-oDAIUs8W)orsST$s8DrskPtAXs8Kt:"T5t5rqc!]J,~>
-l2NR5s8Vons6fp#8Gt65WD*%EV2nRTqa6pO4T@p2YtG$cVR8kS:TU0Xs6rdaSh^-<r;QZnrs-"4
-s8;Qdp&'ODs8ET.rq-0gs75j]rVu`cq#CBmrr2`n!<2or"oA2ms8Vljrs8W(mf3%]pAapes!@=;
-s7?*es8N&ts8N&ts8E3`:JF5J90cD\:]Kh[s7u]err<#mnc&monGi1]p\+Ucli.Opr;6BUqYKaQ
-r8[hMrr;lpqZ$TpdJa(E"T,e0qYC'g!<)orp&BO~>
-l2NR5s8Vons6fp#8Gt65WD*%EV2nRTqa6pO4T@p2YtG$cVR8kS:TU0Xs6rdaSh^-<r;QZnrs-"4
-s8;Qdp&'LC#lFJnmeZeWo(W+_"8r/us8DiprVZZqrVQTurVufqs8Mfn#4VZgs7lWjrr3,os8Vij
-pAYOF@Y0JmMhlG#,%1HErVllro`#3qq"aa_rVuokq>^'b!rM]`rV$9d%f6(erVc?]rS[SDqtg$^
-qYC9jqY&D1#kn,lqY%>op\4%SJ,~>
-kl3=(rVuo[s8P-ts!R+($N1\=kna!j'CY](q%NJk*;gN0kR.=krpBs^rse2a$NKu#rr`&dYPeA!
-!W2kRrVl]qrVlcq%fH@lq"=4JmdKZ9kkX];rr3'!rVc`lrVliq%/fu!q=!SBp%%kPpZ;>J2tHb6
-q!e(Qo]t\pkMk[h'dY(N,:+Q\-:Rt\!!WE'!tu:ImI^)>lhBcAq!7_Nq"a^\q"a^\q=jdkp&+CY
-nc&OZrrE&tr;ciqrrE&mrs&>qqu?]qrVZZArs&5os8TM(rp0T7~>
-kl3=(rVuo[s8P-ts!R+($N1\=kna!j'CY](q%NJk*;gN0kR.=krpBs^rse2a$NKu#rr`&dYPeA!
-!W2kNrVl]tqu$<imJ6bdp&FXRs8;for;uusrqcQrrr<#no)&Femem(fqu?Nmq>C70p](6ms"m#.
-5sdk(5s\TU8hrq,3CQM##6+W,rVHQes8VZVrs&<!pAajdrU]perVcZoqu?Kqo_\Ucrql^"p\=Ub
-s8Drpr;lipdej@Er;Z`"pA+^errN-!rVlKiJ,~>
-kl3=(rVuo[s8P-ts!R+($N1\=kna!j'CY](q%NJk*;gN0kR.=krpBs^rse2a$NKu#rr`&dYPeA!
-!W2kRrVc`rrVZNns7H-e#laktnbr:Zqtp?l"oJ,mqu-KkrrDllrrD`Zrt@t,='8U-='8R?*,f;?
-MLC)$?6&kB"8r3!q!A"aq#C$co)8.T!;lZn&cMUuq=s[UoC;GIqYKdTpA=a_q9o$Eq=XR\poO&Z
-qu-Hms8DTiJ,~>
-l2NsBr;Zcrs7*<[s7QHjquZluq!e^krrE)e"TJK%nHARhs8N>o&G5i&s8N-"q@!,rs8Vicr;Zf*
-q"=@SrV;$E"98;R"o/-##i>CVqu6`sr;?Km&H;V)lM1,@oCD#0nE9B/o&0N<%ajk9m5$+8-6FQB
-,ShQcqW&LWiW'Z($j7%B)\E)EkPP)Nr9XFUrql]^q>:!f!VcKirrDTPrs8Q&qWn1\q>]dYrrDuq
-rltKCr;Zcs]D_d.rpg#=~>
-l2NsBr;Zcrs7*<[s7QHjquZluq!e^krrE)e"TJK%nHARhs8N>o&G5i&s8N-"q@!,rs8Vicr;Zf*
-q"=@SrV;3JrVd<*rr:pg!sAf9"pkG9)!Lqu!<2ip!<2lq%IsJuqZ$Tgr;ZNkr;Z9crsdres8>;D
-2aT_u.mu9]q#:ck#VnV:92e,L1^j?PrVm)hs7ZKjs6fmarVZ`oqu6Wq!VcKirrDWhrseu-rr;ut
-rr;utrr;usq#(KnrqYs]s82i]r;Qcpp\ufDs8N&ts8N&ts8N&ts8N&ts8N&ts8N&ts8N&ts8N&t
-s8N&ts8N&ts8N&trVm#t[f61'r;Zcqo`'F~>
-l2NsBr;Zcrs7*<[s7QHjquZluq!e^krrE)e"TJK%nHARhs8N>o&G5i&s8N-"q@!,rs8Vicr;Zf*
-q"=@SrV;0Irr)ir$gf,U+=8]e+r:n=nc&OjrVQQlrVluuqu-KlrrDfdrrE#srrDKdrs;Li7T`c"
-6sWSlp\tU?<,I>KQ@*sW*<5R-rrVckqYpKpnG`FcquH`lrtbA)qtg0alM18RrVQQjrVQQjrVQQj
-rV-=%q>9gDo^hMGjnelOrVcTer@@pHs8Durs8Durs8Durs8Durs8Durs8Durs8Durs8Durs8Dur
-s8Durs8Dor#6">&q"O[ar;Qisqu-3fJ,~>
-kl3jEs7lW`s"_XprtYCr(\&79p_EE#+lWG6lPolq&cW7.jW4@$s7@?!rsIui#lal(m)ulG\@8'X
-^U:8PYe#[ss0_j76FF/'U:(%C*Yf>&(F^R/Z_FJ(ZaI-FZE^U<ZaI-GZE^U9_iLt&TWt8tWMu=:
-,oJcc'gEZd.&*]9Wgp9(Xfeh#qQ1U=V?SRa$ig86$R>_U_6:/LXi[QQYeR<]q7$F5qQq!H[^`]C
-Z`h'KZEpjCZEpjCZEpjCZMLp-Z4O:MU8tc7[\p72WkPa@Xg5G7YPYR[ZE^^?ZE^^?ZE^^?ZE^^?
-ZE^^?ZE^^?ZE^^?ZE^^?ZE^^?ZE^^?ZE^^?ZE^[5Zc8m@rr`8ur;Q6dJ,~>
-kl3jEs7lW`s"_XprtYCr(\&79p_EE#+lWG6lPolq&cW7.jW4@$s7@?!rsIui#lal(m)ulG\@8'X
-^U:8PYe#[srNR*AZ*CU7_ZMtd5X@Uo6okRhX0/\4Z2Ls1Za-mArNcI0+3^.c]s>Pa`P0'i5WUMo
-1Fc?Y4[$9g[^`<La1AmpY55^I\Y]d'4@M:r68^t4Z,!HJZDkg>[B.!>`3fBK!4)I-'t1NZZ_t+3
-]X"iJZE^^?ZE^^?ZE^_4YlM$-Yn+FBYJ.oj[C<EA]s4fF[_):?1:"6lZa-pCZa-pCZa-pCZa-pC
-Za-pCZa-pCZa-pCZa-pCZa-pCZa-pCZa-mFXgQ-Arr2fps8MZjJ,~>
-kl3jEs7lW`s"_XprtYCr(\&79p_EE#+lWG6lPolq&cW7.jW4@$s7@?!rsIui#lal(m)ulG\@8'X
-^U:8PYe#[s!3lI*6a3l)]=6?4?"n.sC0+P3(<j(hYct:7YdCdG['6^;Yd1XE\'q(lb.uEDeBcIV
-;+j,^6p"sH:/8mV`P]"#d)3f?\,*u[_QFebApefOH!OSeYe-[0XJj.9[]R->_6O!Ds/lF)Y-7],
-'st<TZ)+\+]!/EEYd1L=Yd1L=Yd1M4ZN%9G[^WfX]>Le]`5oj)\?;^>YH=t8](`Kf['I'E['I'E
-['I'E['I'E['I'E['I'E['I'E['I'E['I'E['I'E['I'E['[EK[)8I0qYU0is8)fpp&BO~>
-l2O!HqYU<mrO#K%s7nsOX>p5BT;jsPpb8bD5lWU1\h3qMUouWT6_UD6s7JCXUc/8Gs8W#lrVZN'
-p\"I`s7lNErtGD3rr;utrr;utrr;utrqlWns8Moq"oeQ%r;6KPs7?6crtYG#n,OIE#R1_N*u=qH
-nG)nYrquZhrqQKirqZTh+7_9=2^1"".4?K%i:6<enF?#3nE]N-o_&4Up\t!frqcNnrVuTlrVllr
-rr3)ss8W#cs8W&rrrDo]rt58.p\XXSp[.MFqu?QmrquNfqu"e>"T8,qqn2n-"9/5rrpg#=~>
-l2O!HqYU<mrO#K%s7nsOX>p5BT;jsPpb8bD5lWU1\h3qMUouWT6_UD4rq/:XVE"VLs8W&ns8W)4
-qtL*is7lNUrXJo,s8N&ts8N&ts8N&tlMph^nc/Lco)A^hnc/Ufs8Dus%fcA)"[GC;5t4(.3=#T`
-n,Emq69%Lk68CV`3(``As5s=\%Jg&%s82ims7Q0eqZ$Tjr=o;6"Uk\K#R:\;s82irqYpBjo_na_
-qu6Hl#6+Z%s8Mrlrr)rsq!\7\rWiK#s8;coqYpKo"T/2us80k:"oePu\GlI's8W)js*t~>
-l2O!HqYU<mrO#K%s7nsOX>p5BT;jsPpb8bD5lWU1\h3qMUouWT6_UD5s7\U_V)SACqtp0\r;Z`/
-qY'mfs7lHSrser,rVuirrVuirrVuirnbrFd$2so(qtU$bs8W&rrs8T'rVuirrVuins76-Ws8Doo
-rs9o^EH->RBkLEfm/IAb6W--E5Y4L9=oSF$pA"Xkrr;`hs8;Tj(^Cd"2)dWM,9-scp%7tLqtp?a
-rVH0`qYL3drq6?erqcWks8DuorVllpnc/XfoD]$orV?Bks89k9"SfD$qYU0i!r`#pp&BO~>
-l2Le`p&=tNqDQUus7=eL+3OT)%OAj`s25osa8`h!'Z0je+;D`n^Dn<>oDciH&&J8Cs8VHbqXF6W
-q"+CSrr2otr9F:arVuoqpAYj+r;QWnr;QWnr;QWnr;QN`o^i+Tqu6lms7>mSqu6-c!rW)oqu6Zo
-rVm)sr:p*br8dYNs8)fqrr3l8qtKR[&J,6N!!*B@!9`J7o(D84nE00+.K(Y7.P!&$0^SJrp[7hN
-q>'p`qt^-bqt^-bqt^-drpg!prqlK_o()GIqu4S7rVl`m!<2rsq#8V>#6+T$qmc\*rr<#tnGe"~>
-l2Le`p&=tMqDQUus7=eL+3OT)%OAj`s25osa8`h!'Z0je+;D`n^Dn66mJb0F'?:(Ms8VKds82i%
-s8;olrr3*"pA=deqZ-E^r!*-!r;Q`ks8N!$s8N&ts8MHd$haPps7uTiq>^6ip%JFbrr)cuqu6?h
-o_najr;QHirWiDts82`ns6'1Ws8Mrp%fZM.#!#(@845a37gT.lrr3/qq>^<ks8>(f5!;%k4?iT@
-p&FgErrE&gs8Drbrs&K&rr;utrh'2orr;l)s8Dfo!<2TiJ,~>
-l2Le`p&=tTqDQUus7=eL+3OT)%OAj`s25osa8`h!'Z0je+;D`n^Dn9;nc-fR'Z0_;o^gu6q=Xcj
-s7u]erVcWlpAFmgrrDuerso#&rVQ?drVHKirVQQjrUKdarsSJrq>KdKmI1#Lqt0mf#lFStrVQQj
-rVQKj"ShibqXFI[!r2W`rqZZnrVZZqme$PZrr)j',$f8;K7\Q"GVJjk#$"Jp:/">Sr^m.i!<(@G
-s8Dups8Dorq>L<iqYc]Z"T,V*qu-Bk!<)QiJ,~>
-kPkh^q"=[[s8)ckrr2uirr3?&s8W#gs82irrpTjd#6"Q$s7QE[rr3r1q=j[^pAapfs6B@Ll`K^G
-p$hkPs8N&urr;l`rs&H!p&G'kqWe(br:Tgas7Q'`s8VclrrVrfr;-HhrVZZspAa^[s8W'2k5P8W
-nG`%Yr;QWnr;QTgp\F^cr$_C.m/S(2#64`+#m11Z0IJ7t/MI5M+<g(9lKdd'md]l.p\=L[o_e<3
-s3UcLrr;l)s8D9`J,~>
-kPkh^q"=[[s8)ckrr2uirr3?&s8W#gs82irrpTjd)#aI6s7QE[rr)feqYL-iq>^0gs7--hrk\U6%
-/U#!s8)]loDA%Kqtfm\#QO_uo`+siq<%\`r;-6gp\4[_qY^'es8;ckr;I,qrpKdbrVl]nr;HZ\r
-r3&ls7c'`(Ae%<84#p984lN9#k^VF2)RK^(HOK7q#:Tgs8W)uq>^2?s3UcLrr;l)s8D9`J,~>
-kPkh^q"=[[s8)ckrr2uirr3?&s8W#gs82irrpTjd#6"Q$s7QE[rr3?#rr;rqo(M>>p?MYW!5ea8
-$iBtus82irq>^6fs76'nqYg9co)8Oap\FdYqu6U'qXsg`oB>E0qYg*`rr;oms82ims8;or!rr8u
-rVI#lqs4.VqYU'bq>:3Zrr3&os8)3a'I[m?JV&f=Li>6O@STHe;d2"X<`f(r!;ZTn!r`/oJcF*s
-"oeQ!\,ZEms*t~>
-l2Lk_rVucnrr`5ks8Dor$3'\ts7lWop]'j`rt+_us8M`fr;Zfrr;Zfqr;??grrDT`!"/Mcrr)Qj
-o`+sdqt'Rfq>C9mpAb$gkl21hp](9hn,E@erpT[_s7c?arr`8uqtpBg(%hM)nc&OSrq69ip&=ab
-rqHHes8MNertGA2rVuirq=<tDo(;VJo'PQ>!"Su.*<cQh!%[=.p%.qMo'u50n*]W4n*oo>p%\.F
-r;?5=s3CWJrr;l)s8D9`J,~>
-l2Lk_rVucnrr`5ks8Dor$3'\ts7lWop]'j`rt+_us8M`fr;ZfppAP!is8Vrorrhur#6bP8#Rh4P
-"97fhqXaXQr;?9Yrs&K&s7ZKirV?Horr)Hd#6"AkrVccmrVm0"s8DoiqZ$Qoo`$$-s8VZis6]j[
-s8Vfmq>UEfs7H?gnGiOdrVZWlrVZWgrsf;j9fbj?7m0]Y6hp]YqY^?noCr7fqXOUaon!.grrrE%
-qmZV(li2J~>
-l2Lk_rVucnrr`5ks8Dor$3'\ts7lWop]'j`rsJ;os8M`fr;Zfqq#(.-p\Xg`qt:C5)Bfn1',_Gk
-s763iqZ$Ejs7-'frr_upqu,aY"8VKWo`"mjqYpL&o^VJEp@S"NpA"O`q>V9/s8V`ks6p!_s8Vlo
-qu?]dp?VGCk4\NFo)S^_rsC8_Jp!!4KQM]!?gRdpp&G'bJcF$q"oeQ!\,ZEms*t~>
-kl1h]s7H?dpAP"+nGiO_s7lWorr;rms8;okqu?Wps82cp3;NURs7u]oqYpHnp[e@Z!$!+*(D%K$
-(]a-qnGN:Xr:KRUo^_YFo^_YFo^_YFo^_YIpA4^_r;-3d!<2He$i^2"o(2bUoCDVTrqZTorr2`m
-q#C$sq=s[Uq=j[Oq=s^Vq=ss]!:Kd_!;63d0_d">,pt)i.46AG!$MCH!!*HA(BEmrr;Q*\jnJK)
-p\":Rp\4IXq>9aOs8Vurs8ITLd/O:Ks80;*rTjK6~>
-kl1h]s7H?dpAP"+nGiO_s7lWorr;rms8;okqu?Wps82cp+o209rq6<kqu?WpoC;h[!\ll-5rq1i
-1)']^o`+sas7c6Vrrr?"s8Dusp\t6loDB-ts8VZhs8W&ts8Dutrpp'`rsSf'r;HQkrVccrrVc`q
-$MON"rr)lsqu$<]r;Qcmq>UOX4$#G%3>OY=-5'9A8PDZF6pCnUrVulds6K^bl1b2_p@/+^qYU9j
-JcF*s"oeQ!\,ZEms*t~>
-kl1h]s7H?dpAP"+nGiO_s7lWorr;rms8;okqu?Wps82cp,PhB<rUg-irVucnnFHVZ(JnXZBjb=K
-9.']=rVuoos8N&urpfpGrr`5nk4\WN!qt^MrVHQk!<)Ee!;lTl!W)?arrDiirtA%&:J48M8P;uK
-21KRsMM?P"@j:mO"o&&ks8Vifrs/Ajs8VlhqtksEd/O:Ks80;*rTjK6~>
-kPlUqs8DWjlKnQGs8W&mq=t!irr;rms8;okqu?Wps82cp$2OW"s7ZKlqYpKns!.11!$`6`#nA'm
-'EHeXq"aLVo_%kKq"ag_qt^-bqt^-bqt^3f"o.ueqtoaEq\eSrs7bpNp%J+Pna6)NroWkDo(;VL
-q"agdqu7K(b4YDkkOnK:me-5;nFcAAmd93!lQdtU-3*?5nF?MM)Y3=Z!"L%M"9K>_*WZ!7rqcZp
-p@nL_n,3"WrVQWmJcEgk"oeQ!\,ZEms*t~>
-kPlUqs8DWjlKnQGs8W&mq=t!irr;rms8;okqu?Wps82cp$2OW"s7ZKmr;Z`prXAN#"[>((4$QD%
-1_]6TrrDr]rrrB$q>^Kbl2M:_s8MZfs8Mlkrr;lfrr;H\s8N!.rVufJs8Vcbs8W#ps8Vflr;Qos
-q>WE<q)S3L(D/Z)3CQ2'83@1Xs8)Bes7cBis7$'erVQNmrrrE"rr2clJcF*s"oeQ!\,ZEms*t~>
-kPlUqs8DWjlKnQGs8W&mq=t!irr;rms8;okqu?Wps82cp$Mj`#rUg-iqu?QlrVm0QDK9fOMj&I"
-48o0_q#0s`qu#j]"9&9"p$)JYo)JFPrr<#srr3)hpAadXs8MusrrD$WrrE&grrH&$qG[Gmp\tdX
->^U74F)l)!)uos8o`+shrr2ukrVulrs8;oq#5e5mqY'a`JcF-t"oeQ!\,ZEms*t~>
-l2Le_rr2rtrVZ[)oD/FcrqHBkq#CBgs7?3h.e3H:rq?0cs8W&mrr2lqs8;ons82?e)&<tl!"',N
-!:B@Eq"=+KnEKK7oCWUena6,HoD&.Vq=sd\q=ssbo(rOes6g*d!%A'>s82K[o(;SIp\=LYqY^9i
-rVH3XiUZI.kOS3/l1,`K1+!e_)CR'Yq!0'imdK`=pA"FVoBcMu'-.Z'(CDntr;Z*_s7?9jlMpn^
-q#:Nrq"OggrIP!prrrE%qmZV(li2J~>
-l2Le_rr2rtrVZ[)oD/FcrqHBkq#CBgs7?3h$M"&orq?0cs8W&mr;RN,rq?<ir<GAM8jG6u3BIBD
-s7cQnqtpC+rr;utrr;utrr;utrr2cfs8VrVrsS)r"U>YK$k`dK#3ts_s8Moos8N)hrr3N's8Vrk
-s!gB&2_@-I5XIL(s8W)qr=oGs2b65)4t9,+s8M'WqX=F`lMgh\rVuiq#Q+>go)AX]rdk*rrrrE%
-qmZV(li2J~>
-l2Le_rr2rtrVZ[)oD/FcrqHBkq#CBgs7?3h'(Po"rq?0cs8W&krVulsrV-9brr39ZDh4+:MiE.-
-0`:qQp](3gn,ECbir9Jdj968>-6+'R*Yek>rU0[crqQL"2c*:?6rm&d?;p7k'cC"QCM@Zn6X('#
-rS[JFm/R+Ps8Dcmr;lforqcurp@%GGq"":[JcF-t"oeQ!\,ZEms*t~>
-kl2"gs763ilMpeOs7Q9h$N'l'r:Bscs8W)err4>Drr<#ks6fpdr:0UYqu?Qns7l!^)A)rP$j@7b
-!$NjE3'/S]l0@g+&FfAepuD/=o(2YNq"ORXq"aI[&+oo%!Ytq@!!!'(('=O9o`"mjrY#/)q"=:M
-naZ,:nF?&d.P2i",Q9:r+nGX##5%?Vp$256q=Og`qu6EkpAbEqo`+mis7ZHl!<)Nh!W2hHs3L]K
-rr;l)s8D9`J,~>
-kl2"gs763ilMpeOs7Q9h$N'l'r:Bscs8W)err33$rr<#ks6fmd!;6?g(]X4-rqlTu6:3b':dIN@
-$9("(5!q.+qX"4bmf31]rosFolhgVe5<_:l3Bo\l$2sbdrp]pk-osLH68SU)$Sh\\rr<#rs8Vlo
-p@S@brr2j4rr3!(&-N:D"rIRLr9sUUrr2Kgs8;Zirqc]nrVHTmrVlil!<.QLd/O:Ks80;*rTjK6~>
-kl2"gs763ilMpeOs7Q9h$N'l'r:Bscs8W)err33$rr<#ks6fmd#57oiq>0[\rr3K]Fb"muOaVOr
-.o/l.8kqV9rrD]arrDc`mh4FEoFG5ACh@?sBjD8An)4'@rUTsl2+L8':I+nP!'^>\"oSE#q>^3^
-rrN)qrq[W5rs',X'd"D8//&Kjlh9W;q!\1Yq=OIVq"XUYrq?EgrVH]gp&"]=s3^iMrr;l)s8D9`
-J,~>
-kPnc_s8Vo\4:*)*bo7P9s2b`r_"RfH*?3'!]d4B0rkfim_uIIk*Q[UF`>B6.s8W&nnGjCA!<<-)
-&/P?6p%e+Ppa@UI+=JWf-n6Vp-n6Vp-n6Vp.jlf&2"C/3pC?urrr;utrr;utrr;utrr)j)q#Cj6
-!"'&5%LiF6rVuos/,];>pFe-M.k3"s.Om"Bme-5@o^MJFo^hYHp[n@Rq#($[qYU-dqYU-drUp0s
-p@J%GmG7F)s8Moq!;6'c#QFc&s8)]oo7?qgrrrE%qmZV(li2J~>
-kPmX?s8Vo\4:*)*bo7P9s2b`r_"RfH*?3'!]d4B0rkfim_uIIk*QdgNaVki2rr)`orrtb\4[;J'
-6Tt\QruC\1s"cZ'1d!l^4?GYe4?GYe4?GYe4>SfW55Y9@s8W'-p]Nl_4'#BD<C$c_rr2`nr]L*A
-s#LAar;Zfos8;`n!r`/nrr2ump](9ms8Doo)#+".qYL6hs7$'as7cKhr;Q`ro_e^drr;lp#Q=]$
-s7uTmnq$hfrrrE%qmZV(li2J~>
-kPmX?s8Vo\4:*)*bo7P9s2b`r_"RfH*?3'!]d4B0rkfim_uIIk+3jEWar(]&q"ajfs!;N4H$t6g
-Eb-Kmrte+!>YS3t<E)st<E)st<E)st<DYnC7S!.O%fQ2!q"jd^q"jd^q"jd^rqHus&8@&EOH=RB
-BG1%6q#1'h#t7?U92&)U9LhPCrs&>snbE"YnbDq`qtg*]o`"Fbo^M\VrVQU)r;6$Vo(;bTqsa@T
-pA4X^qZd#rrV-0en:CVdrrrE%qmZV(li2J~>
-l2NC4s8)9cs&Qi5s8G<VXu?;ASuFdNq_YOO49$h,[5.bEVmS;W6)1;3rq&1WVE4_V/bnf8&f(uh
-!"T_e!:om^rquZgp%7qSrr;utrr;utrr;utrr;okp[R`01+FaL00q0=-7C2h-7C2e1Fja?mca00
-j7**P!WrK4!!!K/mk5b0-7C8k.P*1Gq"t!grr;upq"a^\q"a^\q"a^\q"agdjSoD_q==(KqtpEn
-rq$0irr2fqrdk*srs&H$s8B;#qWn03~>
-l2NC4s8)9cs&Qi5s8G<VXu?;ASuFdNq_YOO49$h,[5.bEVmS;W6)1;4rqAO_V`4PR#n&ge8PCm*
-4@954rrr;r5!1YXq)SI9*B?/@3]T5Z7m&d2s7u]ppAYml8Ou9G6TRdDs$AL=48q;*s8W',rVlcp
-rVlcprVlcprVc`qrVZQprVlfls8N#ps8Muus8ITL`;^&?rr;r'qYKOXJ,~>
-l2NpCs8)9cs&Qi5s8G<VXu?;ASuFdNq_YOO49$h,[5.bEVmS;W6)1A;s8G6lU+l62q#::6=)iSF
-GCFRM,PV3KrV66aqY9p^qY9p^qY9pcrVlsi76Nd064?:X;H$Il;H$Ii?WL#"rtI&$It)ctGB.gK
-s%u-Y9MA&M7n#eurrr;pq"F@Orp^-_pA"O`qu6ZprqHfrrVuorqtTs`qu-JEs2Y-DrVliqZhjOa
-s*t~>
-kl40Ms7c3b'(,_lrs%rW*W?cJp([,u,38b6n.kul(&ng5hA?1os6L]irW_Qb#ljr$o(2o'*<Hf_
-+rha+n+QYVr;69_o_/4Srt52&o()P;p\=LXp\=LXp\=FNq<]Qmj6[j>.N]]f,6.iH"onW9$3gJK
-lg*s+nF?&>o_%nNq>:'erVZQaq#L<^r;Z`hrsSf'q!mhFq#(0lrVZZj"T/,ss8DoorVQWjrV-Eh
-qgn[nqum#qs80@ks*t~>
-kl3jDs7c3b'(,_lrs%rW*W?cJp([,u,38b6n.kul(&ng5hA?1os6LZhrX%fg#lXc#q>Ugi9fGI(
-90G<=q"jses8;rtn,<Ffqu?-PrtPD*s"Ql)2E"5h#=:sU6oA+I6jGF?rri?"rVZW^quHWcqZ$Eo
-r;6Ekqu?Nkqu?QorVlfsrVZTlrUp*brIOmoqum#qs80@ks*t~>
-kl3dBs7c3b'(,_lrs%rW*W?cJp([,u,38b6n.kul(&ng5hA?1os6_*'rsn>m!:fOGnc'2"A6Etl
-H"L2!rr<#r!<2rs!;#gK!;$0h!;PjZ&Q3(F<D[.",BJHjHu>(-FX'?Irri?!qYL-Krrr>qq"OIS
-rUU0bp\=U_r;R3%p\4IZr;ZcoqY0a\rVZWnqYpQpJc*so"TJ8tqmktkJ,~>
-kl3F6nc/Ld###Msrs\r)!<3&nmg/sm!<;KirrE)d#5/3"rsJ<+oD\pmrW)ll$MaZ$r878L!X.N^
-rVlotr:'U`q%*8pn+lVAme6>Ip%J(Pp%A1R%/9>ep[RqNn+ZAE!"o21(]aU:#QXMSo_JF_qtp3a
-pA"CTo^q_Fp&+F]!WN#gqZQ`iq"aabqu$BlqZZfjo_&"WrVllqrqQTmrr2rrrr)cnqu?Hmqu20H
-dJjFLqt@Q"qY0@VJ,~>
-kl3sEnc/Ld###Msrs\r)!<3&nmg/sm!<;KirrE)d#5/3"rsJ<+oD\ghrW3&r$i'`$s6L+"%hB$V
-$3peKlMpk^rVcWorU9b<rqQNlo(W+^rVlcprVlcprVlcprVlips8Vrqs7u]nrt/'u7R^3I6UqI_
-li$_Ys8@NKJcGNF#6"Gm\GuF"m/MS~>
-kl3gAnc/Ld###Msrs\r)!<3&nmg/sm!<;KirrE)d#5/3"rsJ<+oD]'qrs&N""mk^Jo\p,b(`O2)
-'+PHdm,e6MrVuoor9""i.t`V5LQmaLH6`Ias8W)t^]4?2!<)fps8.BI_#FW;qt@Q"qY0@VJ,~>
-l2O!Ds82`opBT3n!<3&\r"B#Es616iq[r8m%/^b4mMl!9i9V*V!;lQtq#:iend>Eis80jCR@9nB
-Xe)/qV8\u&[0*eBZ*El(#G86-Vmre2_"b5hZ*U^AZ*U^<Y.UdEVR3_0Yakb'!!E?'!<E0#Y,oON
-]=PS`\$iWGXfSP%Vl-W&o!S%p!jJc*rMojuqlTk!!O/p0[0a1DY-"h1ZaI3Ir367.r3?7*!O8t^
-[*c5`Z+\>RrTaE5~>
-l2O<Ms82`opBT3n!<3&\r"B#Es616iq[r8m%/^b4mMl!9i9V*V!;lKmoDT9cp'guos8C9ZXgZ'U
-`3Z\fY0*9@Za-j?Yd(L?o!J^u[CNBPYdDCE\0/>l\[]2[\[/`\ZG!EO]WelB"@GOK83oa:2$UOr
-aL8MU[f<f>\@&cS\,Nl;\+-g*[IUd'[fX"I\,3T9\$i`QrNuR3s0hs8rj;U2!4;O/J[Ee2"L5Y`
-T`+0UJ,~>
-l2P,ds82`opBT3n!<3&\r"B#Es616iq[r8m%/^b4mMl!9i9V*V!;lU!q>V/tpBgWZoC'Q,X1?-U
-^9"?LWQ(C8\@o\ra2uB:\\#Da\\#Da\\#Da\\#DN\\55`[(+6R]_T/V[^a8_^p(Jd[_K!`EI3Cl
-I"HlW*Pf;4\,NcD\%0,`]=knm^VIXu[e$g)Yk,%"[(F-Q^B;0a]tOEWrOr6G!P,Z<Z3dnJ]=khe
-['6dCrO;j9qmcX9!P#Rh[*c5`Z+\>RrTaE5~>
-l2NF7s8Mfnn5ZX7*</=(Y!DeCViasYqEgaN56"35Yt+ahU9$iO<3)Wcs79$cS1aU9rs[TYp%7pW
-o_\O`o)AXQrs/Q$q>C$cq=a[\,5Cj%o^_YFo^_bDn+H)@pZhDDo*HKT*#^+/*$t[]mc4!6rqQKg
-s8LdQ!VuEeo`"O_p\ssfq#UBlr;Qosq=sd`rVufp!<;inJcF*s#5S2k[f?:)m/MS~>
-l2NF7s8Mfnn5ZX7*</=(Y!DeCViasYqEgaN56"35Yt+ahU9$iO<3)QXr:*UdTeu`Irs%3Wrr<#*
-r;Q]rpAFpnrr<#trposmrVuors8;oqqYL-jrqQL7r;6Nis8Vlos8!Kt6UO%/01\Y@s8)9arVZTl
-r;?Nhs7-'grU]perpKgcrqZQordk*ars&;spU:,"rp9Z8~>
-l2NdAs8Mfnn5ZX7*</=(Y!DeCViasYqEgaN56"35Yt+ahU9$iO<3)TZrUa'pUbqi@p@Zl2rVuo+
-rr2otp\Y!jrTO7]rVHNorqZHss7c9fp&G'err3BR@VB:YBi&Y\)?9a9p\t<nqtpBhnb_nP!;l0`
-s8)fpqYpTnqLS[^rs&;spU:,"rp9Z8~>
-kPm1,s8)cD*u:1Bb8))/s2bm*[djC8,o+l-]0$Y8s03sm^]2I[--#ub_@$dkqu-Nos82iq$G$36
-r;Zcjlhp\[li.1cq#C!ds8)Tl!WMokp^6Teq!@eEp\=LJs7cR%!U':Mq"aa_qu$Hmg&D-Qq>'p_
-s7llrr;?Qnr;QTn"T8)kqu20H])MiAs8;3_J,~>
-kPm^;s8)cD*u:1Bb8))/s2bm*[djC8,o+l-]0$Y8s03sm^]2I[+hdgO_[mO,s8W)us7c?hs0qq#
-qZ$Tjn+m"Sr!<9#qY:*_s8VrcrrrB$s7cQirr3JurW!'*!s\o7$NpUos8DTirr2rt*WH*<s8N&t
-s8N&ts8N#qr;6Ehr;6Ehr;6Ehr;6H]s8W'"rVlfms8W&rrrE%Ls2+d;\GuKms*t~>
-kPn$Ds8)cD*u:1Bb8))/s2bm*[djC8,o+l-]0$Y8s03sm^]2I[+Lq1C`=j'7s8;]hqY'jes181(
-rVuoonGiIaqY9^TpAOO]#5\,po`+sfq#C3h(&.\*nbr+Xs8Vur#8nNq'G)-+)%5[&rtkY2qY'XT
-o_/.YrVQQjrVQQjrVQQjrVQ0^!;l3a"8hrkrVQTsrVHBhrVuTiJcF!p!kA:.li2J~>
-l2Ltarr<#ns8)`p"nqQfqZ$*ars8B!s7$'`pAaa^rr_WfrUKgcs8)upl0\KBq#13no=Ou$#6"Js
-q>'pcl2Lb[mJdFfpAasgs8DcmrsJPnq>0j]nG*%`q;;2\rr;utrr;utrr;utroa:`rqlTjJcEF`
-#5e5qppC"sli2J~>
-l2Ltarr<#ns8)`p"nqQfqZ$*ars8B!s7$'`pAaa^ru:>)rUKperUp3hs8VZ^s75aYo_SFKYl4P"
-rr)$[!<2ut!;Z?gq#1Woo)A=]rqZ6bnGN:c$2s`#rr;fkn,E:`rq6:"rr;utrr;utrr;utroO1Z
-rW)oqrWN2trVlforr`8ur;Q]qp&9OBdJjFJq>U/rrVPp\J,~>
-l2Ltarr<#ns8)`p"nqQfqZ$*ars8B!s7$'`pAaa^ru:>)rUKpbo]u;Ks8Voks7Q$aqZ$TcXRu5]
-qu-Hm"oJ/ko_/.PqZ-Qnr;cTcrV6C"p[eFYs8VrfqWR_MrrVlbmJ?k_rVl]lqZ$L%s8Durs8Dur
-s8DurnGa='qY9p`[email protected]"Xa`rrr;pq"t$gr;6Njr:g<hrIP!srs&ArrqNl!
-qs494~>
-l2M1fs6fpequ?Ths8N&srr3f1s8N&pr;Zfrs8Dutqu6Kjs8Dups8Vflrrr>toD/:\qYpQ,rVm,r
-r;6<equ$HmrtbV3rqucnrqucnrqucnrr2lrs7lWorTaC_rVZ[#q!n1[s7c-an,NFeo)Aair:Bdc
-rosFbrqucpJcE=]"8o_0rp0T7~>
-l2M1fs6fpequ?Ths8N&srr4SGs8N&pr;Zfrs8Dutqu6Kjs8Dups8Vcjq>^Bmqu?]orr)Whp%nQh
-rV?Hlp?2Ger;Z]hrr;forr;utrr;usrVHElq>L4)qt0g^r9jLYq"k!hq"k$gn,2qXrr)iqf`1mK
-rVulrrVc`prquirr;Zfrs8;uts7?5@s3CWHr42k,li2J~>
-l2M1fs6fpequ?Ths8N&srr4#7s8N&pr;Zfrs8Dutqu6Kjs8Dups8VW]nG)k[rqucrrVca"X7PiU
-qt0gd"o\>pq"X^Vq[`K!qt'd`q"t$erVQQjrVcQl!;ufq')qY(p\*\?rVlfrs8)cqo]>f>r;@!"
-rVQQjrVQQjrVb^T!r2K_rqQNirqHoqo^qbIqu-Egq"jmdr;Qrtq"Xa`rVZQmqY^*hqYc!Fci4+F
-\c;Zps*t~>
-q>Uj"r;ZEhq#1-jqu?$^rsAT$s8Drqs7lBbrr3&os8Drs&,c2%q>^Enp\Y!Xs8DWjrr)j'rVQWl
-s8VlgrqcHj!k/..rr;lpr;R3)s8N&ts8N&ts8N&tgA_0PrVllsh#HsEkPkP]JcE=]"oeQ!\,ZEm
-s*t~>
-q>Uj"r;ZEhq#1-jqu?$^rsAT$s8Drqs7lBbrr3&os8Drs&,c2%q>^Enp\Y!Xs8DWjrr)j+rVQTg
-s8Vois82irrr2oq"9,V*qVqM_rr;utrr;utrr;utli-_[q#1<orlkE@rWN9#s8W)ns8W)urrE%L
-s24j?rr;l)s8D9`J,~>
-q>Uj"r;ZEhq#1-jqu?$^rsAT$s8Drqs7lBbrr3&os8Drs&,c2%q>^Enp\Y!Xs8DWjrr)j'rV??_
-rVucks8Dcn"L7jurVcTmlhq4krVuirrVuirrVuibrr`5sqYU*g"8hrlrVZ[$rVuirrVuirrSRVV
-rVQKfrVZNnrVufoqu?Tm!<;]iJcF*s"oeQ!\,ZEms*t~>
-p\tj!s7H-es6p!Xs7#a^r:^-iqY^?or:g-h#4hcnnbiFYrr3,mq#C3frVm;ss8V<_q#9jas7cQe
-q>UN&s8.BIJcDMF"oeQ!\,ZEms*t~>
-p\tj!s7H-es6p!Xs7#a^r:^-iqY^?or:g-h#4hcnnbiFYrr3,mq#C3frVm;ss8V<_q#9jas7cQe
-q>UN&s8.BIJcDMF"oeQ!\,ZEms*t~>
-p\tj!s7H-es6p!Xs7#a^r:^-iqY^?or:g-h#4hcnnbiFYrr3,mq#C3frVm;ss8V<_q#9jas7cQe
-q>UN&s8.BIJcDMF"oeQ!\,ZEms*t~>
-q>Ud!oDeRbs8MEcp&+h'o)JO]s8DQhp%eC\s82<cq>^'arr3E&r:0gas8V`kp&G'Rrr3<"qu?Zq
-s7u]pqt^6nZiBoRs+13FrrrE%qmZV(li2J~>
-q>Ud!oDeRbs8MEcp&+h'o)JO]s8DQhp%eC\s82<cq>^'arr3E&r:0gas8V`kp&G'Rrr3<"qu?Zq
-s7u]pqt^6nZiBoRs+13FrrrE%qmZV(li2J~>
-q>Ud!oDeRbs8MEcp&+h'o)JO]s8DQhp%eC\s82<cq>^'arr3E&r:0gas8V`kp&G'Rrr3<"qu?Zq
-s7u]pqt^6nZiBoRs+13FrrrE%qmZV(li2J~>
-pAZ!*s80]#%KAEas8MBbqZ$Hmo`+seq#CBgrr32pn,N+]q"4Rcs6^O"mf3=_s763ip%/4Vn,N1Z
-qtTpc!jhq(JcC<$U]1Mss80;*rTjK6~>
-pAZ!*s80]#%KAEas8MBbqZ$Hmo`+seq#CBgrr32pn,N+]q"4Rcs6^O"mf3=_s763ip%/4Vn,N1Z
-qtTpc!jhq(JcC<$U]1Mss80;*rTjK6~>
-pAZ!*s80]#%KAEas8MBbqZ$Hmo`+seq#CBgrr32pn,N+]q"4Rcs6^O"mf3=_s763ip%/4Vn,N1Z
-qtTpc!jhq(JcC<$U]1Mss80;*rTjK6~>
-q#;'+s8Vrq-)ptB!<;lks8Dutnb2eTrW"8Jm^`ZEbl>X"*5V[TV]62ks/d[j_Yq7g'Y"+^*$`N(
-s1g$'`q]B0!jhq(JcC<$U]1Mss80;*rTjK6~>
-q#;'+s8Vrq-)ptB!<;lks8Dutnb2eTrW"8Jm^`ZEbl>X"*5V[TV]62ks/d[j_Yq7g'Y"+^*$`N(
-s1g$'`q]B0!jhq(JcC<$U]1Mss80;*rTjK6~>
-q#;'+s8Vrq-)ptB!<;lks8Dutnb2eTrW"8Jm^`ZEbl>X"*5V[TV]62ks/d[j_Yq7g'Y"+^*$`N(
-s1g$'`q]B0!jhq(JcC<$U]1Mss80;*rTjK6~>
-q>UftoDe4XrtEc[h\Q4k#lO8cr;ZZorrE&u,[email protected](0R&3@>s%fD^7JK$ASj!*LSubE]2P6^9
-nG?%ORnWVW!jhq(JcC<$U]1Mss80;*rTjK6~>
-q>UftoDe4XrtEc[h\Q4k#lO8cr;ZZorrE&u,[email protected](0R&3@>s%fD^7JK$ASj!*LSubE]2P6^9
-nG?%ORnWVW!jhq(JcC<$U]1Mss80;*rTjK6~>
-q>UftoDe4XrtEc[h\Q4k#lO8cr;ZZorrE&u,[email protected](0R&3@>s%fD^7JK$ASj!*LSubE]2P6^9
-nG?%ORnWVW!jhq(JcC<$U]1Mss80;*rTjK6~>
-p\tX"s8)E0*WRLunGN+]s"4!Fs8Morqu?0_*o?E;n.b-X$2=H,na?nd#5.clq#^NX"oeT&lP/jg
-#j_Ehq#:E%s8.BIJcDMF"oeQ!\,ZEms*t~>
-p\tX"s8)E0*WRLunGN+]s"4!Fs8Morqu?0_*o?E;n.b-X$2=H,na?nd#5.clq#^NX"oeT&lP/jg
-#j_Ehq#:E%s8.BIJcDMF"oeQ!\,ZEms*t~>
-p\tX"s8)E0*WRLunGN+]s"4!Fs8Morqu?0_*o?E;n.b-X$2=H,na?nd#5.clq#^NX"oeT&lP/jg
-#j_Ehq#:E%s8.BIJcDMF"oeQ!\,ZEms*t~>
-q>W\Yr:U*es8VinoDejgrr;cns7--_rtYnZWrE(nrrE'!s82lsq#UNp!WEGprs8W3m/I(W'));)
-s7cTooG.2trrTP,qgncus.fStrr;l)s8D9`J,~>
-q>W\Yr:U*es8VinoDejgrr;cns7--_rtYnZWrE(nrrE'!s82lsq#UNp!WEGprs8W3m/I(W'));)
-s7cTooG.2trrTP,qgncus.fStrr;l)s8D9`J,~>
-q>W\Yr:U*es8VinoDejgrr;cns7--_rtYnZWrE(nrrE'!s82lsq#UNp!WEGprs8W3m/I(W'));)
-s7cTooG.2trrTP,qgncus.fStrr;l)s8D9`J,~>
-q#:Kqs7--Zrr5I^s82irq#C<hs7lWor;X5>rZ1Cl&-!:)p(%-#''\imrYjqn$iCP+k6q:soC3In
-rsAK!#lX]$n]7o?\$;aOZF%*NY.&tfJ[4gO#Ip\<X1#I?[DB-QUrTd=]Dqiqs*t~>
-q#:Kqs7--Zrr5I^s82irq#C<hs7lWor;X5>rZ1Cl&-!:)p(%-#''\imrYjqn$iCP+k6q:soC3In
-rsAK!#lX]$n]7o?\$;aOZF%*NY.&tfJ[4gO#Ip\<X1#I?[DB-QUrTd=]Dqiqs*t~>
-q#:Kqs7--Zrr5I^s82irq#C<hs7lWor;X5>rZ1Cl&-!:)p(%-#''\imrYjqn$iCP+k6q:soC3In
-rsAK!#lX]$n]7o?\$;aOZF%*NY.&tfJ[4gO#Ip\<X1#I?[DB-QUrTd=]Dqiqs*t~>
-q#:Baq#::aloYFO!<<&ns7u]hs7lEili47%q+Qs]3r_OBW&XYL;5p`^nP#LQ7fPf@]Kl*]SZ=aM
-mmsI?:&jnds7l6bs8Tk0o_eahq18Qss7$'grVum!p&FQrrrqE^]CYpnm/MS~>
-q#:Baq#::aloYFO!<<&ns7u]hs7lEili47%q+Qs]3r_OBW&XYL;5p`^nP#LQ7fPf@]Kl*]SZ=aM
-mmsI?:&jnds7l6bs8Tk0o_eahq18Qss7$'grVum!p&FQrrrqE^]CYpnm/MS~>
-q#:Baq#::aloYFO!<<&ns7u]hs7lEili47%q+Qs]3r_OBW&XYL;5p`^nP#LQ7fPf@]Kl*]SZ=aM
-mmsI?:&jnds7l6bs8Tk0o_eahq18Qss7$'grVum!p&FQrrrqE^]CYpnm/MS~>
-q#:d#rqufr!6bE:s8W#rrr4Y<qZ$T[3WKf0s0+!fbOige1V3Vd[Ls,%s1CJp`;d5)"j?qd$R43p
-s1oTq`VB?-rs8P*q>^Kos8)_GqgnY7qZlQhs6TdWs6%5q#6+Z&qR?J$li2J~>
-q#:d#rqufr!6bE:s8W#rrr4Y<qZ$T[3WKf0s0+!fbOige1V3Vd[Ls,%s1CJp`;d5)"j?qd$R43p
-s1oTq`VB?-rs8P*q>^Kos8)_GqgnY7qZlQhs6TdWs6%5q#6+Z&qR?J$li2J~>
-q#:d#rqufr!6bE:s8W#rrr4Y<qZ$T[3WKf0s0+!fbOige1V3Vd[Ls,%s1CJp`;d5)"j?qd$R43p
-s1oTq`VB?-rs8P*q>^Kos8)_GqgnY7qZlQhs6TdWs6%5q#6+Z&qR?J$li2J~>
-q>Vc1s8VZirJ.0Hs6]jdrV$3_q#CBms6K^bo'QJWq=jphrTjI_rVlg=p\k*[s8W)up&G$krVuK`
-s7Q9ds8N&uqtg9krVcc*rr3&qs8ITLJcG3=!;lcq!Ufp$rs&5tqOd`dq<\-3~>
-q>Vc1s8VZirJ.0Hs6]jdrV$3_q#CBms6K^bo'QJWq=jphrTjI_rVlg=p\k*[s8W)up&G$krVuK`
-s7Q9ds8N&uqtg9krVcc*rr3&qs8ITLJcG3=!;lcq!Ufp$rs&5tqOd`dq<\-3~>
-q>Vc1s8VZirJ.0Hs6]jdrV$3_q#CBms6K^bo'QJWq=jphrTjI_rVlg=p\k*[s8W)up&G$krVuK`
-s7Q9ds8N&uqtg9krVcc*rr3&qs8ITLJcG3=!;lcq!Ufp$rs&5tqOd`dq<\-3~>
-q#;H6o)JLT#QOi-#Pn5ks82irq!\7Ys8Vins8N&uo`+Xart4\hs8;oso)Jacs7c9ap&F[[qu6lm
-s82irrqlWn#Hn%(p%&.\r.4iurpfsms7$'Ys8Vlf_#=Q<mf10"p%dtSJ,~>
-q#;H6o)JLT#QOi-#Pn5ks82irq!\7Ys8Vins8N&uo`+Xart4\hs8;oso)Jacs7c9ap&F[[qu6lm
-s82irrqlWn#Hn%(p%&.\r.4iurpfsms7$'Ys8Vlf_#=Q<mf10"p%dtSJ,~>
-q#;H6o)JLT#QOi-#Pn5ks82irq!\7Ys8Vins8N&uo`+Xart4\hs8;oso)Jacs7c9ap&F[[qu6lm
-s82irrqlWn#Hn%(p%&.\r.4iurpfsms7$'Ys8Vlf_#=Q<mf10"p%dtSJ,~>
-q#:Ees7cHk!:'L^s7?9j&,lOus8)coq>^Hks8N&nnc&Olq#C$es7QBk!;HKm$2=K"r:]g`p]('e
-rs&K&s82Qa[=S@/s6'C`r;ZW.rs&>is6m#gq<S'2~>
-q#:Ees7cHk!:'L^s7?9j&,lOus8)coq>^Hks8N&nnc&Olq#C$es7QBk!;HKm$2=K"r:]g`p]('e
-rs&K&s82Qa[=S@/s6'C`r;ZW.rs&>is6m#gq<S'2~>
-q#:Ees7cHk!:'L^s7?9j&,lOus8)coq>^Hks8N&nnc&Olq#C$es7QBk!;HKm$2=K"r:]g`p]('e
-rs&K&s82Qa[=S@/s6'C`r;ZW.rs&>is6m#gq<S'2~>
-q>V3(s7--bo`+FYs8VTgs8)cos8;coqYpL%r:p<bo)JaUs8VckrrW#ro`"jnp@/+Mo)AXirql]p
-#P@olrr2rkqtpBuZMjh'q>Ks\JcC<$nc&g^s7ZKm!;+#*"SD]os7bjZJ,~>
-q>V3(s7--bo`+FYs8VTgs8)cos8;coqYpL%r:p<bo)JaUs8VckrrW#ro`"jnp@/+Mo)AXirql]p
-#P@olrr2rkqtpBuZMjh'q>Ks\JcC<$nc&g^s7ZKm!;+#*"SD]os7bjZJ,~>
-q>V3(s7--bo`+FYs8VTgs8)cos8;coqYpL%r:p<bo)JaUs8VckrrW#ro`"jnp@/+Mo)AXirql]p
-#P@olrr2rkqtpBuZMjh'q>Ks\JcC<$nc&g^s7ZKm!;+#*"SD]os7bjZJ,~>
-q>Up&s8Vlis2-8h-b]Q[qYgF&qYg9jo`+sbs7ZEkj8T)Yr>P_2s8DusqZ$Nos8;oslMpbXs7lW\
-s8Vrqp&=q#s82Wjs8TS-s8MciqY^?2rr`8urr14C!<2lq"8r&nr9XFcrqu]ndJj:Iqu$Bls8;lr
-!WN#crW3&uj8T2[qu6Tp#6+8pru%7F_#FT9s8O10+TDBCqt^-gnc++~>
-q>Up&s8Vlis2-8h-b]Q[qYgF&qYg9jo`+sbs7ZEkj8T)Yr>P_2s8DusqZ$Nos8;oslMpbXs7lTW
-rr2cop\k*uqtU*hr3Q>$s82cprVl0`!<2rsrVlZn%K?D,s8N&ts8N&ts8N#rr;ciorrE&srSdbG
-rrW,qrVQTprr)ffrmh&Crr2p"rquZkrr3'!rVb+C!r`#orVm6'rr;Zimf7>-oDHN+#laVspAeq.
-p@S7^s8MZjJ,~>
-q>Up&s8Vlis2-8h-b]Q[qYgF&qYg9jo`+sbs7ZEkj8T)Yr>P_2s8DusqZ$Nos8;oslMpbXs7lWY
-s8VrqpAOprq=a[`rNuP's8)Qk!<)ln$iTu$qu-Ejqu-EjqtU-Lrr`/pqYT%I!<)Nd!<)0^r;HTo
-ir/9ErVaS4#lX8fl2YZ$m/+["%JBA[!*fNoo)8UcqYL0]s*t~>
-q>UNos5EtW0,_.lW#bp<nGi:`rV-?lr;ZQhs8Vopq>C6lqt^6krp]sfpAa^\s8VfmrVuKho_ndq
-rr<#smelkXrqud%_>jQ4o^qPFr;?Tprlb<DqtU!WdJs4F!W;rqrrrDro(;\UrVlrss8Dor!;?9h
-"oe>jp&"aarrrAss8Vurp\k?drr)imr;HWert4\knc/V4s8V?Yrr<#onb2eTrVQZkp\P!iqu-Hr
-s8)cqqs==sq<.JOr;ZT_s"hs<eNEm$rqZQbpA"X5rsV6Hf%!:js75XAq>TsVs*t~>
-q>UNos5EtW3#T*uW#bp<nGi:`rV-?lr;ZQhs8Vopq>C6lqt^6krp]sfpAa^\s8VfmrVuKho`+mc
-qt9pfs7?0g"oSAuqmHA#rs&H%s8DimrU0^cr;ccpr<3,urr2lqrYPV6rVcZmr;HQlr;HQlr;HQj
-qY9g[qu6Tp#PnArp@n@Qr8[bUrq$-lrq?![qu7<+oBlACr:p3dqtoj[q>:'erVlEg"TJ8ts8;Tj
-!;-9j!WE&srr2irr;HR0r;6EirUU!cq"=^Yrq-6ho(i=brr)iprrE#js8Dp"s7uZnqX=D"qt0IZ
-qYpH`r;ZB`!+JB,!;H$`qZ$3^q#Ab@&H2>'>t7Wim.gV\rVuoerVlKiJ,~>
-q>UNos5EtW3Z5="W#bp<nGi:`rV-?lr;ZQhs8Vopq>C6lqt^6krp]sfpAa^\s8VfmrVuKho`+pi
-s7lTlr9sOXrqcioqtp<#rquors8;co')VCpp@e:Tq"FLVq"FLVq"FLXrVHNj!<(sX"8)'NlMUY^
-r8R_WrV6!X!;l]os8Dor"T/5qs8;iq!;HKm#lFJnq"jshs8Droqt^Kko_SFXqY0ahlhL5Lo_AFU
-rs8W(qY'[ap]($brrE&rrrE&os8DrprrE&`rtYG2q#CBis8(jB!*D6S!;,sarr;cjao;nMo\fd,
-+#!]Yp&4LEnbW.Ss*t~>
-q>V$$oDeCUr>)[4aTVY>rr2`lrr5+Sp[/"-!rp(X*Q.ok+Y:A)s3(lm])Tl"$GHJN)AU^#_B0rJ
-r;X>?)U@sOq>'scqu?]&s8Vf\o_&"Wqu6ZqbPqeDq"=Oar;5"Ds8N#qrVm'!o^MDCkl1SgpAajd
-rVulss82]n"TJ2eo_eRc(]474qu?]qs8MurrVlcprVHQor;Z6`s8Drqrri2ts8W)trt+o'o`"kC
-R5"[8q"t*kqt0dbrW2omq#19krqQTlr8[eWqtpBt.C\('hVKWsrt"]Og>N#2aYj+gnauhWs7Z*b
-J,~>
-q>V$$oDeCUr>)[4aTVY>rr2`lrr4tOp[/"-!rp(X*Q.ok+Y:A)s3(lm])Tl"$GHJN)AU^#_B0f;
-n,0O()UnQ_s8MunpA"N\pAOmcrVm#uqYL-hmf3(]rVccqnbiXgqY9m`rr)j#rq,XJqYT:NrVcit
-rqHEprqH*^r;RGtr;$-Oqt0m^q=jgcp&"O]r;HWfrtbJ2s8)`prVuiqrVlcprVlWms8Dueqtg?m
-rY>5(qtg*`rVZ0as8Vcm9*"nis7u?^qu6]pqsj[nrr)clqXO@TlhC2Gq$$HPmJm4crq6<k"oqOc
-5s&TWrqZWcrqcHcde=(ChZ.4M8Q8=_qYgErp\XCWp&BO~>
-q>V$$oDeCUr>)[4aTVY>rr2`lrr5"Pp[/"-!rp(X*Q.ok+Y:A)s3(lm])Tl"$GHJN)AU^#_B0iB
-pA_Q2(sV[Lq"XUWoCi$Yq"j[SrVHNrqXjFTmeZq[!<(sX"7tmCo@s9Tqtg3dqtg3dqtg3dqtg3f
-rri;tqu?0brrr)qp](-iqu6ftqY9j_rVulqrVHinqu-6drVQQhq$6TiqtU'Sp\jpf&c;P,rqu`p
-s8N&soB60E!;-3`rrN,tpAb'hs82fl"S2?_mIp,C&bG5RoCVbIo^D&%!&c5Q&jHBqoDn7WoZH_7
-n^IP"#o5*T!;cQ`m-X<5s*t~>
-p\tNsq>^9"0)up+rr4kSs8Vfjs8;osoKrWQ9T-)NoKT4>7K<3NWiA>Z4.<9KqFmTh4mPS;RlCBI
-6F!.Dqu$6frr)is^&J$6p&+^^q>Us&q>0p`q>0p`q>0p`q>1*ds8)`ms8;rsqu7<.rVlcprVlcp
-rVlcprVlcnr;HWo$N0bjrq6<kr;?Qmir8rWs8Muqr;7c8r;[email protected]:Rq#:9jpuD2;
-rr2rtrqc?[p&=Xa!WMohrrDWerrDroq#C-kr;QQkrtG,+s6K\MfuV5Rs7H9is7Q0es8;Qi"oe/a
-o_8=FrtU]@S!:=ORO>\\s8VlX_Cq.>WKX3Nq4uH9%%f5Ian,2gd]F5^rnHrBJ,~>
-p\tNsq>^9"0)up+rr4eQs8Vfjs8;osoKrWQ9T-)NoKT4>7K<3NWiA>Z4.<9KqFmTh4mPS;R4eF7
-4KtG?s8W&rrt58/Z2=M!q#C<fqtg*_qYL-go)ARerr2rr!;ufm,5V38rr)iprr)iprr)iprr)ir
-s8Dilr;Zfjs7?3fr;Zfrir&iQ!r;]hrVH]pqu$?jquZ`mrql^!p&4@Zs82`o"7-!Wrr2frrVlfr
-!;lWg#Pe5or;??]p\=UflLk&Irqc]nr;Q`ps8N)rrVmT%o_n@Y!+nW+!<;ujoBH&Ms7c-Zrqlcj
-qYL9lr;R0(qtTmKmdB,tl/h=(mhkERh.g/4>]F%h!+\8o8jZQl!*<<><bZ"<B@:B(BF8?L<'EEF
-@0$9+h>R3Eq#0mcJ,~>
-p\tNsq>^9"0)up+rr4kSs8Vfjs8;osoKrWQ9T-)NoKT4>7K<3NWiA>Z4.<9KqFmTh4mPS;RPFjA
-5-CG:r;HBfqu-HuZi0dsnb`1ZrVm#tq"4:Yo)A^erqZQjs8Dip!WDoeq#^9\o`"jurUT@8o^r1`
-rq,jYj8JEG%K6+rpA"Obr;ZWns7Q?hrs/8tr;Zfqqtoj^!<)Zl!;63gqYpNp!<)lr'_V7pdEqqa
-/P6$.mI9c+gZ\G/pA4dg!;cQks83E(q=jXTnEfMsiT&SFh::*Je/6Z^`()^`6X2oG!&4p5)$:gF
-!'<>?3_`&c9+Fl#=>23=*_^&DrU\+tp@A66~>
-p\tBks8V]brr3;sl2U2<s8V`ks8MpNJT;(p#5Rfortaep$1.[!lg>#X!<;3^rrr&S&,uq#q]GY2
-$hO9'rqcKhrquK]pTX5g!<2Tf"T/#iq>'scs8;orn,ERjq=sjbrqufrq?6]fp%eU@s8Dp"s8N#q
-qu-?gq[W/lm.gAHrUf%Cp%A(Pr;HWsrqlWfrrDucrs&>so`+pjrr)j#o)&Ieq"O^d"ZX]]s4]Tm
-rs#*`Y`QMjV=UJaWk,k=rru-=p%mV!6G`[.-EH#!iP.#Fs7Yp]J,~>
-p\tBks8V]brr3;sl2U2<s8V`ks8MpOJT;(p#5Rfortaep$1.[!lg>#X!<;3^rrr&S&,uq#q&Aqu
-#OhNss8;fos8MfdpT45jrr<!%rVlcorr)iqc2[hCrVluqqZ$QQrs/Q%rVZWlrVZTl!rW#qpAYEl
-s7QElr;ZHOrVlrss8W&s"98B!r;6?nqY^'arVQZjqY1!c"nqTWnEg/Nrtk8's8Vferr2cjnGuKK
-<_*5gr:0=Ms8W&l!V#RQpC[6!rr)clq=jUSnn)'B8TRsBo4.f.*,P<Dn*f*"p%8;Z9L1X7!:]IH
-lgXB1n*of8n*n]m&b52f6W@8a;ulXio`+mXq=jj\s*t~>
-p\tBks8V]brr3;sl2U2<s8V`ks8MpGJT;(p#5Rfortaep$1.[!lg>#X!<;3^rrr&S&,uq#qAf2&
-#k%KorVQEir<i5ior\)fq>'mar;HWtrVQHgiVsPfq"jd^q"jd^q"jd^q"jXLn,*+a$MsDeqYpNp
-p?V,BdJj7BpAX[mq#16mr;ZEfq!n4Tl2(D[rSRW"rVuWlrVuirrVuieq>^Efp&FpalGrrH%5K:2
-h!!e\hp^$3rp9aKjnJ0Amg\[NlKIBjiDiHS2e#0Go1'B]85[gbdFcOhc,BZ)+VFbj!<1FLcdU@i
-b0pjUjo?e_2DIl*s8V<MmHX9BJ,~>
-q#:?\r;QfmqYpLgrr<#bq#CB^q#@ZVp&t'rs6'mX!<<'+s8N*!rW)s'qu6]s!rr9#o*,*m!WW06
-s69X_qu?ZhpA=a_\ao1`rVuWjq"OFQp%\C\r:Bmhr;-3fpC$ZdoC;>>mdBK0nbVkV%K#nlo_%tT
-qu$HhrqksYpu)#CqZm&rqtTdSp\Xph&c;M#q>C9ks8N&uiq2s3q>1*grrN,qqY:*i"nM<_o`"jc
-rs&E$s5a4[r9jRi38<]ts7OMnq>UW=VQ-r"QMREbVlQkuVkg#WR>Qk"rrDo_rrDlkrrta&i8$d#
-oYLP5;7<p\2#mLNrr2p!q>1*`s*t~>
-q#:?\r;QfmqYpLSrr<#bq#CB^q#@ZVp&t'rs6'mX!<<'+s8N*!rW)s'qu6]s!rr9#o*,*m!<<'1
-r8n"Uq>^KkqYpL+_u96(r;ZKirVlisrr)clq=F@^q>:-gs8Mfn!;uHb!;uir$2sbtqtp?frr;Nf
-rrE&Vr:g'jqYU0grr3#uqu6BmqZ$Qos8W!0pAY*Urr;cnr;-6ap\4CVqu$?hr;Zd$oC`%Ss8N#o
-r=&H!rSRSPp\jp`q"a^aq>C./rVZQckQ#6V7lNS.!:]aMmHF65b_9S@E+N#F@hE0W?=%#J@Us(X
-AG5fdqrmqQoCsI$h=:"'q"!eAm.p,N6!.[uo\TE?p$);?p\=LXp\=LWde4"Aqt17n6>-AjoD\di
-rW2imo`'F~>
-q#:?\r;QfmqYpLSrr<#bq#CB^q#@ZVp&t'rs6'mX!<<'+s8N*!rW)s'qu6]s!rr9#o*,*m!<)j,
-rT=1Xr;Zfmqu6U%_YN]op\OFTp@nL\!;l-_!<)lrqu%0)q>:!bq>:!bq>:!bq>9gJkk>#U$N9et
-qu?]gqX!P?i;4#_qu-Ejqu-Ejqu-Ejp[J1J"7>UMq>L=)qt01?lhLA>s8Vrqq=jUWqt0pgpAt9f
-p\uoGs8V3\s8N&np%S.Rp%S.Uq"ja]q"j[OfDmf5'+5U3!9*.qcagp8Wae@b:.$f55n?=R9MS>Z
-;,^Ij<9NQ)m+Usr(Y\6:j7Dg0n_W?]!&$T(!9)>mmHNEniQCHrnb_eU/fdOaoD&@Zn*]l>s*t~>
-q#<MSs6fpY!<<)_6hgQZq>^Kbp](9`7*kK;rrD3O+9)fCgB7?Q$1I-rjUgtB)Y4F-jWX=@g[bdP
-o+LHg-3!o[m)6-6Yb7/nZ+%?VYe%-CX/W2(ZF.15\c9/=[f<`CZa-g>YHG%0XK/M3o=$B][Bd$@
-ZF.6S^8J3?Wj&S'[C*HN[^EQO[^EQO[^EQO[^EQO[^EQO[^EQO[^ERB[K!]5\,<cj[B-I9]Yh_+
-^7r0>SAW1]*R)$qb,r.HZ*h-T^;.S$]sY)MZF.-M\uWibbKRQ=`VI@V^Vmq?Xi8,tbl#ccbT=mM
-dF5dtI#0Pac27P@qp;o)D47&oX-U*?j5]+[lh][ekje97mc`]de]cL[ZbX#E[^ETS\%&oW\%&oW
-\%&oW][OR+\?*<b\\5_n0#+Hh.DEC"Z+mZR^p^YZ[C3KO[^`iX\@K,[\@K,[\@K,[\@K,[\@K,[
-\@K,[\@K,[\@K,[\@K,[\@K,[\@K,[[^r`S5-R0Lrs8Pcqu?<fp\"IWs*t~>
-q#=Fms6fpY!<<)_6hgQZq>^Kbp](9`7*kK;rrD3O+9)fCgB7?Q$1I-rjUgtB)Y4F-jWX=@g[bjR
-o+C6^+T;6;lc?NI]!%sX]=YP[Xh20W]tM%h\$i^7[/[Q6[f<i:\,s4P])K<+]!o,U[^EQO[^EQO
-[^EQO\%92^\$i]O[()j7[B[9LXgkjK\$icS\$icS\$icS\$icS\$icS\$icS\$icSqmZL3r3[]X
-]tM"bZ*1@9TY7\)^9aa<[\oqCZb`cT\$`TKZ*:F:[JmZ7[L'@9]WeuYVm<M+qPaatX/E^sXJi8$
-r2K[q*iZ6IZ_)VQFUMhW68qJ%:i$,+F`M\L>&ob"U7e<^qP/J3S!f_8StVpVVRNh0]XXrOYHG"1
-Xfee/Xfee/Xfee,[ALUPV5C;b['ZP-B1uV3WOAq2\Zi9MYd(F;Yd(F:YHP+4Y-5"3Y-5"3Y-5"3
-Y-5"3Y-5"3Y-5"3Y-5"3Y-5"3Y-5"3Y-5"3Y-5"4\u(MgA,u8ms7?9jp$r'4~>
-q#<MSs6fpY!<<)_6hgQZq>^Kbp](9`7*kK;rrD3O+9)fCgB7?Q$1I-rjUgtB)Y4F-jWX=@g[baM
-nIOp\,l[fWnB&,N\ZDRN]"5>UW3`\2Z*LaFrONBK]">Pc]">Pc]">QM]D]>>\0JGl[^NQO['m?M
-['m?M['m?MYGA&&]u%Us[^rEM_7-eDQa,PY!4;@'*jMcGWiN8-\@\lb];)s9RK0=ZS].kLX1>UC
-Z*LgLrkJKH%D0-Y['?1.Vm*7iUnOIXU'RBeTX]uXTqnCXTH9\uW2Pr#<Ghe.,ngG&)`(Lp6=X.o
-8iBXiKS4r3N;A6WLPh+ROcu&tS=?UXT<kYlZE^[@Z*U^AZ*U^AZ*U^AZ+[<S_QC5ZXgbU.!+n\p
-!2d61Y._*H]sP)PZF$pEZF$pEZF$pEZF$pEZF$pEZF$pEZF$pEZF$pEZF$pEZF$pEZF$pEZF$pE
-ZF$pE[)/Va!($\LnGi+Tn`BK8s*t~>
-q>Uirr;Zff%1iL>$E3aus"F3Js7lWj!W`H1!(a0)0DJ#0XBG,o8"m"Gs$``h2ZH:AY<;hDX0"\[
-r_Su\9)/Dc#kRHXp%5NWn+ck]#lFAenauVSrquBb!<2Ng!<2Tc!;uirs8;lr"o\K#r;?*2s8Vch
-rrW2cq>UC$2PM;ns8Duhm.gMSrrE&]rrf7%WMc]oWW/psV?NluV3I:[rVm3I]]e\]gpnO,s8:jU
-!<)Bd&GH._nb`+]s82Bes!FE^s8Drss89q;&GGo!s![pIrq5UPrqH'Tkkk&QJ,~>
-q>Uirr;Zff%1iL>$E3aus"sQOs7lWj!W`H1!(a0)0DJ#0XBG,o8"m"Gs$``h2ZH:AY<;hDX05"g
-s%o#V6h1*QrqH*brs$IBp\4CXp\Fghrpg!jrVQKiqtgBirqc`mr:'abrWrH!qsXFYrr2KfrZ2%<
-s8N&ts8N&ts8N&ts8N&ts8N&ts8N&ts8N&trVufps8Duqrqc]prr3Z-p%.kOqsj[apZ;Hc!;H'S
-s7l<ertYM0q=s^Zqu-KkqYBmZoCLu/nEB<-p[/7QmUU'E@:B.Cs'bq:(hIDl<`O[ooCMJPAlL]Y
-6"0imroNqJp\Fg`"7u'Xq"j^cp$r%N!qGmSrq6Kir:]g_rq^L-qYC!`qYC!`qYC!`q;qPCs8MN^
-q>L!_l2Y>pp[7qOqsj=Tq>1!bqtg3dqYC!`qYC!`qYC!`qYC!`qYC!`qYC!`qYC!`qYC!`qYC!`
-qYC!`qYC!`qYC!`r;Q]nl2gGLs8Vurs82fqr:L#>~>
-q>Uirr;Zff%1iL>$E3aus"F3Js7lWj!W`H1!(a0)0DJ#0XBG,o8"m"Gs$``h2ZH:AY<;hDX0+e\
-rD&]W9)/Dc!W)QirsQdFqY^6ipA4RZqYU3j%f6)!qtg3dqtg3dqtg3go`#X(s8DfhqY9p^qY9p^
-qY9p^qY9^Xr;Qfpn,<7gp\OU^`r?DEk5\`enaH#JqtL*i!<)Zl$iBYdjPS,+c,7Q@chPrndaLc`
-91MYO9c-Z):-UsW[GgK9!(/FQ-RD4^n+PZ+h;Ii&p]L-Xq"X^[!;6<^rUgEhp%\CUo(r4QrrVln
-q"t'rr;Z9eB`Rbt_>b#Em*5U]l2CYZp[mhDs8DTiJ,~>
-pAY?ks7u`qs6ose4n8RGs8;ojs8VZirrE)B((eIb_BBZ4s8'eS+M%Nl&KJsn_]Kc5p<=Nk]DMN;
-,TOl)qYpNoqtTmVU@nN_rtbG'p%\Fas8Monqu$?hqu$?hqu$?hrp]sZqZ-WprrE&srs'kMs82fb
-s8C(>!<)os"Y)ae^p5QVrs$$1XerV*WVNIoUopcbrt>>2.88OIc7Aq]q>^Kos8W&nr;Q]rrn.5d
-nabu6s8;omq=t!i)?9U6p\Opiq>^H9rt+htq>\50s7tpRoDS^\o()\Ns*t~>
-pAY?ks7u`qs6ose2=^_?s8;ojs8VZirrE)B((eIb_BBZ4s8'eS+M%Nl&KJsn_]Kc5p<=Nm_#OGI
-+rA&loD/C`rs,k0rqlHbo(W%]s8;Ee!;uck!;ZWj!;ZTi!;l<d!WDrqr@7^>pB9dYq#0XYoDS[e
-rr)iprr)iprr)iprr)iprr)iprr)iprr)iprr)lsrWW9"r;HQkrqdi9s8D]_mdBQ8n,)hN!!$G%
-?NBW]q"spdrVZNep@\(Mrq?Bb$M<o[n^r1uBO>[`pgOJ7BP$GkmHj0;m/?;OmelMsnbrLd!!#tg
-?3'oqs82Wbrr)]dq>C3hqu$EjrVZWlqtg<es7uZj#Q+5lr;6Egr:0Y"o`+p`r:'R_s7Q3[!;Z-^
-o_8=_pAOmbr;ccDq[WT(s76)irr<#ms7H0fq==Q9~>
-pAY?ks7u`qs6ose1\(M=s8;ojs8VZirrE)B((eIb_BBZ4s8'eS+M%Nl&KJsn_]Kc5p<=Nj])2H:
-,oao)qtg<mZMa_'q"F^^!;l`p&,l4spA"@VpA"@VpA"@VqtC'irU0O_qY^?qoBcP>rr3&qs75+J%
-f?5%rVQQjrVQQjrVQQjrqHNjrVZ[/qsj^e;EIYSjP]Lunal;>n+$&DrVI*$p@Ib>k2F6h8P;9C4
-\#6<#=M<[ccsqih"0>5jno#C!!#2=8c\#8rVuicqrdt\r;Q`qrp]pfrV?Hsr;ZWoo^'?ms6'cSk
-k>&Qs6oLMn+-L/~>
-q>UH_rr3#rr;?R,p\Y!_s8VurrVuKhs82Tfs7-'f)Z'C3p%n^_s7c6do_SU`s7cQnqZ$EkoDA@\
-rr2ulrr33#o(`%N]D;:&s8;*\!;l]o!<2uq!<2ut!rN#op&>'hr;Q]t/"IsbrrV`jqY:'orVuTj
-s7baW!;ufq!<2Wj#<Blgs7b3p9)S\kSY)UNrh^%$X0&G&Vkg#WOfmI=$hsGks8P'h0)th@p&=sk
-r:'acrr<#nrr;ofqZ[!!s7l?frVllsqu6Zoo(gZ0"TJH$`;fi9r;Qitqu69gJ,~>
-q>UH_rr3#rr;?R,p\Y!_s8VurrVuKhs82Tfs7-'f0DbVHp%n^_s7c6do_SU`s7cQnqZ$ElqZ$Qn
-rr)c`qY^?ipAY*g^\7Nto^VSLrVlg:rqlKcqu?WprVlfrrr;utrr;usq>'maqssX^rqcX"r;$6]
-qssXYqYMuErVlcmr:oj`@K?6%rq-3_rVcZmr;HQlr;-?_qYg$ar;HQlr;HQlr;HQlr;HQmq>Vf:
-s8)copAOm]na,K#jlu4)mdC-W:eX/M@K>ETjn&%YCM@HoAnCpOs(24B#\[Y!i:Z^8m.^&D"SMEZ
-p%A:Ws7cQgs7luur;Z`mjT&fkr;Q]tr;-?jrX8](rVlcprVlcprVlfprr2rrr;Q'_!<2rs$2jYs
-s8W)qq"am's8W!(ppC"us8MunqYU3]s*t~>
-q>UH_rr3#rr;?R,p\Y!_s8VurrVuKhs82Tfs7-'f)Z'C3p%n^_s7c6do_SU`s7cQnqZ$EjoDAC\
-rr2umrVm&qr;ZN.rVQTsqtTs`rVmE-q"a^\p\O[]q"jd^q"jmcrsJN#rVuipqtp6drU'UmqWlo:
-6icTMs7u]jpAY-lmJeU2oCVYHoCVYAoC2ADlh'T&g"G-8i8WeWf$FCK,97CC493.@`m`=+r^ISo
-92SDQ6UXI<:K1FrHJA&`qXOFUoCN(WqsaUkp@6u>>la*WptYlLrUTjcrV?HtrVH6ZnalOnqu6Tu
-qsUHSq>UBrrV6*_o)F4~>
-pAYumlZDFF(]_bZs8;ols8)]gs82ijp](!Urr3&ls7Q?j"oJ>orVucort>>*s7uZns7c']qu$?i
-r:p<lpAY*lrWDu,s7H0fs8M`l!;?0e''0)js8N&rqYBs]p\+I`s7H?gp&>$frr3-U=No1#0`M(P
-psT0[VLb87Xg5@>Vk93F/&Td%r;Ys!b=8,/M5K\?"8r2lroO1[rr;ltq<S%ZrrjDBs8;]imf*Ii
-qtTs^q>L3jr;HTqr;6*]"oSE"q"XjerrDuprrN&no(^Z/!W2forr^"8rVl0`J,~>
-pAYumlZDFF(]_bZs8;ols8)]gs82ijp](!Urr3&ls7Q?j"oJ>orVucors8Vus7uZns7c-as8W$%
-qXa[anbrIdrsAP0qrdbKn+lk[rr3B*qt^'br:g-eqYKRTr!E8uqVM2Gs8N&tqYq]9qW@YAqY'dZ
-p\+@Tp\+@Xl1t/>!!$A:<rgkJqrmnPqY1*Zo)ACco(_nJp]:6hrVQWk2Yle9q=s94>\[k[?X7#L
-BkqbiE)]:Z9MeGj!)@?*Dt<JhnF,i6md9H1n*ol<o_.VHqt0jZnGW:^s8;fns8N#t#4VZfp[eI[
-rVmN.q>'mcs8N&ts8N&ts8N&ts8M]krVl-_!ri)prr3'!r;FD1%fZA'ppL,"s8Dikq>'pdoDa=~>
-pAYumlZDFF(]_bZs8;ols8)]gs82ijp](!Urr3&ls7Q?j"oJ>orVucort>>*s7uZns7c$[qYg9j
-rVHQop\t0rrVQ8ps7-$e"9&#gqY^?trV6?iqtodWrVum)rSmkQrVZQgq=saap'(9ls7cQmpAY[%
-lML;4!!"o=2#tnprTX1Sp]9gRrqHQcq"=4Q47qq(lKINslKINhlKINfHV.429i"P^;c-7c!'VG7
-!#R"[email protected]\air2dhW=4kqqM,WoDeX_p&ORMnbCo>%K6(uq"jd^q"jd^q"jdb
-qZ$HmrVlisrC$PZq"41Mq"X^\qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^
-qY9p^qY9p^qY9p^qY9p^qY9pcrr_u!qYgEn"9&)kqXXZ:~>
-q>V3+s5j8W_#IB#]cR4Mrr<#nrVuosp&4mpmf3:\s8Voort"`!s5X.Equ?9[pAb0Ns8;iq)tj-s
-rr2corqH-ds8N&ds/c8#pA"1Qn+c\Rq>U6prr;uts8DWjs8E0!rpg$`q#:?nn,EVZf]i/&o?p>(
-rro6_Xe)tpn>-,\rMBOk"d-*aX/NQ&rsJZ's!k\Hs5_D)3pZeMq#C!YqtU!Xp]13bs8Dp$s8Dut
-rr;rjrtbV6rr;utrr;utrr;utrqlNeqtp<jrosFor:osXq>C6kq=sa\r;69ar5/I>r;6Nk[f#su
-r9aN7~>
-q>V3+s5j8W_#IB#]cR4Mrr<#nrVuosp&4mpmf3:\s8Voort"`!s5X.Equ?9[pAb0Ns8;iq(\dt!
-s8VrprUo^[rqZHVrhKJlp%nF_q>1*srV?$\qXOFRqYL!gqYpHn!qcNgq>VT8p%S4XqYL!ar;6Eh
-r;6<]naGiF>!b>39N2#Ylga!'qX+UWj^_8*?$0QGA--7MA,Tm:@3J<]=^tiYcL1;to'u8Aq#'jc
-quBu1;E@`S!:94Ho(MkSrV-Hgq"jsd#Q4Q!rqHHfp[/"Zrq??sq=sdNq==L_r;-TkoCVqErtYP2
-qtg-aqu-Qos8N&ts8N&ts8N&ti;Y_7s8N&ts8N&ts8N&ts8N&ts8N&ts8N&ts8N&ts8N&ts8N&t
-s8N&ts8N&ts8N&urr2f)s8N#t"o\AsrVlfgs*t~>
-q>V3+s5j8W_#IB#]cR4Mrr<#nrVuosp&4mpmf3:\s8Voort"`!s5X.Equ?9[pAb0Ns8;lr'`IV!
-mJ[(_s8Vris8DZaiUiT6s8Drs"oJ8loCVnXrr`9#s8DZk#lOPrq"j=OnbE"T#l+AnrVHQms82Zm
-&c;4jn)sa=*YT)54TO[2n_)Ojcj.t=7Pmk)=$lII&P5bq;Gg=h;GfS_92net^u=SUm3V,Wj4XJg
-2('/'6icB:naH&=n*TT6oChhDm-O-0oCVhSp&Fjcmf*:co(rggp]'jbq"j^Sn+6>Brs8Mnn+$#A
-pA4a_rs\l+rVuirrVuirrVuifs8W#tqu6OUs8Durs8Durs8Durs8Durs8Durs8Durs8Durs8Dur
-s8Durs8Durs8Durs8Durs8Durs8Durs8Durr;Qfp[J^%,rVQHgo)F4~>
-q#;Q2s8N)hl3R1K$N'l$s7u]os7lWop\4^Us8VBas8)cqnbN%]!VuBarrqWdq>0sfrr*,pr:9mf
-rq66i&(LXZq=O1<mI^2OrquZgq>UBorUp0lqt]gCrstKh/\C!,s8V`G[k3`/T;DC`!N33PrsJ\s
-p\+VB_T(HF/+`f:rWr8dr;?Hiq"X[Vp]^Kkrr;utqu.$%rqucqqZ$0erqQ*_q#Bm`ir98]q=sa\
-qu$Biq>^3iq8*(=q=j^XYkRhbp\=X`nGe"~>
-q#;Q2s8N)hl3R1K$N'l$s7u]os7lWop\4^Us8VBas8)cqnbN%]!VuBarrhQcq>1!ert5),oD\aa
-mJZt\qn`4+rVuorq#;!*qY9d[r;-<fr;6Ehr;6HjrV$6js83/rp%.bEnF,l;rqHTcq"OR[pC[)[
-BlF&W><#5J6WI4g@VBOjqI^%EDsZrVn)*O'mfDkCqWn.G$g-a>kjS9Cq=j^^rr2p-o_&8f4'5ql
-n+,T.qYU3fr;cimrrN#frVloqrSdbYnb_DErVm0%qtg0dqY'[^lMpn`s8Don!<)Kfn,Ejrs8N&t
-s8N&ts8N&ts3goFrrN)0q#:?noDa=~>
-q#;Q2s8N)hl3R1K$N'l$s7u]os7lWop\4^Us8VBas8)cqnbN%]!VuBartjo!q=s[[rVccms7cQn
-q!.YHna3RJnbi@c"o@iXmIU,NrrE&krr_u`kk+iFqX4IPlMg/Qk3_O&q<S[L@ql!94q\nY)EL=f
->[h#5qF(WV:!1`#c+V<kblQ;KpYcD6iSi\XpA=a`rV-iqnCuXs3>t+b!;$'WqX=Fbrq5XX!WDfa
-p&aOSkPkM]qZQWaoCi.OrsJYpn+#r>o_/(WrVHO&rVuirrVuirrVuirrU0\JrVuirrVuirrVuir
-rVuirrVuirrVuirrVuirrVuirrVuirrVuirrVuirrVuirrVuirrVuirrVuirrVQTp]DV[2rVQQl
-o)F4~>
-q>V6-s8DF;\oV`gdnTlBqu?]prr;lqs82fq-Mld&p\44Qp&Fmfs6BFRs7Q!`mJleFs7ZKmq=ssh
-jo>>>qt^-drVm9)s8O2@s82BKlL+EErsAW#m/HGPrp0RKqYom^"KA;i^6JJm%%s6\s8OCMaQVg!
-^*Lc!$iB\ghYdER1runm+TDE@rWr2rrr;uqqt^-^rrE#crr`2pqt^'b%K-,$s8VienaYo:p\4L^
-rrW)mr.G"[r:fgTn>u9Qo'u5=nb)_UoDa=~>
-q>V6-s8DF;\oV`gdnTlBqu?]prr;lqs82fq,5U@"p\44Qp&Fmfs6BFRs7Q!`mJleFs7ZKmq=ssg
-jT#8Br;R*%r;,^Pm/ZSPrUp0qnc/+Ys6]jQp\u!-rU9OSna5W(j5AhNiT'@cBP(Lt;u1,=C4kCB
-qZ'nm;GfMj!;QQanb;nR"7#XNmeunMo)[email protected]&Dp?q_Ss8Dlorr2p.n,N@Y!,,AB!:]d[
-qYg0err2lr!<)cl#Q+Apqt9g]r:9gUrrN)hq#:Epr;Q3c!WMuqq?-WmrVi)^!5%LqJ,~>
-q>V6-s8DF;\oV`gdnTlBqu?]prr;lqs82fq,5U@"p\44Qp&Fmfs6BFRs7Q!`mJleFs7ZKmq=smb
-hu3N:r;R*"q"!>+gAogko)8S/p?h#'kjA$;lhg)HrTO4CrVQQjrVQQioC),9jR)j5rp^9Xe'c60
-8jHfDqF_ArDU.Y3!'(u@*tCa?j6bjcq<S4AmH<R/mIKKBlgXcC#P%9Xq"FLPqYpL*rVQKjrVkpN
-m,7q@6<aH_eGfOGnb<OZq#16frr)`jq"X^_lMh_"qtg$YoC;;:mI0cDqY9p^qY9p^qY9p^qtg<e
-rq??drVAnVqY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^qY9p^
-qY9p^qY9p^qYL*gs8Th3s8Muds*t~>
-q#:]ls7s;3-g1M6g@kOG$1RupqXOL`li-e\rrW&rrr2p)o`+p\s7lW[s8V!TrrqWdqsXR]rr35j
-rql0ZjSAZMrs1-T9E5%iht6@#rrDlorsbNXVkKrdXg5:AZEL+3o)B*fs8O<I]tDH9s7#s[rs&H%
-s7lQmpuhYunc/Xas8DfjrVuosqY0XTiqi]T4dI&es763fpA=jgrr`8uqtp0gs8;ormf*=cq>L3h
-r;?Qnrsnedg[ah-p\Fggs8MT[qr[qYrW2rri;X5bs8N&ts8N&ts8N&td/OUTq=aRTZ2++fo'l):
-o_J(XJ,~>
-q#:]ls7s;3-g1M6g@kOG$1RupqXOL`li-e\rrW&rrr2p)o`+p\s7lW[s8V!Ts%_eXqsXR]s8N#a
-s8Vopmf34]oD87V!+Q0#qu6'ap\t'apA"L^rVQ?cnau_Tqtg3dqtg3dqtf4Fp[*!*D.-dW>$+j,
-=_D;flKdoin+uG_o(`7Zs7--k;GhE6rU^'gp\Y6fr;?$Ur9s.QrVQTl%JfthrVcHhr;ZfrrVlfp
-rs\5dp[J;i!;cQVrVlWjrr)j*rVZTlqu$?hqss=Mo_n[YquH]crs&2rs8VWhrr)lsrr)lgrWrK!
-q"aa^qu$EWs8W)urV-?lrkAC5rj_b'!ri/tp&BO~>
-q#:]ls7s;3-g1M6g@kOG$1RupqXOL`li-e\rrW&rrr2p)o`+p\s7lW[s8V!Ts"!=5qsXR]rVQKW
-rVuZmmJZkQlgO3(!(R%EmJ62Mo'tl"gu%/Um-X--jluO.#jq!*lK[AC<,)>?raH1E?!'^%cd:1R
-eG@W+gZS+kn_!mB-mqXfjPoh%oDA@`#5A/to(W1TkPkYTs8VuorrW2trr)j'kje0(!)N[cqX!SK
-s8)iqr9aOTr<E#nqsrkIq>C6qqtTgWqs47grVH6ZnaGl3n+cAKs8)cjrq?Bes8'P."oA#is8Th4
-s8Mucs*t~>
-q>UNhs7H9i48o3Zr;ZT_o)JX^qu?]R49,Ynp;8<i]CWlL(XN-Taql/>rP9I!`9tAU'ZC$g(E1<n
-s1&sW^%_@%q>UBt,F$?a74n!.%_8(0WiN/#WiN.g^S$gejSoVeq=t!i3OL=9rV6?js8)Ttm/HAN
-rq69inbi@arqm-"r:Bg[nbMJFq>:'h!qcEirVm-=s8VWhrp9X`lMgk[nG`Lfnc&dgs8MK_rr)j!
-q=XCSl2USYm/I@hq>($is8Mrq]DhlFrr3-#q"47Wnc++~>
-q>UNhs7H9i2uWdVr;ZT_o)JX^qu?]R49,Ynp;8<i]CWlL(XN-Taql/>rP9I!`9tAU'ZC$g(E'sb
-rjs3hbPqM^p&+[K!)Zic!!)]goCqb?kN;!on*fYjlfm]oE)ZRA'39Kg<GU^emIp>Oqu$ElrUK^Y
-nc&=anG;qmp[RnTs7uBV!,V`3rr;rkm.pMU#OVQYs8Vfms6BOfrr;fos8)cfrVllsrqmT2o`"jg
-qY'R^r;#UWs7$'crVZQjr;Q]crrDiarr<#rrr;osr;6@!pA=@Ys7u]pq=jX\r;Z<cqYp<jnbiRh
-rVZWgs8Ms*rr)iprr)iprr)iprm1NJs8MukZMF=qr;Qlsq"Xg\s*t~>
-q>UNhs7H9i=T/:"r;ZT_o)JX^qu?]R49,Ynp;8<i]CWlL(XN-Taql/>rP9I!`9tAU'ZC$g(Dja\
-rO<mcbQ%V>m.'Go!'WY!!!)`kq>0I9f@/4#gtpl'gtLN3@79rj#ZON?=%m)Wki)O+rp0gUn*9Q:
-m/HVWlh^5cp@S+Zs7>I.!*90dq#CBkn,3%]"n2K[s8VlVrrr5us7u]drVuorq>U`ro)S"7h"Ld>
-rr3-"qtg0dr;?`prVQQkqu6Nop@A1Mrr_liqXF@]"8;-LqTAj<rO`(4q"=Uc_#FB6qZ-T`s*t~>
-q#:]ps8W)hs7cNks7Q?j4T5<[p\b'Os1o-C7%0oCs$3QY55P:,Y*i&]33B#?s";*_2?,h:UJ1Rb
-6'S`3nb2t^rVn4@_;F#"XtBYQ%A0T(['-F$TUNQdX/`_srs&Apo^VSNq#:9onFuVU!U0F[rrjGC
-s82Whrr;fl"nMTco)/+IquH`qr;ZX#p%e1Rq!n+Oq>Bja"oJ?"r;Q]arrDrqrrW&krnIGRrVHHl
-"8MQaqsj[drqcNnrqu]nm/ICkqt^'crr2imr4Dt/_YsK9rqZH\s*t~>
-q#:]ps8W)hs7cNks7Q?j:]:=np\b'Os1o-C7%0oCs$3QY55P:,Y*i&]33B#?s";*_2?,h:U.G"V
-6'\uCqu?]ql1P*iA5P^";?61Nnmu6:?!h&QAl`tQ@:4$'lM9ZMlK@O!nFH;Hr;$?mp\t-mn+QSV
-&cD:qrU9dcmIUDQqu?]kp\+R]rrr,rqtC'akl1hcs82irqu$I*rqu]kr;?HjrVH0_r:L$hr;Q]r
-rqu]orV6Bmr;Q]sqtTX[!<2los8;lnrq['#q>9[Ys8W)sp\":Xr;ZBe!<2`mqu6Wo&cDV*rVZWl
-rVZWlrVZWmrr)firrE&drAOTPr;HQlr;HQlr;HQlr;HQlr;HQlr;HQlr;HQlr;HQlr;HQlr;HQl
-r;HQlrVZQcYk\"krr)j!qtKmap&BO~>
-q#:]ps8W)hs7cNks7Q?j:]:=np\b'Os1o-C7%0oCs$3QY55P:,Y*i&]33B#?s";*_2?,h:TLJGL
-6'o5Jr;QTehWOrA;E67#70)cAo4)!(;cHe%>=iEt<)[8Hh>5n8i8s(ck3VF#lL"!-n,DhYo`+jg
-rrMchq#:^"s8Vclqrd;GnbD_U"o&&pq>^<Trs/N&qZ$Tls8N#ts8Dcn#Q"Dmip?4+qu6U!rVH<_
-q"XU[!;?Eg!VZ-SqZ-T`rri)nqYL0frr`&bn,%_:$MsMqs1\O5rVQKjn,In~>
-q>V--rVud&!!*-$!s&8srr;rso`+jgs#BGS(&nd3oaUj8jn0>^rtOtp#lPb*miDB;l1Y5Y&Fnro
-rq[N%lPTNrlMpl8Y,g1Gj4)>O315TZrrN,srqc]pn,NFequ?]qrVtgT!<;lor;?ToXoAD#r;Zco
-!<2KfqYgNqroX7Zrr<#trk&11]);U.rp]r<~>
-q>V--rVud&!!*-$!s&8srr;rso`+jgs#BGS(&nd3oaUj8jn0>^rtOtp#lPb*miDB;l1Y5Y&Fo$"
-s7mDojV\!slMU2Q?qFO%6V1T^!,cEJp&t'^p\FXSq#:'lqYU3hrp0Rcrqu]mrr`2rqu$Ems82lr
-ci<Y:qZ$EkgAh$Krr)corVluur;HTls7?6\rtPJ4rr;utrr;utrr;utrr;utrr;ugs#^5[rr;ut
-rr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;utrr;us#5nJs
-[/Kt&r;Qlur;HWfs*t~>
-q>V--rVud&!!*-$!s&8srr;rso`+jgs$?(\(&nd3oaUj8jn0>^rtOtp#lPb*miDB;l1Y5Y&Fnoj
-q"5Emmiqf3kk=0>;`mT3,:l2F!+&jrn+$#Aq"jdZo(2qUqtU!`!;l-_"o\;mq"js&rrr;rq"jmd
-rVuor!;kXOmJm1aqYpZrqYL/BrsAW#s8KP.rr;rqmf.e~>
-q#="Xs7Xl<A]=TIrr2rtq>^?lrTsRVrs8\-nc8^i!<3'!rs&2s!;QQoqZ-Zr!;d!#q@*B(s8E)e
-rri<#!WMckoDehRf[A[;gr)U3s+13Hrs&E#q7-J'rp9Z8~>
-q#=.\s7Xl<A]=TIrr2rtq>^?lrTsRVrs8\-nc8^i!<3'!rs&2s!;QQoqZ-Zr!;d!#q@*B(s8NH!
-rrDTc!<2ipm.o`CE)TD)=]YUmnG2t\r;uuurdk*#s0)G,r;QN%s8Dr`s*t~>
-q#=4^s7Xl<A]=TIrr2rtq>^?lrTsRVrs8\-nc8^i!<3'!rs&2s!;QQoqZ-Zr!;d!#q@*B(s8;r_
-qZ?Zp$ig5)jQtIu@mVn(4$!Amh!4G'qgncus/Z/(r;QN%s8Dr`s*t~>
-q>XOqs82iqs0jm1LuA='oDeC]s7lWjp&FEX+9*5Riu72%lf&rbrtb.e*rcN:o+:p0n)"oM&,PT%
-r:qN2q%rbnoDYo?k;g)qa@SVE`RD-*\[f2Y[^EMo[Xkll[KX%B]?$WBlMlA~>
-q>XOqs82iqs0jm1LuA='oDeC]s7lWjp&FEX+9*5Riu72%lf&rbrtb.e*rcN:o+:p0n)"oM&,PZ.
-s7dGtmhGTnrr8^pXoMI%;ZHfbS@PH'[^EQO[^W_s[Xkll[KX%B]?$WBlMlA~>
-q>XOqs82iqs0jm1LuA='oDeC]s7lWjp&FEX+9*5Riu72%lf&rbrtb.e*rcN:o+:p0n)"oM&,PQ#
-qXts'qAoV4q"BlASH),H56(\:MQ3)HZ*LaF[^<Dm[Xkll[KX%B]?$WBlMlA~>
-q>Uius8VZip](%s"r->$s"jZSr;6Nor;Zf5*q2@^\OZZP\Z%aarD9K&0CrG3XZHJSOhXKc8"ueF
-qtig\Ud+bLnFHPX!]-rAqu6ZorVH]nq"X]:qgnXMqZm/rp[u)srVGm\J,~>
-q>Uius8VZip](%s"r->$s"FBOr;6Nor;Zf5*q2@^\OZZP\Z%aarD9K&0CrG3XZHJSOhXKc8"ukM
-rV&FDRQUNJ%K,qi!+#Z\nb2hRs82`nqZHcprVV6DJbubM#QOSnost,$qX"64~>
-q>Uius8VZip](%s"r->$s$H_br;6Nor;Zf5*q2@^\OZZP\Z%aarD9K&0CrG3XZHJSOhXKc8"uhH
-qY3@PUI5(]q=4"Ak2QG=!7'Win*]uFp\ssjp\+@WJbt#qZMOn,q"OHls8Df\s*t~>
-p&@;Is0XU6MYHl9s7lWks8Vi^rriQ?ZM;im#K$Mc&Ke[cs.gD=ci:F%)8kjO/c`H^cl3nFr;X\`
-(s_O;naZAErr39Fs8W)ts7l-]r;HWpquH_Is+13Trr`6"r4;.mJ,~>
-p&@,Ds0XU6MYHl9s7lWks8Vi^rriQ?ZM;im#K$Mc&Ke[cs.gD=ci:F%)8kjO/c`H^cl3tLrVa>I%
-F"GBrsSMuq!8"QpZqSRr;)!EJcD\K"9&8t]'96F~>
-p&@\Ts0XU6MYHl9s7lWks8Vi^rriQ?ZM;im#K$Mc&Ke[cs.gD=ci:F%)8kjO/c`H^cl3qLs8Tk[
-'@64DqtTgEmHrp6n+#N0qtp3grrrAuq"FL]JcC<$Z2Xq)s89Ims*t~>
-q#;Vss8Tl*ABFlOoDe^fp%A@Grr<#ls7QEis6fperVlinp]'pTrVm!!p](-fs8Vd:s7ZBPs8W#s
-s82KZl.bt3qZ"Y:rU9ORp%S4Vr;HQkq>1#?rIOpQrVlg"rNuFrrp9Z8~>
-q#;Vss8Tl*ABFlOoDe^fp%A@Grr<#ls7QEis6fperVlinp]'pTrVm!!p](-fs8Vcus7cQUrVQ'\
-rr)j&o)Ja]r4;RsoDJUirr)forrE%LrIOpQrVlg"rNuFrrp9Z8~>
-q#;Vss8Tl*ABFlOoDe^fp%A@Grr<#ls7QEis6fperVlinp]'pTrVm!!p](-fs8Vd-s7u]\s8Vfm
-s8DikoA]K;lh7m]q"":]"oJ)go_/05rIOpQrVlg"rNuFrrp9Z8~>
-pAY0b"TAB,!WW<#qtg?mpAY(:mJZtZo`+g_s8VfmpAb-ls7u]ds7ZK_s8VNerp]a`s7ZKkrql`q
-s8!B+p@RnDnG*"KYke%co%E[$qu6Tp"9/2prdk*#s0D\)qZ?fr\*<pC~>
-pAY0b"TAB,!WW<#qtg?mpAY(BmJZtZo`+g_s8VfmpAb-ls7u]ds7ZK_s8VNerp]a`s7ZKls8Vrl
-qsjCZqYpT`[/L".mJln[s8ITLJcDhOs8)ltrO;%kJ,~>
-pAY0b"TAB,!WW<#qtg?mpAY(8mJZtZo`+g_s8VfmpAb-ls7u]ds7ZK_s8VNerp]a`s7Z<h$hEob
-q"jj_q>L9Y[Jg+,mf3._rrrAuq"FL]JcC<$ZN't%!rr5.l2Q8~>
-q#:s*s82ios8V`kp&G!es7cQnnG`G)qZ$Tns8Vrqs8D`ms8;osrVuons8W&sq#16mpBLZlrpKUV
-rr)j.r:0FOp$hkUVtJs7nac5Grr)j!rqlTlJcC<$YQ"b&[J]gtm/MS~>
-q#:s*s82ios8V`kp&G!es7cQnnG`G)qZ$Tns8Vrqs8D`ms8;osrVuons8W&sq#16mpC.)rs7QEd
-r;HTks8W&srrr9!s0;Upqu?ZoJcC<$WrE5![J]gtm/MS~>
-q#:s*s82ios8V`kp&G!es7cQnnG`G)qZ$Tns8Vrqs8D`ms8;osrVuons8W&sq#16mpB1His82cp%
-K#qsqXaR^s82irZN'FhrrDrqq>gJFs+13Rrri5,r:p3Vs*t~>
-kl3.%s7--hp]'gaq#Bpbp&Fdds82fqr;Zfls8W)up](6ms7Q*cpAFIUpAY!i%K?8!o_JLaYkRqe
-o(`+Ys8Vusrdk*#s0D\)rWN9!ZhjOas*t~>
-kl3:)s7--hp]'gaq#Bpbp&Fdds82fqr;Zfls8W)up](6ms7Q*cpAb!hqu?ZprVQTo"o\8prqs5(
-rrE&trri;tqYU5Bs+13Qs8W'$s895"qWn03~>
-kl2sus7--hp]'gaq#Bpbp&Fdds82fqr;Zfls8W)up](6ms7Q*cpA=miqZZ`jqu-Eirr3,.q>L9g
-q>^HmJcC<$Z2ak'"TSD+qYKOXJ,~>
-l2LbQrVllnrr2uYrr482s8DWjrVlfks763`s763imf3:Uq>^Kgs82TcpAFsarr;ioqu6o-rVl`g
-r:U!brrW2trdk*#s/uA'\c;Wos*t~>
-l2LbQrVllnrr2uYrr3i&s8DWjrVlfks763`s763imf3:Uq>^KgrVmB,s8MTbqt9gbrr)fnrNcG&
-$2a_opA=[]qY^>Ds+13NrrTb2rTjK6~>
-l2LbQrVllnrr2uYrr3i&s8DWjrVlfks763`s763imf3:Uq>^KgqYp]gpA"4Qqu6o(o_A4Sq==<3
-s+13HrrTb2rTjK6~>
-kl1hcs6B.SlMgekq>9jbs7$'apAadQrVluupAb$ers8Aos7c?Jq>9m\s8W#t^%D=+rqubHs+13U
-rri2lqtIP`s*t~>
-kl1hcs6B.SlMgekq>9jbs7$'apAadQrVluupAb$ertbA(s7lW[s8Vlorr)cmqu$?co('*crr2p&
-rquWgq"jmdJcC<$YQ"b%pA=Tml2Q8~>
-kl1hcs6B.SlMgekq>9jbs7$'apAadQrVluupAb$ers8Aos8)c_s8Vilr;lrtrVm3#Y4M;Xq"X^_
-rIP!"s/Q)%qY'g\[d!gB~>
-kl1YErr4JG1]S,[qpuYr\Gsba*kVIPUD46bs0O*i`9t/h!4i-W*#lopqm@X\aS5N1"M+R4qt9aa
-!WN%Krdk'Rrr2p#rjVn(rp9Z8~>
-kl1YErr4YL1]S,[qpuYr\Gsba*kVIPUD46bs0O*i`9t/h!4i-W*$*3%s19Wl`q00+p\Xjeq@9P$
-qt0IZqtg-aq"adarIOs!rilD$rri>1rql]]s*t~>
-kl1YErr4SJ1]S,[qpuYr\Gsba*kVIPUD46bs0O*i`9t/h!4i-W*$!6's19Tja7TE3rVZ[&Vt0EF
-l1k#Jr;?Qks+10#rj)P&rri>1rql]]s*t~>
-kPm.-s8;`Zs0F$O2P@W<s"1aZ4S/JHW4=VQ>EGpIs%/<e8c(uNV*Y.T2kc]urqZQo_>aH9q>^GF
-s+13Ks8Vs!s805'rp0T7~>
-kPmU:s8;`Zs0F$O2P@W<s"1aZ4S/JHW4=VQ>EGpIs%/<e8c(uNV*Y.V4/\c4s8MfeqY^3er3Q:u
-s8)`ps8;oo"8r,srdk*#s0;V(qZQrr[Jp0ks*t~>
-kPm+,s8;`Zs0F$O2P@W<s"1aZ4S/JHW4=VQ>EGpIs%/<e8c(uNV*Y.U3iAZ6rr2utrVlosZi9e&
-o_\LarIP!"s/Z2"qZQrr[Jp0ks*t~>
-kl1\\qY^@AlllBAm-keX)X?9$s8Ni'k9'^0o(!@l$LZabmg]'Y'`[tGn*UYZqYL3l[Jp10qYBj^
-qu6Tp!WN"JqgnXKqZd*!s80;*rTjK6~>
-kl1\\qY^@KlllBAm-keX)X?9$s8Ni'k9'^0o(!@l$LZabmg]'W%fQ/@p%B-uqYpHlrr<#qWqHAj
-rs8Q&qtg*_q>'l<qgnXKqZd*!s80;*rTjK6~>
-kl1\\qY^@?lllBAm-keX)X?9$s8Ni'k9'^0o(!@l$LZabmg]'X%fQ)=p%TC$rsJ`%poaGms8N&p
-rquZmrIOisqm$#&s8Dup\,ZEms*t~>
-kl3p<s8VZis7^YtrrDTh!<3'!rrr)q!;HKnqZ-Zr!;cs"q@!<'s8NT&rrMWa!;Q$[rqH$\qu">/
-oChkJme?PVJcC<$WrE;&r:d]#q"="RJ,~>
-kl3I/s8VZis7^YtrrDTh!<3'!rrr)q!;HKnqZ-Zr!;cs"q@!<'s8E/er;lTk"98)ps7l9drs$11
-oDS^hrr2rprdk*#s02M-rquN"s7l9Rs*t~>
-kl3[5s8VZis7^YtrrDTh!<3'!rrr)q!;HKnqZ-Zr!;cs"q@!<'s8<&ar;l]n#QOf's8)<]q"r#0
-qu6TqrIP!"s/5l$rquN"s7l9Rs*t~>
-kPmO8s8VclQWsIb&,G`'r>"Dc'DDG>n+-e`*pE&4r"&)f!ril'm2Z0+)rolpo_/1M`N68LY,'4D
-riH<uYH4q5r3Ls[J[2Pd"LP8;`hVeuJ,~>
-kPmjAs8VclQWsIb&,G`'r>"Dc'DDG>n+-e`*pE&4r"&)f!ril'kn<^c)s?H1rVcceag&:dZ_PUI
-[_'5Z]="uNZ%933ZE:D8[&gXSUZqf/~>
-kPm+,s8VclQWsIb&,G`'r>"Dc'DDG>n+-e`*pE&4r"&)f!ril'kRmL`)sZc9rr3T'b,qhNXf9j_
-]=bh_\[8`LZMh"YZ@T<dZ37P9[)Sm*s*t~>
-l2NF7s7ZKfs+Y.cqYrsWSk&`HX/%oTs!b4D:\clIUHec9[APt`5b"cIqYrjG[l3pTrX#q2qY^9g
-qu$HmJcC<$WrE8%s80;*rTjK6~>
-l2NF7s7ZKfs+Y.cqYrsWSk&`HX/%oTs!b4D:\clIUHec9[APt`5b"]CpAI:B\2sH]rrTP+qgncu
-s.fStrr;l)s8D9`J,~>
-l2NF7s7ZKfs+Y.cqYrsWSk&`HX/%oTs!b4D:\clIUHec9[APt`5b"`Cp&77D\i]cds8Mrs[=S@/
-s.TGrrr;l)s8D9`J,~>
-kl37(s8W)n+92cLpWF-s_XtbW(s`-TbS_;9s2Q<1_<8KG*khTt'c4[bs100ca8#Z9ZiBoRs+13F
-rrrE%qmZV(li2J~>
-kl37(s8W)n+92cLpWF-s_XtbW(s`-TbS_;9s2Q<1_<8KG*khTt'c4[bs100ca8#Z9ZiBoRs+13F
-rrrE%qmZV(li2J~>
-kl37(s8W)n+92cLpWF-s_XtbW(s`-TbS_;9s2Q<1_<8KG*khTt'c4[bs100ca8#Z9ZiBoRs+13F
-rrrE%qmZV(li2J~>
-l2M4js8Dffs7cQfp](6fmJ[%lo`+p\s8)c^s8UpRrrqZep$)MQrr3/gs8VfmlhUP^ZiBoRs+13F
-rrrE%qmZV(li2J~>
-l2M4js8Dffs7cQfp](6fmJ[%lo`+p\s8)c^s8UpRrrqZep$)MQrr3/gs8VfmlhUP^ZiBoRs+13F
-rrrE%qmZV(li2J~>
-l2M4js8Dffs7cQfp](6fmJ[%lo`+p\s8)c^s8UpRrrqZep$)MQrr3/gs8VfmlhUP^ZiBoRs+13F
-rrrE%qmZV(li2J~>
-l2N=)s7lWomf3%Xs7?'dqZ$TkpAasfs6BIYs7>XXli6bNs7?9jp@J=ajT#8ApAY3#s8.BIJcDMF
-"oeQ!\,ZEms*t~>
-l2N=)s7lWomf3%Xs7?'dqZ$TkpAasfs6BIYs7>XXli6bNs7?9jp@J=ajT#8ApAY3#s8.BIJcDMF
-"oeQ!\,ZEms*t~>
-l2N=)s7lWomf3%Xs7?'dqZ$TkpAasfs6BIYs7>XXli6bNs7?9jp@J=ajT#8ApAY3#s8.BIJcDMF
-"oeQ!\,ZEms*t~>
-kl2:hrqHHmoD/.\p&G'jq=jphnbN+_&,lP-q>^Kls8DipmJlnZqYgEqq#Bs]rrTP,qgncus.fSt
-rr;l)s8D9`J,~>
-kl2:hrqHHmoD/.\p&G'jq=jphnbN+_&,lP-q>^Kls8DipmJlnZqYgEqq#Bs]rrTP,qgncus.fSt
-rr;l)s8D9`J,~>
-kl2:hrqHHmoD/.\p&G'jq=jphnbN+_&,lP-q>^Kls8DipmJlnZqYgEqq#Bs]rrTP,qgncus.fSt
-rr;l)s8D9`J,~>
-l2Lq_rr<#sq#:9qp&G']rr2umrr3N#qu>XTlMU\Sp%JFckl:JWrs/8tn,NFas8Mio!jhq(JcC<$
-U]1Mss80;*rTjK6~>
-l2Lq_rr<#sq#:9qp&G']rr2umrr3N#qu>XTlMU\Sp%JFckl:JWrs/8tn,NFas8Mio!jhq(JcC<$
-U]1Mss80;*rTjK6~>
-l2Lq_rr<#sq#:9qp&G']rr2umrr3N#qu>XTlMU\Sp%JFckl:JWrs/8tn,NFas8Mio!jhq(JcC<$
-U]1Mss80;*rTjK6~>
-kPl(^s8Vfmme?SXrVuiaq#C!brrr5urr<#jrVm,ms7lNis8VEarr`2tqt9aa!jhq(JcC<$U]1Ms
-s80;*rTjK6~>
-kPl(^s8Vfmme?SXrVuiaq#C!brrr5urr<#jrVm,ms7lNis8VEarr`2tqt9aa!jhq(JcC<$U]1Ms
-s80;*rTjK6~>
-kPl(^s8Vfmme?SXrVuiaq#C!brrr5urr<#jrVm,ms7lNis8VEarr`2tqt9aa!jhq(JcC<$U]1Ms
-s80;*rTjK6~>
-kl1hYs8MQgq>L=9nc/Xfs8Vlls7Q3fo`+UYs7lNlp&Fm^s8Vros7cQkr;Q]ro(i:eZiBoRs+13F
-rrrE%qmZV(li2J~>
-kl1hYs8MQgq>L=9nc/Xfs8Vlls7Q3fo`+UYs7lNlp&Fm^s8Vros7cQkr;Q]ro(i:eZiBoRs+13F
-rrrE%qmZV(li2J~>
-kl1hYs8MQgq>L=9nc/Xfs8Vlls7Q3fo`+UYs7lNlp&Fm^s8Vros7cQkr;Q]ro(i:eZiBoRs+13F
-rrrE%qmZV(li2J~>
-kPm78qu?WfI/s<Daqbl3rPB]m`;f\T)'H<d^)%O/s25fr])9Ph'>OSI]HeE6s8W&rrr3<(s0D\)
-qtg?mrIP!"s/,eur42k,li2J~>
-kPm78qu?WfI/s<Daqbl3rPB]m`;f\T)'H<d^)%O/s25fr])9Ph'>OSI]HeE6s8W&rrr3<(s0D\)
-qtg?mrIP!"s/,eur42k,li2J~>
-kPm78qu?WfI/s<Daqbl3rPB]m`;f\T)'H<d^)%O/s25fr])9Ph'>OSI]HeE6s8W&rrr3<(s0D\)
-qtg?mrIP!"s/,eur42k,li2J~>
-l2NF.s7lWkrr@TSs6rU[Z9&"TTWD;es$rca2uc+BXZ6;NT!L?T0s7TGpARmTV+(%UrsSc%r;Q`-
-s8W&ns8@NKJcD\K#5e5qppC"sli2J~>
-l2NF.s7lWkrr@TSs6rU[Z9&"TTWD;es$rca2uc+BXZ6;NT!L?T0s7TGpARmTV+(%UrsSc%r;Q`-
-s8W&ns8@NKJcD\K#5e5qppC"sli2J~>
-l2NF.s7lWkrr@TSs6rU[Z9&"TTWD;es$rca2uc+BXZ6;NT!L?T0s7TGpARmTV+(%UrsSc%r;Q`-
-s8W&ns8@NKJcD\K#5e5qppC"sli2J~>
-kl3j6oDedds8N)mrsmcK(B4j:gB7?Q$1I0qkmd=D*V0d/koT^Ch=D!Rndk![+oh*1s8W&ts88qi
-qZ$Kmp&0IAJcDSH!kA:.li2J~>
-kl3j6oDedds8N)mrsmcK(B4j:gB7?Q$1I0qkmd=D*V0d/koT^Ch=D!Rndk![+oh*1s8W&ts88qi
-qZ$Kmp&0IAJcDSH!kA:.li2J~>
-kl3j6oDedds8N)mrsmcK(B4j:gB7?Q$1I0qkmd=D*V0d/koT^Ch=D!Rndk![+oh*1s8W&ts88qi
-qZ$Kmp&0IAJcDSH!kA:.li2J~>
-l2LbQrVnPCs6TgdrrE*!"8i0!rsJf+!<<'!rs/N&"9/N(rrD]qs8N*!rt,.l!;cTms8D`lrrBt7
-rrDtJs+13Jrs&;spU:,"rp9Z8~>
-l2LbQrVnPCs6TgdrrE*!"8i0!rsJf+!<<'!rs/N&"9/N(rrD]qs8N*!rt,.l!;cTms8D`lrrBt7
-rrDtJs+13Jrs&;spU:,"rp9Z8~>
-l2LbQrVnPCs6TgdrrE*!"8i0!rsJf+!<<'!rs/N&"9/N(rrD]qs8N*!rt,.l!;cTms8D`lrrBt7
-rrDtJs+13Jrs&;spU:,"rp9Z8~>
-kl37/rVu*]p&>B_s!$Xp$2bS%q[)Wh$L@-drrW5b!<38siY)8!lMDUppBgBf$i^/8lc--3ZEBJ4
-ZEC=9Ye>UpJ[DA_"L5Y`T`+0UJ,~>
-kl37/rVu*]p&>B_s!$Xp$2bS%q[)Wh$L@-drrW5b!<38siY)8!lMDUppBgBf$i^/8lc--3ZEBJ4
-ZEC=9Ye>UpJ[DA_"L5Y`T`+0UJ,~>
-kl37/rVu*]p&>B_s!$Xp$2bS%q[)Wh$L@-drrW5b!<38siY)8!lMDUppBgBf$i^/8lc--3ZEBJ4
-ZEC=9Ye>UpJ[DA_"L5Y`T`+0UJ,~>
-l2NU9s82cprUp0js78RJSk8rHVO'aOqEh3^6hN^1Yu(<kUTd)J8uS79s8P<hTfiAOs8Vrkrr35C
-s8)Wks7lMCs+13Krs&H!p:1/!p[%p1~>
-l2NU9s82cprUp0js78RJSk8rHVO'aOqEh3^6hN^1Yu(<kUTd)J8uS79s8P<hTfiAOs8Vrkrr35C
-s8)Wks7lMCs+13Krs&H!p:1/!p[%p1~>
-l2NU9s82cprUp0js78RJSk8rHVO'aOqEh3^6hN^1Yu(<kUTd)J8uS79s8P<hTfiAOs8Vrkrr35C
-s8)Wks7lMCs+13Krs&H!p:1/!p[%p1~>
-l2NL;s8Vuds7lQno`)<K-aWie+Y:A)s3(lk]`$+u%_D\N*=pg"`u?,Co`)?2)U\<Trr)j!q=LZ^
-rVlosqLSQqr2KSsrqcZl\`s-E~>
-l2NL;s8Vuds7lQno`)<K-aWie+Y:A)s3(lk]`$+u%_D\N*=pg"`u?,Co`)?2)U\<Trr)j!q=LZ^
-rVlosqLSQqr2KSsrqcZl\`s-E~>
-l2NL;s8Vuds7lQno`)<K-aWie+Y:A)s3(lk]`$+u%_D\N*=pg"`u?,Co`)?2)U\<Trr)j!q=LZ^
-rVlosqLSQqr2KSsrqcZl\`s-E~>
-kl2_#qu?]as7H?hqu?$_s7H?gq>UEnnGiI_s7c-bs7ZHl&GcA%rr2fpp\k-jnGiOdq>L9l#.+@0
-qss^_JcC<$W;d)#rr;r'qYKOXJ,~>
-kl2_#qu?]as7H?hqu?$_s7H?gq>UEnnGiI_s7c-bs7ZHl&GcA%rr2fpp\k-jnGiOdq>L9l#.+@0
-qss^_JcC<$W;d)#rr;r'qYKOXJ,~>
-kl2_#qu?]as7H?hqu?$_s7H?gq>UEnnGiI_s7c-bs7ZHl&GcA%rr2fpp\k-jnGiOdq>L9l#.+@0
-qss^_JcC<$W;d)#rr;r'qYKOXJ,~>
-^Ae<5s8VukrrTP,qgncus.fStrr;l)s8D9`J,~>
-^Ae<5s8VukrrTP,qgncus.fStrr;l)s8D9`J,~>
-^Ae<5s8VukrrTP,qgncus.fStrr;l)s8D9`J,~>
-_>ac5s8MrrnGiL`rrTP,qgncus.fStrr;l)s8D9`J,~>
-_>ac5s8MrrnGiL`rrTP,qgncus.fStrr;l)s8D9`J,~>
-_>ac5s8MrrnGiL`rrTP,qgncus.fStrr;l)s8D9`J,~>
-_#FW2s8N&os7Z9g!jhq(JcC<$U]1Mss80;*rTjK6~>
-_#FW2s8N&os7Z9g!jhq(JcC<$U]1Mss80;*rTjK6~>
-_#FW2s8N&os7Z9g!jhq(JcC<$U]1Mss80;*rTjK6~>
-_#FW7s8Vurs7Z9g!jhq(JcC<$U]1Mss80;*rTjK6~>
-_#FW7s8Vurs7Z9g!jhq(JcC<$U]1Mss80;*rTjK6~>
-_#FW7s8Vurs7Z9g!jhq(JcC<$U]1Mss80;*rTjK6~>
-_>a`(s7?9jr;HEj!jhq(JcC<$U]1Mss80;*rTjK6~>
-_>a`(s7?9jr;HEj!jhq(JcC<$U]1Mss80;*rTjK6~>
-_>a`(s7?9jr;HEj!jhq(JcC<$U]1Mss80;*rTjK6~>
-^Ae>R*$)ilq>UN&s8.BIJcDMF"oeQ!\,ZEms*t~>
-^Ae>R*$)ilq>UN&s8.BIJcDMF"oeQ!\,ZEms*t~>
-^Ae>R*$)ilq>UN&s8.BIJcDMF"oeQ!\,ZEms*t~>
-_>a`1s5uVCRRd/Q!jhq(JcC<$U]1Mss80;*rTjK6~>
-_>a`1s5uVCRRd/Q!jhq(JcC<$U]1Mss80;*rTjK6~>
-_>a`1s5uVCRRd/Q!jhq(JcC<$U]1Mss80;*rTjK6~>
-_#FT-rsA2p!;QQqZiBoRs+13FrrrE%qmZV(li2J~>
-_#FT-rsA2p!;QQqZiBoRs+13FrrrE%qmZV(li2J~>
-_#FT-rsA2p!;QQqZiBoRs+13FrrrE%qmZV(li2J~>
-_>b>Hs7uitp]^]js8W#lrr;r+rVuiis8Vr1rr`&rs7jS5"8MorpqHb4rr;rns8W&6rs/N%s8W)q
-s80Y4#58)rrPAC*li2J~>
-_>b>Hs7uitp]^]js8W#lrr;r+rVuiis8Vr1rr`&rs7jS5"8MorpqHb4rr;rns8W&6rs/N%s8W)q
-s80Y4#58)rrPAC*li2J~>
-_>b>Hs7uitp]^]js8W#lrr;r+rVuiis8Vr1rr`&rs7jS5"8MorpqHb4rr;rns8W&6rs/N%s8W)q
-s80Y4#58)rrPAC*li2J~>
-_#FT8rsJ#j!<3!-m)ulG\@8KOW4BOJXgl3Q#H5;AZF-jC\&ko\\$`BHZEq3B_6O<QYd_'G\>QgP
-Z3S"FYcb[<^oY>Ws02X=Vu#]YJ,~>
-_#FT8rsJ#j!<3!-m)ulG\@8KOW4BOJXgl3Q#H5;AZF-jC\&ko\\$`BHZEq3B_6O<QYd_'G\>QgP
-Z3S"FYcb[<^oY>Ws02X=Vu#]YJ,~>
-_#FT8rsJ#j!<3!-m)ulG\@8KOW4BOJXgl3Q#H5;AZF-jC\&ko\\$`BHZEq3B_6O<QYd_'G\>QgP
-Z3S"FYcb[<^oY>Ws02X=Vu#]YJ,~>
-_>b>LnbYhCY<i3cs8Vicr;Zf;r;$BfrV?H/rs-11rqZTlqYA85#Pn5os7uZg]>4FNq>C9cp;H^A
-rs/Q"rVHH,q=hW'"SK>'s7bm[J,~>
-_>b>LnbYhCY<i3cs8Vicr;Zf;r;$BfrV?H/rs-11rqZTlqYA85#Pn5os7uZg]>4FNq>C9cp;H^A
-rs/Q"rVHH,q=hW'"SK>'s7bm[J,~>
-_>b>LnbYhCY<i3cs8Vicr;Zf;r;$BfrV?H/rs-11rqZTlqYA85#Pn5os7uZg]>4FNq>C9cp;H^A
-rs/Q"rVHH,q=hW'"SK>'s7bm[J,~>
-^]+N1`[qeMoD8Cb#5I_aqu?]jrr9h5"0DP&rr)l=rVc`urr<#"_>X<3!rfS,_>X<3!j)D$_>OW2
-r;Zeur;ZTZs*t~>
-^]+N1`[qeMoD8Cb#5I_aqu?]jrr9h5"0DP&rr)l=rVc`urr<#"_>X<3!rfS,_>X<3!j)D$_>OW2
-r;Zeur;ZTZs*t~>
-^]+N1`[qeMoD8Cb#5I_aqu?]jrr9h5"0DP&rr)l=rVc`urr<#"_>X<3!rfS,_>X<3!j)D$_>OW2
-r;Zeur;ZTZs*t~>
-_>ao5rVu]fs763cr;QZnrs-74q>U-dp@c?&#.+@0n+Z\Wa8ZABqY0IYs0KQA#5\,js8Vu"^]4?+
-!rDr'^&J91s/Q,!o'HC,~>
-_>ao5rVu]fs763cr;QZnrs-74q>U-dp@c?&#.+@0n+Z\Wa8ZABqY0IYs0KQA#5\,js8Vu"^]4?+
-!rDr'^&J91s/Q,!o'HC,~>
-_>ao5rVu]fs763cr;QZnrs-74q>U-dp@c?&#.+@0n+Z\Wa8ZABqY0IYs0KQA#5\,js8Vu"^]4?+
-!rDr'^&J91s/Q,!o'HC,~>
-^]+K0s8Vrqp\t0rqt^'aqmHG'"8r3!rPAI8\,?:*rVt"=rVm!!s89@Brquctqn<$GrW3&urr3&7
-r;XV4"S_l_s/G8_J,~>
-^]+K0s8Vrqp\t0rqt^'aqmHG'"8r3!rPAI8\,?:*rVt"=rVm!!s89@Brquctqn<$GrW3&urr3&7
-r;XV4"S_l_s/G8_J,~>
-^]+K0s8Vrqp\t0rqt^'aqmHG'"8r3!rPAI8\,?:*rVt"=rVm!!s89@Brquctqn<$GrW3&urr3&7
-r;XV4"S_l_s/G8_J,~>
-_>al@s76-gs8;cjqu6Nn#H@S"rU^'hqSE15_!V!srrD`6rrD`jrrW&a^qp$Ur;ZZfs7+>(_>ac:
-qu?]b`pio>rrU%/q!7s1~>
-_>al@s76-gs8;cjqu6Nn#H@S"rU^'hqSE15_!V!srrD`6rrD`jrrW&a^qp$Ur;ZZfs7+>(_>ac:
-qu?]b`pio>rrU%/q!7s1~>
-_>al@s76-gs8;cjqu6Nn#H@S"rU^'hqSE15_!V!srrD`6rrD`jrrW&a^qp$Ur;ZZfs7+>(_>ac:
-qu?]b`pio>rrU%/q!7s1~>
-_#FQ8s8Vchrr3<'r;HWorVlfrn,E=fp]&)/!;ZWo"8qups2k9?rrMrnrr2uo_>X]8s8Mios763+
-rW3&prr3&ms8Tq7#QOPurp]sfqX"64~>
-_#FQ8s8Vchrr3<'r;HWorVlfrn,E=fp]&)/!;ZWo"8qups2k9?rrMrnrr2uo_>X]8s8Mios763+
-rW3&prr3&ms8Tq7#QOPurp]sfqX"64~>
-_#FQ8s8Vchrr3<'r;HWorVlfrn,E=fp]&)/!;ZWo"8qups2k9?rrMrnrr2uo_>X]8s8Mios763+
-rW3&prr3&ms8Tq7#QOPurp]sfqX"64~>
-\,QO+q>:-j#5/#joDejfaSuM;s82`fs8Voorr`/us8L%<"8ViooY1>/r;Zfgs7X;/"TJ2rq"t$i
-"oA9!q#Bj.rsSi#rqucgs8W#sq>0FWJ,~>
-\,QO+q>:-j#5/#joDejfaSuM;s82`fs8Voorr`/us8L%<"8ViooY1>/r;Zfgs7X;/"TJ2rq"t$i
-"oA9!q#Bj.rsSi#rqucgs8W#sq>0FWJ,~>
-\,QO+q>:-j#5/#joDejfaSuM;s82`fs8Voorr`/us8L%<"8ViooY1>/r;Zfgs7X;/"TJ2rq"t$i
-"oA9!q#Bj.rsSi#rqucgs8W#sq>0FWJ,~>
-\c2^!r;HWtp&FXRrVlolrQ5'@nGiCbrs%rls8;fpqo8X@q>^Hos8)cl_>a]7qu?*am_8]-qZ$Tk
-m/Q_Rr:0dd!V?-7rs&E$qYgHjq#:9sirB&Us7bm[J,~>
-\c2^!r;HWtp&FXRrVlolrQ5'@nGiCbrs%rls8;fpqo8X@q>^Hos8)cl_>a]7qu?*am_8]-qZ$Tk
-m/Q_Rr:0dd!V?-7rs&E$qYgHjq#:9sirB&Us7bm[J,~>
-\c2^!r;HWtp&FXRrVlolrQ5'@nGiCbrs%rls8;fpqo8X@q>^Hos8)cl_>a]7qu?*am_8]-qZ$Tk
-m/Q_Rr:0dd!V?-7rs&E$qYgHjq#:9sirB&Us7bm[J,~>
-\,QU(qt'jWrr3)so`+j0rrr<"p[/"Qr;Qous8W&qaSuJ9s8VlmrqjJ1!;QQn!W23!rt"i!s6p!f
-nG`Ifo`+RYs8:4C%.X5prr;rfs8Vrgs7kp[J,~>
-\,QU(qt'jWrr3)so`+j0rrr<"p[/"Qr;Qous8W&qaSuJ9s8VlmrqjJ1!;QQn!W23!rt"i!s6p!f
-nG`Ifo`+RYs8:4C%.X5prr;rfs8Vrgs7kp[J,~>
-\,QU(qt'jWrr3)so`+j0rrr<"p[/"Qr;Qous8W&qaSuJ9s8VlmrqjJ1!;QQn!W23!rt"i!s6p!f
-nG`Ifo`+RYs8:4C%.X5prr;rfs8Vrgs7kp[J,~>
-\c2[%rVm#fs)SCsrr3&ls8L.?!<2ut"Ss&;&XrXt!W2f9rs&2so_ngJ#JpEErUg,o!#)NNrsngQ
-$NKkS4UE5-s69R`o[*U<q>UC%mJles"p2(#.Kgcpm/MS~>
-\c2[%rVm#fs)SCsrr3&ls8L.?!<2ut"Ss&;&XrXt!W2f9rs&2so_ngJ#JpEErUg,o!#)NNrsngQ
-$NKkS4UE5-s69R`o[*U<q>UC%mJles"p2(#.Kgcpm/MS~>
-\c2[%rVm#fs)SCsrr3&ls8L.?!<2ut"Ss&;&XrXt!W2f9rs&2so_ngJ#JpEErUg,o!#)NNrsngQ
-$NKkS4UE5-s69R`o[*U<q>UC%mJles"p2(#.Kgcpm/MS~>
-\,Qd&s7u[#K+%_ZpAY'lm`>D;p$r(Zs8TDDs7H?gs8L+>#O29Ws8Upu#J^9BrX@)l%J@R;$3Yt]
-s8-'Ho)8FRrr2uid/OXPs6fmdm.UJX_\<(Gq614ms*t~>
-\,Qd&s7u[#K+%_ZpAY'lm`>D;p$r(Zs8TDDs7H?gs8L+>#O29Ws8Upu#J^9BrX@)l%J@R;$3Yt]
-s8-'Ho)8FRrr2uid/OXPs6fmdm.UJX_\<(Gq614ms*t~>
-\,Qd&s7u[#K+%_ZpAY'lm`>D;p$r(Zs8TDDs7H?gs8L+>#O29Ws8Upu#J^9BrX@)l%J@R;$3Yt]
-s8-'Ho)8FRrr2uid/OXPs6fmdm.UJX_\<(Gq614ms*t~>
-\c2s5s8Mlos5a(Xrr3#uo>gk2rr)j&p&F(fs7ZKkrPnjAr;$'XRt1^Yrs&?"q@VQ0*l%^aq#gQp%
-K#qqs8W#es8:4C%f5htoD\dcr3@F?s7uR9m/MS~>
-\c2s5s8Mlos5a(Xrr3#uo>gk2rr)j&p&F(fs7ZKkrPnjAr;$'XRt1^Yrs&?"q@VQ0*l%^aq#gQp%
-K#qqs8W#es8:4C%f5htoD\dcr3@F?s7uR9m/MS~>
-\c2s5s8Mlos5a(Xrr3#uo>gk2rr)j&p&F(fs7ZKkrPnjAr;$'XRt1^Yrs&?"q@VQ0*l%^aq#gQp%
-K#qqs8W#es8:4C%f5htoD\dcr3@F?s7uR9m/MS~>
-\,Qp:s8MlprsSYpq>^KnrlG*En,N+]q=0?)rVlrqs80q<#Pn5rp#m+d!5\[?r:Bra!W]+i_>b&/
-rrE)k&j6i#l2:SUs8VT9rt#)#s8W)dp%8:uU\XrhFFifYJ,~>
-\,Qp:s8MlprsSYpq>^KnrlG*En,N+]q=0?)rVlrqs80q<#Pn5rp#m+d!5\[?r:Bra!W]+i_>b&/
-rrE)k&j6i#l2:SUs8VT9rt#)#s8W)dp%8:uU\XrhFFifYJ,~>
-\,Qp:s8MlprsSYpq>^KnrlG*En,N+]q=0?)rVlrqs80q<#Pn5rp#m+d!5\[?r:Bra!W]+i_>b&/
-rrE)k&j6i#l2:SUs8VT9rt#)#s8W)dp%8:uU\XrhFFifYJ,~>
-\GuU+s8NE!s7-0ds8W#rqT/[Ir;ZHis8JBds7?9cs8)cmao;VCs8MmObl7d[rri0<^r[M/rsT#(
-rrU?i'*&"*s7l?8rso#-rq-6jp](8b*<5i(>jME?~>
-\GuU+s8NE!s7-0ds8W#rqT/[Ir;ZHis8JBds7?9cs8)cmao;VCs8MmObl7d[rri0<^r[M/rsT#(
-rrU?i'*&"*s7l?8rso#-rq-6jp](8b*<5i(>jME?~>
-\GuU+s8NE!s7-0ds8W#rqT/[Ir;ZHis8JBds7?9cs8)cmao;VCs8MmObl7d[rri0<^r[M/rsT#(
-rrU?i'*&"*s7l?8rso#-rq-6jp](8b*<5i(>jME?~>
-Z2Y".s82Khs7+21&,cD's88Eds82ils8Vrqp<!=Fp](3l!!!]5%IsJps7$'^a8Z>A&^\K3r5&C@
-q>UHps#dX8*;9F/c2S=Os7?9jp](!b_%cg6@/g)js*t~>
-Z2Y".s82Khs7+21&,cD's88Eds82ils8Vrqp<!=Fp](3l!!!]5%IsJps7$'^a8Z>A&^\K3r5&C@
-q>UHps#dX8*;9F/c2S=Os7?9jp](!b_%cg6@/g)js*t~>
-Z2Y".s82Khs7+21&,cD's88Eds82ils8Vrqp<!=Fp](3l!!!]5%IsJps7$'^a8Z>A&^\K3r5&C@
-q>UHps#dX8*;9F/c2S=Os7?9jp](!b_%cg6@/g)js*t~>
-Z2Y"Cl2UeZq!cB)%fQG-k5PGb!!MTel2UVRa8ZSErTjL`lNQhYqu?]ba8ZA=r;Vm#$(\j2%fHJ/
-jP)9eUAt)is82ipdJjaSs8W&hs5"Xp$H2`?!!E;gs*t~>
-Z2Y"Cl2UeZq!cB)%fQG-k5PGb!!MTel2UVRa8ZSErTjL`lNQhYqu?]ba8ZA=r;Vm#$(\j2%fHJ/
-jP)9eUAt)is82ipdJjaSs8W&hs5"Xp$H2`?!!E;gs*t~>
-Z2Y"Cl2UeZq!cB)%fQG-k5PGb!!MTel2UVRa8ZSErTjL`lNQhYqu?]ba8ZA=r;Vm#$(\j2%fHJ/
-jP)9eUAt)is82ipdJjaSs8W&hs5"Xp$H2`?!!E;gs*t~>
-Z2Y'ls7ZKes8Vr;s8W!!p&G']rVm&ts8Dipn]Ce;rVuops6Td`s7?3h!WMl9rs/N&q#:<noCB`t
-&,cJ-qu?]cs8N&js8;`nrmC`Qqu?Tos7cQnrr;inrrD]Xs*t~>
-Z2Y'ls7ZKes8Vr;s8W!!p&G']rVm&ts8Dipn]Ce;rVuops6Td`s7?3h!WMl9rs/N&q#:<noCB`t
-&,cJ-qu?]cs8N&js8;`nrmC`Qqu?Tos7cQnrr;inrrD]Xs*t~>
-Z2Y'ls7ZKes8Vr;s8W!!p&G']rVm&ts8Dipn]Ce;rVuops6Td`s7?3h!WMl9rs/N&q#:<noCB`t
-&,cJ-qu?]cs8N&js8;`nrmC`Qqu?Tos7cQnrr;inrrD]Xs*t~>
-Yl=q*rVufqs8L+>%.F5pnc.qRp%n@]s7FA3&,?1is8VuerqZ6eqs4:^qoJd>jo>AF^Ae`?rVQNk
-s7uQlqu?]gs8W)Ers\/is7c*Us8Vrjs7?!Ns*t~>
-Yl=q*rVufqs8L+>%.F5pnc.qRp%n@]s7FA3&,?1is8VuerqZ6eqs4:^qoJd>jo>AF^Ae`?rVQNk
-s7uQlqu?]gs8W)Ers\/is7c*Us8Vrjs7?!Ns*t~>
-Yl=q*rVufqs8L+>%.F5pnc.qRp%n@]s7FA3&,?1is8VuerqZ6eqs4:^qoJd>jo>AF^Ae`?rVQNk
-s7uQlqu?]gs8W)Ers\/is7c*Us8Vrjs7?!Ns*t~>
-Z2XjrnG`Fgq=MZ+$gRcds8Vlor9"%Zou6q=nc&Ofr;ZforVufnaSuMCs8DornGN*rrsSW%li7"\
-s8Vrqs7Xh>s8;os#jM!]s8D'Zs6o4PJ,~>
-Z2XjrnG`Fgq=MZ+$gRcds8Vlor9"%Zou6q=nc&Ofr;ZforVufnaSuMCs8DornGN*rrsSW%li7"\
-s8Vrqs7Xh>s8;os#jM!]s8D'Zs6o4PJ,~>
-Z2XjrnG`Fgq=MZ+$gRcds8Vlor9"%Zou6q=nc&Ofr;ZforVufnaSuMCs8DornGN*rrsSW%li7"\
-s8Vrqs7Xh>s8;os#jM!]s8D'Zs6o4PJ,~>
-YQ"b&mf3=^aSue?s7cQnnbi4^m/R+^pAas0rrDHcrsA>tp[J4Rs8VrmaSuMAs8;iqqu?Z3rs/)n
-s8W&pq=O[d"8;cpnB_+Ep\k-lqt^*\s8Mcms5s:Hs*t~>
-YQ"b&mf3=^aSue?s7cQnnbi4^m/R+^pAas0rrDHcrsA>tp[J4Rs8VrmaSuMAs8;iqqu?Z3rs/)n
-s8W&pq=O[d"8;cpnB_+Ep\k-lqt^*\s8Mcms5s:Hs*t~>
-YQ"b&mf3=^aSue?s7cQnnbi4^m/R+^pAas0rrDHcrsA>tp[J4Rs8VrmaSuMAs8;iqqu?Z3rs/)n
-s8W&pq=O[d"8;cpnB_+Ep\k-lqt^*\s8Mcms5s:Hs*t~>
-Z2Xmns8;lr!;sn;"T&/ks82cp"o[ras8Vo7rsJ\is8Vlms8Vrhq8WF<pAa^^s7O,+!W)HdrrrAr
-p&Fjcd/OUMs8V?`s763im/6\Zs7kp[J,~>
-Z2Xmns8;lr!;sn;"T&/ks82cp"o[ras8Vo7rsJ\is8Vlms8Vrhq8WF<pAa^^s7O,+!W)HdrrrAr
-p&Fjcd/OUMs8V?`s763im/6\Zs7kp[J,~>
-Z2Xmns8;lr!;sn;"T&/ks82cp"o[ras8Vo7rsJ\is8Vlms8Vrhq8WF<pAa^^s7O,+!W)HdrrrAr
-p&Fjcd/OUMs8V?`s763im/6\Zs7kp[J,~>
-Z2Xn(s7ZEk!<("=%JTc"o`+s`s8;osrV$!+rsnZ#rr;lps8V]js82iroZ7%9r;Zfls8Kh6&,?2!
-s8VZ_s8N&urVuHgq9]-Bo)8Rf!<)lr"T&/hs7>UWJ,~>
-Z2Xn(s7ZEk!<("=%JTc"o`+s`s8;osrV$!+rsnZ#rr;lps8V]js82iroZ7%9r;Zfls8Kh6&,?2!
-s8VZ_s8N&urVuHgq9]-Bo)8Rf!<)lr"T&/hs7>UWJ,~>
-Z2Xn(s7ZEk!<("=%JTc"o`+s`s8;osrV$!+rsnZ#rr;lps8V]js82iroZ7%9r;Zfls8Kh6&,?2!
-s8VZ_s8N&urVuHgq9]-Bo)8Rf!<)lr"T&/hs7>UWJ,~>
-JcD\K$i^,(rqQ?is7cQfqYpL&rqufbs8Vclq>^9frVm*#s8;Qis6kO=Zi>O~>
-JcD\K$i^,(rqQ?is7cQfqYpL&rqufbs8Vclq>^9frVm*#s8;Qis6kO=Zi>O~>
-JcD\K$i^,(rqQ?is7cQfqYpL&rqufbs8Vclq>^9frVm*#s8;Qis6kO=Zi>O~>
-JcD_L"8)Nkqu6U'nG*"ZqZ$<io)JadrVmB'qX=IMs7bs]s763irUtgBZN#F~>
-JcD_L"8)Nkqu6U'nG*"ZqZ$<io)JadrVmB'qX=IMs7bs]s763irUtgBZN#F~>
-JcD_L"8)Nkqu6U'nG*"ZqZ$<io)JadrVmB'qX=IMs7bs]s763irUtgBZN#F~>
-JcD\K#PJ,skP5&Vq#::#o_.tXp[nL^q>]j]rr`,to'u_Z#6"T$s7$'eJcE+WJ,~>
-JcD\K#PJ,skP5&Vq#::#o_.tXp[nL^q>]j]rr`,to'u_Z#6"T$s7$'eJcE+WJ,~>
-JcD\K#PJ,skP5&Vq#::#o_.tXp[nL^q>]j]rr`,to'u_Z#6"T$s7$'eJcE+WJ,~>
-JcD_L%IF,nrq731!!W#pnc/:\rt4`"s7lWbq=FXPs8VfhpAb$UJcDtSJ,~>
-JcD_L%IF,nrq731!!W#pnc/:\rt4`"s7lWbq=FXPs8VfhpAb$UJcDtSJ,~>
-JcD_L%IF,nrq731!!W#pnc/:\rt4`"s7lWbq=FXPs8VfhpAb$UJcDtSJ,~>
-JcDYJ(B"(0!<)ros7`Y[qZ$!UqYL6Xs8VZnrr3B#s6BX_o`+pbs8DYBs0VfV~>
-JcDYJ(B"(0!<)ros7`Y[qZ$!UqYL6Xs8VZnrr3B#s6BX_o`+pbs8DYBs0VfV~>
-JcDYJ(B"(0!<)ros7`Y[qZ$!UqYL6Xs8VZnrr3B#s6BX_o`+pbs8DYBs0VfV~>
-JcD\K%IX/op'^Tks8W#X+WmHXrtN=!%3G$N!W_f!)%smerr?X.s8VM<s0M`U~>
-JcD\K%IX/op'^Tks8W#X+WmHXrtN=!%3G$N!W_f!)%smerr?X.s8VM<s0M`U~>
-JcD\K%IX/op'^Tks8W#X+WmHXrtN=!%3G$N!W_f!)%smerr?X.s8VM<s0M`U~>
-JcDYJ,Q.-4$NL2-q>^3hj8f&KruR9cs7cNns8OOQW&XD@!TX4FpOW@Ms*t~>
-JcDYJ,Q.-4$NL2-q>^3hj8f&KruR9cs7cNns8OOQW&XD@!TX4FpOW@Ms*t~>
-JcDYJ,Q.-4$NL2-q>^3hj8f&KruR9cs7cNns8OOQW&XD@!TX4FpOW@Ms*t~>
-JcD_Ls8)ouqYL9kruD$,-OBeQs7H]rs7lWg"8_usqu-TqrrE)qs8VkFs0M`U~>
-JcD_Ls8)ouqYL9kruD$,-OBeQs7H]rs7lWg"8_usqu-TqrrE)qs8VkFs0M`U~>
-JcD_Ls8)ouqYL9kruD$,-OBeQs7H]rs7lWg"8_usqu-TqrrE)qs8VkFs0M`U~>
-JcDYJ"8)We!<3!=p&>01rrDikrud@%s7-*ro`$&DYVGtR!<;cmqtksEZi>O~>
-JcDYJ"8)We!<3!=p&>01rrDikrud@%s7-*ro`$&DYVGtR!<;cmqtksEZi>O~>
-JcDYJ"8)We!<3!=p&>01rrDikrud@%s7-*ro`$&DYVGtR!<;cmqtksEZi>O~>
-JcD_L!:^!f,P)ZLnGi7\Sdtl&q!J+-,6\YYs.2mR[1!_Rs8EQ/s4mYSq18RQs*t~>
-JcD_L!:^!f,P)ZLnGi7\Sdtl&q!J+-,6\YYs.2mR[1!_Rs8EQ/s4mYSq18RQs*t~>
-JcD_L!:^!f,P)ZLnGi7\Sdtl&q!J+-,6\YYs.2mR[1!_Rs8EQ/s4mYSq18RQs*t~>
-JcD\K-1g[,p\"LbrVulsp%n^]s8Dorq#16Wq>^Kis8VinqXaR_s7Z&8s0M`U~>
-JcD\K-1g[,p\"LbrVulsp%n^]s8Dorq#16Wq>^Kis8VinqXaR_s7Z&8s0M`U~>
-JcD\K-1g[,p\"LbrVulsp%n^]s8Dorq#16Wq>^Kis8VinqXaR_s7Z&8s0M`U~>
-JcDYJ!W)WkrsSc"s8V-Zp&FjVs7H<j&F9Arq>('cq#C-gs8Vops6tU>Zi>O~>
-JcDYJ!W)WkrsSc"s8V-Zp&FjVs7H<j&F9Arq>('cq#C-gs8Vops6tU>Zi>O~>
-JcDYJ!W)WkrsSc"s8V-Zp&FjVs7H<j&F9Arq>('cq#C-gs8Vops6tU>Zi>O~>
-JcD_L$M+5sqYfmXqZ$6crr2ulrVlrmqYC-j!V?9hrsAT#s6]gbmf3=^JcE+WJ,~>
-JcD_L$M+5sqYfmXqZ$6crr2ulrVlrmqYC-j!V?9hrsAT#s6]gbmf3=^JcE+WJ,~>
-JcD_L$M+5sqYfmXqZ$6crr2ulrVlrmqYC-j!V?9hrsAT#s6]gbmf3=^JcE+WJ,~>
-JcD\K"o/#qq>^!aruLn7m/R(bq=s1Rs8Dorp&G'Ym/R%arr;`fs763eJcE+WJ,~>
-JcD\K"o/#qq>^!aruLn7m/R(bq=s1Rs8Dorp&G'Ym/R%arr;`fs763eJcE+WJ,~>
-JcD\K"o/#qq>^!aruLn7m/R(bq=s1Rs8Dorp&G'Ym/R%arr;`fs763eJcE+WJ,~>
-JcD_L%fZM.qu>XTqY1$gs8V`irVm,js8;oos6fmbrs/,plMpVYs87HJZi>O~>
-JcD_L%fZM.qu>XTqY1$gs8V`irVm,js8;oos6fmbrs/,plMpVYs87HJZi>O~>
-JcD_L%fZM.qu>XTqY1$gs8V`irVm,js8;oos6fmbrs/,plMpVYs87HJZi>O~>
-JcF^/s8Miorr1RMrr*T%rr;Kfp&G'is8MThs8)ces8;iprt"o)s7H?`s8VrqqZ$T_s8%<H[/YX~>
-JcE"Ts8Mlp'D2>)nGi1\rV-<hmJ["Yrq$-cqu6U+q#CBds7H?kqZ$Els6]j_JcE+WJ,~>
-JcFR+r;HTorr(LL'D)8(o)JIas82irnc/Xds7?9fr;Q^,q#CBds7H?kqZ$Els6]j_JcE+WJ,~>
-JcFa0"9.ujrql^8qY]s_oD\des8Vros82ioq>^?bs8Dipp%\Odp\t0mpAP!kpAY'pqXjgemJZt^
-rqQKtqt:!es8V]irt4l&qtL-crVuops8Vloqu?Wnrr3N*rVQWiq>^Kfs7ZK_s8VflrsJc*q#:<n
-rVufqq>UBonq$hrs*t~>
-JcF^/-2RZArquTks8W#sq>^-frql`qqYgHks8;]mqt:!fqu??arr;`lrrDckrrDclrWN#gs8VWc
-s!7UBpA+I[p%eISnG`.[rpg$fs7?9fp]($es8Vurs7lWks8Doqrt"u)qu?Hes8VclpAa[_s7ZHl
-$NC)#rr<#ss8;omrr2uhJcFO*J,~>
-JcFR++o([(qYU*gp\jUUq>('jqYgHks8;]mqt:!fqu??arr;`lrrDckrs8>urVcQas8VZ[ru:e-
-s8)cqo)8LdoDe^^s7lQms82irq#C6krVc`q&,Q8%s7lEis7QEcs7--hpAY(!rr;cms8W&tr;ZTl
-rrDV@s4mX)~>
-JcFa0"TJ;qq"t'j#5J&mo(2YUrr4)-s8Vops8;osp&G'equ?]ns7ZHlpAap[q>L!ds7,j_rs8Q%
-l21AUr;Q]prs\Z%s5X.Zqu?]fqu?TlrrN&mr;S>2s7lHis8Vcks75d]rVuZ`s8VNdqYL6dqt]g_
-mf3%Us8W#sr;Z]pr;ZQcJcFI(J,~>
-JcFX-$MjGqrVlKes82`nrVn22s8Vops8;osp&G'equ?]ns7ZHlpAap[q>L!ds6oRYrr;ormJ6br
-rp]jai;EECrVl?\rV6?krr<#rq#(.CkPt>Rrr<#krr;Q\s8Dumo)JaXrqcKkp\XdWs6fp]p](9k
-s8;ops8;olp4<7ts*t~>
-JcFX-2#I"ApA".Ns8)QfqY'^_mJm4^s8W#ss7QElq"t*kqu?Bhs7ZKfo_JIYs8VTZrVuoqs6f[^
-'DVV-kPtSZs8V`fs7uZmr;Zfpq#(.CkPt>Rrr<#krr;Q\s8Dumo)JaXrqcKkp\XdWs6fp]p](9k
-s8;ops8;olp4<7ts*t~>
-JcF^/!VlWms!%:=s8;]^q"t'br;ZZos8;ihs8D-\s7?9gmf2eVs7?-frr2p.n,MqVs8W)hq=ikG
-o_eXdrVm<*o]Yc=kj\96r:U!brr3u7s60L_p](-Ys8Vopp&Fpfs8VWgs8Drns8W&krVluqs8Vfl
-rsSDtnc/4Us6p!fmJd+b!W)M@s4mX)~>
-JcF^/#5S8urqQ'\rr2uprr3l1q>^?ls8;ihs8D-\s7?9gmf2eVs7?-frr2p+n,MqVrr2c`q=iqL
-q#(0lrr<!(rVQWjs8Vrqp\t3mrZ(_5kl:\Ws826as7u]fs82cps7-*grVlZns8DZirr`)ss7ZHl
-$hF>fs7?$cn,NFTrr2ouqY#L?h#Dm~>
-JcF^/-i<rBqXX"Hqu?]os8Vubo)8Ics8;ihs8D-\s7?9gmf2eVs7?-frr2p+n,MqVs8Vudq=inK
-p\=aqp@nUYrVuTkpAb-kruCk7kl:\Ws826as7u]fs82cps7-*grVlZns8DZirr`)ss7ZHl$hF>f
-s7?$cn,NFTrr2ouqY#L?h#Dm~>
-JcF[.!VZNlrs/Dkq""+Oq=FRb$iL&)qu?]ks7Gj]rr2p(qt'ddq>^KaqtpBm"7cEgr;Q`rrW<#s
-rqud,r9`56qsNb7p$i"Np\+Xdrr4#,p]($fs8DEdmJm4Js7#^]p%n^Qs7c9\s7H3foDS\#nFZ,J
-li6eQs7u]bs7QEjrIP"%s*t~>
-JcF[.#kIfhrqcHas8;lr!;6<j$iL&)qu?]ks7Gj]rr2p(qt'ddq>^KaqtpBm"S)NfqYp9is8E-!
-s8Vols!RC;qu?Nmr;Q`rme?bVrr;rcs6]jdjo=iCs7Q6gl2UMPp&F[]rq$*g&Ff>Zs6K^\o`+ae
-nc/:^rV_<Ig&HR~>
-JcF^/#Q=,apA"@Up\b%&o_SF_s8;osqu?]ks7Gj]rr2p(qt'ddq>^KaqtpBm"7cEfqtg?mrVZNn
-qW\"U$2aJon,<"\q>^EmruLP%s7lTnrU9dRs8V3\nFchSqZ#g[p\4@\o_\XZrVmGuo^2\Es7u<e
-q>^!bp&G!hJcFF'J,~>
-JcFR+/,]GFo^;/;mHsiOnc/OTrqcZpnbi1[s8V`[s7--hrVuoppAa^`q#16moD\amlMpkMq"aq%
-jRW?Jn*095s8V]kMYmAQpAP!j!Vu?drrW3"nGE4do_SIb"7-!equ6U1r:p<lqu6Wgs8W)dq#BOW
-nc/O^s71a@h#Dm~>
-JcF^/0E1hIqtL-jrVZ]qqtC'hm/R"OrqcZpnbi1[s8V`[s7--hrVuoppAa^`q#16moD\^llMpnP
-qYpKsrr)fnrr3#smf!.lr;HX+Q2gjapAP!j!Vu?drrW3"nGE4do_SIb"7-!equ6U1r:p<lqu6Wg
-s8W)dq#BOWnc/O^s71a@h#Dm~>
-JcFa0#6"Dhq"=7Wq>VW:l2CPJrqcZpnbi1[s8V`[s7--hrVuoppAa^`q#16moD\aolMpnMp\k!f
-q@iYtl1+B,o(MkOqYU*qOoPF_pAP!j!Vu?drrW3"nGE4do_SIb"7-!equ6U1r:p<lqu6Wgs8W)d
-q#BOWnc/O^s71a@h#Dm~>
-JcF^/!<)lr!WLRF!!)3]rrDurrrM]`rr4#<s8Dutmf34ZlNHGOs6]j\q#BUYs7lWis7u`pp\Fh:
-q#C@Fro2i8s8;Ef,i\_%s8W&rqu?]`q>^Ejr;Zfhp&":Zs8)chrVmf)s7QElr<Duqo(2nZs7Yj[
-s8;osq!nFbq>,[Bg])d~>
-JcF^/%JKYtr;ZfP!t,\D!se/krW)lqrrM]`rr48Cs8Dutmf34ZlNHGOs6]j\q#BUYs7ZHfs7u`q
-q#CBnrr)cmrr4;0!<;uds7b[S$o%#I!WW2urVQWpmJ6e\qu$Koo_&+Os8VrqpAP"0n,N(\s8</q
-s75d]r;ZKXs8W#ss7l-bs7uMBs4dR(~>
-JcFa0%f>bcp@eIbjUr[b*#TRdrr2urrr3#ip&=t6rr;rss6fpbpZhtGs8VKdp\=dQs8Vopq>^9k
-rU]p_q#0n7oBbi)!:St(pA!kE!\EX:"98E"rVQWpmJ6e\qu$Koo_&+Os8VrqpAP"0n,N(\s8</q
-s75d]r;ZKXs8W#ss7l-bs7uMBs4dR(~>
-JcFU,0_b/5nc0@A)Bf+Tr9a@cDZ@!m+M@Hb&2:6coEBRJi"l@nquD0F4octcq#GsYf`1pNpAY1]
-W>PR54o>:eK_YWI\-+Rmp[8T=!9F1Zr:g8On\HRa[/[?N$cW/?!/D$K!!!K.#^?;2!@ln'!.G1*
-rr<Q0s8VqHs4mX)~>
-JcF^/rr+VFoDeh(1,q3S$ig.jqZ^s<Z7@'1pV@CpXo@qrHO8UH!!)osIK)J2-f"LtIh:94rW)rt
-:&b+hr;6Bho)MJbs8Mi`li.3-NW0%Z\-+Rmp[8T=!9F1Zr:g8On\HRa[/[?N$cW/?!/D$K!!!K.
-#^?;2!@ln'!.G1*rr<Q0s8VqHs4mX)~>
-JcFa02#I(Aq""+Xs!^]E=&pLEs7#skDZ@!m+M@Hb&2:6coEBRJi"l@nquD0F4octcq#H![gAh-P
-;#C+ap@S"KoC(o*!)`gamdp/BqZ(>hrrW51">[:Wmga[EjT#5Wp]-<D_']f$s0*LO`W,Z4LCNMK
-!"Jr6GQ0c+.bst&IL"O*!"T)0s8%<Hh#Dm~>
-JcF[.2ZECKq<[JK.L[aH!:0I[r<;BU0s\bNs!GUg3rf-ZT>a@o"98$!iq39N_(,HbT#O%lrrrH"
-q#:FNb/tt+rt#2#*9R>",e^!,s8N(qcoh4)s!S!#ruo+n3:8Z;djG+p!2mk'rs\kr!oEtUs4Jao
-$(K:1p%o!ns8;bFs4mX)~>
-JcF^/>Q4Hjs8Vrq##$dG3s>E[rqlr_o.dPi0)m98Z9&$a!M@>%o`P6e#NGCUs1p2b!1^tmqYpa!
-r;ZfrrVZTjq=Xe^;?6Xln`TB@#Q,n7!WF@XUbDcJ!2\%)qu6UD"4mJq^;;kt28.Hcs8N(sa$K_6
-rUBsGs8DuN-MRn:cpdX)#QFc$qgne&s*t~>
-JcFa0"9&)fnGW@j,&^P'>p0+G8d4DL0s\bNs!GUg3rf-ZT>a@o"98$!iq39N_(,HbT>s:prr`/l
-qY'XToCMM>joALi!:9+@nG)h[pa#;0r?T(P0E;%PV9h@%rr4AKf)Ho-_Dps@^!e>.rrAt;62qAl
-nH.SIrVtOtp^*G:7J6N_rr;onJcFO*J,~>
-JcF[.s8FeI2Bi\4*tSl'!$3pLqZHll"98E-q[NT+#l+6"s7-Bf!rW''s8S?*'F=C6s7cTorrE&u
-:@eGbqi?2_*N[!.S.UX>YQ+V'mu;r"oaLK^)#rk,s7??l!!*$!s8N'!$j-G6"oo5(rsA_u!<3W!
-rr_upRj&+Ao+(g#!<;rsrVuZiJcFO*J,~>
-JcF[.2#dOP6T$>*7QNIq$SO\"qucum"98E-q[NT+#l+6"s7-Bf!rW''s8S?*'F=C6rq6?lrrE&u
-:@A,[mX]1l!+@fjAH6XYC#eq!ooF_*oaLK^)#rk,s7??l!!*$!s8N'!$j-G6"oo5(rsA_u!<3W!
-rr_upRj&+Ao+(g#!<;rsrVuZiJcFO*J,~>
-JcF^/J,T<Fs%GgJ0l:B,B.6DM5Q:icp&k?q#l>)3!!rAr"on,tp&b0l#QOgh*Y\nR!rr&ts8N)r
-qYK=Io]:=,@fT_&BjLdM@WUo.$1j12!qcuon/22j#ljMtrr<'!!<<'!!"8r/#6k/>nG`gpo`5"'
-n,EL`s-k2<"nN6(rs&Q(quH]qq"oXBh#Dm~>
-JcFa0s8G"LqtTIEjo>c2%3PZ6o_ACcs8Oghs6ot:](5n$quHQl!<3E%rrE)h(<PnCrr`<$&G#K)
-p@S:Zrr2urrr3'UdD@%'s#L/ZlQ-60*P8Bip\t6nrrE#os6g9on1)iYp\u8Ns8W#s!:g'jo`4@Y
-!<<!&eF3bD!<;cooCidipjrJ!s*t~>
-JcF[.%/9f%q#C@"1.sPq"o\H#F9)@@0u3hXs!bPMs8Vusq>LBo$Mj]%s7$lHli@%frr*K"oagch
-qtL*br9jFWp]+T!!;cB\jR2aKs61C$rZ/VP)#+%1s8N)tqZ$!js6qMcp%SJ,_Z0Z6rrDTh!qcQ[
-rrE)t#Lr5KrrE)n!V?$rp\9=>gAc[~>
-JcF[.!;QKl"XmMo?Y^nbs)\8@s"V=hn,FiJo)Jaf!;ZTorsJT%!<;R)am9$-"98B6o(<IanFlAF
-n+>`4mcXXa>la0Tna6&B!WD:("TKLSX;L^3!<<'!rVHQ_$30KEdIm86*Q%jVr;QcerrVinm/I(c
-r<LjA#lao)pAsm[&,5jMs4[L'~>
-JcFa0%K?5!q=jC<lMr4O$n;8VmO%o5s5q6=);P#=*Z!,hrrE)s%KE(ZnIYa#9)o8-e-,gO!;-;\
-7fE>erql]s15d%Is#^;]s.Bkp_uq1.s7u`frrDoqmf*:en%fhOjnlt9'A`We!<<'!s/\TW%dO'i
-!"/ej&HDb1s8ScZs7,o9s4mX)~>
-JcFR+2uW@J#"_6=9a1RopAY-mk/82Vh"]JB(=;FJ!<;s+s.D:?&HDc'!!s+a"TJB#o`(COs8N&t
-p\k"[q"Xn[#QOhpmHXZRs8A8fo>CbRci<hAo`"pfs6fmes6mc@&*<],*#%0,rrE*!!<9,fn.+a`
-:B1b&kRddo!<<(m6N?TOJcFO*J,~>
-JcFI("X7_q<c'&Zs)nDBk/82Vh"]JB(=;FJ!<;s+s.D:?&HDc'!!s+a"TSK%o`(=Jqtg0an+HDJ
-oB#6;7KDoIlgX<<!<)kb#kZ%<+4'u`!;-9kqZ$!`!<;N((_>a*`#KHHrVlltrrE)#6gtTNs%`V&
-!9b!orrE*!TgJeLq18S$s*t~>
-JcFa0#QFAjo^q\GmeZtdnb)_Drr4#6s5CEdk5Y1pr;Zfrs8W&ns8Vfiqu?]qp]'a_pAY'qrqu<d
-s8Drs#lXSrq>WSFp[.t[!qZ<arVlurs5WhOrt5),s763ir;Q`rqt-f`s8Vtprr35ls8ViZs8W)s
-rsJ>sqY'ses7H'cnq$hos*t~>
-JcF^/%/g/&rVHQo%0d"L$jHY1!:9^b*r,co[f>LipVm(1s8N&urV?KnpA=aes8McmnG`(Zrr4&=
-oDejerr)fdp&Fg[)=RUsrqH3Ys7c?cpAXpgj7`HO&c)J,o)Jafrr<#qoV_Tds8/bors/#ms7bjZ
-s8Mus$M+5npAb'jo_8CVJcFF'J,~>
-JcFI("X+p2/g_P:rrDKdruLn7iO8dKs7aM1s8W)us8Dcns7Z?es8W)ms7$'_rVm#tnGW7Wrq[Du
-m.'6,'ArECo_/(IqXaO[q>^<kj7`HO&c)J,o)Jafrr<#qoV_Tds8/bors/#ms7bjZs8Mus$M+5n
-pAb'jo_8CVJcFF'J,~>
-JcFa0*<,j2kPY2Iq![b4mdBW<k4elEo`+m`s8Vins6K[a"8DiqpAY(!fDkgArVu]cr;ZKhrs/E"
-r;QZcs8N#t$hO2lp%e7Tqu?]qo)/Lpp%\Ods7Q<ep\*bKrr`8ss8)`p)#F%)o`+XRs8VQ^s8W&r
-s8V?Vs8V?`qtKse!;;!Dg])d~>
-JcFX-#OMKip](0kq#14%med%Ro`+m`s8Vins6K[a"8DiqpAY(&fDkgArVu]cr;ZEgrr<#rrVm#k
-s8)`mrr3#jm/I"hrr)Wlq>^KorX/>nrr<#kr;66^k5PA_rqcZkrr3i3q"s^`p@&%]n+Zk^rVccr
-l1P)Vl2UYTqYpQhJcFL)J,~>
-JcFX-"7Z?jqYC.#nbiFVo`+m`s8Vins6K[a"8DiqpAY(!fDkgArVu]cr;ZHgrrDrqrt>8!qtL!a
-qXX:DjnAKEp%\7Wnc&CorVuQcrr<#kr;66^k5PA_rqcZkrr3i3q"s^`p@&%]n+Zk^rVccrl1P)V
-l2UYTqYpQhJcFL)J,~>
-JcFa0"TIKZrqHEl+S5*sp##99rr;lqnc/X]s7ZHls8Mues8W&os7--ds82cp!rN#mrr4>9lMpkT
-q>C0is7--dr;69hs82H]r;-Ejnbhn?s8W&rs7cQls8;lr#lF>qp@81_pZ_YV!;$3i!VH9grs88s
-r:p<lrr;Tgrs8MqrV?<is7uJAs4mX)~>
-JcF^/%-dflp\t3mp\=dgm/$_](]47&s8V`kpAY*lrr)Bes8Dfonc/Ldqu-O&qu?Hirqu9Ns8Vil
-rVluirqH?frt>80qu?]ks75ITs8Dorp](3lr;Q^%qtC'`nc/X`l2CV^oD\ajo_\Xf#k\/pq#CBn
-s7?3h#lF>oq>1-kq>#UAh#Dm~>
-JcF^/-ggs6p&"XbpA=mio)/Obqt^9^s8V`kpAY*lrr)Bes8Dfonc/Ldqu-O&qu?Hjs8DKQs8Vfj
-rVmQ$s7lHfp\Oa`p\"FVq>0UXmH+6Er<<5qs8Duqrr39$pAajVs8ViXrVllhrr3#kqu6U$o`+ja
-s8W)uoDS[pqtC!aqZ$Tkq18S$s*t~>
-JcFa0!ri,srr3r9s8W#qp\b$cq#CBXs8VEbs7uB_s8VZis7H6grsA5ls7c6emJm4Zrr2ugrr4PK
-n,)_Sr;Q]qs8)Qjs8V?`rr;umo_\IZq"FRas8)0`q#C0iq"jgdqu?]jrr3Don,NCenF6>Tnc/UW
-rVmK!s7cQgmJm(^s8W&ts6p!frIP"(s*t~>
-JcFU,*W5m-pAXmer;Zflo`"mSs8VEbs7uB_s8VZis7H6grsnSqs7c6emJm4Zs8N#brr2p!oDeX`
-rsSMfoDJRFr;-HnrV?Bk&cD\/qX4CYs7u]iqYC0gs8Vimrs\;`s8N&fnGE7Us8MKcrt4c#p]($U
-s82cps8Dutn,NFdJcFO*J,~>
-JcFX-*rYm+l0\38q=spbmca9>kl:\Ks8Vogp](9as8V`hrr3GtqZ$<`s6]jdp&G'jm/?qco`+aa
-rsSYpp](9Rs7uWjp[\:Z')25%s8)0`q#C0iq"jgdqu?]jrr3Don,NCenF6>Tnc/UWrVmK!s7cQg
-mJm(^s8W&ts6p!frIP"(s*t~>
-JcF^/"Sr&ns8Dor"8`&mr;Q^;qZ$Tjs8Vurs8;lgs8V`ks82cks8VNes7u]pqu?<frsAW%q#:3_
-r;-6frr2os!;HKm%/U#'oDJLMq!n+XrVZWo'Dqgus8W#ps7ZKis7cNmrqlQlqu6U4qu?WprqZEj
-s8;ogs8VTgpAb0ks7?9]pOWA!s*t~>
-JcF^/;YpFhrq-0fp](!fq>C9mrV?Knq#CBks8W#ro`+s`s8VupqZ$T`s8Vops82igrql]krV6Em
-p](6krr<#srV?<XrVHB\rr;ips7?9is8W&qrVmQ.s6p!fr;?Tgs82ijrr;upqZ$HlrttY5rVulm
-qZ$Tns7?9jnGi4^s8Duhs75o8s4dR(~>
-JcF^/3r]0RqWmbEme6/HoCVbJo(E%_q#CBks8W#ro`+s`s8VupqZ$T`s8Vops82igs8)]krV6Em
-q#CBnqYpQerr3E!s8DQdqWn%NqYgBjrVmQ.s6p!fr;?Tgs82ijrr;upqZ$HlrttY5rVulmqZ$Tn
-s7?9jnGi4^s8Duhs75o8s4dR(~>
-JcDeNr;Q<frVlfoJcDPGJ,~>
-JcFR+rVkLMs8MZj!WN&prdk*=s*t~>
-JcFU,!<)imrVc`m!<(%>qYc!FV#Pr~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-JcC<$JcE+WJ,~>
-%%EndData
-showpage
-%%Trailer
-end
-%%EOF
diff --git a/lib/megaco/doc/src/notes.xml b/lib/megaco/doc/src/notes.xml
index ab17dd50ca..af6e87b56b 100644
--- a/lib/megaco/doc/src/notes.xml
+++ b/lib/megaco/doc/src/notes.xml
@@ -36,6 +36,70 @@
section is the version number of Megaco.</p>
<section>
+ <title>Megaco 3.14.1.1</title>
+
+ <p>Version 3.14.1.1 supports code replacement in runtime from/to
+ version 3.14.1, 3.14, 3.13, 3.12 and 3.11.3.</p>
+
+ <section>
+ <title>Improvements and new features</title>
+
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>Updated the
+ <seealso marker="megaco_performance">performance</seealso>
+ chapter. </p>
+ <p>Own Id: OTP-8696</p>
+ </item>
+
+ </list>
+
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>A race condition when, during high load, processing
+ both the original and a resent message and delivering
+ this as two separate messages to the user. </p>
+ <p>Note that this solution only protects against multiple
+ reply deliveries! </p>
+ <p>Own Id: OTP-8529</p>
+ <p>Aux Id: Seq 10915</p>
+ </item>
+
+ <item>
+ <p>Fix shared libraries installation. </p>
+ <p>The flex shared lib(s) were incorrectly installed as data
+ files. </p>
+ <p>Peter Lemenkov</p>
+ <p>Own Id: OTP-8627</p>
+ </item>
+
+ <item>
+ <p>Eliminated a possible race condition while creating
+ pending counters. </p>
+ <p>Own Id: OTP-8634</p>
+ <p>Aux Id: Seq 11579</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+
+ </section> <!-- 3.14.1.1 -->
+
+
+ <section>
<title>Megaco 3.14.1</title>
<p>Version 3.14.1 supports code replacement in runtime from/to
@@ -66,6 +130,16 @@
<list type="bulleted">
<item>
+ <p>A race condition when, during high load, processing
+ both the original and a resent message and delivering
+ this as two separate messages to the user. </p>
+ <p>Note that this solution only protects against multiple
+ reply deliveries! </p>
+ <p>Own Id: OTP-8529</p>
+ <p>Aux Id: Seq 10915</p>
+ </item>
+
+ <item>
<p>Fix shared libraries installation. </p>
<p>The flex shared lib(s) were incorrectly installed as data
files. </p>
@@ -73,6 +147,13 @@
<p>Own Id: OTP-8627</p>
</item>
+ <item>
+ <p>Eliminated a possible race condition while creating
+ pending counters. </p>
+ <p>Own Id: OTP-8634</p>
+ <p>Aux Id: Seq 11579</p>
+ </item>
+
</list>
</section>
@@ -125,7 +206,7 @@
<item>
<p>Callbacks, when the callback module is unknown (undefined),
results in warning messages. </p>
- <p>A raise condition scenario. As part of a cancelation operation,
+ <p>A race condition scenario. As part of a cancelation operation,
replies with waiting acknowledgements is cancelled. This includes
informing the user (via a call to the handle_trans_ack callback
function). It is possible that at this point the connection data
@@ -657,13 +738,16 @@
<list type="bulleted">
<item>
- <p>Unexpected <seealso marker="megaco_user#unexpected_trans">handle_unexpected_reply</seealso> callbacks. </p>
- <p>The <seealso marker="megaco_user">megaco_user</seealso> callback function
+ <p>Unexpected
+ <seealso marker="megaco_user#unexpected_trans">handle_unexpected_reply</seealso>
+ callbacks. </p>
+ <p>The <seealso marker="megaco_user">megaco_user</seealso> callback
+ function
<seealso marker="megaco_user#unexpected_trans">handle_unexpected_reply</seealso>
could during high load be called with unexpected values for the Trans
- argument, such as an <c>TransactionReply</c> where <c>transactionResult</c>
- had the value <c>{error, timeout}</c>. This was a result of a raise condition
- and has now been fixed. </p>
+ argument, such as an <c>TransactionReply</c> where
+ <c>transactionResult</c> had the value <c>{error, timeout}</c>.
+ This was a result of a race condition and has now been fixed. </p>
<p>Own Id: OTP-7926</p>
<p>Aux Id: Seq 11255</p>
</item>
@@ -858,416 +942,6 @@
</section>
</section> <!-- 3.10 -->
-
- <section>
- <title>Megaco 3.9.4</title>
-
- <p>Version 3.9.4 supports code replacement in runtime from/to
- version 3.9.3, 3.9.2, 3.9.1.1, 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
- when using any of the drivers (flex for text or asn1 for binary).</p>
-
- <section>
- <title>Improvements and new features</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>Miscellaneous dialyzer related and test case cleanup. </p>
- <p>Own Id: OTP-7614</p>
- </item>
-
- </list>
--->
- </section>
-
- <section>
- <title>Fixed bugs and malfunctions</title>
-<!--
- <p>-</p>
--->
-
- <list type="bulleted">
- <item>
- <p>Segmenting a reply failed (with a badmatch) if the message
- did not actually need to be segmented (e.g. was within the
- size limit,
- <seealso marker="megaco#ui_max_pdu_size">max_pdu_size</seealso>). </p>
- <p>Own Id: OTP-7733</p>
- <p>Aux Id: Seq 11168</p>
- </item>
-
- <item>
- <p>Improve the error handling of megaco_tcp for received
- messages. </p>
- <p>Own Id: OTP-7728</p>
- </item>
-
- </list>
-
- </section>
-
- <section>
- <title>Incompatibilities</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>For those implementing their own codec's, the new megaco_encoder
- behaviour will require three more functions. See above for more
- info. </p>
- <p>Own Id: OTP-7168</p>
- <p>Aux Id: Seq 10867</p>
- </item>
-
- </list>
--->
-
- </section>
- </section> <!-- 3.9.3.1 -->
-
-
- <section>
- <title>Megaco 3.9.3</title>
-
- <p>Version 3.9.3 supports code replacement in runtime from/to
- version 3.9.2, 3.9.1.1, 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
- when using any of the drivers (flex for text or asn1 for binary).</p>
-
- <section>
- <title>Improvements and new features</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>Miscellaneous dialyzer related and test case cleanup. </p>
- <p>Own Id: OTP-7614</p>
- </item>
-
- </list>
--->
- </section>
-
- <section>
- <title>Fixed bugs and malfunctions</title>
-<!--
- <p>-</p>
--->
-
- <list type="bulleted">
- <item>
- <p>Memory leak in the flex scanner. There was a memory
- leak in the flex scanner function handling
- Property Parameters. </p>
- <p>Own Id: OTP-7700</p>
- <p>Aux Id: Seq 11126</p>
- </item>
-
- </list>
-
- </section>
-
- <section>
- <title>Incompatibilities</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>For those implementing their own codec's, the new megaco_encoder
- behaviour will require three more functions. See above for more
- info. </p>
- <p>Own Id: OTP-7168</p>
- <p>Aux Id: Seq 10867</p>
- </item>
-
- </list>
--->
-
- </section>
- </section> <!-- 3.9.3 -->
-
-
- <section>
- <title>Megaco 3.9.2</title>
-
- <p>Version 3.9.2 supports code replacement in runtime from/to
- version 3.9.1.1, 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
- when using any of the drivers (flex for text or asn1 for binary).</p>
-
- <section>
- <title>Improvements and new features</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>Miscellaneous dialyzer related and test case cleanup. </p>
- <p>Own Id: OTP-7614</p>
- </item>
-
- </list>
--->
- </section>
-
- <section>
- <title>Fixed bugs and malfunctions</title>
-<!--
- <p>-</p>
--->
-
- <list type="bulleted">
- <item>
- <p>The text encoders (v1, v2, v3, ...) all failed to
- properly encode the DigitMapDescriptor. </p>
- <p>Own Id: OTP-7671</p>
- <p>Aux Id: Seq 11113</p>
- </item>
-
- <item>
- <p>The mini decoder some time incorrectly identifies
- plain text as tokens. </p>
- <p>Own Id: OTP-7672</p>
- <p>Aux Id: Seq 11103</p>
- </item>
-
- </list>
-
- </section>
-
- <section>
- <title>Incompatibilities</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>For those implementing their own codec's, the new megaco_encoder
- behaviour will require three more functions. See above for more
- info. </p>
- <p>Own Id: OTP-7168</p>
- <p>Aux Id: Seq 10867</p>
- </item>
-
- </list>
--->
-
- </section>
- </section> <!-- 3.9.2 -->
-
-
- <section>
- <title>Megaco 3.9.1.1</title>
-
- <p>Version 3.9.1.1 supports code replacement in runtime from/to
- version 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
- when using any of the drivers (flex for text or asn1 for binary).</p>
-
- <section>
- <title>Improvements and new features</title>
-<!--
- <p>-</p>
--->
-
- <list type="bulleted">
- <item>
- <p>Miscellaneous dialyzer related and test case cleanup. </p>
- <p>Own Id: OTP-7614</p>
- </item>
-
- </list>
- </section>
-
- <section>
- <title>Fixed bugs and malfunctions</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>[text] The flex scanner did not allow an empty quotedString
- in propertyParm. </p>
- <p>Own Id: OTP-7573</p>
- <p>Aux Id: Seq 11062</p>
- </item>
-
- <item>
- <p>[text] Unable to decode a version 2 message with a
- topologyTriple containing an (optional) eventStream. </p>
- <p>Own Id: OTP-7576</p>
- <p>Aux Id: Seq 11066</p>
- </item>
-
- </list>
--->
-
- </section>
-
- <section>
- <title>Incompatibilities</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>For those implementing their own codec's, the new megaco_encoder
- behaviour will require three more functions. See above for more
- info. </p>
- <p>Own Id: OTP-7168</p>
- <p>Aux Id: Seq 10867</p>
- </item>
-
- </list>
--->
-
- </section>
- </section> <!-- 3.9.1.1 -->
-
-
- <section>
- <title>Megaco 3.9.1</title>
-
- <p>Version 3.9.1 supports code replacement in runtime from/to
- version 3.9, 3.8.2, 3.8.1 and 3.8 except
- when using any of the drivers (flex for text or asn1 for binary).</p>
-
- <section>
- <title>Improvements and new features</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>[text] The text codec(s) has been optimized. The parsing of
- "property parameters" has been moved to the scanner(s). Which means
- that when decoding messages containing property parameters, using
- the flex scanner, decode time(s) will be reduced. The reduction
- depends on the message, but can be as large as 25%. </p>
- <p>Own Id: OTP-7431</p>
- </item>
-
- </list>
--->
- </section>
-
- <section>
- <title>Fixed bugs and malfunctions</title>
-<!--
- <p>-</p>
--->
-
- <list type="bulleted">
- <item>
- <p>[text] The flex scanner did not allow an empty quotedString
- in propertyParm. </p>
- <p>Own Id: OTP-7573</p>
- <p>Aux Id: Seq 11062</p>
- </item>
-
- <item>
- <p>[text] Unable to decode a version 2 message with a
- topologyTriple containing an (optional) eventStream. </p>
- <p>Own Id: OTP-7576</p>
- <p>Aux Id: Seq 11066</p>
- </item>
-
- </list>
-
- </section>
-
- <section>
- <title>Incompatibilities</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>For those implementing their own codec's, the new megaco_encoder
- behaviour will require three more functions. See above for more
- info. </p>
- <p>Own Id: OTP-7168</p>
- <p>Aux Id: Seq 10867</p>
- </item>
-
- </list>
--->
-
- </section>
- </section> <!-- 3.9.1 -->
-
-
- <section>
- <title>Megaco 3.9</title>
-
- <p>Version 3.9 supports code replacement in runtime from/to
- version 3.8.2, 3.8.1 and 3.8 except
- when using any of the drivers (flex for text or asn1 for binary).</p>
-
- <section>
- <title>Improvements and new features</title>
-<!--
- <p>-</p>
--->
-
- <list type="bulleted">
- <item>
- <p>[text] The text codec(s) has been optimized. The parsing of
- "property parameters" has been moved to the scanner(s). Which means
- that when decoding messages containing property parameters, using
- the flex scanner, decode time(s) will be reduced. The reduction
- depends on the message, but can be as large as 25%. </p>
- <p>Own Id: OTP-7431</p>
- </item>
-
- </list>
- </section>
-
- <section>
- <title>Fixed bugs and malfunctions</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>If a TransactionRequest arrives while a user is
- connecting (is in the callback function
- handle_connect as a result of a megaco:connect call),
- megaco responds with a pending message and then drops
- the request.</p>
- <p>These messages will now be silently dropped, forcing the
- other side to resend. </p>
- <p>Own Id: OTP-7192</p>
- <p>Aux Id: Seq 10884</p>
- </item>
-
- </list>
--->
-
- </section>
-
- <section>
- <title>Incompatibilities</title>
- <p>-</p>
-
-<!--
- <list type="bulleted">
- <item>
- <p>For those implementing their own codec's, the new megaco_encoder
- behaviour will require three more functions. See above for more
- info. </p>
- <p>Own Id: OTP-7168</p>
- <p>Aux Id: Seq 10867</p>
- </item>
-
- </list>
--->
-
- </section>
- </section> <!-- 3.9 -->
-
-
<!-- section>
<title>Release notes history</title>
<p>For information about older versions see
diff --git a/lib/megaco/doc/src/notes_history.xml b/lib/megaco/doc/src/notes_history.xml
index 640b62230f..220ed4bbb1 100644
--- a/lib/megaco/doc/src/notes_history.xml
+++ b/lib/megaco/doc/src/notes_history.xml
@@ -33,6 +33,415 @@
</header>
<section>
+ <title>Megaco 3.9.4</title>
+
+ <p>Version 3.9.4 supports code replacement in runtime from/to
+ version 3.9.3, 3.9.2, 3.9.1.1, 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
+ when using any of the drivers (flex for text or asn1 for binary).</p>
+
+ <section>
+ <title>Improvements and new features</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>Miscellaneous dialyzer related and test case cleanup. </p>
+ <p>Own Id: OTP-7614</p>
+ </item>
+
+ </list>
+-->
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>Segmenting a reply failed (with a badmatch) if the message
+ did not actually need to be segmented (e.g. was within the
+ size limit,
+ <seealso marker="megaco#ui_max_pdu_size">max_pdu_size</seealso>). </p>
+ <p>Own Id: OTP-7733</p>
+ <p>Aux Id: Seq 11168</p>
+ </item>
+
+ <item>
+ <p>Improve the error handling of megaco_tcp for received
+ messages. </p>
+ <p>Own Id: OTP-7728</p>
+ </item>
+
+ </list>
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>For those implementing their own codec's, the new megaco_encoder
+ behaviour will require three more functions. See above for more
+ info. </p>
+ <p>Own Id: OTP-7168</p>
+ <p>Aux Id: Seq 10867</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+ </section> <!-- 3.9.3.1 -->
+
+
+ <section>
+ <title>Megaco 3.9.3</title>
+
+ <p>Version 3.9.3 supports code replacement in runtime from/to
+ version 3.9.2, 3.9.1.1, 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
+ when using any of the drivers (flex for text or asn1 for binary).</p>
+
+ <section>
+ <title>Improvements and new features</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>Miscellaneous dialyzer related and test case cleanup. </p>
+ <p>Own Id: OTP-7614</p>
+ </item>
+
+ </list>
+-->
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>Memory leak in the flex scanner. There was a memory
+ leak in the flex scanner function handling
+ Property Parameters. </p>
+ <p>Own Id: OTP-7700</p>
+ <p>Aux Id: Seq 11126</p>
+ </item>
+
+ </list>
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>For those implementing their own codec's, the new megaco_encoder
+ behaviour will require three more functions. See above for more
+ info. </p>
+ <p>Own Id: OTP-7168</p>
+ <p>Aux Id: Seq 10867</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+ </section> <!-- 3.9.3 -->
+
+
+ <section>
+ <title>Megaco 3.9.2</title>
+
+ <p>Version 3.9.2 supports code replacement in runtime from/to
+ version 3.9.1.1, 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
+ when using any of the drivers (flex for text or asn1 for binary).</p>
+
+ <section>
+ <title>Improvements and new features</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>Miscellaneous dialyzer related and test case cleanup. </p>
+ <p>Own Id: OTP-7614</p>
+ </item>
+
+ </list>
+-->
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>The text encoders (v1, v2, v3, ...) all failed to
+ properly encode the DigitMapDescriptor. </p>
+ <p>Own Id: OTP-7671</p>
+ <p>Aux Id: Seq 11113</p>
+ </item>
+
+ <item>
+ <p>The mini decoder some time incorrectly identifies
+ plain text as tokens. </p>
+ <p>Own Id: OTP-7672</p>
+ <p>Aux Id: Seq 11103</p>
+ </item>
+
+ </list>
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>For those implementing their own codec's, the new megaco_encoder
+ behaviour will require three more functions. See above for more
+ info. </p>
+ <p>Own Id: OTP-7168</p>
+ <p>Aux Id: Seq 10867</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+ </section> <!-- 3.9.2 -->
+
+
+ <section>
+ <title>Megaco 3.9.1.1</title>
+
+ <p>Version 3.9.1.1 supports code replacement in runtime from/to
+ version 3.9.1, 3.9, 3.8.2, 3.8.1 and 3.8 except
+ when using any of the drivers (flex for text or asn1 for binary).</p>
+
+ <section>
+ <title>Improvements and new features</title>
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>Miscellaneous dialyzer related and test case cleanup. </p>
+ <p>Own Id: OTP-7614</p>
+ </item>
+
+ </list>
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>[text] The flex scanner did not allow an empty quotedString
+ in propertyParm. </p>
+ <p>Own Id: OTP-7573</p>
+ <p>Aux Id: Seq 11062</p>
+ </item>
+
+ <item>
+ <p>[text] Unable to decode a version 2 message with a
+ topologyTriple containing an (optional) eventStream. </p>
+ <p>Own Id: OTP-7576</p>
+ <p>Aux Id: Seq 11066</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>For those implementing their own codec's, the new megaco_encoder
+ behaviour will require three more functions. See above for more
+ info. </p>
+ <p>Own Id: OTP-7168</p>
+ <p>Aux Id: Seq 10867</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+ </section> <!-- 3.9.1.1 -->
+
+
+ <section>
+ <title>Megaco 3.9.1</title>
+
+ <p>Version 3.9.1 supports code replacement in runtime from/to
+ version 3.9, 3.8.2, 3.8.1 and 3.8 except
+ when using any of the drivers (flex for text or asn1 for binary).</p>
+
+ <section>
+ <title>Improvements and new features</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>[text] The text codec(s) has been optimized. The parsing of
+ "property parameters" has been moved to the scanner(s). Which means
+ that when decoding messages containing property parameters, using
+ the flex scanner, decode time(s) will be reduced. The reduction
+ depends on the message, but can be as large as 25%. </p>
+ <p>Own Id: OTP-7431</p>
+ </item>
+
+ </list>
+-->
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>[text] The flex scanner did not allow an empty quotedString
+ in propertyParm. </p>
+ <p>Own Id: OTP-7573</p>
+ <p>Aux Id: Seq 11062</p>
+ </item>
+
+ <item>
+ <p>[text] Unable to decode a version 2 message with a
+ topologyTriple containing an (optional) eventStream. </p>
+ <p>Own Id: OTP-7576</p>
+ <p>Aux Id: Seq 11066</p>
+ </item>
+
+ </list>
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>For those implementing their own codec's, the new megaco_encoder
+ behaviour will require three more functions. See above for more
+ info. </p>
+ <p>Own Id: OTP-7168</p>
+ <p>Aux Id: Seq 10867</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+ </section> <!-- 3.9.1 -->
+
+
+ <section>
+ <title>Megaco 3.9</title>
+
+ <p>Version 3.9 supports code replacement in runtime from/to
+ version 3.8.2, 3.8.1 and 3.8 except
+ when using any of the drivers (flex for text or asn1 for binary).</p>
+
+ <section>
+ <title>Improvements and new features</title>
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>[text] The text codec(s) has been optimized. The parsing of
+ "property parameters" has been moved to the scanner(s). Which means
+ that when decoding messages containing property parameters, using
+ the flex scanner, decode time(s) will be reduced. The reduction
+ depends on the message, but can be as large as 25%. </p>
+ <p>Own Id: OTP-7431</p>
+ </item>
+
+ </list>
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>If a TransactionRequest arrives while a user is
+ connecting (is in the callback function
+ handle_connect as a result of a megaco:connect call),
+ megaco responds with a pending message and then drops
+ the request.</p>
+ <p>These messages will now be silently dropped, forcing the
+ other side to resend. </p>
+ <p>Own Id: OTP-7192</p>
+ <p>Aux Id: Seq 10884</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>For those implementing their own codec's, the new megaco_encoder
+ behaviour will require three more functions. See above for more
+ info. </p>
+ <p>Own Id: OTP-7168</p>
+ <p>Aux Id: Seq 10867</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+ </section> <!-- 3.9 -->
+
+
+ <section>
<title>Megaco 3.8.2</title>
<p>Version 3.8.2 supports code replacement in runtime from/to
@@ -901,7 +1310,7 @@
<list>
<item>
<p>When timers expire while a connection cancel
- (megaco:cancel) is in progress, there is a raise
+ (megaco:cancel) is in progress, there is a race
condition possibility. This has been eliminated. </p>
<p>Own Id: OTP-6921</p>
<p>Aux Id: Seq 10450</p>
@@ -1166,7 +1575,7 @@
<list type="bulleted">
<item>
<p>When replies arrive during a call to megaco:cancel
- there is a raise condition possibility. This has been
+ there is a race condition possibility. This has been
eliminated. </p>
<p>Own Id: OTP-6276</p>
<p>Aux Id: Seq 10450</p>
diff --git a/lib/megaco/src/app/megaco.appup.src b/lib/megaco/src/app/megaco.appup.src
index 5df31f2923..d904e8ab33 100644
--- a/lib/megaco/src/app/megaco.appup.src
+++ b/lib/megaco/src/app/megaco.appup.src
@@ -127,19 +127,29 @@
%% |
%% v
%% 3.14.1
+%% |
+%% v
+%% 3.14.1.1
%%
%%
{"%VSN%",
[
+ {"3.14.1",
+ [
+ ]
+ },
{"3.14",
[
+ {load_module, megaco_messenger, soft_purge, soft_purge, [megaco_monitor]},
+ {update, megaco_monitor, soft, soft_purge, soft_purge, []},
{update, megaco_config, soft, soft_purge, soft_purge, []}
]
},
{"3.13",
[
- {load_module, megaco_messenger, soft_purge, soft_purge, []},
+ {load_module, megaco_messenger, soft_purge, soft_purge, [megaco_monitor]},
{load_module, megaco_filter, soft_purge, soft_purge, []},
+ {update, megaco_monitor, soft, soft_purge, soft_purge, []},
{update, megaco_config, soft, soft_purge, soft_purge, []},
{update, megaco_flex_scanner_handler, {advanced, downgrade_to_pre_3_13_1},
soft_purge, soft_purge, []}
@@ -171,15 +181,22 @@
}
],
[
+ {"3.14.1",
+ [
+ ]
+ },
{"3.14",
[
+ {load_module, megaco_messenger, soft_purge, soft_purge, [megaco_monitor]},
+ {update, megaco_monitor, soft, soft_purge, soft_purge, []},
{update, megaco_config, soft, soft_purge, soft_purge, []}
]
},
{"3.13",
[
- {load_module, megaco_messenger, soft_purge, soft_purge, []},
+ {load_module, megaco_messenger, soft_purge, soft_purge, [megaco_monitor]},
{load_module, megaco_filter, soft_purge, soft_purge, []},
+ {update, megaco_monitor, soft, soft_purge, soft_purge, []},
{update, megaco_config, soft, soft_purge, soft_purge, []},
{update, megaco_flex_scanner_handler, {advanced, upgrade_from_pre_3_13_1},
soft_purge, soft_purge, []}
diff --git a/lib/megaco/src/app/megaco_internal.hrl b/lib/megaco/src/app/megaco_internal.hrl
index adbaacacef..2c124e9060 100644
--- a/lib/megaco/src/app/megaco_internal.hrl
+++ b/lib/megaco/src/app/megaco_internal.hrl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -139,6 +139,22 @@
[?APPLICATION, ?MODULE, self()|A]))).
+-define(megaco_ereport(Label, Report),
+ ?megaco_report(error_report, Label, Report)).
+
+-define(megaco_wreport(Label, Report),
+ ?megaco_report(warning_report, Label, Report)).
+
+-define(megaco_ireport(Label, Report),
+ ?megaco_report(info_report, Label, Report)).
+
+-define(megaco_report(Func, Label, Report),
+ (catch error_logger:Func([{label, Label},
+ {application, ?APPLICATION},
+ {module, ?MODULE},
+ {process, self()} | Report]))).
+
+
%%%----------------------------------------------------------------------
%%% Default (ignore) value of the Extra argument to the
%%% megaco:receive_message/5 and process_received_message functions/5.
diff --git a/lib/megaco/src/engine/megaco_config.erl b/lib/megaco/src/engine/megaco_config.erl
index 0445f10838..6805db790d 100644
--- a/lib/megaco/src/engine/megaco_config.erl
+++ b/lib/megaco/src/engine/megaco_config.erl
@@ -628,31 +628,19 @@ incr_counter(Item, Incr) ->
end
catch
error:_ ->
+ %% Counter does not exist, so try creat it
try
begin
cre_counter(Item, Incr)
end
catch
exit:_ ->
- %% Ok, some other process got there before us,
- %% so try again
+ %% This is a raise condition.
+ %% When we tried to update the counter above, it
+ %% did not exist, but now it does...
ets:update_counter(megaco_config, Item, Incr)
end
end.
-%% incr_counter(Item, Incr) ->
-%% case (catch ets:update_counter(megaco_config, Item, Incr)) of
-%% {'EXIT', _} ->
-%% case (catch cre_counter(Item, Incr)) of
-%% {'EXIT', _} ->
-%% %% Ok, some other process got there before us,
-%% %% so try again
-%% ets:update_counter(megaco_config, Item, Incr);
-%% NewVal ->
-%% NewVal
-%% end;
-%% NewVal ->
-%% NewVal
-%% end.
cre_counter(Item, Initial) ->
case whereis(?SERVER) =:= self() of
@@ -660,8 +648,8 @@ cre_counter(Item, Initial) ->
case call({cre_counter, Item, Initial}) of
{ok, Value} ->
Value;
- Error ->
- exit(Error)
+ {error, Reason} ->
+ exit({failed_creating_counter, Item, Initial, Reason})
end;
true ->
%% Check that the counter does not already exists
@@ -671,7 +659,7 @@ cre_counter(Item, Initial) ->
ets:insert(megaco_config, {Item, Initial}),
{ok, Initial};
[_] ->
- %% Ouch, now what?
+ %% Possibly a raise condition
{error, already_exists}
end
diff --git a/lib/megaco/src/engine/megaco_messenger.erl b/lib/megaco/src/engine/megaco_messenger.erl
index 5756e8e896..5fad29931b 100644
--- a/lib/megaco/src/engine/megaco_messenger.erl
+++ b/lib/megaco/src/engine/megaco_messenger.erl
@@ -1541,30 +1541,6 @@ check_pending_limit(Limit, Direction, TransId) ->
aborted
end.
-%% check_pending_limit(infinity, _, _) ->
-%% {ok, 0};
-%% check_pending_limit(Limit, Direction, TransId) ->
-%% ?rt2("check pending limit", [Direction, Limit, TransId]),
-%% case (catch megaco_config:get_pending_counter(Direction, TransId)) of
-%% {'EXIT', _} ->
-%% %% This function is only called when we "know" the
-%% %% counter to exist. So, the only reason that this
-%% %% would happen is of the counter has been removed.
-%% %% This only happen if the pending limit has been
-%% %% reached. In any case, this is basically the same
-%% %% as aborted!
-%% ?rt2("check pending limit - exit", []),
-%% aborted;
-%% Val when Val =< Limit ->
-%% %% Since we have no intention to increment here, it
-%% %% is ok to be _at_ the limit
-%% ?rt2("check pending limit - ok", [Val]),
-%% {ok, Val};
-%% _Val ->
-%% ?rt2("check pending limit - aborted", [_Val]),
-%% aborted
-%% end.
-
check_and_maybe_incr_pending_limit(infinity, _, _) ->
ok;
@@ -1572,59 +1548,42 @@ check_and_maybe_incr_pending_limit(Limit, Direction, TransId) ->
%%
%% We need this kind of test to detect when we _pass_ the limit
%%
- ?rt2("check and maybe incr pending limit", [Direction, Limit, TransId]),
+ ?rt2("check and maybe incr pending limit", [{direction, Direction},
+ {transaction_id, TransId},
+ {counter_limit, Limit}]),
try megaco_config:get_pending_counter(Direction, TransId) of
Val when Val > Limit ->
- ?rt2("check and maybe incr - aborted", [Direction, Val, Limit]),
+ ?rt2("check and maybe incr - aborted", [{counter_value, Val}]),
aborted; % Already passed the limit
Val ->
- ?rt2("check and maybe incr - incr", [Direction, Val, Limit]),
+ ?rt2("check and maybe incr - incr", [{counter_value, Val}]),
megaco_config:incr_pending_counter(Direction, TransId),
if
Val < Limit ->
ok; % Still within the limit
true ->
?rt2("check and maybe incr - error",
- [Direction, Val, Limit]),
+ [{counter_value, Val}]),
error % Passed the limit
end
catch
_:_ ->
%% Has not been created yet (connect).
- megaco_config:cre_pending_counter(Direction, TransId, 1),
- ok
+ %% Try create it, but bevare of possible raise condition
+ try
+ begin
+ megaco_config:cre_pending_counter(Direction, TransId, 1),
+ ok
+ end
+ catch
+ _:_ ->
+ %% Ouch, raise condition, increment instead...
+ megaco_config:incr_pending_counter(Direction, TransId),
+ ok
+ end
end.
-%% check_and_maybe_incr_pending_limit(infinity, _, _) ->
-%% ok;
-%% check_and_maybe_incr_pending_limit(Limit, Direction, TransId) ->
-%% %%
-%% %% We need this kind of test to detect when we _pass_ the limit
-%% %%
-%% ?rt2("check and maybe incr pending limit", [Direction, Limit, TransId]),
-%% case (catch megaco_config:get_pending_counter(Direction, TransId)) of
-%% {'EXIT', _} ->
-%% %% Has not been created yet (connect).
-%% megaco_config:cre_pending_counter(Direction, TransId, 1),
-%% ok;
-%% Val when Val > Limit ->
-%% ?rt2("check and maybe incr - aborted", [Direction, Val, Limit]),
-%% aborted; % Already passed the limit
-%% Val ->
-%% ?rt2("check and maybe incr - incr", [Direction, Val, Limit]),
-%% megaco_config:incr_pending_counter(Direction, TransId),
-%% if
-%% Val < Limit ->
-%% ok; % Still within the limit
-%% true ->
-%% ?rt2("check and maybe incr - error",
-%% [Direction, Val, Limit]),
-%% error % Passed the limit
-%% end
-%% end.
-
-
%% BUGBUG BUGBUG BUGBUG
%%
%% Do we know that the Rep is still valid? A previous transaction
@@ -2648,33 +2607,84 @@ handle_reply(
handle_reply(#conn_data{conn_handle = CH} = CD, T, Extra) ->
TransId = to_local_trans_id(CD),
?rt2("handle reply", [T, TransId]),
- case megaco_monitor:lookup_request(TransId) of
- [Req] when (is_record(Req, request) andalso
- (CD#conn_data.cancel =:= true)) ->
+ case {megaco_monitor:request_lockcnt_inc(TransId),
+ megaco_monitor:lookup_request(TransId)} of
+ {_Cnt, [Req]} when (is_record(Req, request) andalso
+ (CD#conn_data.cancel =:= true)) ->
?TC_AWAIT_REPLY_EVENT(true),
+ ?report_trace(CD, "trans reply - cancel(1)", [T]),
do_handle_reply_cancel(CD, Req, T);
- [#request{remote_mid = RMid} = Req] when ((RMid =:= preliminary_mid) orelse
- (RMid =:= CH#megaco_conn_handle.remote_mid)) ->
+ {Cnt, [#request{remote_mid = RMid} = Req]} when
+ ((Cnt =:= 1) andalso
+ ((RMid =:= preliminary_mid) orelse
+ (RMid =:= CH#megaco_conn_handle.remote_mid))) ->
+ ?TC_AWAIT_REPLY_EVENT(false),
+ %% Just in case conn_data got update after our lookup
+ %% but before we looked up the request record, we
+ %% check the cancel field again.
+ case megaco_config:conn_info(CD, cancel) of
+ true ->
+ ?report_trace(CD, "trans reply - cancel(2)", [T]),
+ megaco_monitor:request_lockcnt_del(TransId),
+ do_handle_reply_cancel(CD, Req, T);
+ false ->
+ ?report_trace(CD, "trans reply", [T]),
+ do_handle_reply(CD, Req, TransId, T, Extra)
+ end;
+
+ {Cnt, [#request{remote_mid = RMid} = _Req]} when
+ (is_integer(Cnt) andalso
+ ((RMid =:= preliminary_mid) orelse
+ (RMid =:= CH#megaco_conn_handle.remote_mid))) ->
+ ?TC_AWAIT_REPLY_EVENT(false),
+ %% Ok, someone got there before me, now what?
+ %% This is a plain old raise condition
+ ?report_important(CD, "trans reply - raise condition",
+ [T, {request_lockcnt, Cnt}]),
+ megaco_monitor:request_lockcnt_dec(TransId);
+
+ %% no counter
+ {_Cnt, [#request{remote_mid = RMid} = Req]} when
+ ((RMid =:= preliminary_mid) orelse
+ (RMid =:= CH#megaco_conn_handle.remote_mid)) ->
?TC_AWAIT_REPLY_EVENT(false),
+ %% The counter does not exist.
+ %% This can only mean a code upgrade raise condition.
+ %% That is, this request record was created before
+ %% this feature (the counters) was instroduced.
+ %% The simples solution is this is to behave exactly as
+ %% before, that is pass it along, and leave it to the
+ %% user to figure out.
+
%% Just in case conn_data got update after our lookup
%% but before we looked up the request record, we
%% check the cancel field again.
+ ?report_verbose(CD, "trans reply - old style", [T]),
case megaco_config:conn_info(CD, cancel) of
true ->
+ megaco_monitor:request_lockcnt_del(TransId),
do_handle_reply_cancel(CD, Req, T);
false ->
do_handle_reply(CD, Req, TransId, T, Extra)
end;
- [#request{user_mod = UserMod,
- user_args = UserArgs,
- reply_action = Action,
- reply_data = UserData,
- remote_mid = RMid}] ->
+ {Cnt, [#request{user_mod = UserMod,
+ user_args = UserArgs,
+ reply_action = Action,
+ reply_data = UserData,
+ remote_mid = RMid}]} ->
?report_trace(CD,
"received trans reply with invalid remote mid",
- [T, RMid]),
+ [{transaction, T},
+ {remote_mid, RMid},
+ {request_lockcnt, Cnt}]),
+ if
+ is_integer(Cnt) ->
+ megaco_monitor:request_lockcnt_dec(TransId);
+ true ->
+ ok
+ end,
WrongMid = CH#megaco_conn_handle.remote_mid,
T2 = transform_transaction_reply_enc(CD#conn_data.protocol_version,
T),
@@ -2685,7 +2695,15 @@ handle_reply(#conn_data{conn_handle = CH} = CD, T, Extra) ->
reply_data = UserData},
return_reply(CD2, TransId, UserReply, Extra);
- [] ->
+ {Cnt, []} when is_integer(Cnt) ->
+ ?TC_AWAIT_REPLY_EVENT(undefined),
+ ?report_trace(CD, "trans reply (no receiver)",
+ [T, {request_lockcnt, Cnt}]),
+ megaco_monitor:request_lockcnt_dec(TransId),
+ return_unexpected_trans(CD, T, Extra);
+
+ %% No counter
+ {_Cnt, []} ->
?TC_AWAIT_REPLY_EVENT(undefined),
?report_trace(CD, "trans reply (no receiver)", [T]),
return_unexpected_trans(CD, T, Extra)
@@ -2716,6 +2734,7 @@ do_handle_reply(CD,
%% This is the first reply (maybe of many)
megaco_monitor:delete_request(TransId),
+ megaco_monitor:request_lockcnt_del(TransId),
megaco_monitor:cancel_apply_after(Ref), % OTP-4843
megaco_config:del_pending_counter(recv, TransId), % OTP-7189
@@ -3739,6 +3758,11 @@ insert_requests(ConnData, ConnHandle,
insert_request(ConnData, ConnHandle, TransId,
Action, Data, InitTimer, LongTimer) ->
+ %% We dont check the result of the lock-counter creation because
+ %% the only way it could already exist is if the transaction-id
+ %% range has wrapped and an old counter was not deleted.
+ megaco_monitor:request_lockcnt_cre(TransId),
+
#megaco_conn_handle{remote_mid = RemoteMid} = ConnHandle,
#conn_data{protocol_version = Version,
user_mod = UserMod,
@@ -4323,6 +4347,7 @@ cancel_request(ConnData, Req, Reason) ->
cancel_request2(ConnData, TransId, UserReply) ->
megaco_monitor:delete_request(TransId),
+ megaco_monitor:request_lockcnt_del(TransId),
megaco_config:del_pending_counter(recv, TransId), % OTP-7189
Serial = TransId#trans_id.serial,
ConnData2 = ConnData#conn_data{serial = Serial},
@@ -4380,29 +4405,67 @@ receive_reply_remote(ConnData, UserReply) ->
receive_reply_remote(ConnData, UserReply, Extra) ->
TransId = to_local_trans_id(ConnData),
- case (catch megaco_monitor:lookup_request(TransId)) of
- [#request{timer_ref = {_Type, Ref}} = Req] -> %% OTP-4843
+ case {megaco_monitor:request_lockcnt_inc(TransId),
+ (catch megaco_monitor:lookup_request(TransId))} of
+ {Cnt, [Req]} when (Cnt =:= 1) andalso is_record(Req, request) ->
%% Don't care about Req and Rep version diff
- megaco_monitor:delete_request(TransId),
- megaco_monitor:cancel_apply_after(Ref), % OTP-4843
- megaco_config:del_pending_counter(recv, TransId), % OTP-7189
-
- UserMod = Req#request.user_mod,
- UserArgs = Req#request.user_args,
- Action = Req#request.reply_action,
- UserData = Req#request.reply_data,
- ConnData2 = ConnData#conn_data{user_mod = UserMod,
- user_args = UserArgs,
- reply_action = Action,
- reply_data = UserData},
- return_reply(ConnData2, TransId, UserReply, Extra);
-
+ do_receive_reply_remote(ConnData, TransId, Req, UserReply, Extra);
+
+ {Cnt, [Req]} when is_integer(Cnt) andalso is_record(Req, request) ->
+ %% Another process is accessing, handle as unexpected
+ %% (so it has a possibillity to get logged).
+ ?report_important(ConnData, "trans reply (no receiver)",
+ [{user_reply, UserReply},
+ {request_lockcnt, Cnt}]),
+ megaco_monitor:request_lockcnt_dec(TransId),
+ return_unexpected_trans_reply(ConnData, TransId, UserReply, Extra);
+
+ %% no counter
+ {_Cnt, [Req]} when is_record(Req, request) ->
+ %% The counter does not exist.
+ %% This can only mean a code upgrade raise condition.
+ %% That is, this request record was created before
+ %% this feature (the counters) was instroduced.
+ %% The simples solution to this is to behave exactly as
+ %% before, that is, pass it along, and leave it to the
+ %% user to figure out.
+ ?report_trace(ConnData,
+ "remote reply - "
+ "code upgrade raise condition",
+ [{user_reply, UserReply}]),
+ do_receive_reply_remote(ConnData, TransId, Req, UserReply, Extra);
+
+ {Cnt, _} when is_integer(Cnt) ->
+ ?report_trace(ConnData, "trans reply (no receiver)",
+ [{user_reply, UserReply}, {request_lockcnt, Cnt}]),
+ megaco_monitor:request_lockcnt_dec(TransId),
+ return_unexpected_trans_reply(ConnData, TransId, UserReply, Extra);
+
_ ->
?report_trace(ConnData, "remote reply (no receiver)",
- [UserReply]),
+ [{user_reply, UserReply}]),
return_unexpected_trans_reply(ConnData, TransId, UserReply, Extra)
end.
+do_receive_reply_remote(ConnData, TransId,
+ #request{timer_ref = {_Type, Ref},
+ user_mod = UserMod,
+ user_args = UserArgs,
+ reply_action = Action,
+ reply_data = UserData} = _Req,
+ UserReply, Extra) ->
+ megaco_monitor:delete_request(TransId),
+ megaco_monitor:request_lockcnt_del(TransId),
+ megaco_monitor:cancel_apply_after(Ref), % OTP-4843
+ megaco_config:del_pending_counter(recv, TransId), % OTP-7189
+
+ ConnData2 = ConnData#conn_data{user_mod = UserMod,
+ user_args = UserArgs,
+ reply_action = Action,
+ reply_data = UserData},
+ return_reply(ConnData2, TransId, UserReply, Extra).
+
+
cancel_reply(ConnData, #reply{state = waiting_for_ack,
user_mod = UserMod,
user_args = UserArgs} = Rep, Reason) ->
diff --git a/lib/megaco/src/engine/megaco_monitor.erl b/lib/megaco/src/engine/megaco_monitor.erl
index f95a20cf58..29275371be 100644
--- a/lib/megaco/src/engine/megaco_monitor.erl
+++ b/lib/megaco/src/engine/megaco_monitor.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -51,6 +51,11 @@
update_request_field/3, update_request_fields/2,
delete_request/1,
+ request_lockcnt_cre/1,
+ request_lockcnt_del/1,
+ request_lockcnt_inc/1,
+ request_lockcnt_dec/1,
+
lookup_reply/1,
lookup_reply_field/2,
match_replies/1,
@@ -115,6 +120,24 @@ update_request_fields(Key, NewFields) when is_list(NewFields) ->
delete_request(Key) ->
ets:delete(megaco_requests, Key).
+
+request_lockcnt_cre(TransId) ->
+ Key = {TransId, lockcnt},
+ ets:insert_new(megaco_requests, {Key, 1}).
+
+request_lockcnt_del(TransId) ->
+ Key = {TransId, lockcnt},
+ ets:delete(megaco_requests, Key).
+
+request_lockcnt_inc(TransId) ->
+ Key = {TransId, lockcnt},
+ (catch ets:update_counter(megaco_requests, Key, 1)).
+
+request_lockcnt_dec(TransId) ->
+ Key = {TransId, lockcnt},
+ (catch ets:update_counter(megaco_requests, Key, -1)).
+
+
lookup_reply(Key) ->
ets:lookup(megaco_replies, Key).
diff --git a/lib/megaco/vsn.mk b/lib/megaco/vsn.mk
index cf5957460d..efb46253aa 100644
--- a/lib/megaco/vsn.mk
+++ b/lib/megaco/vsn.mk
@@ -18,11 +18,13 @@
# %CopyrightEnd%
APPLICATION = megaco
-MEGACO_VSN = 3.14.1
+MEGACO_VSN = 3.14.1.1
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(MEGACO_VSN)$(PRE_VSN)"
-TICKETS = OTP-8561 OTP-8627
+TICKETS = OTP-8696
+
+TICKETS_3_14_1 = OTP-8529 OTP-8561 OTP-8627 OTP-8634
TICKETS_3_14 = OTP-8317 OTP-8323 OTP-8328 OTP-8362 OTP-8403
diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml
index 66242398d9..b0bead0ba0 100644
--- a/lib/mnesia/doc/src/notes.xml
+++ b/lib/mnesia/doc/src/notes.xml
@@ -37,6 +37,22 @@
bugfixes for every release of Mnesia. Each release of Mnesia
thus constitutes one section in this document. The title of each
section is the version number of Mnesia.</p>
+
+ <section><title>Mnesia 4.4.14</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Added mnesia:subscribe(activity) contributed by Bernard
+ Duggan.</p>
+ <p>
+ Own Id: OTP-8519</p>
+ </item>
+ </list>
+ </section>
+
+ </section>
<section><title>Mnesia 4.4.13</title>
diff --git a/lib/mnesia/examples/mnesia_meter.erl b/lib/mnesia/examples/mnesia_meter.erl
index ea74d8691b..68094c4431 100644
--- a/lib/mnesia/examples/mnesia_meter.erl
+++ b/lib/mnesia/examples/mnesia_meter.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -407,7 +407,7 @@ run(Nodes, Config, FunOverhead) ->
stop(Nodes),
Res.
-run_meter(M, Nodes, FunOverhead) when record(M, meter) ->
+run_meter(M, Nodes, FunOverhead) when is_record(M, meter) ->
io:format(".", []),
case catch init_records(M#meter.init, ?TIMES) of
{atomic, ok} ->
diff --git a/lib/mnesia/src/mnesia.appup.src b/lib/mnesia/src/mnesia.appup.src
index b3b9297db2..47c9bf9979 100644
--- a/lib/mnesia/src/mnesia.appup.src
+++ b/lib/mnesia/src/mnesia.appup.src
@@ -1,69 +1,7 @@
%% -*- erlang -*-
{"%VSN%",
- [
- {"4.4.12",
- [
- {update, mnesia, soft, soft_purge, soft_purge, []},
- {update, mnesia_loader, soft, soft_purge, soft_purge, []},
- {update, mnesia_monitor, soft, soft_purge, soft_purge, []},
- {update, mnesia_tm, soft, soft_purge, soft_purge, []}
- ]
- },
- {"4.4.11",
- [
- {update, mnesia, soft, soft_purge, soft_purge, []},
- {update, mnesia_loader, soft, soft_purge, soft_purge, []},
- {update, mnesia_monitor, soft, soft_purge, soft_purge, []},
- {update, mnesia_tm, soft, soft_purge, soft_purge, []},
- {update, mnesia_locker, soft, soft_purge, soft_purge, []},
- {update, mnesia_controller, soft, soft_purge, soft_purge, []}
- ]
- },
- {"4.4.10",
- [
- {update, mnesia, soft, soft_purge, soft_purge, []},
- {update, mnesia_loader, soft, soft_purge, soft_purge, []},
- {update, mnesia_monitor, soft, soft_purge, soft_purge, []},
- {update, mnesia_tm, soft, soft_purge, soft_purge, []},
- {update, mnesia_locker, soft, soft_purge, soft_purge, []},
- {update, mnesia_controller, soft, soft_purge, soft_purge, []}
- ]
- },
- {"4.4.9", [{restart_application, mnesia}]},
- {"4.4.8", [{restart_application, mnesia}]},
- {"4.4.7", [{restart_application, mnesia}]}
+ [
],
[
- {"4.4.12",
- [
- {update, mnesia, soft, soft_purge, soft_purge, []},
- {update, mnesia_loader, soft, soft_purge, soft_purge, []},
- {update, mnesia_monitor, soft, soft_purge, soft_purge, []},
- {update, mnesia_tm, soft, soft_purge, soft_purge, []}
- ]
- },
- {"4.4.11",
- [
- {update, mnesia, soft, soft_purge, soft_purge, []},
- {update, mnesia_loader, soft, soft_purge, soft_purge, []},
- {update, mnesia_monitor, soft, soft_purge, soft_purge, []},
- {update, mnesia_tm, soft, soft_purge, soft_purge, []},
- {update, mnesia_locker, soft, soft_purge, soft_purge, []},
- {update, mnesia_controller, soft, soft_purge, soft_purge, []}
- ]
- },
- {"4.4.10",
- [
- {update, mnesia, soft, soft_purge, soft_purge, []},
- {update, mnesia_loader, soft, soft_purge, soft_purge, []},
- {update, mnesia_monitor, soft, soft_purge, soft_purge, []},
- {update, mnesia_tm, soft, soft_purge, soft_purge, []},
- {update, mnesia_locker, soft, soft_purge, soft_purge, []},
- {update, mnesia_controller, soft, soft_purge, soft_purge, []}
- ]
- },
- {"4.4.9", [{restart_application, mnesia}]},
- {"4.4.8", [{restart_application, mnesia}]},
- {"4.4.7", [{restart_application, mnesia}]}
]
}.
diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl
index 9bc480e619..0298b382a6 100644
--- a/lib/mnesia/src/mnesia_controller.erl
+++ b/lib/mnesia/src/mnesia_controller.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -52,6 +52,7 @@
async_dump_log/1,
sync_dump_log/1,
connect_nodes/1,
+ connect_nodes/2,
wait_for_schema_commit_lock/0,
release_schema_commit_lock/0,
create_table/1,
@@ -94,7 +95,7 @@
load_and_reply/2,
send_and_reply/2,
wait_for_tables_init/2,
- connect_nodes2/2
+ connect_nodes2/3
]).
-import(mnesia_lib, [set/2, add/2]).
@@ -420,12 +421,15 @@ try_schedule_late_disc_load(Tabs, Reason, MsgTag) ->
[[Tabs, Reason, MsgTag], AbortReason])
end.
-connect_nodes(Ns) ->
+connect_nodes(Ns) ->
+ connect_nodes(Ns, fun default_merge/1).
+
+connect_nodes(Ns, UserFun) ->
case mnesia:system_info(is_running) of
no ->
{error, {node_not_running, node()}};
yes ->
- Pid = spawn_link(?MODULE,connect_nodes2,[self(),Ns]),
+ Pid = spawn_link(?MODULE,connect_nodes2,[self(),Ns, UserFun]),
receive
{?MODULE, Pid, Res, New} ->
case Res of
@@ -443,7 +447,7 @@ connect_nodes(Ns) ->
end
end.
-connect_nodes2(Father, Ns) ->
+connect_nodes2(Father, Ns, UserFun) ->
Current = val({current, db_nodes}),
abcast([node()|Ns], {merging_schema, node()}),
{NewC, OldC} = mnesia_recover:connect_nodes(Ns),
@@ -451,7 +455,7 @@ connect_nodes2(Father, Ns) ->
New1 = mnesia_lib:intersect(Ns, Connected),
New = New1 -- Current,
process_flag(trap_exit, true),
- Res = try_merge_schema(New),
+ Res = try_merge_schema(New, UserFun),
Msg = {schema_is_merged, [], late_merge, []},
multicall([node()|Ns], Msg),
After = val({current, db_nodes}),
@@ -465,7 +469,7 @@ connect_nodes2(Father, Ns) ->
merge_schema() ->
AllNodes = mnesia_lib:all_nodes(),
- case try_merge_schema(AllNodes) of
+ case try_merge_schema(AllNodes, fun default_merge/1) of
ok ->
schema_is_merged();
{aborted, {throw, Str}} when is_list(Str) ->
@@ -474,8 +478,11 @@ merge_schema() ->
fatal("Failed to merge schema: ~p~n", [Else])
end.
-try_merge_schema(Nodes) ->
- case mnesia_schema:merge_schema() of
+default_merge(F) ->
+ F([]).
+
+try_merge_schema(Nodes, UserFun) ->
+ case mnesia_schema:merge_schema(UserFun) of
{atomic, not_merged} ->
%% No more nodes that we need to merge the schema with
ok;
@@ -488,11 +495,11 @@ try_merge_schema(Nodes) ->
im_running(OldFriends, NewFriends),
im_running(NewFriends, OldFriends),
- try_merge_schema(Nodes);
+ try_merge_schema(Nodes, UserFun);
{atomic, {"Cannot get cstructs", Node, Reason}} ->
dbg_out("Cannot get cstructs, Node ~p ~p~n", [Node, Reason]),
timer:sleep(1000), % Avoid a endless loop look alike
- try_merge_schema(Nodes);
+ try_merge_schema(Nodes, UserFun);
Other ->
Other
end.
@@ -1842,17 +1849,20 @@ reply(ReplyTo, Reply) ->
add_worker(Worker = #dump_log{}, State) ->
InitBy = Worker#dump_log.initiated_by,
Queue = State#state.dumper_queue,
- case lists:keymember(InitBy, #dump_log.initiated_by, Queue) of
- true when Worker#dump_log.opt_reply_to == undefined ->
- %% The same threshold has been exceeded again,
- %% before we have had the possibility to
- %% process the older one.
- DetectedBy = {dump_log, InitBy},
- Event = {mnesia_overload, DetectedBy},
- mnesia_lib:report_system_event(Event);
- _ ->
- ignore
- end,
+ Status =
+ case lists:keymember(InitBy, #dump_log.initiated_by, Queue) of
+ true when Worker#dump_log.opt_reply_to == undefined ->
+ %% The same threshold has been exceeded again,
+ %% before we have had the possibility to
+ %% process the older one.
+ DetectedBy = {dump_log, InitBy},
+ Event = {mnesia_overload, DetectedBy},
+ mnesia_lib:report_system_event(Event),
+ true;
+ _ ->
+ false
+ end,
+ mnesia_recover:log_dump_overload(Status),
Queue2 = Queue ++ [Worker],
State2 = State#state{dumper_queue = Queue2},
opt_start_worker(State2);
diff --git a/lib/mnesia/src/mnesia_lib.erl b/lib/mnesia/src/mnesia_lib.erl
index dba808e66e..3da3dd2f5c 100644
--- a/lib/mnesia/src/mnesia_lib.erl
+++ b/lib/mnesia/src/mnesia_lib.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -113,6 +113,9 @@
mkcore/1,
not_active_here/1,
other_val/2,
+ overload_read/0,
+ overload_read/1,
+ overload_set/2,
pad_name/3,
random_time/2,
read_counter/1,
@@ -551,6 +554,33 @@ cs_to_nodes(Cs) ->
Cs#cstruct.disc_only_copies ++
Cs#cstruct.disc_copies ++
Cs#cstruct.ram_copies.
+
+overload_types() ->
+ [mnesia_tm, mnesia_dump_log].
+
+valid_overload_type(T) ->
+ case lists:member(T, overload_types()) of
+ false ->
+ erlang:error(bad_type);
+ true ->
+ true
+ end.
+
+overload_set(Type, Bool) when is_boolean(Bool) ->
+ valid_overload_type(Type),
+ set({overload, Type}, Bool).
+
+overload_read() ->
+ [{T, overload_read(T)} || T <- overload_types()].
+
+overload_read(T) ->
+ case ?catch_val({overload, T}) of
+ {'EXIT',_} ->
+ valid_overload_type(T),
+ false;
+ Flag when is_boolean(Flag) ->
+ Flag
+ end.
dist_coredump() ->
dist_coredump(all_nodes()).
diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl
index 6c53c2e752..0ca7bf3f7f 100644
--- a/lib/mnesia/src/mnesia_recover.erl
+++ b/lib/mnesia/src/mnesia_recover.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -36,6 +36,7 @@
incr_trans_tid_serial/0,
init/0,
log_decision/1,
+ log_dump_overload/1,
log_master_nodes/3,
log_mnesia_down/1,
log_mnesia_up/1,
@@ -70,6 +71,7 @@
unclear_decision,
unclear_waitfor,
tm_queue_len = 0,
+ log_dump_overload = false,
initiated = false,
early_msgs = []
}).
@@ -277,6 +279,9 @@ mnesia_down(Node) ->
cast({mnesia_down, Node})
end.
+log_dump_overload(Flag) when is_boolean(Flag) ->
+ cast({log_dump_overload, Flag}).
+
log_master_nodes(Args, UseDir, IsRunning) ->
if
IsRunning == yes ->
@@ -818,6 +823,12 @@ handle_cast({announce_all, Nodes}, State) ->
announce_all(Nodes),
{noreply, State};
+handle_cast({log_dump_overload, Flag}, State) when is_boolean(Flag) ->
+ Prev = State#state.log_dump_overload,
+ Overload = Prev orelse Flag,
+ mnesia_lib:overload_set(mnesia_dump_log, Overload),
+ {noreply, State#state{log_dump_overload = Flag}};
+
handle_cast(Msg, State) ->
error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
{noreply, State}.
@@ -851,12 +862,14 @@ handle_info(check_overload, S) ->
Len > Threshold, Prev > Threshold ->
What = {mnesia_tm, message_queue_len, [Prev, Len]},
mnesia_lib:report_system_event({mnesia_overload, What}),
+ mnesia_lib:overload_set(mnesia_tm, true),
{noreply, S#state{tm_queue_len = 0}};
Len > Threshold ->
{noreply, S#state{tm_queue_len = Len}};
true ->
+ mnesia_lib:overload_set(mnesia_tm, false),
{noreply, S#state{tm_queue_len = 0}}
end;
undefined ->
@@ -905,7 +918,23 @@ terminate(Reason, State) ->
%% Purpose: Upgrade process when its code is to be changed
%% Returns: {ok, NewState}
%%----------------------------------------------------------------------
-code_change(_OldVsn, State, _Extra) ->
+code_change(_OldVsn, {state,
+ Supervisor,
+ Unclear_pid,
+ Unclear_decision,
+ Unclear_waitfor,
+ Tm_queue_len,
+ Initiated,
+ Early_msgs
+ }, _Extra) ->
+ {ok, #state{supervisor = Supervisor,
+ unclear_pid = Unclear_pid,
+ unclear_decision = Unclear_decision,
+ unclear_waitfor = Unclear_waitfor,
+ tm_queue_len = Tm_queue_len,
+ initiated = Initiated,
+ early_msgs = Early_msgs}};
+code_change(_OldVsn, #state{} = State, _Extra) ->
{ok, State}.
%%%----------------------------------------------------------------------
diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl
index 354431a296..17e570b881 100644
--- a/lib/mnesia/src/mnesia_schema.erl
+++ b/lib/mnesia/src/mnesia_schema.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -62,6 +62,7 @@
list2cs/1,
lock_schema/0,
merge_schema/0,
+ merge_schema/1,
move_table/3,
opt_create_dir/2,
prepare_commit/3,
@@ -2650,10 +2651,16 @@ make_dump_tables([]) ->
%% Merge the local schema with the schema on other nodes
merge_schema() ->
- schema_transaction(fun() -> do_merge_schema() end).
+ schema_transaction(fun() -> do_merge_schema([]) end).
+
+merge_schema(UserFun) ->
+ schema_transaction(fun() -> UserFun(fun(Arg) -> do_merge_schema(Arg) end) end).
-do_merge_schema() ->
+
+do_merge_schema(LockTabs0) ->
{_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, write),
+ LockTabs = [{T, tab_to_nodes(T)} || T <- LockTabs0],
+ [get_tid_ts_and_lock(T,write) || {T,_} <- LockTabs],
Connected = val(recover_nodes),
Running = val({current, db_nodes}),
Store = Ts#tidstore.store,
@@ -2665,9 +2672,12 @@ do_merge_schema() ->
mnesia:abort({bad_commit, {missing_lock, Miss}})
end,
case Connected -- Running of
- [Node | _] ->
+ [Node | _] = OtherNodes ->
%% Time for a schema merging party!
mnesia_locker:wlock_no_exist(Tid, Store, schema, [Node]),
+ [mnesia_locker:wlock_no_exist(
+ Tid, Store, T, mnesia_lib:intersect(Ns, OtherNodes))
+ || {T,Ns} <- LockTabs],
case rpc:call(Node, mnesia_controller, get_cstructs, []) of
{cstructs, Cstructs, RemoteRunning1} ->
LockedAlready = Running ++ [Node],
@@ -2681,6 +2691,9 @@ do_merge_schema() ->
end,
NeedsLock = RemoteRunning -- LockedAlready,
mnesia_locker:wlock_no_exist(Tid, Store, schema, NeedsLock),
+ [mnesia_locker:wlock_no_exist(Tid, Store, T,
+ mnesia_lib:intersect(Ns,NeedsLock))
+ || {T,Ns} <- LockTabs],
{value, SchemaCs} =
lists:keysearch(schema, #cstruct.name, Cstructs),
@@ -2714,6 +2727,10 @@ do_merge_schema() ->
not_merged
end.
+tab_to_nodes(Tab) when is_atom(Tab) ->
+ Cs = val({Tab, cstruct}),
+ mnesia_lib:cs_to_nodes(Cs).
+
make_merge_schema(Node, [Cs | Cstructs]) ->
Ops = do_make_merge_schema(Node, Cs),
Ops ++ make_merge_schema(Node, Cstructs);
diff --git a/lib/mnesia/test/Makefile b/lib/mnesia/test/Makefile
new file mode 100644
index 0000000000..a4f32e3f78
--- /dev/null
+++ b/lib/mnesia/test/Makefile
@@ -0,0 +1,118 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1996-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+
+MODULES= \
+ mt \
+ mnesia_SUITE \
+ mnesia_test_lib \
+ mnesia_install_test \
+ mnesia_registry_test \
+ mnesia_config_test \
+ mnesia_frag_test \
+ mnesia_inconsistent_database_test \
+ mnesia_config_backup \
+ mnesia_config_event \
+ mnesia_examples_test \
+ mnesia_nice_coverage_test \
+ mnesia_evil_coverage_test \
+ mnesia_evil_backup \
+ mnesia_trans_access_test \
+ mnesia_dirty_access_test \
+ mnesia_atomicity_test \
+ mnesia_consistency_test \
+ mnesia_isolation_test \
+ mnesia_durability_test \
+ mnesia_recovery_test \
+ mnesia_qlc_test \
+ mnesia_schema_recovery_test \
+ mnesia_measure_test \
+ mnesia_cost \
+ mnesia_dbn_meters
+
+MnesiaExamplesDir := ../examples
+
+ExampleModules = \
+ company \
+ company_o \
+ bup \
+ mnesia_meter \
+ mnesia_tpcb
+ExamplesHrl = \
+ company.hrl \
+ company_o.hrl
+
+ERL_FILES= $(MODULES:%=%.erl) $(ExampleModules:%=$(MnesiaExamplesDir)/%.erl)
+
+HRL_FILES= mnesia_test_lib.hrl $(ExamplesHrl:%=$(MnesiaExamplesDir)/%)
+
+TARGET_FILES= \
+ $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(ExampleModules:%=$(EBIN)/%.$(EMULATOR))
+
+INSTALL_PROGS= $(TARGET_FILES)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/mnesia_test
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+#ERL_COMPILE_FLAGS +=
+
+EBIN = .
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+tests debug opt: $(TARGET_FILES)
+
+$(EBIN)/%.beam: $(MnesiaExamplesDir)/%.erl
+ $(ERLC) -bbeam $(ERL_COMPILE_FLAGS) -o$(EBIN) $<
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+
+release_tests_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)
+ $(INSTALL_DATA) mnesia.spec mnesia.spec.vxworks $(ERL_FILES) $(HRL_FILES) $(RELSYSDIR)
+ $(INSTALL_PROGRAM) mt $(INSTALL_PROGS) $(RELSYSDIR)
+# chmod -f -R u+w $(RELSYSDIR)
+# @tar cf - *_SUITE_data | (cd $(RELSYSDIR); tar xf -)
+
+release_docs_spec:
+
+
diff --git a/lib/mnesia/test/README b/lib/mnesia/test/README
new file mode 100644
index 0000000000..e0ced7399d
--- /dev/null
+++ b/lib/mnesia/test/README
@@ -0,0 +1,107 @@
+This directory contains the test suite of Mnesia.
+Compile it with "erl -make".
+
+Test cases are identified with a {Mod, Fun} tuple that maps
+to a function Mod:Fun(Config), where the test case hopefully
+is implemented. The test suite is organized in a hierarchy
+with {mnesia_SUITE, all} as the top.
+
+The module called mt, implements various convenience functions
+to ease up the execution of test cases. It does also provide
+aliases for some test cases. For example the atom Mod is an
+alias for {Mod, all}, the atom all for {mnesia_SUITE, all},
+evil for mnesia_evil_coverage_test etc.
+
+ mt:struct(TestCase)
+
+ Displays the test case structure from TestCase
+ and downwards the hierarchy. E.g. mt:struct(all)
+ will display the entire test suite.
+
+ mt:t(TestCase), mt:t(TestCase, Config)
+
+ Runs a single test case or a hierarchy of test cases.
+ mt:t(silly) is be a good starter, but you may also
+ try mt:t(all) directly if you feel lucky.
+
+ The identity of the last run test case and the outcome of
+ it is stored on file. mt:t() will re-run the last test case.
+
+ The Config argument contains various configuration
+ parameters for the test cases, such as which nodes that
+ are available for running the test suite. The default
+ settings should be enough for the most. Use mt:read_config()
+ to get the current default setting and change it with
+ mt:write_config(Config).
+
+ mt:doc(TestCase)
+
+ Generates html documentation for the test suite.
+
+In order to be able to run the test suite, the Erlang node must
+be started with the distribution enabled and the code path must
+be set to the mnesia/ebin, mnesia/examples, and mnesia/test
+directories. E.g. the following would do:
+
+ erl -sname a -pa $top/examples -pa $top/src -pa $top/ebin
+
+where $top is the path to the Mnesia installation. Many test
+cases needs 2 or 3 nodes. The node names may explicitly be
+stated as test suite configuration parameters, but by default
+the extra node names are generated. In this example the names
+will be: a, a1 and a2. It is enough to start the first node
+manually, the extra nodes will automatically be started if
+neccessary.
+
+The attached UNIX shell script mt, does not work on all
+platforms, but it may be used as a source for inspiration. It
+starts three Erlang nodes in one xterm's each. The main xterm
+(a@localhost) logs all output in the Erlang shell to a
+file. The file is piped thru grep to easily find successful
+test cases (i.e. test cases that encountered an error).
+
+During development we want to be able to run the test cases
+in the debugger. This demands a little bit of preparations:
+
+ - Start the neccessary number of nodes (normally 3).
+ This may either be done by running the mt script or
+ by starting the main node and then invoke mt:start_nodes()
+ to start the extra nodes with slave.
+
+ - Ensure that the nodes are connected. The easiest way to do
+ this is by invoking mt:ping().
+
+ - Load all files that needs to be interpreted. This is typically
+ all Mnesia files plus the test case. By invoking mnesia:ni()
+ and mnesia:ni([TestModule]) the neccessary modules will be
+ loaded on all CONNECTED nodes.
+
+The test case execution is supervised in order to ensure that no test
+case exceeds its maximum time limit, which by default is 5 minutes.
+When the limit is reached, the running test case gets aborted and the
+test server runs the next test case in line. This behaviour is useful
+when running the entire test suite during the night, but it is really
+annoying during debugging.
+
+ Use the "erl -mnesia_test_timeout" flag to disable the test case
+ time limit mechanism.
+
+Some mechanisms in Mnesia are almost impossible to test with a
+white box technique. In order to be able to write predictable
+test cases which tests the same thing every time it is run,
+Mnesia has been instrumented with debug functions. These may be
+controlled from a test program. For example to verify that the
+commit protocols work it is essential that it is possible to
+ensure that we are able to kill Mnesia in the most critical
+situations. Normally Mnesia is compiled with the debug
+functions disabled and this means that test cases which
+requires this functionality will be skipped. The mnesia:ni(),
+mentioned above, functions ensures that the interpreted code is
+instrumented with Mnesia's debug functionality. The mnesia:nc()
+functions compiles Mnesia with the debug setting enabled.
+
+Happy bug hunting!
+
+ Hakan Mattsson <[email protected]>
+
+
diff --git a/lib/mnesia/test/mnesia.spec b/lib/mnesia/test/mnesia.spec
new file mode 100644
index 0000000000..596f8b917d
--- /dev/null
+++ b/lib/mnesia/test/mnesia.spec
@@ -0,0 +1,23 @@
+{topcase, {dir, "../mnesia_test"}}.
+{require_nodenames, 2}.
+{skip, {mnesia_measure_test, ram_meter, "Takes to long time"}}.
+{skip, {mnesia_measure_test, disc_meter, "Takes to long time"}}.
+{skip, {mnesia_measure_test, disc_only_meter, "Takes to long time"}}.
+{skip, {mnesia_measure_test, cost, "Takes to long time"}}.
+{skip, {mnesia_measure_test, dbn_meters, "Takes to long time"}}.
+{skip, {mnesia_measure_test, tpcb, "Takes to long time"}}.
+{skip, {mnesia_measure_test, prediction, "Not yet implemented"}}.
+{skip, {mnesia_measure_test, consumption, "Not yet implemented"}}.
+{skip, {mnesia_measure_test, scalability, "Not yet implemented"}}.
+{skip, {mnesia_measure_test, tpcb, "Takes too much time and memory"}}.
+{skip, {mnesia_measure_test, measure_all_api_functions, "Not yet implemented"}}.
+{skip, {mnesia_measure_test, mnemosyne_vs_mnesia_kernel, "Not yet implemented"}}.
+{skip, {mnesia_examples_test, company, "Not yet implemented"}}.
+{skip, {mnesia_config_test, ignore_fallback_at_startup, "Not yet implemented"}}.
+{skip, {mnesia_evil_backup, local_backup_checkpoint, "Not yet implemented"}}.
+{skip, {mnesia_config_test, max_wait_for_decision, "Not yet implemented"}}.
+{skip, {mnesia_recovery_test, after_full_disc_partition, "Not yet implemented"}}.
+{skip, {mnesia_recovery_test, system_upgrade, "Not yet implemented"}}.
+{skip, {mnesia_consistency_test, consistency_after_change_table_copy_type, "Not yet implemented"}}.
+{skip, {mnesia_consistency_test, consistency_after_transform_table, "Not yet implemented"}}.
+{skip, {mnesia_consistency_test, consistency_after_rename_of_node, "Not yet implemented"}}.
diff --git a/lib/mnesia/test/mnesia.spec.vxworks b/lib/mnesia/test/mnesia.spec.vxworks
new file mode 100644
index 0000000000..11c01ea3fe
--- /dev/null
+++ b/lib/mnesia/test/mnesia.spec.vxworks
@@ -0,0 +1,362 @@
+{topcase, {dir, "../mnesia_test"}}.
+{require_nodenames, 3}.
+{diskless, true}.
+{skip, {mnesia_measure_test, all, "Too heavy"}}.
+%{mnesia_install_test, silly_durability} 'IMPL'
+%{mnesia_install_test, silly_move} 'IMPL'
+{skip, {mnesia_install_test, silly_upgrade, "Uses disk"}}.
+%{mnesia_install_test, conflict} 'IMPL'
+%{mnesia_install_test, dist} 'IMPL'
+{skip, {mnesia_examples_test, all, "Uses disk"}}.
+{skip, {mnesia_nice_coverage_test, all, "Uses disk"}}.
+
+%{mnesia_evil_coverage_test, system_info} 'IMPL'
+%{mnesia_evil_coverage_test, table_info} 'IMPL'
+%{mnesia_evil_coverage_test, error_description} 'IMPL'
+{skip, {mnesia_evil_coverage_test, db_node_lifecycle, "Uses disk"}}.
+{skip, {mnesia_evil_coverage_test, local_content, "Uses disk"}}.
+%{mnesia_evil_coverage_test, start_and_stop} 'IMPL'
+%{mnesia_evil_coverage_test, transaction} 'IMPL'
+{skip, {mnesia_evil_coverage_test, checkpoint, "Uses disk"}}.
+{skip, {mnesia_evil_backup, backup, "Uses disk"}}.
+{skip, {mnesia_evil_backup, global_backup_checkpoint, "Uses disk"}}.
+{skip, {mnesia_evil_backup, incremental_backup_checkpoint, "Uses disk"}}.
+{skip, {mnesia_evil_backup, local_backup_checkpoint, "Uses disk"}}.
+{skip, {mnesia_evil_backup, selective_backup_checkpoint, "Uses disk"}}.
+{skip, {mnesia_evil_backup, restore_errors, "Uses disk"}}.
+{skip, {mnesia_evil_backup, restore_clear, "Uses disk"}}.
+{skip, {mnesia_evil_backup, restore_keep, "Uses disk"}}.
+{skip, {mnesia_evil_backup, restore_recreate, "Uses disk"}}.
+{skip, {mnesia_evil_backup, traverse_backup, "Uses disk"}}.
+{skip, {mnesia_evil_backup, install_fallback, "Uses disk"}}.
+{skip, {mnesia_evil_backup, uninstall_fallback, "Uses disk"}}.
+{skip, {mnesia_evil_backup, local_fallback, "Uses disk"}}.
+%{mnesia_evil_coverage_test, table_lifecycle} 'IMPL'
+{skip, {mnesia_evil_coverage_test, replica_management, "Uses disk"}}.
+%{mnesia_evil_coverage_test, change_table_access_mode} 'IMPL'
+%{mnesia_evil_coverage_test, change_table_load_order} 'IMPL'
+{skip, {mnesia_evil_coverage_test, set_master_nodes, "Uses disk"}}.
+{skip, {mnesia_evil_coverage_test, offline_set_master_nodes, "Uses disk"}}.
+{skip, {mnesia_evil_coverage_test, replica_location, "Uses disk"}}.
+%{mnesia_evil_coverage_test, add_table_index_ram} 'IMPL'
+{skip, {mnesia_trans_access_test, add_table_index_disc, "Uses disc"}}.
+{skip, {mnesia_trans_access_test, add_table_index_disc_only, "Uses disc"}}.
+%{mnesia_evil_coverage_test, create_live_table_index_ram} 'IMPL'
+{skip, {mnesia_trans_access_test, create_live_table_index_disc, "Uses disc"}}.
+{skip, {mnesia_trans_access_test, create_live_table_index_disc_only, "Uses disc"}}.
+%{mnesia_evil_coverage_test, del_table_index_ram} 'IMPL'
+{skip, {mnesia_trans_access_test, del_table_index_disc, "Uses disc"}}.
+{skip, {mnesia_trans_access_test, del_table_index_disc_only, "Uses disc"}}.
+{skip, {mnesia_trans_access_test, idx_schema_changes_ram, "Uses disk"}}.
+{skip, {mnesia_trans_access_test, idx_schema_changes_disc, "Uses disc"}}.
+{skip, {mnesia_trans_access_test, idx_schema_changes_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_write_ram} 'IMPL'
+
+{skip, {mnesia_dirty_access_test, dirty_write_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_write_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_read_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_read_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_read_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_update_counter_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_update_counter_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_update_counter_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_delete_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_delete_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_delete_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_delete_object_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_delete_object_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_delete_object_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_match_object_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_match_object_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_match_object_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_index_match_object_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_index_match_object_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_index_match_object_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_index_read_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_index_read_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_index_read_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_index_update_set_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_index_update_set_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_index_update_set_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_index_update_bag_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_index_update_bag_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_index_update_bag_disc_only, "Uses disc"}}.
+%{mnesia_dirty_access_test, dirty_iter_ram} 'IMPL'
+{skip, {mnesia_dirty_access_test, dirty_iter_disc, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, dirty_iter_disc_only, "Uses disc"}}.
+{skip, {mnesia_dirty_access_test, admin_tests, "Uses disk"}}.
+
+%{mnesia_trans_access_test, write} 'IMPL'
+%{mnesia_trans_access_test, read} 'IMPL'
+%{mnesia_trans_access_test, wread} 'IMPL'
+%{mnesia_trans_access_test, delete} 'IMPL'
+%{mnesia_trans_access_test, delete_object} 'IMPL'
+%{mnesia_trans_access_test, match_object} 'IMPL'
+%{mnesia_trans_access_test, all_keys} 'IMPL'
+%{mnesia_trans_access_test, index_match_object} 'IMPL'
+%{mnesia_trans_access_test, index_read} 'IMPL'
+%{mnesia_trans_access_test, index_update_set} 'IMPL'
+%{mnesia_trans_access_test, index_update_bag} 'IMPL'
+{skip, {mnesia_evil_coverage_test, dump_tables, "Uses disk"}}.
+{skip, {mnesia_evil_coverage_test, dump_log, "Uses disk"}}.
+%{mnesia_evil_coverage_test, wait_for_tables} 'IMPL'
+{skip, {mnesia_evil_coverage_test, force_load_table, "Uses disk"}}.
+%{mnesia_evil_coverage_test, user_properties} 'IMPL'
+%{mnesia_evil_coverage_test, record_name_dirty_access_ram} 'IMPL'
+{skip, {mnesia_evil_coverage_test, record_name_dirty_access_disc, "Uses disc"}}.
+{skip, {mnesia_evil_coverage_test, record_name_dirty_access_disc_only, "Uses disc"}}.
+%{mnesia_evil_coverage_test, snmp_open_table} 'IMPL'
+%{mnesia_evil_coverage_test, snmp_close_table} 'IMPL'
+%{mnesia_evil_coverage_test, snmp_get_next_index} 'IMPL'
+%{mnesia_evil_coverage_test, snmp_get_row} 'IMPL'
+%{mnesia_evil_coverage_test, snmp_get_mnesia_key} 'IMPL'
+%{mnesia_evil_coverage_test, snmp_update_counter} 'IMPL'
+%{mnesia_evil_coverage_test, info} 'IMPL'
+%{mnesia_evil_coverage_test, schema_0} 'IMPL'
+%{mnesia_evil_coverage_test, schema_1} 'IMPL'
+%{mnesia_evil_coverage_test, view_0} 'IMPL'
+{skip, {mnesia_evil_coverage_test, view_1, "Uses disk"}}.
+{skip, {mnesia_evil_coverage_test, view_2, "Uses disk"}}.
+%{mnesia_evil_coverage_test, lkill} 'IMPL'
+%{mnesia_evil_coverage_test, kill} 'IMPL'
+
+%{mnesia_config_test, access_module} 'IMPL'
+%{mnesia_config_test, auto_repair} 'IMPL'
+{skip, {mnesia_config_test, backup_module, "Uses disk"}}.
+{skip, {mnesia_config_test, dynamic_connect, "Uses disk"}}.
+%{mnesia_config_test, debug} 'IMPL'
+%{mnesia_config_test, dir} 'IMPL'
+{skip, {mnesia_config_test, dump_log_load_regulation, "Uses disk"}}.
+{skip, {mnesia_config_test, dump_log_time_threshold, "Uses disk"}}.
+{skip, {mnesia_config_test, dump_log_write_threshold, "Uses disk"}}.
+{skip, {mnesia_config_test, dump_log_update_in_place, "Uses disk"}}.
+{skip, {mnesia_config_test, embedded_mnemosyne, "Uses Mnemosyne"}}.
+%{mnesia_config_test, event_module} 'IMPL'
+{skip, {mnesia_config_test, ignore_fallback_at_startup, "Not Yet impl"}}.
+%{mnesia_config_test, inconsistent_database} 'IMPL'
+{skip, {mnesia_config_test, max_wait_for_decision, "Not Yet impl"}}.
+{skip, {mnesia_config_test, start_one_disc_full_then_one_disc_less, "Uses disc"}}.
+{skip, {mnesia_config_test, start_first_one_disc_less_then_one_disc_full, "Uses disc"}}.
+%%{skip, {mnesia_config_test, start_first_one_disc_less_then_two_more_disc_less, "Uses disc"}}.
+{skip, {mnesia_config_test, schema_location_and_extra_db_nodes_combinations, "Uses disk"}}.
+{skip, {mnesia_config_test, table_load_to_disc_less_nodes, "Uses disc"}}.
+{skip, {mnesia_config_test, schema_merge, "Uses Disc"}}.
+%{mnesia_config_test, unknown_config} 'IMPL'
+%{mnesia_registry_test, good_dump} 'IMPL'
+%{mnesia_registry_test, bad_dump} 'IMPL'
+
+%{mnesia_atomicity_test, explicit_abort_in_middle_of_trans} 'IMPL'
+%{mnesia_atomicity_test, runtime_error_in_middle_of_trans} 'IMPL'
+%{mnesia_atomicity_test, kill_self_in_middle_of_trans} 'IMPL'
+%{mnesia_atomicity_test, throw_in_middle_of_trans} 'IMPL'
+%{mnesia_atomicity_test, mnesia_down_during_infinite_trans} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_sw_rt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_sw_wt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wr_r} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_sw_sw} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_sw_w} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_sw_wr} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wr_wt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wr_sw} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wr_w} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_r_sw} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_r_w} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_r_wt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_rt_sw} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_rt_w} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_rt_wt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wt_r} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wt_w} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wt_rt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wt_wt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wt_wr} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_wt_sw} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_w_wr} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_w_sw} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_w_r} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_w_w} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_w_rt} 'IMPL'
+%{mnesia_atomicity_test, lock_waiter_w_wt} 'IMPL'
+%{mnesia_atomicity_test, restart_r_one} 'IMPL'
+%{mnesia_atomicity_test, restart_w_one} 'IMPL'
+%{mnesia_atomicity_test, restart_rt_one} 'IMPL'
+%{mnesia_atomicity_test, restart_wt_one} 'IMPL'
+%{mnesia_atomicity_test, restart_wr_one} 'IMPL'
+%{mnesia_atomicity_test, restart_sw_one} 'IMPL'
+%{mnesia_atomicity_test, restart_r_two} 'IMPL'
+%{mnesia_atomicity_test, restart_w_two} 'IMPL'
+%{mnesia_atomicity_test, restart_rt_two} 'IMPL'
+%{mnesia_atomicity_test, restart_wt_two} 'IMPL'
+%{mnesia_atomicity_test, restart_wr_two} 'IMPL'
+%{mnesia_atomicity_test, restart_sw_two} 'IMPL'
+
+%{mnesia_isolation_test, no_conflict} 'IMPL'
+%{mnesia_isolation_test, simple_queue_conflict} 'IMPL'
+%{mnesia_isolation_test, advanced_queue_conflict} 'IMPL'
+%{mnesia_isolation_test, simple_deadlock_conflict} 'IMPL'
+%{mnesia_isolation_test, advanced_deadlock_conflict} 'IMPL'
+%{mnesia_isolation_test, lock_burst} 'IMPL'
+%{mnesia_isolation_test, basic_sticky_functionality} 'IMPL'
+%{mnesia_isolation_test, create_table} 'IMPL'
+%{mnesia_isolation_test, delete_table} 'IMPL'
+%{mnesia_isolation_test, move_table_copy} 'IMPL'
+%{mnesia_isolation_test, add_table_index} 'IMPL'
+%{mnesia_isolation_test, del_table_index} 'IMPL'
+%{mnesia_isolation_test, transform_table} 'IMPL'
+%{mnesia_isolation_test, snmp_open_table} 'IMPL'
+%{mnesia_isolation_test, snmp_close_table} 'IMPL'
+{skip, {mnesia_isolation_test, change_table_copy_type, "Uses disk"}}.
+%{mnesia_isolation_test, change_table_access} 'IMPL'
+%{mnesia_isolation_test, add_table_copy} 'IMPL'
+%{mnesia_isolation_test, del_table_copy} 'IMPL'
+{skip, {mnesia_isolation_test, dump_tables, "Uses disk"}}.
+{skip, {mnesia_isolation_test, extra_admin_tests, "Uses disk"}}.
+%{mnesia_isolation_test, del_table_copy_1} 'IMPL'
+%{mnesia_isolation_test, del_table_copy_2} 'IMPL'
+%{mnesia_isolation_test, del_table_copy_3} 'IMPL'
+%{mnesia_isolation_test, add_table_copy_1} 'IMPL'
+%{mnesia_isolation_test, add_table_copy_2} 'IMPL'
+%{mnesia_isolation_test, add_table_copy_3} 'IMPL'
+%{mnesia_isolation_test, add_table_copy_4} 'IMPL'
+%{mnesia_isolation_test, move_table_copy_1} 'IMPL'
+%{mnesia_isolation_test, move_table_copy_2} 'IMPL'
+%{mnesia_isolation_test, move_table_copy_3} 'IMPL'
+%{mnesia_isolation_test, move_table_copy_4} 'IMPL'
+%{mnesia_isolation_test, dirty_updates_visible_direct} 'IMPL'
+%{mnesia_isolation_test, dirty_reads_regardless_of_trans} 'IMPL'
+%{mnesia_isolation_test, trans_update_invisibible_outside_trans} 'IMPL'
+%{mnesia_isolation_test, trans_update_visible_inside_trans} 'IMPL'
+%{mnesia_isolation_test, write_shadows} 'IMPL'
+%{mnesia_isolation_test, delete_shadows} 'IMPL'
+%{mnesia_isolation_test, write_delete_shadows_bag} 'IMPL'
+
+{skip, {mnesia_durability_test, all, "Uses disk "}}.
+%{mnesia_durability_test, load_local_contents_directly} 'IMPL'
+%{mnesia_durability_test, load_directly_when_all_are_ram_copiesA} 'IMPL'
+%{mnesia_durability_test, load_directly_when_all_are_ram_copiesB} 'IMPL'
+%{skip, {mnesia_durability_test, late_load_when_all_are_ram_copies_on_ram_nodes1, "Uses disk schema"}}.
+%{skip, {mnesia_durability_test, late_load_when_all_are_ram_copies_on_ram_nodes2, "Uses disk schema"}}.
+%{skip, {mnesia_durability_test, load_when_last_replica_becomes_available, "Uses disk"}}.
+%{skip, {mnesia_durability_test, load_when_we_have_down_from_all_other_replica_nodes, "Uses disk"}}.
+%{skip, {mnesia_durability_test, late_load_transforms_into_disc_load, "Uses disc"}}.
+%{mnesia_durability_test, late_load_leads_to_hanging} 'IMPL'
+%{mnesia_durability_test, force_load_when_nobody_intents_to_load} 'IMPL'
+%{mnesia_durability_test, force_load_when_someone_has_decided_to_load} 'IMPL'
+%{mnesia_durability_test, force_load_when_someone_else_already_has_loaded} 'IMPL'
+%{mnesia_durability_test, force_load_when_we_has_loaded} 'IMPL'
+%{mnesia_durability_test, force_load_on_a_non_local_table} 'IMPL'
+%{mnesia_durability_test, force_load_when_the_table_does_not_exist} 'IMPL'
+%{mnesia_durability_test, master_nodes} 'IMPL'
+%{mnesia_durability_test, master_on_non_local_tables} 'IMPL'
+%{mnesia_durability_test, remote_force_load_with_local_master_node} 'IMPL'
+%{mnesia_durability_test, dump_ram_copies} 'IMPL'
+%{skip, {mnesia_durability_test, dump_disc_copies, "Uses disc"}}.
+%{skip, {mnesia_durability_test, dump_disc_only, "Uses disc"}}.
+%{skip, {mnesia_durability_test, durability_of_disc_copies, "Uses disc"}}.
+%{skip, {mnesia_durability_test, durability_of_disc_only_copies, "Uses disc"}}.
+
+{skip, {mnesia_recovery_test, mnesia_down, "Uses Disk"}}.
+%{mnesia_recovery_test, no_master_2} 'IMPL'
+%{mnesia_recovery_test, no_master_3} 'IMPL'
+%{mnesia_recovery_test, one_master_2} 'IMPL'
+%{mnesia_recovery_test, one_master_3} 'IMPL'
+%{mnesia_recovery_test, two_master_2} 'IMPL'
+%{mnesia_recovery_test, two_master_3} 'IMPL'
+%{mnesia_recovery_test, all_master_2} 'IMPL'
+%{mnesia_recovery_test, all_master_3} 'IMPL'
+{skip, {mnesia_recovery_test, mnesia_down_during_startup_disk_ram, "Uses disk"}}.
+%{mnesia_recovery_test, mnesia_down_during_startup_init_ram} 'IMPL'
+{skip, {mnesia_recovery_test, mnesia_down_during_startup_init_disc, "Uses disc"}}.
+{skip, {mnesia_recovery_test, mnesia_down_during_startup_init_disc_only, "Uses disc"}}.
+%{mnesia_recovery_test, mnesia_down_during_startup_tm_ram} 'IMPL'
+{skip, {mnesia_recovery_test, mnesia_down_during_startup_tm_disc, "Uses disc"}}.
+{skip, {mnesia_recovery_test, mnesia_down_during_startup_tm_disc_only, "Uses disc"}}.
+%{mnesia_recovery_test, explicit_stop_during_snmp} 'IMPL'
+
+{skip, {mnesia_recovery_test, schema_trans, "Uses Disk, needs disk log"}}.
+{skip, {mnesia_recovery_test, async_dirty, "Uses disc"}}.
+{skip, {mnesia_recovery_test, sync_dirty, "Uses disc"}}.
+{skip, {mnesia_recovery_test, sym_trans, "Uses disc"}}.
+{skip, {mnesia_recovery_test, asym_trans, "Uses disc"}}.
+
+{skip, {mnesia_recovery_test, after_full_disc_partition, "Not Yet impl"}}.
+{skip, {mnesia_recovery_test, after_corrupt_files, "Uses disk"}}.
+
+%{mnesia_evil_coverage_test, subscriptions} 'IMPL'
+%{mnesia_evil_coverage_test, nested_trans_both_ok} 'IMPL'
+%{mnesia_evil_coverage_test, nested_trans_child_dies} 'IMPL'
+%{mnesia_evil_coverage_test, nested_trans_parent_dies} 'IMPL'
+%{mnesia_evil_coverage_test, nested_trans_both_dies} 'IMPL'
+%{mnesia_evil_coverage_test, mix_of_trans_sync_dirty} 'IMPL'
+%{mnesia_evil_coverage_test, mix_of_trans_async_dirty} 'IMPL'
+%{mnesia_evil_coverage_test, mix_of_trans_ets} 'IMPL'
+
+{skip, {mnesia_recovery_test, disc_less, "Uses disc (on the other nodes)"}}.
+{skip, {mnesia_recovery_test, system_upgrade, "Not Yet impl"}}.
+%{mnesia_consistency_test, consistency_after_restart_1_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_restart_1_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_restart_1_disc_only, "Uses disc"}}.
+%{mnesia_consistency_test, consistency_after_restart_2_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_restart_2_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_restart_2_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_dump_tables_1_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, consistency_after_dump_tables_2_ram, "Uses disk"}}.
+%{mnesia_consistency_test, consistency_after_add_replica_2_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_add_replica_2_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_add_replica_2_disc_only, "Uses disc"}}.
+%{mnesia_consistency_test, consistency_after_add_replica_3_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_add_replica_3_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_add_replica_3_disc_only, "Uses disc"}}.
+%{mnesia_consistency_test, consistency_after_del_replica_2_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_del_replica_2_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_del_replica_2_disc_only, "Uses disc"}}.
+%{mnesia_consistency_test, consistency_after_del_replica_3_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_del_replica_3_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_del_replica_3_disc_only, "Uses disc"}}.
+%{mnesia_consistency_test, consistency_after_move_replica_2_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_move_replica_2_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_move_replica_2_disc_only, "Uses disc"}}.
+%{mnesia_consistency_test, consistency_after_move_replica_3_ram} 'IMPL'
+{skip, {mnesia_consistency_test, consistency_after_move_replica_3_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_move_replica_3_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_transform_table, "Not yet implemented"}}.
+{skip, {mnesia_consistency_test, consistency_after_change_table_copy_type, "Not yet implemented"}}.
+{skip, {mnesia_consistency_test, consistency_after_fallback_2_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, consistency_after_fallback_2_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_fallback_2_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_fallback_3_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, consistency_after_fallback_3_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_fallback_3_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_restore_clear_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, consistency_after_restore_clear_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_restore_clear_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_restore_recreate_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, consistency_after_restore_recreate_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_restore_recreate_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, consistency_after_rename_of_node, "Not yet implemented"}}.
+{skip, {mnesia_consistency_test, updates_during_checkpoint_activation, "Uses disk"}}.
+%{skip, {mnesia_consistency_test, updates_during_checkpoint_activation_2_disc, "Uses disc"}}.
+%{skip, {mnesia_consistency_test, updates_during_checkpoint_activation_2_disc_only, "Uses disc"}}.
+%%{mnesia_consistency_test, updates_during_checkpoint_activation_3_ram} 'IMPL'
+%{skip, {mnesia_consistency_test, updates_during_checkpoint_activation_3_disc, "Uses disc"}}.
+%{skip, {mnesia_consistency_test, updates_during_checkpoint_activation_3_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, updates_during_checkpoint_iteration, "Uses disk"}}.
+%{skip, {mnesia_consistency_test, updates_during_checkpoint_iteration_2_disc, "Uses disc"}}.
+%{skip, {mnesia_consistency_test, updates_during_checkpoint_iteration_2_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, load_table_with_activated_checkpoint_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, load_table_with_activated_checkpoint_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, load_table_with_activated_checkpoint_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, add_table_copy_to_table_with_activated_checkpoint_ram, "Uses disk"}}.
+{skip, {mnesia_consistency_test, add_table_copy_to_table_with_activated_checkpoint_disc, "Uses disc"}}.
+{skip, {mnesia_consistency_test, add_table_copy_to_table_with_activated_checkpoint_disc_only, "Uses disc"}}.
+{skip, {mnesia_consistency_test, inst_fallback_process_dies, "Uses disk"}}.
+{skip, {mnesia_consistency_test, fatal_when_inconsistency, "Uses disk"}}.
+{skip, {mnesia_consistency_test, after_delete, "Uses disk"}}.
+{skip, {mnesia_consistency_test, mnesia_down_during_backup_causes_switch, "Uses disk"}}.
+{skip, {mnesia_consistency_test, mnesia_down_during_backup_causes_abort, "Uses disk"}}.
+%{mnesia_consistency_test, cause_switch_after} 'IMPL'
+%{mnesia_consistency_test, cause_abort_before} 'IMPL'
+%{mnesia_consistency_test, cause_abort_after} 'IMPL'
+%{mnesia_consistency_test, change_schema_before} 'IMPL'
+%{mnesia_consistency_test, change_schema_after} 'IMPL'
+
diff --git a/lib/mnesia/test/mnesia_SUITE.erl b/lib/mnesia/test/mnesia_SUITE.erl
new file mode 100644
index 0000000000..b28deaf330
--- /dev/null
+++ b/lib/mnesia/test/mnesia_SUITE.erl
@@ -0,0 +1,203 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_SUITE).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Verify that Mnesia really is a distributed real-time DBMS",
+ "This is the test suite of the Mnesia DBMS. The test suite",
+ "covers many aspects of usage and is indended to be developed",
+ "incrementally. The test suite is divided into a hierarchy of test",
+ "suites where the leafs actually implements the test cases.",
+ "The intention of each test case and sub test suite can be",
+ "read in comments where they are implemented or in worst cases",
+ "from their long mnemonic names. ",
+ "",
+ "The most simple test case of them all is called 'silly'",
+ "and is useful to run now and then, e.g. when some new fatal",
+ "bug has been introduced. It may be run even if Mnesia is in",
+ "such a bad shape that the test machinery cannot be used.",
+ "NB! Invoke the function directly with mnesia_SUITE:silly()",
+ "and do not involve the normal test machinery."];
+all(suite) ->
+ [
+ light,
+ medium,
+ heavy,
+ clean_up_suite
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+silly() ->
+ mnesia_install_test:silly().
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+light(doc) ->
+ ["The 'light' test suite runs a selected set of test suites and is",
+ "intended to be the smallest test suite that is meaningful",
+ "to run. It starts with an installation test (which in essence is the",
+ "'silly' test case) and then it covers all functions in the API in",
+ "various depths. All configuration parameters and examples are also",
+ "covered."];
+light(suite) ->
+ [
+ install,
+ nice,
+ evil,
+ {mnesia_frag_test, light},
+ qlc,
+ registry,
+ config,
+ examples
+ ].
+
+install(suite) ->
+ [{mnesia_install_test, all}].
+
+nice(suite) ->
+ [{mnesia_nice_coverage_test, all}].
+
+evil(suite) ->
+ [{mnesia_evil_coverage_test, all}].
+
+qlc(suite) ->
+ [{mnesia_qlc_test, all}].
+
+registry(suite) ->
+ [{mnesia_registry_test, all}].
+
+config(suite) ->
+ [{mnesia_config_test, all}].
+
+examples(suite) ->
+ [{mnesia_examples_test, all}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+medium(doc) ->
+ ["The 'medium' test suite verfies the ACID (atomicity, consistency",
+ "isolation and durability) properties and various recovery scenarios",
+ "These tests may take quite while to run."];
+medium(suite) ->
+ [
+ install,
+ atomicity,
+ isolation,
+ durability,
+ recovery,
+ consistency,
+ {mnesia_frag_test, medium}
+ ].
+
+atomicity(suite) ->
+ [{mnesia_atomicity_test, all}].
+
+isolation(suite) ->
+ [{mnesia_isolation_test, all}].
+
+durability(suite) ->
+ [{mnesia_durability_test, all}].
+
+recovery(suite) ->
+ [{mnesia_recovery_test, all}].
+
+consistency(suite) ->
+ [{mnesia_consistency_test, all}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+heavy(doc) ->
+ ["The 'heavy' test suite runs some resource consuming tests and",
+ "benchmarks"];
+heavy(suite) ->
+ [measure].
+
+measure(suite) ->
+ [{mnesia_measure_test, all}].
+
+prediction(suite) ->
+ [{mnesia_measure_test, prediction}].
+
+fairness(suite) ->
+ [{mnesia_measure_test, fairness}].
+
+benchmarks(suite) ->
+ [{mnesia_measure_test, benchmarks}].
+
+consumption(suite) ->
+ [{mnesia_measure_test, consumption}].
+
+scalability(suite) ->
+ [{mnesia_measure_test, scalability}].
+
+
+clean_up_suite(doc) -> ["Not a test case only kills mnesia and nodes, that where"
+ "started during the tests"];
+clean_up_suite(suite) ->
+ [];
+clean_up_suite(Config) when is_list(Config)->
+ mnesia:kill(),
+ Slaves = mnesia_test_lib:lookup_config(nodenames, Config),
+ Nodes = lists:delete(node(), Slaves),
+ rpc:multicall(Nodes, erlang, halt, []),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+otp_r4b(doc) ->
+ ["This test suite is an extract of the grand Mnesia suite",
+ "it contains OTP R4B specific test cases"];
+otp_r4b(suite) ->
+ [
+ {mnesia_config_test, access_module},
+ {mnesia_config_test, dump_log_load_regulation},
+ {mnesia_config_test, embedded_mnemosyne},
+ {mnesia_config_test, ignore_fallback_at_startup},
+ {mnesia_config_test, max_wait_for_decision},
+ {mnesia_consistency_test, consistency_after_restore},
+ {mnesia_evil_backup, restore},
+ {mnesia_evil_coverage_test, offline_set_master_nodes},
+ {mnesia_evil_coverage_test, record_name},
+ {mnesia_evil_coverage_test, user_properties},
+ {mnesia_registry_test, all},
+ otp_2363
+ ].
+
+otp_2363(doc) ->
+ ["Index on disc only tables"];
+otp_2363(suite) ->
+ [
+ {mnesia_dirty_access_test, dirty_index_match_object_disc_only},
+ {mnesia_dirty_access_test,dirty_index_read_disc_only},
+ {mnesia_dirty_access_test,dirty_index_update_bag_disc_only},
+ {mnesia_dirty_access_test,dirty_index_update_set_disc_only},
+ {mnesia_evil_coverage_test, create_live_table_index_disc_only}
+ ].
+
+
+
diff --git a/lib/mnesia/test/mnesia_atomicity_test.erl b/lib/mnesia/test/mnesia_atomicity_test.erl
new file mode 100644
index 0000000000..645c203a91
--- /dev/null
+++ b/lib/mnesia/test/mnesia_atomicity_test.erl
@@ -0,0 +1,839 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_atomicity_test).
+-author('[email protected]').
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Verify atomicity of transactions",
+ "Verify that transactions are atomic, i.e. either all operations",
+ "in a transaction will be performed or none of them. It must be",
+ "assured that no partitially completed operations leaves any",
+ "effects in the database."];
+all(suite) ->
+ [
+ explicit_abort_in_middle_of_trans,
+ runtime_error_in_middle_of_trans,
+ kill_self_in_middle_of_trans,
+ throw_in_middle_of_trans,
+ mnesia_down_in_middle_of_trans
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+explicit_abort_in_middle_of_trans(suite) -> [];
+explicit_abort_in_middle_of_trans(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = explicit_abort_in_middle_of_trans,
+
+ Rec1A = {Tab, 1, a},
+ Rec1B = {Tab, 1, b},
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]}])),
+ %% Start a transaction on one node
+ {success, [A]} = ?start_activities([Node1]),
+
+ %% store an object in the Tab - first tranaction
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1A) % returns ok when successful
+ end,
+ ?match_receive({A, ok}),
+ A ! end_trans,
+ ?match_receive({A, {atomic, end_trans}}),
+
+ %% second transaction: store some new objects and abort before the
+ %% transaction is finished -> the new changes should be invisable
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1B),
+ exit(abort_by_purpose) %does that stop the process A ???
+ end,
+ ?match_receive({A, {aborted, abort_by_purpose}}),
+
+
+ %?match_receive({A, {'EXIT', Pid, normal}}), % A died and sends EXIT
+
+
+ %% Start a second transactionprocess, after the first failed
+ {success, [B]} = ?start_activities([Node1]),
+
+ %% check, whether the interupted transaction had no influence on the db
+ ?start_transactions([B]),
+ B ! fun() ->
+ ?match([Rec1A], mnesia:read({Tab, 1})),
+ ok
+ end,
+ ?match_receive({B, ok}),
+ B ! end_trans,
+ ?match_receive({B, {atomic, end_trans}}),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+runtime_error_in_middle_of_trans(suite) -> [];
+runtime_error_in_middle_of_trans(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = runtime_error_in_middle_of_trans,
+
+ Rec1A = {Tab, 1, a},
+ Rec1B = {Tab, 1, b},
+ Rec1C = {Tab, 1, c},
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]}])),
+ %% Start a transaction on one node
+ {success, [A]} = ?start_activities([Node1]),
+
+ %% store an object in the Tab - first tranaction
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1A) % returns ok when successful
+ end,
+ ?match_receive({A, ok}),
+ A ! end_trans,
+ ?match_receive({A, {atomic, end_trans}}),
+
+ %% second transaction: store some new objects and abort before the
+ %% transaction is finished -> the new changes should be invisable
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1B),
+ erlang:error(foo), % that should provoke a runtime error
+ mnesia:write(Rec1C)
+ end,
+ ?match_receive({A, {aborted, _Reason}}),
+
+ %?match_receive({A, {'EXIT', Msg1}), % A died and sends EXIT
+
+
+ %% Start a second transactionprocess, after the first failed
+ {success, [B]} = ?start_activities([Node1]),
+
+ %% check, whether the interupted transaction had no influence on the db
+ ?start_transactions([B]),
+ B ! fun() ->
+ ?match([Rec1A], mnesia:read({Tab, 1})),
+ ok
+ end,
+ ?match_receive({B, ok}),
+ B ! end_trans,
+ ?match_receive({B, {atomic, end_trans}}),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+kill_self_in_middle_of_trans(suite) -> [];
+kill_self_in_middle_of_trans(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = kill_self_in_middle_of_trans,
+
+ Rec1A = {Tab, 1, a},
+ Rec1B = {Tab, 1, b},
+ Rec1C = {Tab, 1, c},
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]}])),
+ %% Start a transaction on one node
+ {success, [A]} = ?start_activities([Node1]),
+
+ %% store an object in the Tab - first tranaction
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1A) % returns ok when successful
+ end,
+ ?match_receive({A, ok}),
+ A ! end_trans,
+ ?match_receive({A, {atomic, end_trans}}),
+
+ %% second transaction: store some new objects and abort before the
+ %% transaction is finished -> the new changes should be invisable
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1B),
+ exit(self(), kill), % that should kill the process himself
+ % - poor guy !
+ mnesia:write(Rec1C)
+ end,
+ %%
+ %% exit(.., kill) : the transaction can't trap this error - thus no
+ %% proper result can be send by the test server
+
+ % ?match_receive({A, {aborted, Reason}}),
+
+ ?match_receive({'EXIT', _Pid, killed}), % A is killed and sends EXIT
+
+ %% Start a second transactionprocess, after the first failed
+ {success, [B]} = ?start_activities([Node1]),
+
+ %% check, whether the interupted transaction had no influence on the db
+ ?start_transactions([B]),
+ B ! fun() ->
+ ?match([Rec1A], mnesia:read({Tab, 1})),
+ ok
+ end,
+ ?match_receive({B, ok}),
+ B ! end_trans,
+ ?match_receive({B, {atomic, end_trans}}),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+throw_in_middle_of_trans(suite) -> [];
+throw_in_middle_of_trans(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = throw_in_middle_of_trans,
+
+ Rec1A = {Tab, 1, a},
+ Rec1B = {Tab, 1, b},
+ Rec1C = {Tab, 1, c},
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]}])),
+ %% Start a transaction on one node
+ {success, [A]} = ?start_activities([Node1]),
+
+ %% store an object in the Tab - first tranaction
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1A) % returns ok when successful
+ end,
+ ?match_receive({A, ok}),
+ A ! end_trans,
+ ?match_receive({A, {atomic, end_trans}}),
+
+ %% second transaction: store some new objects and abort before the
+ %% transaction is finished -> the new changes should be invisable
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write(Rec1B),
+ throw(exit_transactian_by_a_throw),
+ mnesia:write(Rec1C)
+ end,
+ ?match_receive({A, {aborted, {throw, exit_transactian_by_a_throw}}}),
+ % A ! end_trans, % is A still alive ?
+ % ?match_receive({A, {atomic, end_trans}}), % {'EXIT', Pid, normal}
+
+ %?match_receive({A, {'EXIT', Pid, normal}}), % A died and sends EXIT
+
+ %% Start a second transactionprocess, after the first failed
+ {success, [B]} = ?start_activities([Node1]),
+
+ %% check, whether the interupted transaction had no influence on the db
+ ?start_transactions([B]),
+ B ! fun() ->
+ ?match([Rec1A], mnesia:read({Tab, 1})),
+ ok
+ end,
+ ?match_receive({B, ok}),
+ B ! end_trans,
+ ?match_receive({B, {atomic, end_trans}}),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+mnesia_down_in_middle_of_trans(suite) ->
+ [
+ mnesia_down_during_infinite_trans,
+ lock_waiter,
+ restart_check
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+mnesia_down_during_infinite_trans(suite) -> [];
+mnesia_down_during_infinite_trans(Config) when is_list(Config) ->
+ [Node1, Node2] = ?acquire_nodes(2, Config),
+ Tab = mnesia_down_during_infinite_trans,
+
+ ?match({atomic, ok},
+ mnesia:create_table([{name, Tab}, {ram_copies, [Node1, Node2]}])),
+ %% Start a transaction on one node
+ {success, [A2, A1]} = ?start_activities([Node2, Node1]),
+ %% Start order of the transactions are important
+ %% We also needs to sync the tid counter
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 1, test_ok}) end)),
+ mnesia_test_lib:start_sync_transactions([A2, A1]),
+
+ %% Obtain a write lock and wait forever
+ RecA = {Tab, 1, test_not_ok},
+ A1 ! fun() -> mnesia:write(RecA) end,
+ ?match_receive({A1, ok}),
+
+ A1 ! fun() -> process_flag(trap_exit, true), timer:sleep(infinity) end,
+ ?match_receive(timeout),
+
+ %% Try to get read lock, but gets queued
+ A2 ! fun() -> mnesia:read({Tab, 1}) end,
+ ?match_receive(timeout),
+
+ %% Kill Mnesia on other node
+ mnesia_test_lib:kill_mnesia([Node1]),
+
+ %% Second transaction gets the read lock
+ ?match_receive({A2, [{Tab, 1, test_ok}]}),
+ exit(A1, kill), % Needed since we trap exit
+
+ ?verify_mnesia([Node2], [Node1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+lock_waiter(doc) ->
+ ["The purpose of this test case is to test the following situation:",
+ "process B locks an object, process A accesses that object as",
+ "well, but A has to wait for the lock to be released. Then",
+ "mnesia of B goes down. Question: will A get the lock ?",
+ "important: the transaction of A is the oldest one !!! (= a little tricky)",
+ "",
+ "several different access operations shall be tested",
+ "rt = read_lock_table, wt = write_lock_table, r = read,",
+ "sw = s_write, w = write, wr = wread"];
+lock_waiter(suite) ->
+ [
+ lock_waiter_sw_r,
+ lock_waiter_sw_rt,
+ lock_waiter_sw_wt,
+ lock_waiter_wr_r,
+ lock_waiter_srw_r,
+ lock_waiter_sw_sw,
+ lock_waiter_sw_w,
+ lock_waiter_sw_wr,
+ lock_waiter_sw_srw,
+ lock_waiter_wr_wt,
+ lock_waiter_srw_wt,
+ lock_waiter_wr_sw,
+ lock_waiter_srw_sw,
+ lock_waiter_wr_w,
+ lock_waiter_srw_w,
+ lock_waiter_r_sw,
+ lock_waiter_r_w,
+ lock_waiter_r_wt,
+ lock_waiter_rt_sw,
+ lock_waiter_rt_w,
+ lock_waiter_rt_wt,
+ lock_waiter_wr_wr,
+ lock_waiter_srw_srw,
+ lock_waiter_wt_r,
+ lock_waiter_wt_w,
+ lock_waiter_wt_rt,
+ lock_waiter_wt_wt,
+ lock_waiter_wt_wr,
+ lock_waiter_wt_srw,
+ lock_waiter_wt_sw,
+ lock_waiter_w_wr,
+ lock_waiter_w_srw,
+ lock_waiter_w_sw,
+ lock_waiter_w_r,
+ lock_waiter_w_w,
+ lock_waiter_w_rt,
+ lock_waiter_w_wt
+ ].
+
+lock_waiter_sw_r(suite) -> [];
+lock_waiter_sw_r(Config) when is_list(Config) ->
+ start_lock_waiter(sw, r, Config).
+
+lock_waiter_sw_rt(suite) -> [];
+lock_waiter_sw_rt(Config) when is_list(Config) ->
+ start_lock_waiter(sw, rt, Config).
+
+lock_waiter_sw_wt(suite) -> [];
+lock_waiter_sw_wt(Config) when is_list(Config) ->
+ start_lock_waiter(sw, wt,Config).
+
+lock_waiter_wr_r(suite) -> [];
+lock_waiter_wr_r(Config) when is_list(Config) ->
+ start_lock_waiter(wr, r, Config).
+
+lock_waiter_srw_r(suite) -> [];
+lock_waiter_srw_r(Config) when is_list(Config) ->
+ start_lock_waiter(srw, r, Config).
+
+lock_waiter_sw_sw(suite) -> [];
+lock_waiter_sw_sw(Config) when is_list(Config) ->
+ start_lock_waiter(sw, sw,Config).
+
+lock_waiter_srw_srw(suite) -> [];
+lock_waiter_srw_srw(Config) when is_list(Config) ->
+ start_lock_waiter(srw, srw,Config).
+
+lock_waiter_wr_wr(suite) -> [];
+lock_waiter_wr_wr(Config) when is_list(Config) ->
+ start_lock_waiter(wr, wr,Config).
+
+lock_waiter_sw_w(suite) -> [];
+lock_waiter_sw_w(Config) when is_list(Config) ->
+ start_lock_waiter(sw, w,Config).
+
+lock_waiter_sw_wr(suite) -> [];
+lock_waiter_sw_wr(Config) when is_list(Config) ->
+ start_lock_waiter(sw, wr,Config).
+
+lock_waiter_sw_srw(suite) -> [];
+lock_waiter_sw_srw(Config) when is_list(Config) ->
+ start_lock_waiter(sw, srw,Config).
+
+lock_waiter_wr_wt(suite) -> [];
+lock_waiter_wr_wt(Config) when is_list(Config) ->
+ start_lock_waiter(wr, wt,Config).
+
+lock_waiter_srw_wt(suite) -> [];
+lock_waiter_srw_wt(Config) when is_list(Config) ->
+ start_lock_waiter(srw, wt,Config).
+
+lock_waiter_wr_sw(suite) -> [];
+lock_waiter_wr_sw(Config) when is_list(Config) ->
+ start_lock_waiter(wr, sw,Config).
+
+lock_waiter_srw_sw(suite) -> [];
+lock_waiter_srw_sw(Config) when is_list(Config) ->
+ start_lock_waiter(srw, sw,Config).
+
+lock_waiter_wr_w(suite) -> [];
+lock_waiter_wr_w(Config) when is_list(Config) ->
+ start_lock_waiter(wr, w,Config).
+
+lock_waiter_srw_w(suite) -> [];
+lock_waiter_srw_w(Config) when is_list(Config) ->
+ start_lock_waiter(srw, w,Config).
+
+lock_waiter_r_sw(suite) -> [];
+lock_waiter_r_sw(Config) when is_list(Config) ->
+ start_lock_waiter(r, sw,Config).
+
+lock_waiter_r_w(suite) -> [];
+lock_waiter_r_w(Config) when is_list(Config) ->
+ start_lock_waiter(r, w,Config).
+
+lock_waiter_r_wt(suite) -> [];
+lock_waiter_r_wt(Config) when is_list(Config) ->
+ start_lock_waiter(r, wt,Config).
+
+lock_waiter_rt_sw(suite) -> [];
+lock_waiter_rt_sw(Config) when is_list(Config) ->
+ start_lock_waiter(rt, sw,Config).
+
+lock_waiter_rt_w(suite) -> [];
+lock_waiter_rt_w(Config) when is_list(Config) ->
+ start_lock_waiter(rt, w,Config).
+
+lock_waiter_rt_wt(suite) -> [];
+lock_waiter_rt_wt(Config) when is_list(Config) ->
+ start_lock_waiter(rt, wt,Config).
+
+lock_waiter_wt_r(suite) -> [];
+lock_waiter_wt_r(Config) when is_list(Config) ->
+ start_lock_waiter(wt, r,Config).
+
+lock_waiter_wt_w(suite) -> [];
+lock_waiter_wt_w(Config) when is_list(Config) ->
+ start_lock_waiter(wt, w,Config).
+
+lock_waiter_wt_rt(suite) -> [];
+lock_waiter_wt_rt(Config) when is_list(Config) ->
+ start_lock_waiter(wt, rt,Config).
+
+lock_waiter_wt_wt(suite) -> [];
+lock_waiter_wt_wt(Config) when is_list(Config) ->
+ start_lock_waiter(wt, wt,Config).
+
+lock_waiter_wt_wr(suite) -> [];
+lock_waiter_wt_wr(Config) when is_list(Config) ->
+ start_lock_waiter(wt, wr,Config).
+
+lock_waiter_wt_srw(suite) -> [];
+lock_waiter_wt_srw(Config) when is_list(Config) ->
+ start_lock_waiter(wt, srw,Config).
+
+lock_waiter_wt_sw(suite) -> [];
+lock_waiter_wt_sw(Config) when is_list(Config) ->
+ start_lock_waiter(wt, sw,Config).
+
+lock_waiter_w_wr(suite) -> [];
+lock_waiter_w_wr(Config) when is_list(Config) ->
+ start_lock_waiter(w, wr, Config).
+
+lock_waiter_w_srw(suite) -> [];
+lock_waiter_w_srw(Config) when is_list(Config) ->
+ start_lock_waiter(w, srw, Config).
+
+lock_waiter_w_sw(suite) -> [];
+lock_waiter_w_sw(Config) when is_list(Config) ->
+ start_lock_waiter(w, sw, Config).
+
+lock_waiter_w_r(suite) -> [];
+lock_waiter_w_r(Config) when is_list(Config) ->
+ start_lock_waiter(w, r, Config).
+
+lock_waiter_w_w(suite) -> [];
+lock_waiter_w_w(Config) when is_list(Config) ->
+ start_lock_waiter(w, w, Config).
+
+lock_waiter_w_rt(suite) -> [];
+lock_waiter_w_rt(Config) when is_list(Config) ->
+ start_lock_waiter(w, rt, Config).
+
+lock_waiter_w_wt(suite) -> [];
+lock_waiter_w_wt(Config) when is_list(Config) ->
+ start_lock_waiter(w, wt, Config).
+
+start_lock_waiter(BlockOpA, BlockOpB, Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+
+ TabName = mk_tab_name(lock_waiter_),
+ ?match({atomic, ok}, mnesia:create_table(TabName,
+ [{ram_copies, [N1, N2]}])),
+
+ %% initialize the table with object {1, c} - when there
+ %% is a read transaction, the read will find that value
+ ?match({atomic, ok}, mnesia:sync_transaction(fun() -> mnesia:write({TabName, 1, c}) end)),
+ rpc:call(N2, ?MODULE, sync_tid_release, []),
+
+ Tester = self(),
+ Fun_A =fun() ->
+ NewCounter = incr_restart_counter(),
+ if
+ NewCounter == 1 ->
+ Tester ! go_ahead_test,
+ receive go_ahead -> ok end;
+ true -> ok
+ end,
+ lock_waiter_fun(BlockOpA, TabName, a),
+ NewCounter
+ end,
+
+ %% it's not possible to just spawn the transaction, because
+ %% the result shall be evaluated
+ A = spawn_link(N1, ?MODULE, perform_restarted_transaction, [Fun_A]),
+
+ ?match(ok, receive go_ahead_test -> ok after 10000 -> timeout end),
+
+ mnesia_test_lib:sync_trans_tid_serial([N1, N2]),
+
+ Fun_B = fun() ->
+ lock_waiter_fun(BlockOpB, TabName, b),
+ A ! go_ahead,
+ wait(infinity)
+ end,
+
+ B = spawn_link(N2, mnesia, transaction, [Fun_B, 100]),
+
+ io:format("waiting for A (~p on ~p) to be in the queue ~n", [A, [N1, N2]]),
+ wait_for_a(A, [N1, N2]),
+
+ io:format("Queus ~p~n",
+ [[{N,rpc:call(N, mnesia, system_info, [lock_queue])} || N <- Nodes]]),
+
+ KillNode = node(B),
+ io:format("A was in the queue, time to kill Mnesia on B's node (~p on ~p)~n",
+ [B, KillNode]),
+
+ mnesia_test_lib:kill_mnesia([KillNode]), % kill mnesia of fun B
+
+ %% Read Ops does not need to be restarted
+ ExpectedCounter =
+ if
+ BlockOpA == sw, BlockOpB == w -> 1;
+ BlockOpA == sw, BlockOpB == wt -> 1;
+ BlockOpA == sw, BlockOpB == wr -> 1;
+ BlockOpA == srw, BlockOpB == w -> 1;
+ BlockOpA == srw, BlockOpB == wt -> 1;
+ BlockOpA == srw, BlockOpB == wr -> 1;
+ BlockOpA == r, BlockOpB /= sw -> 1;
+ BlockOpA == rt, BlockOpB /= sw -> 1;
+ true -> 2
+ end,
+ ?match_multi_receive([{'EXIT', A, {atomic, ExpectedCounter}},
+ {'EXIT', B, killed}]),
+
+ %% the expected result depends on the transaction of
+ %% fun A - when that doesn't change the object in the
+ %% table (e.g. it is a read) then the predefined
+ %% value {Tabname, 1, c} is expected to be the result here
+ ExpectedResult =
+ case BlockOpA of
+ w -> {TabName, 1, a};
+ sw ->{TabName, 1, a};
+ _all_other -> {TabName, 1, c}
+ end,
+
+ ?match({atomic, [ExpectedResult]},
+ mnesia:transaction(fun() -> mnesia:read({TabName, 1}) end, 100)),
+ ?verify_mnesia([N1], [N2]).
+
+mk_tab_name(Prefix) ->
+ {Mega, Sec, Micro} = erlang:now(),
+ list_to_atom(lists:concat([Prefix , Mega, '_', Sec, '_', Micro])).
+
+lock_waiter_fun(Op, TabName, Val) ->
+ case Op of
+ rt -> mnesia:read_lock_table(TabName);
+ wt -> mnesia:write_lock_table(TabName);
+ r -> mnesia:read({TabName, 1});
+ w -> mnesia:write({TabName, 1, Val});
+ wr -> mnesia:wread({TabName, 1});
+ srw -> mnesia:read(TabName, 1, sticky_write);
+ sw -> mnesia:s_write({TabName, 1, Val})
+ end.
+
+wait_for_a(Pid, Nodes) ->
+ wait_for_a(Pid, Nodes, 5).
+
+wait_for_a(_P, _N, 0) ->
+ ?error("Timeout while waiting for lock on a~n", []);
+
+wait_for_a(Pid, Nodes, Count) ->
+ %% io:format("WAIT_FOR_A ~p ON ~w ~n", [Pid, Nodes]),
+ List = [rpc:call(N, mnesia, system_info, [lock_queue]) || N <- Nodes],
+ Q = lists:append(List),
+ check_q(Pid, Q, Nodes, Count).
+
+check_q(Pid, [{{_Oid,_Tid}, _Op, Pid, _WFT} | _Tail], _N, _Count) ->
+ ok;
+check_q(Pid, [{_Oid, _Op, Pid, _Tid, _WFT} | _Tail], _N, _Count) ->
+ ok;
+check_q(Pid, [_ | Tail], N, Count) ->
+ check_q(Pid, Tail, N, Count);
+check_q(Pid, [], N, Count) ->
+ timer:sleep(500),
+ wait_for_a(Pid, N, Count - 1).
+
+perform_restarted_transaction (Fun_Trans) ->
+ %% the result of the transaction shall be:
+ %% - undefined (if the transaction was never executed)
+ %% - Times ( number of times that the transaction has been executed)
+
+ Result = mnesia:transaction(Fun_Trans, 100),
+ exit(Result).
+
+%% Returns new val
+incr_restart_counter() ->
+ NewCount =
+ case get(count_restart_of_transaction) of
+ undefined -> 1;
+ OldCount -> OldCount + 1
+ end,
+ put(count_restart_of_transaction, NewCount),
+ NewCount.
+
+wait(Mseconds) ->
+ receive
+ after Mseconds -> ok
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+restart_check (doc) ->
+ [
+ "test case:'A' performs a transaction on a table which",
+ "is only replicated on node B. During that transaction",
+ "mnesia on node B is killed. The transaction of A should",
+ "be stopped, since there is no further replica",
+ "rt = read_lock_table, wt = write_lock_table, r = read,",
+ "sw = s_write, w = write, wr = wread,"];
+restart_check(suite) ->
+ [
+ restart_r_one,
+ restart_w_one,
+ restart_rt_one,
+ restart_wt_one,
+ restart_wr_one,
+ restart_sw_one,
+ restart_r_two,
+ restart_w_two,
+ restart_rt_two,
+ restart_wt_two,
+ restart_wr_two,
+ restart_sw_two
+ ].
+
+restart_r_one(suite) -> [];
+restart_r_one(Config) when is_list(Config) ->
+ start_restart_check(r, one, Config).
+
+restart_w_one(suite) -> [];
+restart_w_one(Config) when is_list(Config) ->
+ start_restart_check(w, one, Config).
+
+restart_rt_one(suite) -> [];
+restart_rt_one(Config) when is_list(Config) ->
+ start_restart_check(rt, one, Config).
+
+restart_wt_one(suite) -> [];
+restart_wt_one(Config) when is_list(Config) ->
+ start_restart_check(wt, one, Config).
+
+restart_wr_one(suite) -> [];
+restart_wr_one(Config) when is_list(Config) ->
+ start_restart_check(wr, one, Config).
+
+restart_sw_one(suite) -> [];
+restart_sw_one(Config) when is_list(Config) ->
+ start_restart_check(sw, one, Config).
+
+restart_r_two(suite) -> [];
+restart_r_two(Config) when is_list(Config) ->
+ start_restart_check(r, two, Config).
+
+restart_w_two(suite) -> [];
+restart_w_two(Config) when is_list(Config) ->
+ start_restart_check(w, two, Config).
+
+restart_rt_two(suite) -> [];
+restart_rt_two(Config) when is_list(Config) ->
+ start_restart_check(rt, two, Config).
+
+restart_wt_two(suite) -> [];
+restart_wt_two(Config) when is_list(Config) ->
+ start_restart_check(wt, two, Config).
+
+restart_wr_two(suite) -> [];
+restart_wr_two(Config) when is_list(Config) ->
+ start_restart_check(wr, two, Config).
+
+restart_sw_two(suite) -> [];
+restart_sw_two(Config) when is_list(Config) ->
+ start_restart_check(sw, two, Config).
+
+start_restart_check(RestartOp, ReplicaNeed, Config) ->
+ [N1, N2, N3] = Nodes = ?acquire_nodes(3, Config),
+
+ {TabName, _TabNodes} = create_restart_table(ReplicaNeed, Nodes),
+
+ %% initialize the table with object {1, c} - when there
+ %% is a read transaction, the read will find that value
+ ?match({atomic, ok}, mnesia:sync_transaction(fun() -> mnesia:write({TabName, 1, c}) end)),
+
+ %% Really sync tid_release
+ rpc:multicall([N2,N3], ?MODULE, sync_tid_release, []),
+ Coord = self(),
+
+ Fun_A = fun() ->
+ NewCounter = incr_restart_counter(),
+ case NewCounter of
+ 1 ->
+ mnesia:write({TabName, 1, d}),
+ %% send a message to the test proc
+ Coord ! {self(),fun_a_is_blocked},
+ receive go_ahead -> ok end;
+ _ ->
+ %% the fun will NOT be blocked here
+ restart_fun_A(RestartOp, TabName)
+ end,
+ NewCounter
+ end,
+
+ A = spawn_link(N1, ?MODULE, perform_restarted_transaction, [Fun_A]),
+ ?match_receive({A,fun_a_is_blocked}),
+
+ %% mnesia shall be killed at that node, where A is reading
+ %% the information from
+ kill_where_to_read(TabName, N1, [N2, N3]),
+
+ %% wait some time to let mnesia go down and spread those news around
+ %% fun A shall be able to finish its job before being restarted
+ wait(500),
+ A ! go_ahead,
+
+ %% the sticky write doesnt work on remote nodes !!!
+ ExpectedMsg =
+ case RestartOp of
+ sw when ReplicaNeed == two ->
+ {'EXIT',A,{aborted, {not_local, TabName}}};
+ _all_other ->
+ case ReplicaNeed of
+ one ->
+ {'EXIT',A,{aborted, {no_exists, TabName}}};
+ two ->
+ {'EXIT',A,{atomic, 2}}
+ end
+ end,
+
+ ?match_receive(ExpectedMsg),
+
+ %% now mnesia has to be started again on the node KillNode
+ %% because the next test suite will need it
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [TabName])),
+
+
+ %% the expected result depends on the transaction of
+ %% fun A - when that doesnt change the object in the
+ %% table (e.g. it is a read) then the predefined
+ %% value {Tabname, 1, c} is expected to be the result here
+
+ ExpectedResult =
+ case ReplicaNeed of
+ one ->
+ [];
+ two ->
+ case RestartOp of
+ w -> [{TabName, 1, a}];
+ _ ->[ {TabName, 1, c}]
+ end
+ end,
+
+ ?match({atomic, ExpectedResult},
+ mnesia:transaction(fun() -> mnesia:read({TabName, 1}) end,100)),
+ ?verify_mnesia(Nodes, []).
+
+create_restart_table(ReplicaNeed, [_N1, N2, N3]) ->
+ TabNodes =
+ case ReplicaNeed of
+ one -> [N2];
+ two -> [N2, N3]
+ end,
+ TabName = mk_tab_name(restart_check_),
+ ?match({atomic, ok}, mnesia:create_table(TabName, [{ram_copies, TabNodes}])),
+ {TabName, TabNodes}.
+
+restart_fun_A(Op, TabName) ->
+ case Op of
+ rt -> mnesia:read_lock_table(TabName);
+ wt -> mnesia:write_lock_table(TabName);
+ r -> mnesia:read( {TabName, 1});
+ w -> mnesia:write({TabName, 1, a});
+ wr -> mnesia:wread({TabName, 1});
+ sw -> mnesia:s_write({TabName, 1, a})
+ end.
+
+kill_where_to_read(TabName, N1, Nodes) ->
+ Read = rpc:call(N1,mnesia,table_info, [TabName, where_to_read]),
+ case lists:member(Read, Nodes) of
+ true ->
+ mnesia_test_lib:kill_mnesia([Read]);
+ false ->
+ ?error("Fault while killing Mnesia: ~p~n", [Read]),
+ mnesia_test_lib:kill_mnesia(Nodes)
+ end.
+
+sync_tid_release() ->
+ sys:get_status(whereis(mnesia_tm)),
+ sys:get_status(whereis(mnesia_locker)),
+ ok.
+
diff --git a/lib/mnesia/test/mnesia_config_backup.erl b/lib/mnesia/test/mnesia_config_backup.erl
new file mode 100644
index 0000000000..a33ec6ac5c
--- /dev/null
+++ b/lib/mnesia/test/mnesia_config_backup.erl
@@ -0,0 +1,105 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_config_backup).
+-author('[email protected]').
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% This module is used for testing the backup module config parameter.
+%%
+%% This module is an impostor for the mnesia_backup module.
+%%
+%%
+%% Original doc below:
+%%
+%% This module contains one implementation of callback functions
+%% used by Mnesia at backup and restore. The user may however
+%% write an own module the same interface as mnesia_backup and
+%% configure Mnesia so the alternate module performs the actual
+%% accesses to the backup media. This means that the user may put
+%% the backup on medias that Mnesia does not know about, possibly
+%% on hosts where Erlang is not running.
+%%
+%% The OpaqueData argument is never interpreted by other parts of
+%% Mnesia. It is the property of this module. Alternate implementations
+%% of this module may have different interpretations of OpaqueData.
+%% The OpaqueData argument given to open_write/1 and open_read/1
+%% are forwarded directly from the user.
+%%
+%% All functions must return {ok, NewOpaqueData} or {error, Reason}.
+%%
+%% The NewOpaqueData arguments returned by backup callback functions will
+%% be given as input when the next backup callback function is invoked.
+%% If any return value does not match {ok, _} the backup will be aborted.
+%%
+%% The NewOpaqueData arguments returned by restore callback functions will
+%% be given as input when the next restore callback function is invoked
+%% If any return value does not match {ok, _} the restore will be aborted.
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-export([
+ open_write/1, write/2, commit_write/1, abort_write/1,
+ open_read/1, read/1, close_read/1
+ ]).
+
+-record(backup, {name, mode, items}).
+
+open_write(Name) ->
+ file:delete(Name),
+ {ok, #backup{name = Name, mode = write, items = []}}.
+
+write(Opaque, Item) when Opaque#backup.mode == write ->
+ %% Build the list in reverse order
+ {ok, Opaque#backup{items = [Item | Opaque#backup.items]}}.
+
+commit_write(Opaque) when Opaque#backup.mode == write ->
+ Bin = term_to_binary(Opaque#backup.items),
+ case file:write_file(Opaque#backup.name, Bin) of
+ ok ->
+ {ok, Opaque#backup{mode = closed, items = []}};
+ {error, Reason} ->
+ {error, {commit_write, Reason}}
+ end.
+
+abort_write(Opaque) ->
+ {ok, Opaque#backup{mode = closed, items = []}}.
+
+open_read(Name) ->
+ case file:read_file(Name) of
+ {ok, Bin} ->
+ ReverseList = binary_to_term(Bin),
+ List = lists:reverse(ReverseList),
+ {ok, #backup{name = Name, mode = read, items = List}};
+ {error, Reason} ->
+ {error, {open_read, Reason}}
+ end.
+
+read(Opaque) when Opaque#backup.mode == read ->
+ case Opaque#backup.items of
+ [Head | Tail] ->
+ {ok, Opaque#backup{items = Tail}, Head};
+ [] ->
+ {ok, Opaque#backup{mode = closed}, []}
+ end.
+
+close_read(Opaque) ->
+ {ok, Opaque#backup{mode = closed, items = []}}.
diff --git a/lib/mnesia/test/mnesia_config_event.erl b/lib/mnesia/test/mnesia_config_event.erl
new file mode 100644
index 0000000000..6c1dea7ed5
--- /dev/null
+++ b/lib/mnesia/test/mnesia_config_event.erl
@@ -0,0 +1,74 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_config_event).
+-author('[email protected]').
+
+-behaviour(gen_event).
+
+%%
+%% This module was stolen from Mnesia
+%%
+
+
+%% gen_event callback interface
+-export([init/1, handle_event/2, handle_call/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+
+init(_Args) ->
+ {ok, []}.
+
+handle_event(Msg, State) ->
+ handle_any_event(Msg, State).
+
+handle_info(Msg, State) ->
+ handle_any_event(Msg, State).
+
+
+handle_call(Msg, State) ->
+ handle_any_event(Msg, State).
+
+
+%% The main...
+
+handle_any_event({get_log, Pid}, State) ->
+ Pid ! {log, State},
+ {ok, State};
+handle_any_event(Msg, State) ->
+ io:format("Got event: ~p~n", [Msg]),
+ {ok, [Msg | State]}.
+
+%%-----------------------------------------------------------------
+%% terminate(Reason, State) ->
+%% AnyVal
+%%-----------------------------------------------------------------
+
+terminate(_Reason, _State) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, _State, _Extra) ->
+ exit(not_supported).
+
diff --git a/lib/mnesia/test/mnesia_config_test.erl b/lib/mnesia/test/mnesia_config_test.erl
new file mode 100644
index 0000000000..7b62c63a62
--- /dev/null
+++ b/lib/mnesia/test/mnesia_config_test.erl
@@ -0,0 +1,1466 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_config_test).
+-author('[email protected]').
+
+-include("mnesia_test_lib.hrl").
+
+-record(test_table,{i,a1,a2,a3}).
+-record(test_table2,{i, b}).
+
+-export([
+ all/1,
+ access_module/1,
+ auto_repair/1,
+ backup_module/1,
+ debug/1,
+ dir/1,
+ dump_log_load_regulation/1,
+ dump_log_thresholds/1,
+ dump_log_update_in_place/1,
+ embedded_mnemosyne/1,
+ event_module/1,
+ ignore_fallback_at_startup/1,
+ inconsistent_database/1,
+ max_wait_for_decision/1,
+ send_compressed/1,
+
+ app_test/1,
+ schema_config/1,
+ schema_merge/1,
+ unknown_config/1,
+
+ dump_log_time_threshold/1,
+ dump_log_write_threshold/1,
+
+ start_one_disc_full_then_one_disc_less/1,
+ start_first_one_disc_less_then_one_disc_full/1,
+ start_first_one_disc_less_then_two_more_disc_less/1,
+ schema_location_and_extra_db_nodes_combinations/1,
+ table_load_to_disc_less_nodes/1,
+ dynamic_connect/1,
+ dynamic_basic/1,
+ dynamic_ext/1,
+ dynamic_bad/1,
+
+ init_per_testcase/2,
+ fin_per_testcase/2,
+ c_nodes/0
+ ]).
+
+-export([check_logs/1]).
+
+-define(init(N, Config),
+ mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema,
+ {reload_appls, [mnesia]}],
+ N, Config, ?FILE, ?LINE)).
+-define(acquire(N, Config),
+ mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema,
+ {reload_appls, [mnesia]},
+ create_schema,
+ {start_appls, [mnesia]}],
+ N, Config, ?FILE, ?LINE)).
+-define(acquire_schema(N, Config),
+ mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema,
+ {reload_appls, [mnesia]},
+ create_schema],
+ N, Config, ?FILE, ?LINE)).
+-define(cleanup(N, Config),
+ mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
+ N, Config, ?FILE, ?LINE)).
+-define(trans(Fun),
+ ?match({atomic, ok}, mnesia:transaction(Fun))).
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+all(doc) ->
+ [
+ "Test all configuration parameters",
+ "Perform an exhaustive test of all the various parameters that",
+ "may be used to configure the Mnesia application.",
+ "",
+ "Hint: Check out the unofficial function mnesia:start/1.",
+ " But be careful to cleanup all configuration parameters",
+ " afterwards since the rest of the test suite may rely on",
+ " these default configurations. Perhaps it is best to run",
+ " these tests in a separate node which is dropped afterwards.",
+ "Are really all configuration parameters covered?"];
+
+all(suite) ->
+ [
+ access_module,
+ auto_repair,
+ backup_module,
+ debug,
+ dir,
+ dump_log_load_regulation,
+ dump_log_thresholds,
+ dump_log_update_in_place,
+ embedded_mnemosyne,
+ event_module,
+ ignore_fallback_at_startup,
+ inconsistent_database,
+ max_wait_for_decision,
+ send_compressed,
+
+ app_test,
+ schema_config,
+ unknown_config
+ ].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+access_module(doc) ->
+ ["Replace the activity access module with another module and ",
+ "use it to read and write to some alternate table storage"];
+access_module(suite) -> [];
+access_module(Config) when is_list(Config) ->
+ Nodes = ?acquire_schema(1, Config),
+ ?match(ok, mnesia:start([{access_module, mnesia_frag}])),
+
+ ?match(mnesia_frag, mnesia:system_info(access_module)),
+
+ access_tab(ram_copies, Nodes),
+ case mnesia_test_lib:diskless(Config) of
+ true -> skip;
+ false ->
+ access_tab(disc_copies, Nodes)
+ , access_tab(disc_only_copies, Nodes)
+ end,
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config).
+
+access_tab(Storage, Nodes) ->
+ Tab = list_to_atom(lists:concat([access_tab_, Storage])),
+ RecName = some_access,
+ Attr = val,
+ TabDef = [{Storage, Nodes},
+ {type, bag},
+ {index, [Attr]},
+ {record_name, RecName}],
+ ?match({atomic,ok}, mnesia:create_table(Tab, TabDef)),
+
+ Activity = fun(Kind) ->
+ A = [Kind, Tab, RecName, Attr, Nodes],
+ io:format("kind: ~w, storage: ~w~n", [Kind, Storage]),
+ mnesia:activity(Kind, fun do_access/5, A)
+ end,
+ ModActivity = fun(Kind, M) ->
+ io:format("kind: ~w, storage: ~w. module: ~w~n",
+ [Kind, Storage, M]),
+ A = [Kind, Tab, RecName, Attr, Nodes],
+ mnesia:activity(Kind, fun do_access/5, A, M)
+ end,
+ ?match(ok, Activity(transaction)),
+ ?match(ok, Activity({transaction, 47})),
+ ?match(ok, ModActivity(transaction, mnesia)),
+ ?match(ok, ModActivity(transaction, mnesia_frag)),
+
+ ?match(ok, Activity(async_dirty)),
+ ?match(ok, Activity(sync_dirty)),
+ case Storage of
+ ram_copies ->
+ ?match(ok, Activity(ets));
+ _ ->
+ ignore
+ end.
+
+do_access(Kind, Tab, RecName, Attr, Nodes) ->
+ Tens = lists:sort([{RecName, 1, 10}, {RecName, 3, 10}]),
+ {OptNodes, OptTens} =
+ case Kind of
+ transaction -> {Nodes, Tens};
+ {transaction, _} -> {Nodes, Tens};
+ async_dirty -> {[], Tens};
+ sync_dirty -> {[], Tens};
+ ets -> {[], []}
+ end,
+ ?match(RecName, mnesia:table_info(Tab, record_name)),
+
+ ?match(ok, mnesia:write(Tab, {RecName, 1, 10}, write)),
+ ?match(ok, mnesia:write(Tab, {RecName, 2, 20}, sticky_write)),
+ ?match(ok, mnesia:write(Tab, {RecName, 2, 21}, sticky_write)),
+ ?match(ok, mnesia:write(Tab, {RecName, 2, 22}, write)),
+ ?match(ok, mnesia:write(Tab, {RecName, 3, 10}, write)),
+
+ Twos = [{RecName, 2, 20}, {RecName, 2, 21}, {RecName, 2, 22}],
+ ?match(Twos, lists:sort(mnesia:read(Tab, 2, read))),
+
+ ?match(ok, mnesia:delete_object(Tab, {RecName, 2, 21}, sticky_write)),
+
+ TenPat = {RecName, '_', 10},
+ ?match(Tens, lists:sort(mnesia:match_object(Tab, TenPat, read))),
+ ?match(OptTens, lists:sort(mnesia:index_match_object(Tab, TenPat, Attr, read) )),
+ ?match(OptTens, lists:sort(mnesia:index_read(Tab, 10, Attr))),
+ Keys = [1, 2, 3],
+ ?match(Keys, lists:sort(mnesia:all_keys(Tab))),
+
+ First = mnesia:first(Tab),
+ Mid = mnesia:next(Tab, First),
+ Last = mnesia:next(Tab, Mid),
+ ?match('$end_of_table', mnesia:next(Tab, Last)),
+ ?match(Keys, lists:sort([First,Mid,Last])),
+
+ %% For set and bag these last, prev works as first and next
+ First2 = mnesia:last(Tab),
+ Mid2 = mnesia:prev(Tab, First2),
+ Last2 = mnesia:prev(Tab, Mid2),
+ ?match('$end_of_table', mnesia:prev(Tab, Last2)),
+ ?match(Keys, lists:sort([First2,Mid2,Last2])),
+
+ ?match([ok, ok, ok], [mnesia:delete(Tab, K, write) || K <- Keys]),
+ W = wild_pattern,
+ ?match([], mnesia:match_object(Tab, mnesia:table_info(Tab, W), read)),
+ ?log("Safe fixed ~p~n", [catch ets:info(Tab, safe_fixed)]),
+ ?log("Fixed ~p ~n", [catch ets:info(Tab, fixed)]),
+
+ ?match(OptNodes, mnesia:lock({global, some_lock_item, Nodes}, write)),
+ ?match(OptNodes, mnesia:lock({global, some_lock_item, Nodes}, read)),
+ ?match(OptNodes, mnesia:lock({table, Tab}, read)),
+ ?match(OptNodes, mnesia:lock({table, Tab}, write)),
+
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+auto_repair(doc) ->
+ ["Try the auto_repair mechanism on the various disk_logs and dets files.",
+ "",
+ "The case tests both normal values of the parameter, and also",
+ "one crazy value.",
+ "The test of the real auto_repair functionality is made in the",
+ "dets suite"
+ ];
+auto_repair(suite) -> [];
+auto_repair(Config) when is_list(Config) ->
+ ?init(1, Config),
+ ?match(ok, mnesia:start()), % Check default true
+ ?match(true, mnesia:system_info(auto_repair)),
+ ?match(stopped, mnesia:stop()),
+ ?match(ok, mnesia:start([{auto_repair, true}])),
+ ?match(true, mnesia:system_info(auto_repair)),
+ ?match(stopped, mnesia:stop()),
+ ?match(ok, mnesia:start([{auto_repair, false}])),
+ ?match(false, mnesia:system_info(auto_repair)),
+ ?match(stopped, mnesia:stop()),
+ ?match({error, {bad_type, auto_repair, your_mama}},
+ mnesia:start([{auto_repair, your_mama}])),
+ ?match(stopped, mnesia:stop()),
+ ?cleanup(1, Config),
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+backup_module(doc) ->
+ ["Replace the backup module with another module and use it to",
+ "read and write to an alternate backup media, e.g stored in",
+ "the internal state of a simple process."];
+backup_module(suite) -> [];
+backup_module(Config) when is_list(Config) ->
+ Nodes = ?acquire_schema(1, Config),
+ ?match(ok, mnesia:start([{backup_module, mnesia_config_backup}])),
+ ?match({atomic,ok},
+ mnesia:create_table(test_table,
+ [{disc_copies, Nodes},
+ {attributes,
+ record_info(fields,test_table)}])),
+
+ ?match({atomic,ok},
+ mnesia:create_table(test_table2,
+ [{disc_copies, Nodes},
+ {attributes,
+ record_info(fields,test_table2)}])),
+ %% Write in test table
+ ?trans(fun() -> mnesia:write(#test_table{i=1}) end),
+ ?trans(fun() -> mnesia:write(#test_table{i=2}) end),
+
+ %% Write in test table 2
+ ?trans(fun() -> mnesia:write(#test_table2{i=3}) end),
+ ?trans(fun() -> mnesia:write(#test_table2{i=4}) end),
+ mnesia_test_lib:sync_tables(Nodes, [test_table, test_table2]),
+
+ File = whow,
+ %% Now make a backup
+ ?match(ok, mnesia:backup(File)),
+
+ ?match(ok, mnesia:install_fallback(File)),
+
+ %% Now add things
+ ?trans(fun() -> mnesia:write(#test_table{i=2.5}) end),
+ ?trans(fun() -> mnesia:write(#test_table2{i=3.5}) end),
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [test_table, test_table2])),
+
+ %% Now check newly started tables
+ ?match({atomic, [1,2]},
+ mnesia:transaction(fun() -> lists:sort(mnesia:all_keys(test_table)) end)),
+ ?match({atomic, [3,4]},
+ mnesia:transaction(fun() -> lists:sort(mnesia:all_keys(test_table2)) end)),
+
+ file:delete(File),
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+debug(doc) ->
+ ["Try out the four debug levels and ensure that the",
+ "expected events are generated."];
+debug(suite) -> [];
+debug(Config) when is_list(Config) ->
+ Nodes = ?init(1, Config),
+ case application:get_env(mnesia,debug) of
+ undefined ->
+ ?match(none, mnesia:system_info(debug));
+ {ok, false} ->
+ ?match(none, mnesia:system_info(debug));
+ {ok, true} ->
+ ?match(debug, mnesia:system_info(debug));
+ {ok, Env} ->
+ ?match(Env, mnesia:system_info(debug))
+ end,
+
+ ?match(ok, mnesia:start([{debug, verbose}])),
+ ?match(verbose, mnesia:system_info(debug)),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+
+ ?match(ok, mnesia:start([{debug, debug}])),
+ ?match(debug, mnesia:system_info(debug)),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+
+ ?match(ok, mnesia:start([{debug, trace}])),
+ ?match(trace, mnesia:system_info(debug)),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+
+ ?match(ok, mnesia:start([{debug, true}])),
+ ?match(debug, mnesia:system_info(debug)),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+
+ ?match(ok, mnesia:start([{debug, false}])),
+ ?match(none, mnesia:system_info(debug)),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dir(doc) ->
+ ["Try to use alternate Mnesia directories"];
+dir(suite) -> [];
+dir(Config) when is_list(Config) ->
+ Nodes = ?init(1, Config),
+
+ ?match(ok, mnesia:start([{dir, tuff}])),
+ Dir = filename:join([element(2, file:get_cwd()), "tuff"]),
+ ?match(Dir, mnesia:system_info(directory)),
+ mnesia_test_lib:kill_mnesia(Nodes),
+
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dump_log_update_in_place(doc) ->
+ ["Change the update in place policy for the transaction log dumper."];
+dump_log_update_in_place(suite) -> [];
+dump_log_update_in_place(Config) when is_list(Config) ->
+ Nodes = ?acquire(1, Config),
+ ?match(true, mnesia:system_info(dump_log_update_in_place)),
+ ?match({atomic,ok},
+ mnesia:create_table(test_table,
+ [{disc_copies, Nodes},
+ {attributes,
+ record_info(fields,test_table)}])),
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+
+ ?match(ok, mnesia:start([{dump_log_update_in_place, false}])),
+ ?match(false, mnesia:system_info(dump_log_update_in_place)),
+
+ mnesia_test_lib:sync_tables(Nodes, [schema, test_table]),
+
+ %% Now provoke some log dumps
+
+ L = lists:map(
+ fun(Num) ->
+ %% Write something on one end ...
+ mnesia:transaction(
+ fun() ->
+ mnesia:write(#test_table{i=Num}) end
+ ) end,
+ lists:seq(1, 110)),
+
+ L2 = lists:duplicate(110, {atomic, ok}),
+
+ %% If this fails then some of the 110 writes above failed
+ ?match(true, L==L2),
+ if L==L2 -> ok;
+ true ->
+ ?verbose("***** List1 len: ~p, List2 len: ~p~n",
+ [length(L), length(L2)]),
+ ?verbose("L: ~p~nL2:~p~n", [L, L2])
+ end,
+
+ %% If we still can write, then Mnesia is probably alive
+ ?trans(fun() -> mnesia:write(#test_table{i=115}) end),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dump_log_thresholds(doc) ->
+ ["Elaborate with various values of the dump log thresholds and how",
+ "they affects each others. Both the dump_log_time_threshold and the",
+ "dump_log_write_threshold must be covered. Do also check that both",
+ "kinds of overload events are generated as expected.",
+ "",
+ "Logs are checked by first doing whatever has to be done to trigger ",
+ "a dump, and then stopping Mnesia and then look in the ",
+ "data files and see that the correct amount of transactions ",
+ "have been done."];
+dump_log_thresholds(suite) ->
+ [
+ dump_log_time_threshold,
+ dump_log_write_threshold
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dump_log_write_threshold(doc)->
+ ["This test case must be rewritten.",
+ "Dump logs are tested by doing transactions, then killing Mnesia and ",
+ "then examining the table data files and see if they are correct.",
+ "The test_table is used as a counter, test_table. is stepped once ",
+ "for each transaction."];
+dump_log_write_threshold(suite)->[];
+dump_log_write_threshold(Config) when is_list(Config) ->
+ [N1] = ?acquire_schema(1, Config),
+
+ Threshold = 3,
+ ?match(ok,mnesia:start([{dump_log_write_threshold, Threshold}])),
+
+ ?match({atomic,ok},
+ mnesia:create_table(test_table,
+ [{disc_copies, [N1]},
+ {attributes,
+ record_info(fields,test_table)}])),
+ ?match(dumped, mnesia:dump_log()),
+
+ ?match(ok, do_trans(2)), % Shall not have dumped
+ check_logs(0),
+
+ ?match(ok, do_trans(Threshold - 2)), % Trigger a dump
+ receive after 1000 -> ok end,
+ check_logs(Threshold),
+
+
+ ?match(ok, do_trans(Threshold - 1)),
+ ?match(dumped, mnesia:dump_log()), %% This should trigger ets2dcd dump
+ check_logs(0), %% and leave no dcl file
+
+ ?match(stopped, mnesia:stop()),
+
+ %% Check bad threshold value
+ ?match({error,{bad_type,dump_log_write_threshold,0}},
+ mnesia:start([{dump_log_write_threshold,0}])),
+
+ ?verify_mnesia([], [N1]),
+ ?cleanup(1, Config),
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dump_log_time_threshold(doc)->
+ ["See doc on above."];
+dump_log_time_threshold(suite)->[];
+dump_log_time_threshold(Config) when is_list(Config) ->
+ Nodes = ?acquire_schema(1, Config),
+ Time = 4000,
+
+ %% Check bad threshold value
+ ?match({error,{bad_type,dump_log_time_threshold,0}},
+ mnesia:start([{dump_log_time_threshold,0}])),
+
+
+ ?match(ok,mnesia:start([{dump_log_write_threshold,100},
+ {dump_log_time_threshold, Time}])),
+
+ ?match({atomic,ok},mnesia:create_table(test_table,
+ [{disc_copies, Nodes},
+ {attributes,
+ record_info(fields,
+ test_table)}])),
+
+ %% Check that nothing is dumped when within time threshold
+ ?match(ok, do_trans(1)),
+ check_logs(0),
+
+ ?match(Time, mnesia:system_info(dump_log_time_threshold)),
+
+ %% Check that things get dumped when time threshold exceeded
+ ?match(ok, do_trans(5)),
+ receive after Time+2000 -> ok end,
+ check_logs(6),
+
+ ?verify_mnesia([node()], []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%
+%%
+%% Help functions for dump log
+
+%% Do a transaction N times
+do_trans(0) -> ok;
+do_trans(N) ->
+ Fun = fun() ->
+ XX=incr(),
+ mnesia:write(#test_table{i=XX})
+ end,
+ {atomic, ok} = mnesia:transaction(Fun),
+ do_trans(N-1).
+
+%% An increasing number
+incr() ->
+ case get(bloody_counter) of
+ undefined -> put(bloody_counter, 2), 1;
+ Num -> put(bloody_counter, Num+1)
+ end.
+
+%%
+%% Check that the correct number of transactions have been recorded.
+%%-record(test_table,{i,a1,a2,a3}).
+check_logs(N) ->
+ File = mnesia_lib:tab2dcl(test_table),
+ Args = [{file, File}, {name, testing}, {repair, true}, {mode, read_only}],
+
+ if N == 0 ->
+ ?match(false, mnesia_lib:exists(File));
+ true ->
+ ?match(true, mnesia_lib:exists(File)),
+ ?match({ok, _Log}, disk_log:open(Args)),
+
+ {Cont, Terms} = disk_log:chunk(testing, start),
+ ?match(eof, disk_log:chunk(testing, Cont)),
+ %%?verbose("N: ~p, L: ~p~n", [N, L]),
+ disk_log:close(testing),
+
+ %% Correct number of records in file
+ ?match({N, N}, {N, length(Terms) -1 }) %% Ignore Header
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+dump_log_load_regulation(doc) ->
+ ["Test the load regulation of the dumper"];
+dump_log_load_regulation(suite) ->
+ [];
+dump_log_load_regulation(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ Param = dump_log_load_regulation,
+
+ %% Normal
+ NoReg = false,
+ ?match(NoReg, mnesia:system_info(Param)),
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+
+ %% Bad
+ Bad = arne_anka,
+ ?match({error, {bad_type, Param, Bad}},
+ mnesia:start([{Param, Bad}])),
+
+ %% Regulation activated
+ Reg = true,
+ ?match(ok,mnesia:start([{Param, Reg}])),
+ ?match(Reg, mnesia:system_info(Param)),
+
+ Args =
+ [{db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {replica_nodes, Nodes},
+ {n_drivers_per_node, 5},
+ {n_branches, length(Nodes) * 10},
+ {n_accounts_per_branch, 5},
+ {replica_type, disc_copies},
+ {stop_after, timer:seconds(30)},
+ {report_interval, timer:seconds(10)},
+ {use_running_mnesia, true},
+ {reuse_history_id, true}],
+
+ ?match({ok, _}, mnesia_tpcb:start(Args)),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+embedded_mnemosyne(doc) ->
+ ["Start Mnemosyne as an embedded part of Mnesia",
+ "on some of the nodes"];
+embedded_mnemosyne(suite) ->
+ [];
+embedded_mnemosyne(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ Param = embedded_mnemosyne,
+
+ %% Normal
+ NoMnem = false,
+ ?match(NoMnem, mnesia:system_info(Param)),
+ ?match(undefined, whereis(mnemosyne_catalog)),
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+
+ %% Bad
+ Bad = arne_anka,
+ ?match({error, {bad_type, Param, Bad}},
+ mnesia:start([{Param, Bad}])),
+
+ case code:priv_dir(mnemosyne) of
+ {error, _} -> %% No mnemosyne on later systems
+ ok;
+ _ ->
+ %% Mnemosyne as embedded application
+ Mnem = true,
+ ?match(undefined, whereis(mnemosyne_catalog)),
+ ?match(ok,mnesia:start([{Param, Mnem}])),
+ ?match(Mnem, mnesia:system_info(Param)),
+ ?match(Pid when is_pid(Pid), whereis(mnemosyne_catalog)),
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+ ?match(undefined, whereis(mnemosyne_catalog))
+ end,
+ ?verify_mnesia([], Nodes),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ignore_fallback_at_startup(doc) ->
+ ["Start Mnesia without rollback of the database to the fallback. ",
+ "Once Mnesia has been (re)started the installed fallback should",
+ "be handled as a normal active fallback.",
+ "Install a customized event module which disables the termination",
+ "of Mnesia when mnesia_down occurrs with an active fallback."].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+max_wait_for_decision(doc) ->
+ ["Provoke Mnesia to make a forced decision of the outome",
+ "of a heavy weight transaction."].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+send_compressed(doc) -> [];
+send_compressed(suite) -> [];
+send_compressed(Config) ->
+ [N1,N2] = Nodes = ?acquire_nodes(2, Config),
+ ?match({atomic,ok}, mnesia:create_table(t0, [{ram_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t1, [{disc_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t2, [{disc_only_copies,[N1,N2]}])),
+
+ Max = 1000,
+ Create = fun(Tab) -> [mnesia:write({Tab, N, {N, "FILLER-123490878345asdasd"}})
+ || N <- lists:seq(1, Max)],
+ ok
+ end,
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ ?match(ok, mnesia:start([{send_compressed, 9}])),
+ ?match(ok, mnesia:wait_for_tables([t0,t1,t2], 5000)),
+
+ ?match({atomic, ok}, mnesia:transaction(Create, [t0])),
+ ?match({atomic, ok}, mnesia:transaction(Create, [t1])),
+ ?match({atomic, ok}, mnesia:transaction(Create, [t2])),
+
+ ?match([], mnesia_test_lib:start_mnesia([N2], [t0,t1,t2])),
+
+ Verify = fun(Tab) ->
+ [ [{Tab,N,{N,_}}] = mnesia:read(Tab, N) || N <- lists:seq(1, Max)],
+ ok
+ end,
+ ?match({atomic, ok}, rpc:call(N1, mnesia, transaction, [Verify, [t0]])),
+ ?match({atomic, ok}, rpc:call(N1, mnesia, transaction, [Verify, [t1]])),
+ ?match({atomic, ok}, rpc:call(N1, mnesia, transaction, [Verify, [t2]])),
+
+ ?match({atomic, ok}, rpc:call(N2, mnesia, transaction, [Verify, [t0]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, transaction, [Verify, [t1]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, transaction, [Verify, [t2]])),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+app_test(doc) -> [];
+app_test(suite) -> [];
+app_test(_Config) ->
+ ?match(ok,test_server:app_test(mnesia)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+event_module(doc) ->
+ ["Replace the event module with another module and use it as",
+ "receiver of the various system and table events. Provoke",
+ "coverage of all kinds of events."];
+event_module(suite) -> [];
+event_module(Config) when is_list(Config) ->
+ Filter = fun({mnesia_system_event,{mnesia_info, _, _}}) -> false;
+ (_) -> true
+ end,
+
+ [_N1, N2]=Nodes=?acquire_schema(2, Config),
+
+ Def = case mnesia_test_lib:diskless(Config) of
+ true -> [{event_module, mnesia_config_event},
+ {extra_db_nodes, Nodes}];
+ false ->
+ [{event_module, mnesia_config_event}]
+ end,
+
+ ?match({[ok, ok], []}, rpc:multicall(Nodes, mnesia, start, [Def])),
+ receive after 1000 -> ok end,
+ mnesia_event ! {get_log, self()},
+ DebugLog1 = receive
+ {log, L1} -> L1
+ after 10000 -> [timeout]
+ end,
+ ?match([{mnesia_system_event,{mnesia_up,N2}}],
+ lists:filter(Filter, DebugLog1)),
+ mnesia_test_lib:kill_mnesia([N2]),
+ receive after 2000 -> ok end,
+
+ ?match({[ok], []}, rpc:multicall([N2], mnesia, start, [])),
+
+ receive after 1000 -> ok end,
+ mnesia_event ! {get_log, self()},
+ DebugLog = receive
+ {log, L} -> L
+ after 10000 -> [timeout]
+ end,
+ ?match([{mnesia_system_event,{mnesia_up,N2}},
+ {mnesia_system_event,{mnesia_down,N2}},
+ {mnesia_system_event,{mnesia_up, N2}}],
+ lists:filter(Filter, DebugLog)),
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+schema_config(doc) ->
+ ["Try many configurations with various schema_location's with and",
+ "without explicit extra_db_nodes. Do also provoke various schema merge",
+ "situations. Most of the other test suites focusses on tests where the",
+ "schema is residing on disc. Now it is time to perform an exhaustive",
+ "elaboration with various disc less configurations."];
+schema_config(suite) ->
+ [
+ start_one_disc_full_then_one_disc_less,
+ start_first_one_disc_less_then_one_disc_full,
+ start_first_one_disc_less_then_two_more_disc_less,
+ schema_location_and_extra_db_nodes_combinations,
+ table_load_to_disc_less_nodes,
+ schema_merge,
+ dynamic_connect
+ ].
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+start_one_disc_full_then_one_disc_less(doc)->
+ ["Start a disk node and then a disk less one. Distribute some",
+ "tables between them."];
+start_one_disc_full_then_one_disc_less(suite) -> [];
+start_one_disc_full_then_one_disc_less(Config) when is_list(Config) ->
+ [N1, N2] = ?init(2, Config),
+ ?match(ok, mnesia:create_schema([N1])),
+ ?match([], mnesia_test_lib:start_mnesia([N1])),
+
+ ?match({atomic, ok}, mnesia:add_table_copy(schema, N2, ram_copies)),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{schema_location, ram},
+ {extra_db_nodes, [N1]}]])),
+ mnesia_test_lib:sync_tables([N1, N2], [schema]),
+
+ %% Now create some tables
+ ?match({atomic,ok},
+ mnesia:create_table(test_table,
+ [{ram_copies, [N1, N2]},
+ {attributes,
+ record_info(fields,test_table)}])),
+
+ ?match({atomic,ok},
+ rpc:call(
+ N2, mnesia,create_table, [test_table2,
+ [{ram_copies, [N1, N2]},
+ {attributes,
+ record_info(fields,test_table2)}]])),
+
+ %% Write something on one end ...
+ Rec = #test_table{i=55},
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec) end)),
+
+ %% ... and read it in the other
+ ?match({atomic, [Rec]},
+ rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:read({test_table, 55}) end])),
+
+
+ %% Then do the same but start at the other end
+ Rec2 = #test_table2{i=155},
+ ?match({atomic, ok},
+ rpc:call(N2, mnesia, transaction,
+ [fun() ->
+ mnesia:write(Rec2) end
+ ])),
+
+ ?match({atomic, [Rec2]},
+ mnesia:transaction(fun() -> mnesia:read({test_table2, 155}) end)),
+
+ ?verify_mnesia([N1, N2], []),
+ ?cleanup(2, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+start_first_one_disc_less_then_one_disc_full(doc)->
+ ["no_doc"];
+start_first_one_disc_less_then_one_disc_full(suite) -> [];
+start_first_one_disc_less_then_one_disc_full(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?init(2, Config),
+ ?match(ok, mnesia:create_schema([N1])),
+ ?match([], mnesia_test_lib:start_mnesia([N1])),
+
+ ?match({atomic, ok}, mnesia:add_table_copy(schema, N2, ram_copies)),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{schema_location, ram},
+ {extra_db_nodes, Nodes}]])),
+
+ mnesia_test_lib:sync_tables([N1, N2], [schema]),
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ receive after 2000 -> ok end,
+ ?match([], mnesia_test_lib:start_mnesia(Nodes)),
+
+ mnesia_test_lib:sync_tables([N1, N2], [schema]),
+
+ %% Now create some tables
+ ?match({atomic,ok},
+ rpc:call(
+ N1, mnesia,create_table, [test_table,
+ [%%{disc_copies, [node()]},
+ {ram_copies, [N1, N2]},
+ {attributes,
+ record_info(fields,test_table)}]])),
+ mnesia_test_lib:sync_tables([N1, N2], [test_table]),
+
+ ?match({atomic,ok},
+ rpc:call(
+ N2, mnesia,create_table, [test_table2,
+ [%%{disc_copies, [node()]},
+ {ram_copies, [N1, N2]},
+ {attributes,
+ record_info(fields,test_table2)}]])),
+
+ mnesia_test_lib:sync_tables([N1, N2], [test_table, test_table2]),
+
+ %% Assure tables loaded
+ ?match({[ok, ok], []},
+ rpc:multicall([N1, N2], mnesia, wait_for_tables,
+ [[schema, test_table, test_table2], 10000])),
+
+ %% Write something on one end ...
+ Rec = #test_table{i=55},
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:write(Rec) end])),
+
+ %% ... and read it in the other
+ ?match({atomic, [Rec]},
+ rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:read({test_table, 55}) end])),
+
+ %% Then do the same but start at the other end
+ Rec2 = #test_table2{i=155},
+ ?match({atomic, ok},
+ rpc:call(N2, mnesia, transaction,
+ [fun() ->
+ mnesia:write(Rec2) end
+ ])),
+
+ ?match({atomic, [Rec2]},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:read({test_table2, 155}) end])),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+start_first_one_disc_less_then_two_more_disc_less(doc)->
+ ["no doc"];
+start_first_one_disc_less_then_two_more_disc_less(suite) -> [];
+start_first_one_disc_less_then_two_more_disc_less(Config) when is_list(Config) ->
+ Nodes = [N1, N2, N3] = ?init(3, Config),
+
+ ?match(ok, rpc:call(N1, mnesia, start, [[{schema_location, ram}]])),
+
+ %% Really should use test_lib:mnesia_start for these ones but ...
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia,add_table_copy, [schema, N2, ram_copies])),
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia,add_table_copy, [schema, N3, ram_copies])),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{schema_location, ram},
+ {extra_db_nodes, [N1]}]])),
+ ?match(ok, rpc:call(N3, mnesia, start, [[{schema_location, ram},
+ {extra_db_nodes, [N1, N2]}]])),
+
+ %% Now create some tables
+ ?match({atomic,ok},
+ rpc:call(
+ N1, mnesia,create_table, [test_table,
+ [%%{disc_copies, [node()]},
+ {ram_copies, [N1, N2, N3]},
+ {attributes,
+ record_info(fields,test_table)}]])),
+
+ %% Assure tables loaded
+ ?match({[ok, ok, ok], []},
+ rpc:multicall([N1, N2, N3], mnesia, wait_for_tables,
+ [[test_table], 1000])),
+
+ %% Write something on one end ...
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:write(#test_table{i=44}) end])),
+
+ %% Force synchronicity
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:write_lock_table(test_table) end])),
+
+ %% ... and read it in the others
+ ?match({[{atomic, [{test_table, 44, _, _, _}]},
+ {atomic, [{test_table, 44, _, _, _}]}], []},
+ rpc:multicall([N2, N3], mnesia, transaction,
+ [fun() -> mnesia:read({test_table, 44}) end])),
+
+ %% Then do the other way around
+ ?match({atomic, ok},
+ rpc:call(N3, mnesia, transaction,
+ [fun() -> mnesia:write(#test_table{i=33}) end])),
+ %% Force synchronicity
+ ?match({atomic, ok},
+ rpc:call(N3, mnesia, transaction,
+ [fun() -> mnesia:write_lock_table(test_table) end])),
+
+ ?match({[{atomic, [{test_table, 44, _, _, _}]},
+ {atomic, [{test_table, 44, _, _, _}]}], []},
+ rpc:multicall([N1, N2], mnesia, transaction,
+ [fun() -> mnesia:read({test_table, 44}) end])),
+
+ mnesia_test_lib:reload_appls([mnesia], Nodes),
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+schema_location_and_extra_db_nodes_combinations(doc)->
+ ["Test schema loaction and extra_db_nodes combinations."];
+schema_location_and_extra_db_nodes_combinations(suite) -> [];
+schema_location_and_extra_db_nodes_combinations(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?init(2, Config),
+ ?match(ok, mnesia:create_schema([N1])),
+ ?match([], mnesia_test_lib:start_mnesia([N1])),
+
+ %% Really should use test_lib:mnesia_start for these ones but ...
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia,add_table_copy, [schema, N2, ram_copies])),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{schema_location, ram},
+ {extra_db_nodes, [N1]}]])),
+
+ %% Assure tables loaded
+ ?match({[ok, ok], []},
+ rpc:multicall([N1, N2], mnesia, wait_for_tables,
+ [[schema], 10000])),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(2, Config),
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+table_load_to_disc_less_nodes(doc)->
+ ["Load tables to disc less nodes"];
+table_load_to_disc_less_nodes(suite) -> [];
+table_load_to_disc_less_nodes(Config) when is_list(Config) ->
+ [N1, N2] = ?init(2, Config),
+
+ ?match(ok, rpc:call(N1, mnesia, start, [[{schema_location, ram}]])),
+
+ %% Really should use test_lib:mnesia_start for these ones but ...
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia,add_table_copy, [schema, N2, ram_copies])),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{schema_location, ram},
+ {extra_db_nodes, [N1]}]])),
+
+ %% Now create some tables
+ ?match({atomic,ok},
+ rpc:call(
+ N1, mnesia,create_table, [test_table,
+ [%%{disc_copies, [node()]},
+ {ram_copies, [N1, N2]},
+ {attributes,
+ record_info(fields,test_table)}]])),
+
+ %% Assure tables loaded
+ ?match({[ok, ok], []},
+ rpc:multicall([N1, N2], mnesia, wait_for_tables,
+ [[test_table], 1000])),
+
+ %% Write something on one end ...
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:write(#test_table{i=44}) end])),
+
+ %% Force synchronicity
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:write_lock_table(test_table) end])),
+
+ %% ... and read it in the others
+ ?match({atomic, [{test_table, 44, _, _, _}]},
+ rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:read({test_table, 44}) end])),
+
+ ?cleanup(2, Config),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+schema_merge(doc) ->
+ ["Provoke various schema merge situations.",
+ "Perform various schema updates while some nodes are down,",
+ "stop the started nodes, start the stopped nodes and perform",
+ "schema updates. Now we have a situation were some of the table",
+ "definitions have been changed on two or more nodes independently",
+ "of each other and when Mnesia on the nodes tries to connect",
+ "to each other at restart the schema will be merged.",
+ "Do also try to provoke schema merge situations were the",
+ "schema cannot be merged."];
+
+schema_merge(suite) -> [];
+
+schema_merge(Config) when is_list(Config) ->
+ [N1, N2]=Nodes=?acquire(2,Config),
+
+ mnesia_test_lib:kill_mnesia([N2]),
+ receive after 1000 -> ok end,
+
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ ?match({atomic,ok},
+ rpc:call(
+ N1, mnesia,create_table,
+ [test_table,
+ [{Storage, [N1]},
+ {attributes,
+ record_info(fields,test_table)}]])),
+
+ ?match({atomic, ok},
+ rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:write(#test_table{i=44}) end])),
+
+ mnesia_test_lib:kill_mnesia([N1]),
+ receive after 2000 -> ok end,
+ %% Can't use std start because it waits for schema
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+
+ ?match({atomic,ok},
+ rpc:call(
+ N2, mnesia,create_table,
+ [test_table2,
+ [{Storage, [N2]},
+ {attributes,
+ record_info(fields,test_table2)}]])),
+
+ receive after 5000 -> ok end,
+
+ ?match({atomic, ok},
+ rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:write(#test_table2{i=33}) end])),
+
+ %% Can't use std start because it waits for schema
+ ?match(ok, rpc:call(N1, mnesia, start, [])),
+
+ %% Assure tables loaded
+ ?match({[ok, ok], []},
+ rpc:multicall([N1, N2], mnesia, wait_for_tables,
+ [[schema, test_table, test_table2], 10000])),
+
+ %% ... and read it in the others
+ ?match({[{atomic, [{test_table, 44, _, _, _}]},
+ {atomic, [{test_table, 44, _, _, _}]}], []},
+ rpc:multicall([N1, N2], mnesia, transaction,
+ [fun() -> mnesia:read({test_table, 44}) end])),
+
+ ?match({[{atomic, [{test_table2, 33, _}]},
+ {atomic, [{test_table2, 33, _}]}], []},
+ rpc:multicall([N1, N2], mnesia, transaction,
+ [fun() -> mnesia:read({test_table2, 33}) end])),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(2, Config),
+ ok.
+
+
+-define(connect(Nodes), mnesia:change_config(extra_db_nodes, Nodes)).
+-define(rpc_connect(From, Nodes),
+ rpc:call(From, mnesia, change_config, [extra_db_nodes, Nodes])).
+
+
+sort({ok, NS}) ->
+ {ok, lists:sort(NS)};
+sort(Ns) when is_tuple(Ns) ->
+ Ns;
+sort(NS) when is_list(NS) ->
+ lists:sort(NS).
+
+
+dynamic_connect(doc) ->
+ ["Test the new functionality where we start mnesia first and then "
+ "connect to the other mnesia nodes"];
+dynamic_connect(suite) ->
+ [
+ dynamic_basic,
+ dynamic_ext,
+ dynamic_bad
+ ].
+
+
+dynamic_basic(suite) -> [];
+dynamic_basic(Config) when is_list(Config) ->
+ Nodes = [N1, N2, N3] = ?acquire_nodes(3, Config),
+ SNs = lists:sort(Nodes),
+
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{ram_copies, Nodes--[N1]}, {disc_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab2, [{disc_copies, Nodes}])),
+
+ ?match({ok, SNs}, sort(?rpc_connect(N1, Nodes))), %% What shall happen?
+ ?match({ok, []}, sort(?rpc_connect(N1, [nonode@nothosted]))), %% What shall happen?
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match(ok, mnesia:delete_schema([N2])),
+
+ ?match(ok, mnesia:dirty_write({tab1, 1, 1})),
+ ?match(ok, mnesia:dirty_write({tab2, 1, 1})),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{extra_db_nodes, [N1]}]])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab1,tab2],5000])),
+ io:format("Here ~p ~n",[?LINE]),
+ check_storage(N2, N1, [N3]),
+ ?match(SNs, sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+ ?match(ok, mnesia:delete_schema([N3])),
+
+ io:format("T1 ~p ~n",[rpc:call(N3,?MODULE,c_nodes,[])]),
+ ?match(ok, rpc:call(N3, mnesia, start, [])),
+ io:format("T2 ~p ~n",[rpc:call(N3,?MODULE,c_nodes,[])]),
+ timer:sleep(2000),
+ io:format("T3 ~p ~n",[rpc:call(N3,?MODULE,c_nodes,[])]),
+ ?match({ok, [N1]}, sort(?rpc_connect(N3, [N1]))),
+ io:format("T4 ~p ~n",[rpc:call(N3,?MODULE,c_nodes,[])]),
+ ?match(ok, rpc:call(N3, mnesia, wait_for_tables, [[tab1,tab2],5000])),
+ io:format("Here ~p ~n",[?LINE]),
+ check_storage(N3, N1, [N2]),
+ ?match(SNs, sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+ ?match(ok, mnesia:delete_schema([N3])),
+
+ ?match(ok, rpc:call(N3, mnesia, start, [])),
+ ?match({ok, [N3]}, sort(?rpc_connect(N1, [N3]))),
+ ?match(ok, rpc:call(N3, mnesia, wait_for_tables, [[tab1,tab2],5000])),
+ io:format("Here ~p ~n",[?LINE]),
+ check_storage(N3, N1, [N2]),
+ ?match(SNs, sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+
+ mnesia_test_lib:kill_mnesia([N2]),
+ ?match(ok, mnesia:delete_schema([N2])),
+ ?match({atomic, ok}, mnesia:del_table_copy(schema, N2)),
+
+ % Ok, we have now removed references to node N2 from the other nodes
+ % mnesia should come up now.
+ ?match({atomic, ok}, mnesia:add_table_copy(tab1, N2, ram_copies)),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match({ok, _}, sort(?rpc_connect(N2, [N3]))),
+
+ ?match(SNs, sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N3, mnesia, system_info, [running_db_nodes]))),
+
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab1], 1000])),
+ ?match([{tab1, 1, 1}], rpc:call(N2, mnesia, dirty_read, [tab1, 1])),
+
+ mnesia_test_lib:kill_mnesia([N2]),
+
+ %%% SYNC!!!
+ timer:sleep(1000),
+
+ ?match([N3,N1], sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match([N3,N1], sort(rpc:call(N3, mnesia, system_info, [running_db_nodes]))),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match({ok, _}, sort(?rpc_connect(N3, [N2]))),
+
+ ?match(SNs, sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N3, mnesia, system_info, [running_db_nodes]))),
+
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab1], 1000])),
+ ?match([{tab1, 1, 1}], rpc:call(N2, mnesia, dirty_read, [tab1, 1])),
+
+ ?verify_mnesia(Nodes, []),
+%% ?cleanup(3, Config).
+ ok.
+
+c_nodes() ->
+ {mnesia_lib:val({current, db_nodes}),mnesia_lib:val(recover_nodes)}.
+
+
+dynamic_ext(suite) -> [];
+dynamic_ext(Config) when is_list(Config) ->
+ Ns = [N1,N2] = ?acquire_nodes(2, Config),
+ SNs = lists:sort([N1,N2]),
+
+ ?match({atomic, ok}, mnesia:create_table(tab0, [{disc_copies, [N1,N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{ram_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab2, [{disc_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab3, [{disc_only_copies, [N2]}])),
+
+ mnesia_test_lib:kill_mnesia([N2]),
+ ?match(ok, mnesia:delete_schema([N2])),
+ ?match(ok, rpc:call(N2, mnesia, start, [[{extra_db_nodes, [N1]}]])),
+
+ ?match(SNs, sort(rpc:call(N1, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab0,tab1,tab2,tab3], 2000])),
+
+ Check = fun({Tab,Storage}) ->
+ ?match(Storage, rpc:call(N2, mnesia, table_info, [Tab, storage_type])),
+ ?match([{N2,Storage}],
+ lists:sort(rpc:call(N2, mnesia, table_info, [Tab, where_to_commit])))
+ end,
+ [Check(Test) || Test <- [{tab1, ram_copies},{tab2, disc_copies},{tab3, disc_only_copies}]],
+
+ T = now(),
+ ?match(ok, mnesia:dirty_write({tab0, 42, T})),
+ ?match(ok, mnesia:dirty_write({tab1, 42, T})),
+ ?match(ok, mnesia:dirty_write({tab2, 42, T})),
+ ?match(ok, mnesia:dirty_write({tab3, 42, T})),
+
+ ?match(stopped, rpc:call(N2, mnesia, stop, [])),
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+ ?match(ok, mnesia:wait_for_tables([tab0,tab1,tab2,tab3], 10000)),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab1,tab2,tab3], 100])),
+ ?match([], mnesia:dirty_read({tab1, 41})),
+ ?match([{tab2,42,T}], mnesia:dirty_read({tab2, 42})),
+ ?match([{tab3,42,T}], mnesia:dirty_read({tab3, 42})),
+
+ mnesia_test_lib:kill_mnesia([N2]),
+ ?match(ok, mnesia:delete_schema([N2])),
+
+ ?match(stopped, rpc:call(N1, mnesia, stop, [])),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{extra_db_nodes,[N1,N2]}]])),
+ ?match({timeout,[tab0]}, rpc:call(N2, mnesia, wait_for_tables, [[tab0], 500])),
+
+ ?match(ok, rpc:call(N1, mnesia, start, [[{extra_db_nodes, [N1,N2]}]])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[tab0], 1500])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab0], 1500])),
+ ?match([{tab0,42,T}], mnesia:dirty_read({tab0, 42})),
+ ?match([{tab0,42,T}], rpc:call(N2, mnesia,dirty_read,[{tab0,42}])),
+
+ ?match(stopped, rpc:call(N1, mnesia, stop, [])),
+ mnesia_test_lib:kill_mnesia([N2]),
+ ?match(ok, mnesia:delete_schema([N2])),
+ ?match(ok, rpc:call(N1, mnesia, start, [[{extra_db_nodes, [N1,N2]}]])),
+ ?match({timeout,[tab0]}, rpc:call(N1, mnesia, wait_for_tables, [[tab0], 500])),
+
+ ?match(ok, rpc:call(N2, mnesia, start, [[{extra_db_nodes,[N1,N2]}]])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[tab0], 1500])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab0], 1500])),
+ ?match([{tab0,42,T}], mnesia:dirty_read({tab0, 42})),
+ ?match([{tab0,42,T}], rpc:call(N2,mnesia,dirty_read,[{tab0,42}])),
+
+ ?verify_mnesia(Ns, []),
+ ok.
+
+check_storage(Me, Orig, Other) ->
+ io:format("Nodes ~p ~p ~p~n",[Me,Orig,Other]),
+ rpc:multicall(Other, sys, status, [mnesia_locker]),
+ rpc:call(Me, sys, status, [mnesia_locker]),
+ rpc:call(Orig, sys, status, [mnesia_locker]),
+ rpc:multicall(Other, sys, status, [mnesia_controller]),
+ rpc:call(Me, sys, status, [mnesia_controller]),
+ rpc:call(Orig, sys, status, [mnesia_controller]),
+ %% Verify disc_copies
+ W2C = lists:sort([{Node,disc_copies} || Node <- [Me,Orig|Other]]),
+ W2W = lists:sort([Me,Orig|Other]),
+ ?match(disc_copies, rpc:call(Orig, mnesia, table_info, [schema, storage_type])),
+ ?match(disc_copies, rpc:call(Me, mnesia, table_info, [schema, storage_type])),
+ ?match(W2C, lists:sort(rpc:call(Orig, mnesia, table_info, [schema, where_to_commit]))),
+ ?match(W2C, lists:sort(rpc:call(Me, mnesia, table_info, [schema, where_to_commit]))),
+
+ ?match(disc_copies, rpc:call(Orig, mnesia, table_info, [tab2, storage_type])),
+ ?match(disc_copies, rpc:call(Me, mnesia, table_info, [tab2, storage_type])),
+ ?match(W2W, lists:sort(rpc:call(Me, mnesia, table_info, [tab2, where_to_write]))),
+ ?match(Me, rpc:call(Me, mnesia, table_info, [tab2, where_to_read])),
+
+ ?match(W2C, lists:sort(rpc:call(Orig, mnesia, table_info, [tab2, where_to_commit]))),
+ ?match(W2C, lists:sort(rpc:call(Me, mnesia, table_info, [tab2, where_to_commit]))),
+
+ ?match([{tab1,1,1}], mnesia:dirty_read(tab1,1)),
+ ?match([{tab2,1,1}], mnesia:dirty_read(tab2,1)),
+ ?match([{tab1,1,1}], rpc:call(Me, mnesia, dirty_read, [tab1,1])),
+ ?match([{tab2,1,1}], rpc:call(Me, mnesia, dirty_read, [tab2,1])),
+
+ ?match(true, rpc:call(Me, mnesia_monitor, use_dir, [])),
+ ?match(disc_copies, rpc:call(Me, mnesia_lib, val, [{schema, storage_type}])),
+
+ mnesia_test_lib:kill_mnesia([Orig]),
+ mnesia_test_lib:kill_mnesia(Other),
+ T = now(),
+ ?match(ok, rpc:call(Me, mnesia, dirty_write, [{tab2, 42, T}])),
+ ?match(stopped, rpc:call(Me, mnesia, stop, [])),
+ ?match(ok, rpc:call(Me, mnesia, start, [])),
+ ?match([], mnesia_test_lib:start_mnesia([Orig|Other], [tab1,tab2])),
+ ?match([{tab2,42,T}], rpc:call(Me, mnesia, dirty_read, [{tab2, 42}])),
+ ?match([{tab2,42,T}], rpc:call(Orig, mnesia, dirty_read, [{tab2, 42}])),
+
+ ?match([{tab1,1,1}], mnesia:dirty_read(tab1,1)),
+ ?match([{tab2,1,1}], mnesia:dirty_read(tab2,1)),
+ ?match([{tab1,1,1}], rpc:call(Me, mnesia, dirty_read, [tab1,1])),
+ ?match([{tab2,1,1}], rpc:call(Me, mnesia, dirty_read, [tab2,1])),
+ ok.
+
+
+dynamic_bad(suite) -> [];
+dynamic_bad(Config) when is_list(Config) ->
+ Ns = [N1, N2, N3] = ?acquire_nodes(3, Config),
+ SNs = lists:sort([N2,N3]),
+
+ ?match({atomic, ok}, mnesia:change_table_copy_type(schema, N2, ram_copies)),
+ ?match({atomic, ok}, mnesia:change_table_copy_type(schema, N3, ram_copies)),
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{ram_copies, Ns -- [N1]},
+ {disc_copies, [N1]}])),
+ ?match(ok, mnesia:dirty_write({tab1, 1, 1})),
+
+ mnesia_test_lib:kill_mnesia(Ns),
+ ?match({[ok, ok], []}, rpc:multicall(Ns -- [N1], mnesia, start, [])),
+ ?match({ok, [N2]}, ?rpc_connect(N3, [N2])),
+ ?match(SNs, sort(rpc:call(N2, mnesia, system_info, [running_db_nodes]))),
+ ?match(SNs, sort(rpc:call(N3, mnesia, system_info, [running_db_nodes]))),
+ ?match({badrpc, {'EXIT', {aborted, {no_exists, _, _}}}},
+ rpc:call(N2, mnesia, table_info, [tab1, where_to_read])),
+
+ ?match(ok, mnesia:start()),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[tab1], 1000])),
+ ?match(N2, rpc:call(N2, mnesia, table_info, [tab1, where_to_read])),
+ ?match([{tab1, 1, 1}], rpc:call(N2, mnesia, dirty_read, [tab1, 1])),
+
+ mnesia_test_lib:kill_mnesia(Ns),
+ ?match({[ok, ok], []}, rpc:multicall(Ns -- [N1], mnesia, start, [])),
+ ?match({ok, [N2]}, ?rpc_connect(N3, [N2])),
+ % Make a merge conflict
+ ?match({atomic, ok}, rpc:call(N3, mnesia, create_table, [tab1, []])),
+
+ io:format("We expect a mnesia crash here~n", []),
+ ?match({error,{_, _}}, mnesia:start()),
+
+ ?verify_mnesia(Ns -- [N1], []),
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+unknown_config(doc) ->
+ ["Try some unknown configuration parameters and see that expected",
+ "things happens."];
+unknown_config(suite)-> [];
+unknown_config(Config) when is_list(Config) ->
+ ?init(1, Config),
+ %% NOTE: case 1 & 2 below do not respond the same
+ ?match({error, Res} when element(1, Res) == bad_type,
+ mnesia:start([{undefined_config,[]}])),
+ %% Below does not work, but the "correct" behaviour would be to have
+ %% case 1 above to behave as the one below.
+
+ %% in mnesia-1.3 {error,{bad_type,{[],undefined_config}}}
+ ?match({error, Res} when element(1, Res) == bad_type,
+ mnesia:start([{[],undefined_config}])),
+ ?cleanup(1, Config),
+ ok.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+inconsistent_database(doc) ->
+ ["Replace the event module with another module and use it as",
+ "receiver of the various system and table events. Provoke",
+ "coverage of all kinds of events."];
+inconsistent_database(suite) -> [];
+inconsistent_database(Config) when is_list(Config) ->
+ Nodes = mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]}],
+ 2, Config, ?FILE, ?LINE),
+ KillAfter = length(Nodes) * timer:minutes(5),
+ ?acquire_schema(2, Config ++ [{tc_timeout, KillAfter}]),
+
+ Ok = [ok || _N <- Nodes],
+ StartArgs = [{event_module, mnesia_inconsistent_database_test}],
+ ?match({Ok, []}, rpc:multicall(Nodes, mnesia, start, [StartArgs])),
+ ?match([], mnesia_test_lib:kill_mnesia(Nodes)),
+
+ ?match(ok, mnesia_meter:go(ram_copies, Nodes)),
+
+ mnesia_test_lib:reload_appls([mnesia], Nodes),
+ ok.
+
diff --git a/lib/mnesia/test/mnesia_consistency_test.erl b/lib/mnesia/test/mnesia_consistency_test.erl
new file mode 100644
index 0000000000..ffe8ab7ac3
--- /dev/null
+++ b/lib/mnesia/test/mnesia_consistency_test.erl
@@ -0,0 +1,1612 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_consistency_test).
+-author('[email protected]').
+-compile([export_all]).
+
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Verify transaction consistency",
+ "Consistency is the property of the application that requires any",
+ "execution of the transaction to take the database from one",
+ "consistent state to another. Verify that the database is",
+ "consistent at any point in time.",
+ "Verify for various configurations.",
+ " Verify for both set and bag"];
+all(suite) ->
+ [
+ consistency_after_restart,
+ consistency_after_dump_tables,
+ consistency_after_add_replica,
+ consistency_after_del_replica,
+ consistency_after_move_replica,
+ consistency_after_transform_table,
+ consistency_after_change_table_copy_type,
+ consistency_after_fallback,
+ consistency_after_restore,
+ consistency_after_rename_of_node,
+ checkpoint_retainer_consistency,
+ backup_consistency
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% stolen from mnesia_tpcb.erl:
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Account record, total size must be at least 100 bytes
+
+-define(ACCOUNT_FILLER,
+ {123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234}).
+
+-record(account,
+ {
+ id = 0, %% Unique account id
+ branch_id = 0, %% Branch where the account is held
+ balance = 0, %% Account balance
+ filler = ?ACCOUNT_FILLER %% Gap filler to ensure size >= 100 bytes
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Branch record, total size must be at least 100 bytes
+
+-define(BRANCH_FILLER,
+ {123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Teller record, total size must be at least 100 bytes
+
+-define(TELLER_FILLER,
+ {123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890,
+ 1234567890123456789012345678901234567890123456789012345678}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% History record, total size must be at least 50 bytes
+
+-define(HISTORY_FILLER, 1234567890).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-record(tab_config,
+ {
+ db_nodes = [node()],
+ replica_nodes = [node()],
+ replica_type = ram_copies,
+ use_running_mnesia = false,
+ n_branches = 1,
+ n_tellers_per_branch = 10, %% Must be 10
+ n_accounts_per_branch = 100000, %% Must be 100000
+ branch_filler = ?BRANCH_FILLER,
+ account_filler = ?ACCOUNT_FILLER,
+ teller_filler = ?TELLER_FILLER
+ }).
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% stolen from mnesia_tpcb.erl:
+
+list2rec(List, Fields, DefaultTuple) ->
+ [Name|Defaults] = tuple_to_list(DefaultTuple),
+ List2 = list2rec(List, Fields, Defaults, []),
+ list_to_tuple([Name] ++ List2).
+
+list2rec(_List, [], [], Acc) ->
+ Acc;
+list2rec(List, [F|Fields], [D|Defaults], Acc) ->
+ {Val, List2} =
+ case lists:keysearch(F, 1, List) of
+ false ->
+ {D, List};
+ {value, {F, NewVal}} ->
+ {NewVal, lists:keydelete(F, 1, List)}
+ end,
+ list2rec(List2, Fields, Defaults, Acc ++ [Val]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+tpcb_config(ReplicaType, _NodeConfig, Nodes, NoDriverNodes) ->
+ [{n_branches, 10},
+ {n_drivers_per_node, 10},
+ {replica_nodes, Nodes},
+ {driver_nodes, Nodes -- NoDriverNodes},
+ {use_running_mnesia, true},
+ {report_interval, infinity},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {reuse_history_id, true}].
+
+%% Stolen from mnesia_tpcb:dist
+tpcb_config_dist(ReplicaType, _NodeConfig, Nodes, _Config) ->
+ [{db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {replica_nodes, Nodes},
+ {n_drivers_per_node, 10},
+ {n_branches, 1},
+ {use_running_mnesia, true},
+ {n_accounts_per_branch, 10},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(15)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% stolen from mnesia_recovery_test.erl:
+
+receive_messages([]) -> [];
+receive_messages(ListOfMsgs) ->
+ receive
+ {Pid, Msg} ->
+ case lists:member(Msg, ListOfMsgs) of
+ false ->
+ ?warning("I (~p) have received unexpected msg~n ~p ~n",
+ [self(),{Pid, Msg}]),
+ receive_messages(ListOfMsgs);
+ true ->
+ ?verbose("I (~p) got msg ~p from ~p ~n", [self(),Msg, Pid]),
+ [{Pid, Msg} | receive_messages(ListOfMsgs -- [Msg])]
+ end;
+ Else -> ?warning("Recevied unexpected Msg~n ~p ~n", [Else])
+ after timer:minutes(3) ->
+ ?error("Timeout in receive msgs while waiting for ~p~n",
+ [ListOfMsgs])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_restart(suite) ->
+ [
+ consistency_after_restart_1_ram,
+ consistency_after_restart_1_disc,
+ consistency_after_restart_1_disc_only,
+ consistency_after_restart_2_ram,
+ consistency_after_restart_2_disc,
+ consistency_after_restart_2_disc_only
+ ].
+
+consistency_after_restart_1_ram(suite) -> [];
+consistency_after_restart_1_ram(Config) when is_list(Config) ->
+ consistency_after_restart(ram_copies, 2, Config).
+
+consistency_after_restart_1_disc(suite) -> [];
+consistency_after_restart_1_disc(Config) when is_list(Config) ->
+ consistency_after_restart(disc_copies, 2, Config).
+
+consistency_after_restart_1_disc_only(suite) -> [];
+consistency_after_restart_1_disc_only(Config) when is_list(Config) ->
+ consistency_after_restart(disc_only_copies, 2, Config).
+
+consistency_after_restart_2_ram(suite) -> [];
+consistency_after_restart_2_ram(Config) when is_list(Config) ->
+ consistency_after_restart(ram_copies, 3, Config).
+
+consistency_after_restart_2_disc(suite) -> [];
+consistency_after_restart_2_disc(Config) when is_list(Config) ->
+ consistency_after_restart(disc_copies, 3, Config).
+
+consistency_after_restart_2_disc_only(suite) -> [];
+consistency_after_restart_2_disc_only(Config) when is_list(Config) ->
+ consistency_after_restart(disc_only_copies, 3, Config).
+
+consistency_after_restart(ReplicaType, NodeConfig, Config) ->
+ [Node1 | _] = Nodes = ?acquire_nodes(NodeConfig, Config),
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_restart with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes, [Node1]),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(10)),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ %% Start and wait for tables to be loaded on all nodes
+ timer:sleep(timer:seconds(3)),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes,[account,branch,teller, history])),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_dump_tables(suite) ->
+ [
+ consistency_after_dump_tables_1_ram,
+ consistency_after_dump_tables_2_ram
+ ].
+
+consistency_after_dump_tables_1_ram(suite) -> [];
+consistency_after_dump_tables_1_ram(Config) when is_list(Config) ->
+ consistency_after_dump_tables(ram_copies, 1, Config).
+
+consistency_after_dump_tables_2_ram(suite) -> [];
+consistency_after_dump_tables_2_ram(Config) when is_list(Config) ->
+ consistency_after_dump_tables(ram_copies, 2, Config).
+
+consistency_after_dump_tables(ReplicaType, NodeConfig, Config) ->
+ [Node1 | _] = Nodes = ?acquire_nodes(NodeConfig, Config),
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_dump_tables with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes, []),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun() -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(10)),
+ ?match({atomic, ok}, rpc:call(Node1, mnesia, dump_tables,
+ [[branch, teller, account, history]])),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ timer:sleep(timer:seconds(1)),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes,[account, branch,
+ teller, history])),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_add_replica(suite) ->
+ [
+ consistency_after_add_replica_2_ram,
+ consistency_after_add_replica_2_disc,
+ consistency_after_add_replica_2_disc_only,
+ consistency_after_add_replica_3_ram,
+ consistency_after_add_replica_3_disc,
+ consistency_after_add_replica_3_disc_only
+ ].
+
+consistency_after_add_replica_2_ram(suite) -> [];
+consistency_after_add_replica_2_ram(Config) when is_list(Config) ->
+ consistency_after_add_replica(ram_copies, 2, Config).
+
+consistency_after_add_replica_2_disc(suite) -> [];
+consistency_after_add_replica_2_disc(Config) when is_list(Config) ->
+ consistency_after_add_replica(disc_copies, 2, Config).
+
+consistency_after_add_replica_2_disc_only(suite) -> [];
+consistency_after_add_replica_2_disc_only(Config) when is_list(Config) ->
+ consistency_after_add_replica(disc_only_copies, 2, Config).
+
+consistency_after_add_replica_3_ram(suite) -> [];
+consistency_after_add_replica_3_ram(Config) when is_list(Config) ->
+ consistency_after_add_replica(ram_copies, 3, Config).
+
+consistency_after_add_replica_3_disc(suite) -> [];
+consistency_after_add_replica_3_disc(Config) when is_list(Config) ->
+ consistency_after_add_replica(disc_copies, 3, Config).
+
+consistency_after_add_replica_3_disc_only(suite) -> [];
+consistency_after_add_replica_3_disc_only(Config) when is_list(Config) ->
+ consistency_after_add_replica(disc_only_copies, 3, Config).
+
+consistency_after_add_replica(ReplicaType, NodeConfig, Config) ->
+ Nodes0 = ?acquire_nodes(NodeConfig, Config),
+ AddNode = lists:last(Nodes0),
+ Nodes = Nodes0 -- [AddNode],
+ Node1 = hd(Nodes),
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_add_replica with ~p on ~p~n",
+ [ReplicaType, Nodes0]),
+ TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes, []),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(10)),
+ ?match({atomic, ok}, mnesia:add_table_copy(account, AddNode, ReplicaType)),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ ?verify_mnesia(Nodes0, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_del_replica(suite) ->
+ [
+ consistency_after_del_replica_2_ram,
+ consistency_after_del_replica_2_disc,
+ consistency_after_del_replica_2_disc_only,
+ consistency_after_del_replica_3_ram,
+ consistency_after_del_replica_3_disc,
+ consistency_after_del_replica_3_disc_only
+ ].
+
+consistency_after_del_replica_2_ram(suite) -> [];
+consistency_after_del_replica_2_ram(Config) when is_list(Config) ->
+ consistency_after_del_replica(ram_copies, 2, Config).
+
+consistency_after_del_replica_2_disc(suite) -> [];
+consistency_after_del_replica_2_disc(Config) when is_list(Config) ->
+ consistency_after_del_replica(disc_copies, 2, Config).
+
+consistency_after_del_replica_2_disc_only(suite) -> [];
+consistency_after_del_replica_2_disc_only(Config) when is_list(Config) ->
+ consistency_after_del_replica(disc_only_copies, 2, Config).
+
+consistency_after_del_replica_3_ram(suite) -> [];
+consistency_after_del_replica_3_ram(Config) when is_list(Config) ->
+ consistency_after_del_replica(ram_copies, 3, Config).
+
+consistency_after_del_replica_3_disc(suite) -> [];
+consistency_after_del_replica_3_disc(Config) when is_list(Config) ->
+ consistency_after_del_replica(disc_copies, 3, Config).
+
+consistency_after_del_replica_3_disc_only(suite) -> [];
+consistency_after_del_replica_3_disc_only(Config) when is_list(Config) ->
+ consistency_after_del_replica(disc_only_copies, 3, Config).
+
+consistency_after_del_replica(ReplicaType, NodeConfig, Config) ->
+ Nodes = ?acquire_nodes(NodeConfig, Config),
+ Node1 = hd(Nodes),
+ Node2 = lists:last(Nodes),
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_del_replica with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes, []),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(10)),
+ ?match({atomic, ok}, mnesia:del_table_copy(account, Node2)),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_move_replica(suite) ->
+ [
+ consistency_after_move_replica_2_ram,
+ consistency_after_move_replica_2_disc,
+ consistency_after_move_replica_2_disc_only,
+ consistency_after_move_replica_3_ram,
+ consistency_after_move_replica_3_disc,
+ consistency_after_move_replica_3_disc_only
+ ].
+
+consistency_after_move_replica_2_ram(suite) -> [];
+consistency_after_move_replica_2_ram(Config) when is_list(Config) ->
+ consistency_after_move_replica(ram_copies, 2, Config).
+
+consistency_after_move_replica_2_disc(suite) -> [];
+consistency_after_move_replica_2_disc(Config) when is_list(Config) ->
+ consistency_after_move_replica(disc_copies, 2, Config).
+
+consistency_after_move_replica_2_disc_only(suite) -> [];
+consistency_after_move_replica_2_disc_only(Config) when is_list(Config) ->
+ consistency_after_move_replica(disc_only_copies, 2, Config).
+
+consistency_after_move_replica_3_ram(suite) -> [];
+consistency_after_move_replica_3_ram(Config) when is_list(Config) ->
+ consistency_after_move_replica(ram_copies, 3, Config).
+
+consistency_after_move_replica_3_disc(suite) -> [];
+consistency_after_move_replica_3_disc(Config) when is_list(Config) ->
+ consistency_after_move_replica(disc_copies, 3, Config).
+
+consistency_after_move_replica_3_disc_only(suite) -> [];
+consistency_after_move_replica_3_disc_only(Config) when is_list(Config) ->
+ consistency_after_move_replica(disc_only_copies, 3, Config).
+
+consistency_after_move_replica(ReplicaType, NodeConfig, Config) ->
+ Nodes = ?acquire_nodes(NodeConfig, Config ++ [{tc_timeout, timer:minutes(10)}]),
+ Node1 = hd(Nodes),
+ Node2 = lists:last(Nodes),
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_move_replica with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes -- [Node2], []),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(10)),
+ ?match({atomic, ok}, mnesia:move_table_copy(account, Node1, Node2)),
+ ?log("First move completed from node ~p to ~p ~n", [Node1, Node2]),
+ ?match({atomic, ok}, mnesia:move_table_copy(account, Node2, Node1)),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_transform_table(doc) ->
+ ["Check that the database is consistent after transform_table.",
+ " While applications are updating the involved tables. "];
+
+consistency_after_transform_table(suite) ->
+ [
+ consistency_after_transform_table_ram,
+ consistency_after_transform_table_disc,
+ consistency_after_transform_table_disc_only
+ ].
+
+
+consistency_after_transform_table_ram(suite) -> [];
+consistency_after_transform_table_ram(Config) when is_list(Config) ->
+ consistency_after_transform_table(ram_copies, Config).
+
+consistency_after_transform_table_disc(suite) -> [];
+consistency_after_transform_table_disc(Config) when is_list(Config) ->
+ consistency_after_transform_table(disc_copies, Config).
+
+consistency_after_transform_table_disc_only(suite) -> [];
+consistency_after_transform_table_disc_only(Config) when is_list(Config) ->
+ consistency_after_transform_table(disc_only_copies, Config).
+
+consistency_after_transform_table(Type, Config) ->
+ Nodes = [N1, N2,_N3] = ?acquire_nodes(3, Config),
+
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{index, [3]}, {Type, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab2, [{index, [3]}, {Type, [N1,N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab3, [{index, [3]}, {Type, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(empty, [{index, [3]},{Type, Nodes}])),
+
+ Tabs = lists:sort([tab1, tab2, tab3, empty]),
+
+ [[mnesia:dirty_write({Tab, N, N}) || N <- lists:seq(1,10)] ||
+ Tab <- Tabs -- [empty, tab4]],
+ mnesia:dump_log(),
+
+ Ok = lists:duplicate(4, {atomic, ok}),
+ ?match(Ok, [mnesia:transform_table(Tab, fun({T, N, N}) -> {T, N, N, ok} end,
+ [k,a,n]) || Tab <- Tabs]),
+ [?match([k,a,n], mnesia:table_info(Tab, attributes)) || Tab <- Tabs],
+
+ Filter = fun(Tab) -> mnesia:foldl(fun(A, Acc) when size(A) == 3 -> [A|Acc];
+ (A, Acc) when size(A) == 4 -> Acc
+ end, [], Tab)
+ end,
+
+ ?match([[],[],[],[]], [element(2,mnesia:transaction(Filter, [Tab])) || Tab <- Tabs]),
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ mnesia_test_lib:start_mnesia(Nodes, Tabs),
+
+ ?match([Tabs, Tabs, Tabs],
+ [lists:sort(rpc:call(Node, mnesia,system_info, [tables]) -- [schema]) || Node <- Nodes]),
+
+ ?match([[],[],[],[]], [element(2,mnesia:transaction(Filter, [Tab])) || Tab <- Tabs]),
+ [?match([k,a,n], mnesia:table_info(Tab, attributes)) || Tab <- Tabs],
+
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_change_table_copy_type(doc) ->
+ ["Check that the database is consistent after change of copy type.",
+ " While applications are updating the involved tables. "].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_fallback(doc) ->
+ ["Check that installed fallbacks are consistent. Check this by starting ",
+ "some nodes, run tpcb on them, take a backup at any time, install it ",
+ "as a fallback, kill all nodes, start mnesia again and check for ",
+ "any inconsistencies"];
+consistency_after_fallback(suite) ->
+ [
+ consistency_after_fallback_2_ram,
+ consistency_after_fallback_2_disc,
+ consistency_after_fallback_2_disc_only,
+ consistency_after_fallback_3_ram,
+ consistency_after_fallback_3_disc
+ , consistency_after_fallback_3_disc_only
+ ].
+
+consistency_after_fallback_2_ram(suite) -> [];
+consistency_after_fallback_2_ram(Config) when is_list(Config) ->
+ consistency_after_fallback(ram_copies, 2, Config).
+
+consistency_after_fallback_2_disc(suite) -> [];
+consistency_after_fallback_2_disc(Config) when is_list(Config) ->
+ consistency_after_fallback(disc_copies, 2, Config).
+
+consistency_after_fallback_2_disc_only(suite) -> [];
+consistency_after_fallback_2_disc_only(Config) when is_list(Config) ->
+ consistency_after_fallback(disc_only_copies, 2, Config).
+
+consistency_after_fallback_3_ram(suite) -> [];
+consistency_after_fallback_3_ram(Config) when is_list(Config) ->
+ consistency_after_fallback(ram_copies, 3, Config).
+
+consistency_after_fallback_3_disc(suite) -> [];
+consistency_after_fallback_3_disc(Config) when is_list(Config) ->
+ consistency_after_fallback(disc_copies, 3, Config).
+
+consistency_after_fallback_3_disc_only(suite) -> [];
+consistency_after_fallback_3_disc_only(Config) when is_list(Config) ->
+ consistency_after_fallback(disc_only_copies, 3, Config).
+
+consistency_after_fallback(ReplicaType, NodeConfig, Config) ->
+ %%?verbose("Starting consistency_after_fallback2 at ~p~n", [self()]),
+ Delay = 5,
+ Nodes = ?acquire_nodes(NodeConfig, [{tc_timeout, timer:minutes(10)} | Config]),
+ Node1 = hd(Nodes),
+ %%?verbose("Mnesia info: ~p~n", [mnesia:info()]),
+
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_fallback with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes, []),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(Delay)),
+
+ %% Make a backup
+ ?verbose("Doing backup~n", []),
+ ?match(ok, mnesia:backup(consistency_after_fallback2)),
+
+ %% Install the backup as a fallback
+ ?verbose("Doing fallback~n", []),
+ ?match(ok, mnesia:install_fallback(consistency_after_fallback2)),
+ timer:sleep(timer:seconds(Delay)),
+
+ %% Stop tpcb
+ ?verbose("Stopping TPC-B~n", []),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+
+ %% Stop and then start mnesia and check table consistency
+ %%?verbose("Restarting Mnesia~n", []),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ mnesia_test_lib:start_mnesia(Nodes,[account,branch,teller,history]),
+
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ if
+ ReplicaType == ram_copies ->
+ %% Test that change_table_copy work i.e. no account.dcd file exists.
+ ?match({atomic, ok}, mnesia:change_table_copy_type(account, node(), disc_copies));
+ true ->
+ ignore
+ end,
+ file:delete(consistency_after_fallback2),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_restore(doc) ->
+ ["Verify consistency after restore operations."];
+
+consistency_after_restore(suite) ->
+ [
+ consistency_after_restore_clear_ram,
+ consistency_after_restore_clear_disc,
+ consistency_after_restore_clear_disc_only,
+ consistency_after_restore_recreate_ram,
+ consistency_after_restore_recreate_disc,
+ consistency_after_restore_recreate_disc_only
+ ].
+
+consistency_after_restore_clear_ram(suite) -> [];
+consistency_after_restore_clear_ram(Config) when is_list(Config) ->
+ consistency_after_restore(ram_copies, clear_tables, Config).
+
+consistency_after_restore_clear_disc(suite) -> [];
+consistency_after_restore_clear_disc(Config) when is_list(Config) ->
+ consistency_after_restore(disc_copies, clear_tables, Config).
+
+consistency_after_restore_clear_disc_only(suite) -> [];
+consistency_after_restore_clear_disc_only(Config) when is_list(Config) ->
+ consistency_after_restore(disc_only_copies, clear_tables, Config).
+
+consistency_after_restore_recreate_ram(suite) -> [];
+consistency_after_restore_recreate_ram(Config) when is_list(Config) ->
+ consistency_after_restore(ram_copies, recreate_tables, Config).
+
+consistency_after_restore_recreate_disc(suite) -> [];
+consistency_after_restore_recreate_disc(Config) when is_list(Config) ->
+ consistency_after_restore(disc_copies, recreate_tables, Config).
+
+consistency_after_restore_recreate_disc_only(suite) -> [];
+consistency_after_restore_recreate_disc_only(Config) when is_list(Config) ->
+ consistency_after_restore(disc_only_copies, recreate_tables, Config).
+
+consistency_after_restore(ReplicaType, Op, Config) ->
+ Delay = 1,
+ Nodes = ?acquire_nodes(3, [{tc_timeout, timer:minutes(10)} | Config]),
+ [Node1, Node2, _Node3] = Nodes,
+ File = "cons_backup_restore",
+
+ ?log("consistency_after_restore with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ Tabs = [carA, carB, carC, carD],
+
+ ?match({atomic, ok}, mnesia:create_table(carA, [{ReplicaType, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(carB, [{ReplicaType, Nodes -- [Node1]}])),
+ ?match({atomic, ok}, mnesia:create_table(carC, [{ReplicaType, Nodes -- [Node2]}])),
+ ?match({atomic, ok}, mnesia:create_table(carD, [{ReplicaType, [Node2]}])),
+
+ NList = lists:seq(0, 20),
+ [lists:foreach(fun(E) -> ok = mnesia:dirty_write({Tab, E, 1}) end, NList) ||
+ Tab <- Tabs],
+
+ {ok, Name, _} = mnesia:activate_checkpoint([{max, [schema | Tabs]},
+ {ram_overrides_dump, true}]),
+ ?verbose("Doing backup~n", []),
+ ?match(ok, mnesia:backup_checkpoint(Name, File)),
+ ?match(ok, mnesia:deactivate_checkpoint(Name)),
+
+ [lists:foreach(fun(E) -> ok = mnesia:dirty_write({Tab, E, 2}) end, NList) ||
+ Tab <- Tabs],
+
+ Pids1 = [{'EXIT', spawn_link(?MODULE, change_tab, [self(), carA, Op]), ok} || _ <- lists:seq(1, 5)],
+ Pids2 = [{'EXIT', spawn_link(?MODULE, change_tab, [self(), carB, Op]), ok} || _ <- lists:seq(1, 5)],
+ Pids3 = [{'EXIT', spawn_link(?MODULE, change_tab, [self(), carC, Op]), ok} || _ <- lists:seq(1, 5)],
+ Pids4 = [{'EXIT', spawn_link(?MODULE, change_tab, [self(), carD, Op]), ok} || _ <- lists:seq(1, 5)],
+
+ AllPids = Pids1 ++ Pids2 ++ Pids3 ++ Pids4,
+
+ Restore = fun(F, Args) ->
+ case mnesia:restore(F, Args) of
+ {atomic, List} -> lists:sort(List);
+ Else -> Else
+ end
+ end,
+
+ timer:sleep(timer:seconds(Delay)), %% Let changers grab locks
+ ?verbose("Doing restore~n", []),
+ ?match(Tabs, Restore(File, [{default_op, Op}])),
+
+ timer:sleep(timer:seconds(Delay)), %% Let em die
+
+ ?match_multi_receive(AllPids),
+
+ case ?match(ok, restore_verify_tabs(Tabs)) of
+ {success, ok} ->
+ file:delete(File);
+ _ ->
+ {T, M, S} = time(),
+ File2 = ?flat_format("consistency_error~w~w~w.BUP", [T, M, S]),
+ file:rename(File, File2)
+ end,
+ ?verify_mnesia(Nodes, []).
+
+change_tab(Father, Tab, Test) ->
+ Key = random:uniform(20),
+ Update = fun() ->
+ case mnesia:read({Tab, Key}) of
+ [{Tab, Key, 1}] ->
+ quit;
+ [{Tab, Key, _N}] ->
+ mnesia:write({Tab, Key, 3})
+ end
+ end,
+ case mnesia:transaction(Update) of
+ {atomic, quit} ->
+ exit(ok);
+ {aborted, {no_exists, Tab}} when Test == recreate_tables ->%% I'll allow this
+ change_tab(Father, Tab, Test);
+ {atomic, ok} ->
+ change_tab(Father, Tab, Test)
+ end.
+
+restore_verify_tabs([Tab | R]) ->
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:foldl(fun({_, _, 1}, ok) ->
+ ok;
+ (Else, Acc) ->
+ [Else|Acc]
+ end, ok, Tab)
+ end)),
+ restore_verify_tabs(R);
+restore_verify_tabs([]) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consistency_after_rename_of_node(doc) ->
+ ["Skipped because it is an unimportant case."].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+checkpoint_retainer_consistency(doc) ->
+ ["Verify that the contents of a checkpoint retainer has the expected",
+ "contents in various situations."];
+checkpoint_retainer_consistency(suite) ->
+ [
+ updates_during_checkpoint_activation,
+ updates_during_checkpoint_iteration,
+ load_table_with_activated_checkpoint,
+ add_table_copy_to_table_with_activated_checkpoint
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+updates_during_checkpoint_activation(doc) ->
+ ["Perform updates while the checkpoint getting activated",
+ "and verify that all checkpoint retainers associated with",
+ "different replicas of the same table really has the same",
+ "contents."];
+updates_during_checkpoint_activation(suite) ->
+ [
+ updates_during_checkpoint_activation_2_ram,
+ updates_during_checkpoint_activation_2_disc,
+ updates_during_checkpoint_activation_2_disc_only,
+ updates_during_checkpoint_activation_3_ram,
+ updates_during_checkpoint_activation_3_disc
+ , updates_during_checkpoint_activation_3_disc_only
+ ].
+
+updates_during_checkpoint_activation_2_ram(suite) -> [];
+updates_during_checkpoint_activation_2_ram(Config) when is_list(Config) ->
+ updates_during_checkpoint_activation(ram_copies, 2, Config).
+
+updates_during_checkpoint_activation_2_disc(suite) -> [];
+updates_during_checkpoint_activation_2_disc(Config) when is_list(Config) ->
+ updates_during_checkpoint_activation(disc_copies, 2, Config).
+
+updates_during_checkpoint_activation_2_disc_only(suite) -> [];
+updates_during_checkpoint_activation_2_disc_only(Config) when is_list(Config) ->
+ updates_during_checkpoint_activation(disc_only_copies, 2, Config).
+
+updates_during_checkpoint_activation_3_ram(suite) -> [];
+updates_during_checkpoint_activation_3_ram(Config) when is_list(Config) ->
+ updates_during_checkpoint_activation(ram_copies, 3, Config).
+
+updates_during_checkpoint_activation_3_disc(suite) -> [];
+updates_during_checkpoint_activation_3_disc(Config) when is_list(Config) ->
+ updates_during_checkpoint_activation(disc_copies, 3, Config).
+
+updates_during_checkpoint_activation_3_disc_only(suite) -> [];
+updates_during_checkpoint_activation_3_disc_only(Config) when is_list(Config) ->
+ updates_during_checkpoint_activation(disc_only_copies, 3, Config).
+
+updates_during_checkpoint_activation(ReplicaType,NodeConfig,Config) ->
+ %%?verbose("updates_during_checkpoint_activation2 at ~p~n", [self()]),
+ Delay = 5,
+ Nodes = ?acquire_nodes(NodeConfig, Config),
+ Node1 = hd(Nodes),
+ %%?verbose("Mnesia info: ~p~n", [mnesia:info()]),
+
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("consistency_after_fallback with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config_dist(ReplicaType, NodeConfig, Nodes, Config),
+ %%TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ timer:sleep(timer:seconds(Delay)),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)}]),
+ timer:sleep(timer:seconds(Delay)),
+
+ %% Stop tpcb
+ ?verbose("Stopping TPC-B~n", []),
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+
+ ?match(ok, mnesia:backup_checkpoint(CPName,
+ updates_during_checkpoint_activation2)),
+ timer:sleep(timer:seconds(Delay)),
+
+ ?match(ok, mnesia:install_fallback(updates_during_checkpoint_activation2)),
+
+ %% Stop and then start mnesia and check table consistency
+ %%?verbose("Restarting Mnesia~n", []),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ file:delete(updates_during_checkpoint_activation2),
+ mnesia_test_lib:start_mnesia(Nodes,[account,branch,teller, history]),
+
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+updates_during_checkpoint_iteration(doc) ->
+ ["Perform updates while someone is iterating over a checkpoint",
+ "and verify that the iterator really finds the expected data",
+ "regardless of ongoing upates."];
+
+updates_during_checkpoint_iteration(suite) ->
+ [
+ updates_during_checkpoint_iteration_2_ram,
+ updates_during_checkpoint_iteration_2_disc
+ , updates_during_checkpoint_iteration_2_disc_only
+ ].
+
+updates_during_checkpoint_iteration_2_ram(suite) -> [];
+updates_during_checkpoint_iteration_2_ram(Config) when is_list(Config) ->
+ updates_during_checkpoint_iteration(ram_copies, 2, Config).
+
+updates_during_checkpoint_iteration_2_disc(suite) -> [];
+updates_during_checkpoint_iteration_2_disc(Config) when is_list(Config) ->
+ updates_during_checkpoint_iteration(disc_copies, 2, Config).
+
+updates_during_checkpoint_iteration_2_disc_only(suite) -> [];
+updates_during_checkpoint_iteration_2_disc_only(Config) when is_list(Config) ->
+ updates_during_checkpoint_iteration(disc_only_copies, 2, Config).
+
+updates_during_checkpoint_iteration(ReplicaType,NodeConfig,Config) ->
+ %?verbose("updates_during_checkpoint_iteration2 at ~p~n", [self()]),
+ Delay = 5,
+ Nodes = ?acquire_nodes(NodeConfig, Config),
+ Node1 = hd(Nodes),
+ %?verbose("Mnesia info: ~p~n", [mnesia:info()]),
+ File = updates_during_checkpoint_iteration2,
+ {success, [A]} = ?start_activities([Node1]),
+ ?log("updates_during_checkpoint_iteration with ~p on ~p~n",
+ [ReplicaType, Nodes]),
+ TpcbConfig = tpcb_config_dist(ReplicaType, NodeConfig, Nodes, Config),
+ %%TpcbConfig = tpcb_config(ReplicaType, NodeConfig, Nodes),
+ TpcbConfigRec = list2rec(TpcbConfig,
+ record_info(fields,tab_config),
+ #tab_config{}),
+ mnesia_tpcb:init(TpcbConfig),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+ A ! fun () -> mnesia:backup_checkpoint(CPName, File) end,
+
+ do_changes_during_backup(TpcbConfigRec),
+
+ ?match_receive({A,ok}),
+
+ timer:sleep(timer:seconds(Delay)),
+ ?match(ok, mnesia:install_fallback(File)),
+ timer:sleep(timer:seconds(Delay)),
+
+ ?match({error,{"Bad balance",_,_}}, mnesia_tpcb:verify_tabs()),
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ mnesia_test_lib:start_mnesia(Nodes,[account,branch,teller, history]),
+
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+
+ ?match(ok, file:delete(File)),
+ ?verify_mnesia(Nodes, []).
+
+do_changes_during_backup(TpcbConfig) ->
+ loop_branches(TpcbConfig#tab_config.n_branches,
+ TpcbConfig#tab_config.n_accounts_per_branch).
+
+loop_branches(N_br,N_acc) when N_br >= 1 ->
+ loop_accounts(N_br,N_acc),
+ loop_branches(N_br-1,N_acc);
+loop_branches(_,_) -> done.
+
+loop_accounts(N_br, N_acc) when N_acc >= 1 ->
+ A = #account{id=N_acc, branch_id=N_br, balance = 4711},
+ ok = mnesia:dirty_write(A),
+ loop_accounts(N_br, N_acc-1);
+
+loop_accounts(_,_) -> done.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+load_table_with_activated_checkpoint(doc) ->
+ ["Load a table with a checkpoint attached to it and verify that the",
+ "newly loaded replica also gets a checkpoint retainer attached to it",
+ "and that it is consistent with the original retainer."];
+
+load_table_with_activated_checkpoint(suite) ->
+ [
+ load_table_with_activated_checkpoint_ram,
+ load_table_with_activated_checkpoint_disc,
+ load_table_with_activated_checkpoint_disc_only
+ ].
+
+load_table_with_activated_checkpoint_ram(suite) -> [];
+load_table_with_activated_checkpoint_ram(Config) when is_list(Config) ->
+ load_table_with_activated_checkpoint(ram_copies, Config).
+
+load_table_with_activated_checkpoint_disc(suite) -> [];
+load_table_with_activated_checkpoint_disc(Config) when is_list(Config) ->
+ load_table_with_activated_checkpoint(disc_copies, Config).
+
+load_table_with_activated_checkpoint_disc_only(suite) -> [];
+load_table_with_activated_checkpoint_disc_only(Config) when is_list(Config) ->
+ load_table_with_activated_checkpoint(disc_only_copies, Config).
+
+load_table_with_activated_checkpoint(Type, Config) ->
+ Nodes = ?acquire_nodes(2, Config),
+ Node1 = hd(Nodes),
+ Tab = load_test,
+ Def = [{attributes, [key, value]},
+ {Type, Nodes}], %% ??? important that RAM ???
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ timer:sleep(timer:seconds(1)),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+
+ mnesia_test_lib:stop_mnesia([Node1]),
+ mnesia_test_lib:start_mnesia([Node1],[Tab]),
+ %%--- check, whether the checkpiont is attached to both replicas
+ {success, [A,B]} = ?start_activities(Nodes),
+
+ A ! fun () ->
+ mnesia:table_info(Tab,checkpoints)
+ end,
+ ?match_receive({A,[CPName]}),
+
+ B ! fun () ->
+ mnesia:table_info(Tab,checkpoints)
+ end,
+ ?match_receive({B,[CPName]}),
+
+ %%--- check, whether both retainers are consistent
+ ?match(ok, mnesia:dirty_write({Tab, 1, 815})),
+ A ! fun () ->
+ mnesia:backup_checkpoint(CPName, load_table_a)
+ end,
+ ?match_receive({A,ok}),
+ B ! fun () ->
+ mnesia:backup_checkpoint(CPName, load_table_b)
+ end,
+ ?match_receive({B,ok}),
+
+ Mod = mnesia_backup, %% Assume local files
+ List_a = view(load_table_a, Mod),
+ List_b = view(load_table_b, Mod),
+
+ ?match(List_a, List_b),
+
+ ?match(ok,file:delete(load_table_a)),
+ ?match(ok,file:delete(load_table_b)),
+ ?verify_mnesia(Nodes, []).
+
+view(Source, Mod) ->
+ View = fun(Item, Acc) ->
+ ?verbose("tab - item : ~p ~n",[Item]),
+ case Item of
+ {schema, Tab, Cs} -> %% Remove cookie information
+ NewCs = lists:keyreplace(cookie, 1, Cs,
+ {cookie, skip_cookie}),
+ Item2 = {schema, Tab, NewCs},
+ {[Item], [Item2|Acc]};
+ _ ->
+ {[Item], [Item|Acc]}
+ end
+ end,
+ {ok,TabList} =
+ mnesia:traverse_backup(Source, Mod, dummy, read_only, View, []),
+ lists:sort(TabList).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+add_table_copy_to_table_with_activated_checkpoint(doc) ->
+ ["Add a replica to a table with a checkpoint attached to it",
+ "and verify that the new replica also gets a checkpoint",
+ "retainer attached to it and that it is consistent with the",
+ "original retainer."];
+
+add_table_copy_to_table_with_activated_checkpoint(suite) ->
+ [
+ add_table_copy_to_table_with_activated_checkpoint_ram,
+ add_table_copy_to_table_with_activated_checkpoint_disc,
+ add_table_copy_to_table_with_activated_checkpoint_disc_only
+ ].
+
+add_table_copy_to_table_with_activated_checkpoint_ram(suite) -> [];
+add_table_copy_to_table_with_activated_checkpoint_ram(Config) when is_list(Config) ->
+ add_table_copy_to_table_with_activated_checkpoint(ram_copies, Config).
+
+add_table_copy_to_table_with_activated_checkpoint_disc(suite) -> [];
+add_table_copy_to_table_with_activated_checkpoint_disc(Config) when is_list(Config) ->
+ add_table_copy_to_table_with_activated_checkpoint(disc_copies, Config).
+
+add_table_copy_to_table_with_activated_checkpoint_disc_only(suite) -> [];
+add_table_copy_to_table_with_activated_checkpoint_disc_only(Config) when is_list(Config) ->
+ add_table_copy_to_table_with_activated_checkpoint(disc_only_copies, Config).
+
+add_table_copy_to_table_with_activated_checkpoint(Type,Config) ->
+ Nodes = ?acquire_nodes(2, Config),
+ %?verbose("NODES = ~p ~n",[Nodes]),
+ [Node1,Node2] = Nodes,
+
+ Tab = add_test,
+ Def = [{attributes, [key, value]},
+ {Type, [Node1]}], %% ??? important that RAM ???
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+
+ ?match({atomic,ok},mnesia:add_table_copy(Tab,Node2,ram_copies)),
+
+ %%--- check, whether the checkpiont is attached to both replicas
+ {success, [A,B]} = ?start_activities(Nodes),
+
+ A ! fun () ->
+ mnesia:table_info(Tab,checkpoints)
+ end,
+ ?match_receive({A,[CPName]}),
+
+ B ! fun () ->
+ mnesia:table_info(Tab,checkpoints)
+ end,
+ ?match_receive({B,[CPName]}),
+
+ %%--- check, whether both retainers are consistent
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 815})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 815})),
+
+ A ! fun () ->
+ mnesia:backup_checkpoint(CPName, add_table_a)
+ end,
+ ?match_receive({A,ok}),
+ B ! fun () ->
+ mnesia:backup_checkpoint(CPName, add_table_b)
+ end,
+ ?match_receive({B,ok}),
+
+ Mod = mnesia_backup, %% Assume local files
+
+ List_a = view(add_table_a, Mod),
+ List_b = view(add_table_b, Mod),
+
+ ?match(List_a, List_b),
+
+ ?match(ok,file:delete(add_table_a)),
+ ?match(ok, file:delete(add_table_b)),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+backup_consistency(suite) ->
+ [
+ interupted_install_fallback,
+ interupted_uninstall_fallback,
+ mnesia_down_during_backup_causes_switch,
+ mnesia_down_during_backup_causes_abort,
+ schema_transactions_during_backup
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+interupted_install_fallback(doc) ->
+ ["Verify that a interrupted install_fallback really",
+ "is performed on all nodes or none"];
+
+interupted_install_fallback(suite) ->
+ [
+ inst_fallback_process_dies,
+ fatal_when_inconsistency
+ ].
+
+inst_fallback_process_dies(suite) ->
+ [];
+inst_fallback_process_dies(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ {success, [A,_B,_C]} = ?start_activities(Nodes),
+
+ TestPid = self(),
+ DebugId = {mnesia_bup, fallback_receiver_loop, pre_swap},
+ DebugFun =
+ fun(PrevContext, _EvalContext) ->
+ ?verbose("fallback_receiver_loop - pre_swap pid ~p #~p~n",
+ [self(),PrevContext]),
+ TestPid ! {self(),fallback_preswap},
+ case receive_messages([fallback_continue]) of
+ [{TestPid,fallback_continue}] ->
+ ?deactivate_debug_fun(DebugId),
+ PrevContext+1
+ end
+ end,
+ ?activate_debug_fun(DebugId, DebugFun, 1),
+
+ Tab = install_table,
+ Def = [{attributes, [key, value]}, {disc_copies, Nodes}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+
+ ?match(ok, mnesia:backup_checkpoint(CPName, install_backup)),
+
+ A ! fun() -> mnesia:install_fallback(install_backup) end,
+ [{AnsPid,fallback_preswap}] = receive_messages([fallback_preswap]),
+ exit(A, kill),
+ AnsPid ! {self(), fallback_continue},
+ ?match_receive({'EXIT', A, killed}),
+ timer:sleep(2000), %% Wait till fallback is installed everywhere
+
+ mnesia_test_lib:kill_mnesia(Nodes),
+ ?verbose("~n---->Mnesia is stopped everywhere<-----~n", []),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes,[Tab])),
+
+ check_data(Nodes, Tab),
+ ?match(ok, file:delete(install_backup)),
+ ?verify_mnesia(Nodes, []).
+
+check_data([N1 | R], Tab) ->
+ ?match([{Tab, 1, 4711}], rpc:call(N1, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 2, 42}], rpc:call(N1, mnesia, dirty_read, [{Tab, 2}])),
+ ?match([{Tab, 3, 256}], rpc:call(N1, mnesia, dirty_read, [{Tab, 3}])),
+ check_data(R, Tab);
+check_data([], _Tab) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+fatal_when_inconsistency(suite) ->
+ [];
+fatal_when_inconsistency(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+
+ [Node1, Node2, Node3] = Nodes =
+ ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ {success, [A,_B,_C]} = ?start_activities(Nodes),
+
+ TestPid = self(),
+ DebugId = {mnesia_bup, fallback_receiver_loop, pre_swap},
+ DebugFun =
+ fun(PrevContext, _EvalContext) ->
+ ?verbose("fallback_receiver_loop - pre_swap pid ~p #~p~n",
+ [self(),PrevContext]),
+ TestPid ! {self(),fallback_preswap},
+ case receive_messages([fallback_continue]) of
+ [{TestPid,fallback_continue}] ->
+ ?deactivate_debug_fun(DebugId),
+ PrevContext+1
+ end
+ end,
+ ?activate_debug_fun(DebugId, DebugFun, 1),
+
+ Tab = install_table,
+ Def = [{attributes, [key, value]}, {disc_copies, Nodes}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+
+ ?match(ok, mnesia:backup_checkpoint(CPName, install_backup)),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42424242})),
+
+ A ! fun() ->
+ mnesia:install_fallback(install_backup)
+ end,
+
+ [{AnsPid,fallback_preswap}] = receive_messages([fallback_preswap]),
+ exit(AnsPid, kill), %% Kill install-fallback on local node will
+ AnsPid ! {self(), fallback_continue},
+ ?deactivate_debug_fun(DebugId),
+
+ ?match_receive({A,{error,{"Cannot install fallback",
+ {'EXIT',AnsPid,killed}}}}),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ ?verbose("EXPECTING FATAL from 2 nodes WITH CORE DUMP~n", []),
+
+ ?match([], mnesia_test_lib:start_mnesia([Node1],[])),
+ is_running(Node1, yes),
+ ?match([{Node2, mnesia, _}], mnesia_test_lib:start_mnesia([Node2],[])),
+ is_running(Node2, no),
+ ?match([{Node3, mnesia, _}], mnesia_test_lib:start_mnesia([Node3],[])),
+ is_running(Node3, no),
+ mnesia_test_lib:kill_mnesia(Nodes),
+
+ ?match(ok, mnesia:install_fallback(install_backup)),
+ mnesia_test_lib:start_mnesia(Nodes,[Tab]),
+
+ check_data(Nodes, Tab),
+
+ ?match(ok,file:delete(install_backup)),
+ ?verify_mnesia(Nodes, []).
+
+is_running(Node, Shouldbe) ->
+ timer:sleep(1000),
+ Running = rpc:call(Node, mnesia, system_info, [is_running]),
+ case Running of
+ Shouldbe -> ok;
+ _ -> is_running(Node, Shouldbe)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+interupted_uninstall_fallback(doc) ->
+ ["Verify that a interrupted uninstall_fallback really",
+ "is performed on all nodes or none"];
+interupted_uninstall_fallback(suite) ->
+ [
+ after_delete
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+after_delete(doc) ->
+ ["interrupt the uninstall after deletion of ",
+ "fallback files - there shall be no fallback"];
+after_delete(suite) -> [];
+after_delete(Config) when is_list(Config) ->
+ do_uninstall(Config, post_delete).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%
+
+do_uninstall(Config,DebugPoint) ->
+ ?is_debug_compiled,
+
+ Nodes = ?acquire_nodes(3, Config),
+ %%?verbose("NODES = ~p ~n",[Nodes]),
+
+ {success, [P1,P2,P3]} = ?start_activities(Nodes),
+
+ NP1 = node(P1),
+ NP2 = node(P2),
+
+ {A,B,C} = case node() of
+ NP1 ->
+ %%?verbose("first case ~n"),
+ {P3,P2,P1};
+ NP2 ->
+ %%?verbose("second case ~n"),
+ {P3, P1, P2};
+ _ ->
+ { P1, P2, P3}
+ end,
+
+ Node1 = node(A),
+ Node2 = node(B),
+ Node3 = node(C),
+
+ ?verbose(" A pid:~p node:~p ~n",[A,Node1]),
+ ?verbose(" B pid:~p node:~p ~n",[B,Node2]),
+ ?verbose(" C pid:~p node:~p ~n",[C,Node3]),
+
+
+ TestPid = self(),
+ %%?verbose("TestPid : ~p~n",[TestPid]),
+ DebugId = {mnesia_bup, uninstall_fallback2, DebugPoint},
+ DebugFun = fun(PrevContext, _EvalContext) ->
+ ?verbose("uninstall_fallback pid ~p #~p~n"
+ ,[self(),PrevContext]),
+ TestPid ! {self(),uninstall_predelete},
+ case receive_messages([uninstall_continue]) of
+ [{TestPid,uninstall_continue}] ->
+ ?deactivate_debug_fun(DebugId),
+ %%?verbose("uninstall_fallback continues~n"),
+ PrevContext+1
+ end
+ end,
+ ?remote_activate_debug_fun(Node1,DebugId, DebugFun, 1),
+
+ Tab = install_table,
+ Def = [{attributes, [key, value]},
+ {ram_copies, Nodes}], %% necessary to test different types ???
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+
+ ?match(ok, mnesia:backup_checkpoint(CPName,install_backup)),
+ timer:sleep(timer:seconds(1)),
+
+ A ! fun () ->
+ mnesia:install_fallback(install_backup)
+ end,
+ ?match_receive({A,ok}),
+
+ A ! fun () ->
+ mnesia:uninstall_fallback()
+ end,
+ %%
+ %% catch the debug entry in mnesia and kill one Mnesia node
+ %%
+
+
+ [{AnsPid,uninstall_predelete}] = receive_messages([uninstall_predelete]),
+
+ ?verbose("AnsPid : ~p~n",[AnsPid]),
+
+ mnesia_test_lib:kill_mnesia([Node2]),
+ timer:sleep(timer:seconds(1)),
+
+ AnsPid ! {self(),uninstall_continue},
+
+ ?match_receive({A,ok}),
+
+ mnesia_test_lib:kill_mnesia(Nodes) ,
+ mnesia_test_lib:start_mnesia(Nodes,[Tab]),
+
+ A ! fun () ->
+ R1 = mnesia:dirty_read({Tab,1}),
+ R2 = mnesia:dirty_read({Tab,2}),
+ R3 = mnesia:dirty_read({Tab,3}),
+ {R1,R2,R3}
+ end,
+ ?match_receive({ A, {[],[],[]} }),
+
+ B ! fun () ->
+ R1 = mnesia:dirty_read({Tab,1}),
+ R2 = mnesia:dirty_read({Tab,2}),
+ R3 = mnesia:dirty_read({Tab,3}),
+ {R1,R2,R3}
+ end,
+ ?match_receive({ B, {[],[],[]} }),
+
+ C ! fun () ->
+ R1 = mnesia:dirty_read({Tab,1}),
+ R2 = mnesia:dirty_read({Tab,2}),
+ R3 = mnesia:dirty_read({Tab,3}),
+ {R1,R2,R3}
+ end,
+ ?match_receive({ C, {[],[],[]} }),
+
+ ?match(ok,file:delete(install_backup)),
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+mnesia_down_during_backup_causes_switch(doc) ->
+ ["Verify that an ongoing backup is not disturbed",
+ "even if the node hosting the replica that currently",
+ "is being backup'ed is stopped. The backup utility",
+ "is expected to switch over to another replica and",
+ "fulfill the backup."];
+mnesia_down_during_backup_causes_switch(suite) ->
+ [
+ cause_switch_before,
+ cause_switch_after
+ ].
+
+%%%%%%%%%%%%%%%
+
+cause_switch_before(doc) ->
+ ["interrupt the backup before iterating the retainer"];
+cause_switch_before(suite) -> [];
+cause_switch_before(Config) when is_list(Config) ->
+ do_something_during_backup(cause_switch,pre,Config).
+
+%%%%%%%%%%%%%%%
+
+cause_switch_after(doc) ->
+ ["interrupt the backup after iterating the retainer"];
+cause_switch_after(suite) -> [];
+cause_switch_after(Config) when is_list(Config) ->
+ do_something_during_backup(cause_switch,post,Config).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+mnesia_down_during_backup_causes_abort(doc) ->
+ ["Verify that an ongoing backup is aborted nicely",
+ "without leaving any backup file if the last replica",
+ "of a table becomes unavailable due to a node down",
+ "or some crash."];
+mnesia_down_during_backup_causes_abort(suite) ->
+ [
+ cause_abort_before,
+ cause_abort_after
+ ].
+
+%%%%%%%%%%%%%%%%%%
+
+cause_abort_before(doc) ->
+ ["interrupt the backup before iterating the retainer"];
+
+cause_abort_before(suite) -> [];
+cause_abort_before(Config) when is_list(Config) ->
+ do_something_during_backup(cause_abort,pre,Config).
+
+%%%%%%%%%%%%%%%%%%
+
+cause_abort_after(doc) ->
+ ["interrupt the backup after iterating the retainer"];
+
+cause_abort_after(suite) -> [];
+cause_abort_after(Config) when is_list(Config) ->
+ do_something_during_backup(cause_abort,post,Config).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+schema_transactions_during_backup(doc) ->
+ ["Verify that an schema transactions does not",
+ "affect an ongoing backup."];
+schema_transactions_during_backup(suite) ->
+ [
+ change_schema_before,
+ change_schema_after
+ ].
+
+%%%%%%%%%%%%%
+
+change_schema_before(doc) ->
+ ["interrupt the backup before iterating the retainer"];
+change_schema_before(suite) -> [];
+change_schema_before(Config) when is_list(Config) ->
+ do_something_during_backup(change_schema,pre,Config).
+
+%%%%%%%%%%%%%%%%
+
+change_schema_after(doc) ->
+ ["interrupt the backup after iterating the retainer"];
+change_schema_after(suite) -> [];
+change_schema_after(Config) when is_list(Config) ->
+ do_something_during_backup(change_schema,post,Config).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+do_something_during_backup(Action,DebugPoint,Config) ->
+ ?is_debug_compiled,
+
+ Nodes = ?acquire_nodes(3, Config),
+
+ {success, [A,B,C]} = ?start_activities(Nodes),
+
+ Node1 = node(A),
+ Node2 = node(B),
+ Node3 = node(C),
+
+ TestPid = self(),
+ %%?verbose("TestPid : ~p~n",[TestPid]),
+
+ Tab = interrupt_table,
+ Bak = interrupt_backup,
+ Def = [{attributes, [key, value]},
+ {ram_copies, [Node2,Node3]}],
+ %% necessary to test different types ???
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+
+
+ DebugId = {mnesia_log, tab_copier, DebugPoint},
+ DebugFun = fun(PrevContext, EvalContext) ->
+ ?verbose("interrupt backup pid ~p #~p ~n context ~p ~n"
+ ,[self(),PrevContext,EvalContext]),
+ TestPid ! {self(),interrupt_backup_pre},
+ global:set_lock({{lock_for_backup, Tab}, self()},
+ Nodes,
+ infinity),
+
+ %%?verbose("interrupt backup - continues ~n"),
+ ?deactivate_debug_fun(DebugId),
+ PrevContext+1
+ end,
+ ?remote_activate_debug_fun(Node1,DebugId, DebugFun, 1),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ {ok, CPName, _NodeList} =
+ mnesia:activate_checkpoint([{max, mnesia:system_info(tables)},
+ {ram_overrides_dump,true}]),
+
+ A ! fun () ->
+ %%?verbose("node: ~p pid: ~p ~n",[node(),self()]),
+ mnesia:table_info(Tab,where_to_read)
+ end,
+
+ ReadNode_a = receive { A, ReadNode_a_tmp } -> ReadNode_a_tmp end,
+ ?verbose("ReadNode ~p ~n",[ReadNode_a]),
+
+ global:set_lock({{lock_for_backup, Tab}, self()}, Nodes, infinity),
+
+ A ! fun () -> %% A shall perform the backup, so the test proc is
+ %% able to do further actions in between
+ mnesia:backup_checkpoint(CPName, Bak)
+ end,
+
+ %% catch the debug function of mnesia, stop the backup process
+ %% kill the node ReadNode_a and continue the backup process
+ %% As there is a second replica of the table, the backup shall continue
+
+ case receive_messages([interrupt_backup_pre]) of
+ [{_AnsPid,interrupt_backup_pre}] -> ok
+ end,
+
+ case Action of
+ cause_switch ->
+ mnesia_test_lib:kill_mnesia([ReadNode_a]),
+ timer:sleep(timer:seconds(1));
+ cause_abort ->
+ mnesia_test_lib:kill_mnesia([Node2,Node3]),
+ timer:sleep(timer:seconds(1));
+ change_schema ->
+ Tab2 = second_interrupt_table,
+ Def2 = [{attributes, [key, value]},
+ {ram_copies, Nodes}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2))
+ end,
+
+ %% AnsPid ! {self(),interrupt_backup_continue},
+ global:del_lock({{lock_for_backup, Tab}, self()}, Nodes),
+
+ case Action of
+ cause_abort ->
+
+ %% answer of A when finishing the backup
+ ?match_receive({A,{error, _}}),
+
+ ?match({error,{"Cannot install fallback",_}},
+ mnesia:install_fallback(Bak));
+ _ -> %% cause_switch, change_schema
+
+ ?match_receive({A,ok}), %% answer of A when finishing the backup
+
+ %% send a fun to that node where mnesia is still running
+ WritePid = case ReadNode_a of
+ Node2 -> C; %% node(C) == Node3
+ Node3 -> B
+ end,
+ WritePid ! fun () ->
+ ?match(ok, mnesia:dirty_write({Tab, 1, 815})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 816})),
+ ok
+ end,
+ ?match_receive({ WritePid, ok }),
+ ?match(ok, mnesia:install_fallback(Bak))
+ end,
+
+ %% Stop and then start mnesia and check table consistency
+ %%?verbose("Restarting Mnesia~n", []),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ mnesia_test_lib:start_mnesia(Nodes,[Tab]),
+
+ case Action of
+ cause_switch ->
+ %% the backup should exist
+ cross_check_tables([A,B,C],Tab,{[{Tab,1,4711}],
+ [{Tab,2,42}],
+ [{Tab,3,256}] }),
+ ?match(ok,file:delete(Bak));
+ cause_abort ->
+ %% the backup should NOT exist
+ cross_check_tables([A,B,C],Tab,{[],[],[]}),
+ %% file does not exist
+ ?match({error, _},file:delete(Bak));
+ change_schema ->
+ %% the backup should exist
+ cross_check_tables([A,B,C],Tab,{[{Tab,1,4711}],
+ [{Tab,2,42}],
+ [{Tab,3,256}] }),
+ ?match(ok,file:delete(Bak))
+ end,
+ ?verify_mnesia(Nodes, []).
+
+%% check the contents of the table
+cross_check_tables([],_tab,_elements) -> ok;
+cross_check_tables([Pid|Rest],Tab,{Val1,Val2,Val3}) ->
+ Pid ! fun () ->
+ R1 = mnesia:dirty_read({Tab,1}),
+ R2 = mnesia:dirty_read({Tab,2}),
+ R3 = mnesia:dirty_read({Tab,3}),
+ {R1,R2,R3}
+ end,
+ ?match_receive({ Pid, {Val1, Val2, Val3 } }),
+ cross_check_tables(Rest,Tab,{Val1,Val2,Val3} ).
diff --git a/lib/mnesia/test/mnesia_cost.erl b/lib/mnesia/test/mnesia_cost.erl
new file mode 100644
index 0000000000..54cb2b3064
--- /dev/null
+++ b/lib/mnesia/test/mnesia_cost.erl
@@ -0,0 +1,222 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_cost).
+-compile(export_all).
+
+%% This code exercises the mnesia system and produces a bunch
+%% of measurements on what various things cost
+
+-define(TIMES, 1000). %% set to at least 1000 when running for real !!
+
+%% This is the record we perform all ops on in this test
+
+-record(item, {a = 1234,
+ b = foobar,
+ c = "1.2.3.4",
+ d = {'Lennart', 'Hyland'},
+ e = true
+ }).
+
+go() ->
+ go([node() | nodes()]).
+
+go(Nodes) when hd(Nodes) == node() ->
+ {ok, Out} = file:open("MNESIA_COST", write),
+ put(out, Out),
+
+ rpc:multicall(Nodes, mnesia, lkill, []),
+ ok = mnesia:delete_schema(Nodes),
+ ok = mnesia:create_schema(Nodes),
+ rpc:multicall(Nodes, mnesia, start, []),
+ TabDef = [{attributes, record_info(fields, item)}],
+ {atomic, ok} = mnesia:create_table(item, TabDef),
+
+ round("single ram copy", "no index"),
+ {atomic, ok} = mnesia:add_table_index(item, #item.e),
+ round("single ram copy", "One index"),
+
+ {atomic, ok} = mnesia:add_table_index(item, #item.c),
+ round("single ram copy", "Two indexes"),
+
+ {atomic, ok} = mnesia:del_table_index(item, #item.e),
+ {atomic, ok} = mnesia:del_table_index(item, #item.c),
+
+ {atomic, ok} = mnesia:change_table_copy_type(item, node(), disc_copies),
+ round("single disc copy", "no index"),
+
+ {atomic, ok} = mnesia:change_table_copy_type(item, node(), ram_copies),
+
+ case length(Nodes) of
+ Len when Len < 2 ->
+ format("<WARNING> replication skipped. Too few nodes.", []);
+ _Len ->
+ N2 = lists:nth(2, Nodes),
+ {atomic, ok} = mnesia:add_table_copy(item, N2, ram_copies),
+ round("2 replicated ram copy", "no index")
+ end,
+ file:close(Out),
+ erase(out),
+ ok.
+
+round(Replication, Index) ->
+ run(Replication, Index, [write],
+ fun() -> mnesia:write(#item{}) end),
+
+
+ run(Replication, Index, [read],
+ fun() -> mnesia:read({item, 1234}) end),
+
+ run(Replication, Index, [read, write],
+ fun() -> mnesia:read({item, 1234}),
+ mnesia:write(#item{}) end),
+
+ run(Replication, Index, [wread, write],
+ fun() -> mnesia:wread({item, 1234}),
+ mnesia:write(#item{}) end),
+
+
+ run(Replication, Index, [match, write, write, write],
+ fun() -> mnesia:match_object({item, 1, '_', '_', '_', true}),
+ mnesia:write(#item{a =1}),
+ mnesia:write(#item{a =2}),
+ mnesia:write(#item{a =3}) end).
+
+
+format(F, As) ->
+ io:format(get(out), F, As).
+
+run(What, OtherInfo, Ops, F) ->
+ run(t, What, OtherInfo, Ops, F).
+
+run(How, What, OtherInfo, Ops, F) ->
+ T1 = erlang:now(),
+ statistics(runtime),
+ do_times(How, ?TIMES, F),
+ {_, RunTime} = statistics(runtime),
+ T2 = erlang:now(),
+ RealTime = subtr(T1, T2),
+ report(How, What, OtherInfo, Ops, RunTime, RealTime).
+
+report(t, What, OtherInfo, Ops, RunTime, RealTime) ->
+ format("~s, ~s, transaction call ", [What, OtherInfo]),
+ format("Ops is ", []),
+ lists:foreach(fun(Op) -> format("~w-", [Op]) end, Ops),
+
+ format("~n ~w/~w Millisecs/Trans ~w/~w MilliSecs/Operation ~n~n",
+ [RunTime/?TIMES,
+ RealTime/?TIMES,
+ RunTime/(?TIMES*length(Ops)),
+ RealTime/(?TIMES*length(Ops))]);
+
+report(dirty, What, OtherInfo, Ops, RunTime, RealTime) ->
+ format("~s, ~s, dirty calls ", [What, OtherInfo]),
+ format("Ops is ", []),
+ lists:foreach(fun(Op) -> format("~w-", [Op]) end, Ops),
+
+ format("~n ~w/~w Millisecs/Bunch ~w/~w MilliSecs/Operation ~n~n",
+ [RunTime/?TIMES,
+ RealTime/?TIMES,
+ RunTime/(?TIMES*length(Ops)),
+ RealTime/(?TIMES*length(Ops))]).
+
+
+subtr(Before, After) ->
+ E =(element(1,After)*1000000000000
+ +element(2,After)*1000000+element(3,After)) -
+ (element(1,Before)*1000000000000
+ +element(2,Before)*1000000+element(3,Before)),
+ E div 1000.
+
+do_times(t, I, F) ->
+ do_trans_times(I, F);
+do_times(dirty, I, F) ->
+ do_dirty(I, F).
+
+do_trans_times(I, F) when I /= 0 ->
+ {atomic, _} = mnesia:transaction(F),
+ do_trans_times(I-1, F);
+do_trans_times(_,_) -> ok.
+
+do_dirty(I, F) when I /= 0 ->
+ F(),
+ do_dirty(I-1, F);
+do_dirty(_,_) -> ok.
+
+
+
+table_load([N1,N2| _ ] = Ns) ->
+ Nodes = [N1,N2],
+ rpc:multicall(Ns, mnesia, lkill, []),
+ ok = mnesia:delete_schema(Ns),
+ ok = mnesia:create_schema(Nodes),
+ rpc:multicall(Nodes, mnesia, start, []),
+ TabDef = [{disc_copies,[N1]},{ram_copies,[N2]},
+ {attributes,record_info(fields,item)},{record_name,item}],
+ Tabs = [list_to_atom("tab" ++ integer_to_list(I)) || I <- lists:seq(1,400)],
+
+ [mnesia:create_table(Tab,TabDef) || Tab <- Tabs],
+
+%% InitTab = fun(Tab) ->
+%% mnesia:write_lock_table(Tab),
+%% InitRec = fun(Key) -> mnesia:write(Tab,#item{a=Key},write) end,
+%% lists:foreach(InitRec, lists:seq(1,100))
+%% end,
+%%
+%% {Time,{atomic,ok}} = timer:tc(mnesia,transaction, [fun() ->lists:foreach(InitTab, Tabs) end]),
+ mnesia:dump_log(),
+%% io:format("Init took ~p msec ~n", [Time/1000]),
+ rpc:call(N2, mnesia, stop, []), timer:sleep(1000),
+ mnesia:stop(), timer:sleep(500),
+ %% Warmup
+ ok = mnesia:start([{no_table_loaders, 1}]),
+ timer:tc(mnesia, wait_for_tables, [Tabs, infinity]),
+ mnesia:dump_log(),
+ rpc:call(N2, mnesia, dump_log, []),
+ io:format("Initialized ~n",[]),
+
+ mnesia:stop(), timer:sleep(1000),
+ ok = mnesia:start([{no_table_loaders, 1}]),
+ {T1, ok} = timer:tc(mnesia, wait_for_tables, [Tabs, infinity]),
+ io:format("Loading from disc with 1 loader ~p msec~n",[T1/1000]),
+ mnesia:stop(), timer:sleep(1000),
+ ok = mnesia:start([{no_table_loaders, 4}]),
+ {T2, ok} = timer:tc(mnesia, wait_for_tables, [Tabs, infinity]),
+ io:format("Loading from disc with 4 loader ~p msec~n",[T2/1000]),
+
+ %% Warmup
+ rpc:call(N2, ?MODULE, remote_load, [Tabs,4]),
+ io:format("Initialized ~n",[]),
+
+
+ T3 = rpc:call(N2, ?MODULE, remote_load, [Tabs,1]),
+ io:format("Loading from net with 1 loader ~p msec~n",[T3/1000]),
+
+ T4 = rpc:call(N2, ?MODULE, remote_load, [Tabs,4]),
+ io:format("Loading from net with 4 loader ~p msec~n",[T4/1000]),
+
+ ok.
+
+remote_load(Tabs,Loaders) ->
+ ok = mnesia:start([{no_table_loaders, Loaders}]),
+%% io:format("~p ~n", [mnesia_controller:get_info(500)]),
+ {Time, ok} = timer:tc(mnesia, wait_for_tables, [Tabs, infinity]),
+ timer:sleep(1000), mnesia:stop(), timer:sleep(1000),
+ Time.
diff --git a/lib/mnesia/test/mnesia_dbn_meters.erl b/lib/mnesia/test/mnesia_dbn_meters.erl
new file mode 100644
index 0000000000..feaf90ee75
--- /dev/null
+++ b/lib/mnesia/test/mnesia_dbn_meters.erl
@@ -0,0 +1,242 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(mnesia_dbn_meters).
+-export([
+ start/0,
+ local_start/0,
+ distr_start/1,
+ start/3
+ ]).
+
+-record(simple,{key,val=0}).
+-define(key,1).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Configuration and start
+
+start() ->
+ local_start(),
+ distr_start(nodes()).
+
+local_start() ->
+ start(one_ram_only,[node()],some_meters()),
+ start(one_disc_only,[node()],some_meters()).
+
+distr_start([]) ->
+ local_only;
+distr_start(OtherNodes) when is_list(OtherNodes) ->
+ start(ram_and_ram,[node()|OtherNodes],some_meters()),
+ start(disc_and_disc,[node()|OtherNodes],some_meters()).
+
+start(Config,Nodes,Meters) ->
+ Attrs = record_info(fields,simple),
+ Schema = [{name,simple},{type,set},{attributes,Attrs}] ++ config(Config,Nodes),
+ L = '====================',
+ io:format("~n~p dbn_meters: ~p ~p~nSchema = ~p.~n~n",[L,Config,L,Schema]),
+ ok = mnesia:delete_schema(Nodes),
+ ok = mnesia:create_schema(Nodes),
+ rpc:multicall(Nodes, mnesia, start, []),
+ {atomic,_} = mnesia:create_table(Schema),
+ lists:foreach(fun report_meter/1,Meters),
+ {atomic, ok} = mnesia:delete_table(simple),
+ rpc:multicall(Nodes, mnesia, stop, []),
+ ok.
+
+config(one_ram_only,[Single|_]) ->
+ [{ram_copies,[Single]}];
+config(ram_and_ram,[Master|[Slave|_]]) ->
+ [{ram_copies,[Master,Slave]}];
+config(one_disc_only,[Single|_]) ->
+ [{disc_copies,[Single]}];
+config(disc_and_disc,[Master|[Slave|_]]) ->
+ [{disc_copies,[Master,Slave]}];
+config(Config,Nodes) ->
+ io:format("<ERROR> Config ~p not supported or too few nodes ~p given~n",[Config,Nodes]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% The various DBN meters
+some_meters() ->
+ [create,
+ open_safe_read,
+ open_dirty_read,
+ get_int,
+ open_update,
+ put_int,
+ put_int_and_copy,
+ dirty_put_int_and_copy,
+ start_trans,
+ commit_one_update,
+ delete,
+ dirty_delete
+ ].
+
+report_meter(Meter) ->
+ Times = 100,
+ Micros = repeat_meter(Meter,{atomic,{0,ignore}},Times) div Times,
+ io:format("\t~-30w ~-10w micro seconds (mean of ~p repetitions)~n",[Meter,Micros,Times]).
+
+repeat_meter(_Meter,{atomic,{Micros,_Result}},0) ->
+ Micros;
+repeat_meter(Meter,{atomic,{Micros,_Result}},Times) when Times > 0 ->
+ repeat_meter(Meter,catch meter(Meter),Times-1) + Micros;
+repeat_meter(Meter,{aborted,Reason},Times) when Times > 0 ->
+ io:format("<ERROR>\t~-20w\t,aborted, because ~p~n",[Meter,Reason]),
+ 0;
+repeat_meter(Meter,{'EXIT',Reason},Times) when Times > 0 ->
+ io:format("<ERROR>\t~-20w\tcrashed, because ~p~n",[Meter,Reason]),
+ 0.
+
+meter(create) ->
+ Key = 1,
+ mnesia:transaction(fun() -> mnesia:delete({simple,Key}) end),
+ Fun = fun() ->
+ BeforeT = erlang:now(),
+ R = mnesia:write(#simple{key=Key}),
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(open_safe_read) ->
+ Key = 2,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ BeforeT = erlang:now(),
+ R = mnesia:read({simple,Key}),
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(open_dirty_read) ->
+ Key = 21,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ BeforeT = erlang:now(),
+ R = mnesia:dirty_read({simple,Key}),
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(get_int) ->
+ Key = 3,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ [Simple] = mnesia:read({simple,Key}),
+ BeforeT = erlang:now(),
+ Int = Simple#simple.val,
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,Int)
+ end,
+ mnesia:transaction(Fun);
+
+meter(open_update) ->
+ Key = 3,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ BeforeT = erlang:now(),
+ R = mnesia:wread({simple,Key}),
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(put_int) ->
+ Key = 4,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ [Simple] = mnesia:wread({simple,Key}),
+ BeforeT = erlang:now(),
+ R = Simple#simple{val=7},
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(put_int_and_copy) ->
+ Key = 5,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ [Simple] = mnesia:wread({simple,Key}),
+ BeforeT = erlang:now(),
+ Simple2 = Simple#simple{val=17},
+ R = mnesia:write(Simple2),
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(dirty_put_int_and_copy) ->
+ Key = 55,
+ mnesia:dirty_write(#simple{key=Key}),
+ [Simple] = mnesia:dirty_read({simple,Key}),
+ BeforeT = erlang:now(),
+ Simple2 = Simple#simple{val=17},
+ R = mnesia:dirty_write(Simple2),
+ AfterT = erlang:now(),
+ {atomic,elapsed_time(BeforeT,AfterT,R)};
+
+meter(start_trans) ->
+ BeforeT = erlang:now(),
+ {atomic,AfterT} = mnesia:transaction(fun() -> erlang:now() end),
+ {atomic,elapsed_time(BeforeT,AfterT,ok)};
+
+meter(commit_one_update) ->
+ Key = 6,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ [Simple] = mnesia:wread({simple,Key}),
+ Simple2 = Simple#simple{val=27},
+ _R = mnesia:write(Simple2),
+ erlang:now()
+ end,
+ {atomic,BeforeT} = mnesia:transaction(Fun),
+ AfterT = erlang:now(),
+ {atomic,elapsed_time(BeforeT,AfterT,ok)};
+
+meter(delete) ->
+ Key = 7,
+ mnesia:transaction(fun() -> mnesia:write(#simple{key=Key}) end),
+ Fun = fun() ->
+ BeforeT = erlang:now(),
+ R = mnesia:delete({simple,Key}),
+ AfterT = erlang:now(),
+ elapsed_time(BeforeT,AfterT,R)
+ end,
+ mnesia:transaction(Fun);
+
+meter(dirty_delete) ->
+ Key = 75,
+ mnesia:dirty_write(#simple{key=Key}),
+ BeforeT = erlang:now(),
+ R = mnesia:dirty_delete({simple,Key}),
+ AfterT = erlang:now(),
+ {atomic, elapsed_time(BeforeT,AfterT,R)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Calculate the elapsed time
+elapsed_time(BeforeT,AfterT,Result) ->
+ {(element(1,AfterT)*1000000000000
+ +element(2,AfterT)*1000000+element(3,AfterT)) -
+ (element(1,BeforeT)*1000000000000
+ +element(2,BeforeT)*1000000+element(3,BeforeT)),Result}.
diff --git a/lib/mnesia/test/mnesia_dirty_access_test.erl b/lib/mnesia/test/mnesia_dirty_access_test.erl
new file mode 100644
index 0000000000..5f9f2a9733
--- /dev/null
+++ b/lib/mnesia/test/mnesia_dirty_access_test.erl
@@ -0,0 +1,927 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_dirty_access_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Evil dirty access, regardless of transaction scope.",
+ "Invoke all functions in the API and try to cover all legal uses",
+ "cases as well the illegal dito. This is a complement to the",
+ "other more explicit test cases."];
+all(suite) ->
+ [
+ dirty_write,
+ dirty_read,
+ dirty_update_counter,
+ dirty_delete,
+ dirty_delete_object,
+ dirty_match_object,
+ dirty_index,
+ dirty_iter,
+ admin_tests
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Write records dirty
+
+dirty_write(suite) ->
+ [
+ dirty_write_ram,
+ dirty_write_disc,
+ dirty_write_disc_only
+ ].
+
+dirty_write_ram(suite) -> [];
+dirty_write_ram(Config) when is_list(Config) ->
+ dirty_write(Config, ram_copies).
+
+dirty_write_disc(suite) -> [];
+dirty_write_disc(Config) when is_list(Config) ->
+ dirty_write(Config, disc_copies).
+
+dirty_write_disc_only(suite) -> [];
+dirty_write_disc_only(Config) when is_list(Config) ->
+ dirty_write(Config, disc_only_copies).
+
+dirty_write(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_write,
+ Def = [{attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match({'EXIT', _}, mnesia:dirty_write([])),
+ ?match({'EXIT', _}, mnesia:dirty_write({Tab, 2})),
+ ?match({'EXIT', _}, mnesia:dirty_write({foo, 2})),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() ->
+ mnesia:dirty_write({Tab, 1, 2}) end)),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Read records dirty
+
+dirty_read(suite) ->
+ [
+ dirty_read_ram,
+ dirty_read_disc,
+ dirty_read_disc_only
+ ].
+
+dirty_read_ram(suite) -> [];
+dirty_read_ram(Config) when is_list(Config) ->
+ dirty_read(Config, ram_copies).
+
+dirty_read_disc(suite) -> [];
+dirty_read_disc(Config) when is_list(Config) ->
+ dirty_read(Config, disc_copies).
+
+dirty_read_disc_only(suite) -> [];
+dirty_read_disc_only(Config) when is_list(Config) ->
+ dirty_read(Config, disc_only_copies).
+
+dirty_read(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_read,
+ Def = [{type, bag}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match({'EXIT', _}, mnesia:dirty_read([])),
+ ?match({'EXIT', _}, mnesia:dirty_read({Tab})),
+ ?match({'EXIT', _}, mnesia:dirty_read({Tab, 1, 2})),
+ ?match([], mnesia:dirty_read({Tab, 1})),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+ ?match([{Tab, 1, 2}], mnesia:dirty_read({Tab, 1})),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 3})),
+ ?match([{Tab, 1, 2}, {Tab, 1, 3}], mnesia:dirty_read({Tab, 1})),
+
+ ?match({atomic, [{Tab, 1, 2}, {Tab, 1, 3}]},
+ mnesia:transaction(fun() -> mnesia:dirty_read({Tab, 1}) end)),
+
+ ?match(false, mnesia:async_dirty(fun() -> mnesia:is_transaction() end)),
+ ?match(false, mnesia:sync_dirty(fun() -> mnesia:is_transaction() end)),
+ ?match(false, mnesia:ets(fun() -> mnesia:is_transaction() end)),
+ ?match(false, mnesia:activity(async_dirty, fun() -> mnesia:is_transaction() end)),
+ ?match(false, mnesia:activity(sync_dirty, fun() -> mnesia:is_transaction() end)),
+ ?match(false, mnesia:activity(ets, fun() -> mnesia:is_transaction() end)),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Update counter record dirty
+
+dirty_update_counter(suite) ->
+ [
+ dirty_update_counter_ram,
+ dirty_update_counter_disc,
+ dirty_update_counter_disc_only
+ ].
+
+dirty_update_counter_ram(suite) -> [];
+dirty_update_counter_ram(Config) when is_list(Config) ->
+ dirty_update_counter(Config, ram_copies).
+
+dirty_update_counter_disc(suite) -> [];
+dirty_update_counter_disc(Config) when is_list(Config) ->
+ dirty_update_counter(Config, disc_copies).
+
+dirty_update_counter_disc_only(suite) -> [];
+dirty_update_counter_disc_only(Config) when is_list(Config) ->
+ dirty_update_counter(Config, disc_only_copies).
+
+dirty_update_counter(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_update_counter,
+ Def = [{attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+
+ ?match({'EXIT', _}, mnesia:dirty_update_counter({Tab, 1}, [])),
+ ?match({'EXIT', _}, mnesia:dirty_update_counter({Tab}, 3)),
+ ?match({'EXIT', _}, mnesia:dirty_update_counter({foo, 1}, 3)),
+ ?match(5, mnesia:dirty_update_counter({Tab, 1}, 3)),
+ ?match([{Tab, 1, 5}], mnesia:dirty_read({Tab, 1})),
+
+ ?match({atomic, 8}, mnesia:transaction(fun() ->
+ mnesia:dirty_update_counter({Tab, 1}, 3) end)),
+
+ ?match(1, mnesia:dirty_update_counter({Tab, foo}, 1)),
+ ?match([{Tab, foo,1}], mnesia:dirty_read({Tab,foo})),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delete record dirty
+
+dirty_delete(suite) ->
+ [
+ dirty_delete_ram,
+ dirty_delete_disc,
+ dirty_delete_disc_only
+ ].
+
+dirty_delete_ram(suite) -> [];
+dirty_delete_ram(Config) when is_list(Config) ->
+ dirty_delete(Config, ram_copies).
+
+dirty_delete_disc(suite) -> [];
+dirty_delete_disc(Config) when is_list(Config) ->
+ dirty_delete(Config, disc_copies).
+
+dirty_delete_disc_only(suite) -> [];
+dirty_delete_disc_only(Config) when is_list(Config) ->
+ dirty_delete(Config, disc_only_copies).
+
+dirty_delete(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_delete,
+ Def = [{type, bag}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match({'EXIT', _}, mnesia:dirty_delete([])),
+ ?match({'EXIT', _}, mnesia:dirty_delete({Tab})),
+ ?match({'EXIT', _}, mnesia:dirty_delete({Tab, 1, 2})),
+ ?match(ok, mnesia:dirty_delete({Tab, 1})),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+ ?match(ok, mnesia:dirty_delete({Tab, 1})),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+ ?match(ok, mnesia:dirty_delete({Tab, 1})),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 2})),
+ ?match({atomic, ok}, mnesia:transaction(fun() ->
+ mnesia:dirty_delete({Tab, 1}) end)),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delete matching record dirty
+
+dirty_delete_object(suite) ->
+ [
+ dirty_delete_object_ram,
+ dirty_delete_object_disc,
+ dirty_delete_object_disc_only
+ ].
+
+dirty_delete_object_ram(suite) -> [];
+dirty_delete_object_ram(Config) when is_list(Config) ->
+ dirty_delete_object(Config, ram_copies).
+
+dirty_delete_object_disc(suite) -> [];
+dirty_delete_object_disc(Config) when is_list(Config) ->
+ dirty_delete_object(Config, disc_copies).
+
+dirty_delete_object_disc_only(suite) -> [];
+dirty_delete_object_disc_only(Config) when is_list(Config) ->
+ dirty_delete_object(Config, disc_only_copies).
+
+dirty_delete_object(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_delete_object,
+ Def = [{type, bag}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ OneRec = {Tab, 1, 2},
+ ?match({'EXIT', _}, mnesia:dirty_delete_object([])),
+ ?match({'EXIT', _}, mnesia:dirty_delete_object({Tab})),
+ ?match({'EXIT', _}, mnesia:dirty_delete_object({Tab, 1})),
+ ?match(ok, mnesia:dirty_delete_object(OneRec)),
+ ?match(ok, mnesia:dirty_write(OneRec)),
+ ?match(ok, mnesia:dirty_delete_object(OneRec)),
+ ?match(ok, mnesia:dirty_write(OneRec)),
+ ?match(ok, mnesia:dirty_write(OneRec)),
+ ?match(ok, mnesia:dirty_delete_object(OneRec)),
+
+ ?match(ok, mnesia:dirty_write(OneRec)),
+ ?match({atomic, ok}, mnesia:transaction(fun() ->
+ mnesia:dirty_delete_object(OneRec) end)),
+
+ ?match({'EXIT', {aborted, {bad_type, Tab, _}}}, mnesia:dirty_delete_object(Tab, {Tab, {['_']}, 21})),
+ ?match({'EXIT', {aborted, {bad_type, Tab, _}}}, mnesia:dirty_delete_object(Tab, {Tab, {['$5']}, 21})),
+
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Read matching records dirty
+
+dirty_match_object(suite) ->
+ [
+ dirty_match_object_ram,
+ dirty_match_object_disc,
+ dirty_match_object_disc_only
+ ].
+
+dirty_match_object_ram(suite) -> [];
+dirty_match_object_ram(Config) when is_list(Config) ->
+ dirty_match_object(Config, ram_copies).
+
+dirty_match_object_disc(suite) -> [];
+dirty_match_object_disc(Config) when is_list(Config) ->
+ dirty_match_object(Config, disc_copies).
+
+dirty_match_object_disc_only(suite) -> [];
+dirty_match_object_disc_only(Config) when is_list(Config) ->
+ dirty_match_object(Config, disc_only_copies).
+
+dirty_match_object(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_match,
+ Def = [{attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ OneRec = {Tab, 1, 2},
+ OnePat = {Tab, '$1', 2},
+ ?match([], mnesia:dirty_match_object(OnePat)),
+ ?match(ok, mnesia:dirty_write(OneRec)),
+ ?match([OneRec], mnesia:dirty_match_object(OnePat)),
+ ?match({atomic, [OneRec]}, mnesia:transaction(fun() ->
+ mnesia:dirty_match_object(OnePat) end)),
+
+ ?match({'EXIT', _}, mnesia:dirty_match_object({foo, '$1', 2})),
+ ?match({'EXIT', _}, mnesia:dirty_match_object({[], '$1', 2})),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+dirty_index(suite) ->
+ [
+ dirty_index_match_object,
+ dirty_index_read,
+ dirty_index_update
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Dirty read matching records by using an index
+
+dirty_index_match_object(suite) ->
+ [
+ dirty_index_match_object_ram,
+ dirty_index_match_object_disc,
+ dirty_index_match_object_disc_only
+ ].
+
+dirty_index_match_object_ram(suite) -> [];
+dirty_index_match_object_ram(Config) when is_list(Config) ->
+ dirty_index_match_object(Config, ram_copies).
+
+dirty_index_match_object_disc(suite) -> [];
+dirty_index_match_object_disc(Config) when is_list(Config) ->
+ dirty_index_match_object(Config, disc_copies).
+
+dirty_index_match_object_disc_only(suite) -> [];
+dirty_index_match_object_disc_only(Config) when is_list(Config) ->
+ dirty_index_match_object(Config, disc_only_copies).
+
+dirty_index_match_object(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_index_match_object,
+ ValPos = 3,
+ BadValPos = ValPos + 1,
+ Def = [{attributes, [k, v]}, {Storage, [Node1]}, {index, [ValPos]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match([], mnesia:dirty_index_match_object({Tab, '$1', 2}, ValPos)),
+ OneRec = {Tab, 1, 2},
+ ?match(ok, mnesia:dirty_write(OneRec)),
+
+ ?match([OneRec], mnesia:dirty_index_match_object({Tab, '$1', 2}, ValPos)),
+ ?match({'EXIT', _}, mnesia:dirty_index_match_object({Tab, '$1', 2}, BadValPos)),
+ ?match({'EXIT', _}, mnesia:dirty_index_match_object({foo, '$1', 2}, ValPos)),
+ ?match({'EXIT', _}, mnesia:dirty_index_match_object({[], '$1', 2}, ValPos)),
+ ?match({atomic, [OneRec]}, mnesia:transaction(fun() ->
+ mnesia:dirty_index_match_object({Tab, '$1', 2}, ValPos) end)),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Read records by using an index
+
+dirty_index_read(suite) ->
+ [
+ dirty_index_read_ram,
+ dirty_index_read_disc,
+ dirty_index_read_disc_only
+ ].
+
+dirty_index_read_ram(suite) -> [];
+dirty_index_read_ram(Config) when is_list(Config) ->
+ dirty_index_read(Config, ram_copies).
+
+dirty_index_read_disc(suite) -> [];
+dirty_index_read_disc(Config) when is_list(Config) ->
+ dirty_index_read(Config, disc_copies).
+
+dirty_index_read_disc_only(suite) -> [];
+dirty_index_read_disc_only(Config) when is_list(Config) ->
+ dirty_index_read(Config, disc_only_copies).
+
+dirty_index_read(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_index_read,
+ ValPos = 3,
+ BadValPos = ValPos + 1,
+ Def = [{type, set},
+ {attributes, [k, v]},
+ {Storage, [Node1]},
+ {index, [ValPos]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ OneRec = {Tab, 1, 2},
+ ?match([], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ ?match(ok, mnesia:dirty_write(OneRec)),
+ ?match([OneRec], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ ?match({atomic, [OneRec]},
+ mnesia:transaction(fun() -> mnesia:dirty_index_read(Tab, 2, ValPos) end)),
+ ?match(42, mnesia:dirty_update_counter({Tab, 1}, 40)),
+ ?match([{Tab,1,42}], mnesia:dirty_read({Tab, 1})),
+ ?match([], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ ?match([{Tab, 1, 42}], mnesia:dirty_index_read(Tab, 42, ValPos)),
+
+ ?match({'EXIT', _}, mnesia:dirty_index_read(Tab, 2, BadValPos)),
+ ?match({'EXIT', _}, mnesia:dirty_index_read(foo, 2, ValPos)),
+ ?match({'EXIT', _}, mnesia:dirty_index_read([], 2, ValPos)),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+dirty_index_update(suite) ->
+ [
+ dirty_index_update_set_ram,
+ dirty_index_update_set_disc,
+ dirty_index_update_set_disc_only,
+ dirty_index_update_bag_ram,
+ dirty_index_update_bag_disc,
+ dirty_index_update_bag_disc_only
+ ];
+dirty_index_update(doc) ->
+ ["See Ticket OTP-2083, verifies that a table with a index is "
+ "update in the correct way i.e. the index finds the correct "
+ "records after a update"].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dirty_index_update_set_ram(suite) -> [];
+dirty_index_update_set_ram(Config) when is_list(Config) ->
+ dirty_index_update_set(Config, ram_copies).
+
+dirty_index_update_set_disc(suite) -> [];
+dirty_index_update_set_disc(Config) when is_list(Config) ->
+ dirty_index_update_set(Config, disc_copies).
+
+dirty_index_update_set_disc_only(suite) -> [];
+dirty_index_update_set_disc_only(Config) when is_list(Config) ->
+ dirty_index_update_set(Config, disc_only_copies).
+
+dirty_index_update_set(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = index_test,
+ ValPos = v1,
+ ValPos2 = v3,
+ Def = [{attributes, [k, v1, v2, v3]},
+ {Storage, [Node1]},
+ {index, [ValPos]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ Pat1 = {Tab, '$1', 2, '$2', '$3'},
+ Pat2 = {Tab, '$1', '$2', '$3', '$4'},
+
+ Rec1 = {Tab, 1, 2, 3, 4},
+ Rec2 = {Tab, 2, 2, 13, 14},
+ Rec3 = {Tab, 1, 12, 13, 14},
+ Rec4 = {Tab, 4, 2, 13, 14},
+
+ ?match([], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ ?match(ok, mnesia:dirty_write(Rec1)),
+ ?match([Rec1], mnesia:dirty_index_read(Tab, 2, ValPos)),
+
+ ?match(ok, mnesia:dirty_write(Rec2)),
+ R1 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2], lists:sort(R1)),
+
+ ?match(ok, mnesia:dirty_write(Rec3)),
+ R2 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec2], lists:sort(R2)),
+ ?match([Rec2], mnesia:dirty_index_match_object(Pat1, ValPos)),
+
+ {atomic, R3} = mnesia:transaction(fun() -> mnesia:match_object(Pat2) end),
+ ?match([Rec3, Rec2], lists:sort(R3)),
+
+ ?match(ok, mnesia:dirty_write(Rec4)),
+ R4 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec2, Rec4], lists:sort(R4)),
+
+ ?match(ok, mnesia:dirty_delete({Tab, 4})),
+ ?match([Rec2], mnesia:dirty_index_read(Tab, 2, ValPos)),
+
+ ?match({atomic, ok}, mnesia:del_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos2)),
+
+ R5 = mnesia:dirty_match_object(Pat2),
+ ?match([Rec3, Rec2, Rec4], lists:sort(R5)),
+
+ R6 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec2, Rec4], lists:sort(R6)),
+ ?match([], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ R7 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec3, Rec2, Rec4], lists:sort(R7)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ R8 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2, Rec4], lists:sort(R8)),
+ ?match([Rec1], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ R9 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec2, Rec4], lists:sort(R9)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec2) end)),
+ R10 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec4], lists:sort(R10)),
+ ?match([Rec1], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ ?match([Rec4], mnesia:dirty_index_read(Tab, 14, ValPos2)),
+
+ ?match(ok, mnesia:dirty_delete({Tab, 4})),
+ R11 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1], lists:sort(R11)),
+ ?match([Rec1], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ ?match([], mnesia:dirty_index_read(Tab, 14, ValPos2)),
+
+ ?verify_mnesia(Nodes, []).
+
+dirty_index_update_bag_ram(suite) -> [];
+dirty_index_update_bag_ram(Config)when is_list(Config) ->
+ dirty_index_update_bag(Config, ram_copies).
+
+dirty_index_update_bag_disc(suite) -> [];
+dirty_index_update_bag_disc(Config)when is_list(Config) ->
+ dirty_index_update_bag(Config, disc_copies).
+
+dirty_index_update_bag_disc_only(suite) -> [];
+dirty_index_update_bag_disc_only(Config)when is_list(Config) ->
+ dirty_index_update_bag(Config, disc_only_copies).
+
+dirty_index_update_bag(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = index_test,
+ ValPos = v1,
+ ValPos2 = v3,
+ Def = [{type, bag},
+ {attributes, [k, v1, v2, v3]},
+ {Storage, [Node1]},
+ {index, [ValPos]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ Pat1 = {Tab, '$1', 2, '$2', '$3'},
+ Pat2 = {Tab, '$1', '$2', '$3', '$4'},
+
+ Rec1 = {Tab, 1, 2, 3, 4},
+ Rec2 = {Tab, 2, 2, 13, 14},
+ Rec3 = {Tab, 1, 12, 13, 14},
+ Rec4 = {Tab, 4, 2, 13, 4},
+ Rec5 = {Tab, 1, 2, 234, 14},
+
+ %% Simple Index
+ ?match([], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ ?match(ok, mnesia:dirty_write(Rec1)),
+ ?match([Rec1], mnesia:dirty_index_read(Tab, 2, ValPos)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec2) end)),
+ R1 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2], lists:sort(R1)),
+
+ ?match(ok, mnesia:dirty_write(Rec3)),
+ R2 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2], lists:sort(R2)),
+
+ R3 = mnesia:dirty_index_match_object(Pat1, ValPos),
+ ?match([Rec1, Rec2], lists:sort(R3)),
+
+ R4 = mnesia:dirty_match_object(Pat2),
+ ?match([Rec1, Rec3, Rec2], lists:sort(R4)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ R5 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2, Rec4], lists:sort(R5)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete({Tab, 4}) end)),
+ R6 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2], lists:sort(R6)),
+
+ ?match(ok, mnesia:dirty_delete_object(Rec1)),
+ ?match([Rec2], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ R7 = mnesia:dirty_match_object(Pat2),
+ ?match([Rec3, Rec2], lists:sort(R7)),
+
+ %% Two indexies
+ ?match({atomic, ok}, mnesia:del_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos2)),
+
+ R8 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec2, Rec4], lists:sort(R8)),
+
+ R9 = mnesia:dirty_index_read(Tab, 4, ValPos2),
+ ?match([Rec1, Rec4], lists:sort(R9)),
+ R10 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec3, Rec2], lists:sort(R10)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec5) end)),
+ R11 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec1, Rec5, Rec2, Rec4], lists:sort(R11)),
+ R12 = mnesia:dirty_index_read(Tab, 4, ValPos2),
+ ?match([Rec1, Rec4], lists:sort(R12)),
+ R13 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec5, Rec3, Rec2], lists:sort(R13)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec1) end)),
+ R14 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec5, Rec2, Rec4], lists:sort(R14)),
+ ?match([Rec4], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ R15 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec5, Rec3, Rec2], lists:sort(R15)),
+
+ ?match(ok, mnesia:dirty_delete_object(Rec5)),
+ R16 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec2, Rec4], lists:sort(R16)),
+ ?match([Rec4], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ R17 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec3, Rec2], lists:sort(R17)),
+
+ ?match(ok, mnesia:dirty_write(Rec1)),
+ ?match(ok, mnesia:dirty_delete({Tab, 1})),
+ R18 = mnesia:dirty_index_read(Tab, 2, ValPos),
+ ?match([Rec2, Rec4], lists:sort(R18)),
+ ?match([Rec4], mnesia:dirty_index_read(Tab, 4, ValPos2)),
+ R19 = mnesia:dirty_index_read(Tab, 14, ValPos2),
+ ?match([Rec2], lists:sort(R19)),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Dirty iteration
+%% dirty_slot, dirty_first, dirty_next
+
+dirty_iter(suite) ->
+ [
+ dirty_iter_ram,
+ dirty_iter_disc,
+ dirty_iter_disc_only
+ ].
+
+dirty_iter_ram(suite) -> [];
+dirty_iter_ram(Config) when is_list(Config) ->
+ dirty_iter(Config, ram_copies).
+
+dirty_iter_disc(suite) -> [];
+dirty_iter_disc(Config) when is_list(Config) ->
+ dirty_iter(Config, disc_copies).
+
+dirty_iter_disc_only(suite) -> [];
+dirty_iter_disc_only(Config) when is_list(Config) ->
+ dirty_iter(Config, disc_only_copies).
+
+dirty_iter(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = dirty_iter,
+ Def = [{type, bag}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match([], all_slots(Tab)),
+ ?match([], all_nexts(Tab)),
+
+ Keys = lists:seq(1, 5),
+ Records = [{Tab, A, B} || A <- Keys, B <- lists:seq(1, 2)],
+ lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
+
+ SortedRecords = lists:sort(Records),
+ ?match(SortedRecords, lists:sort(all_slots(Tab))),
+ ?match(Keys, lists:sort(all_nexts(Tab))),
+
+ ?match({'EXIT', _}, mnesia:dirty_first(foo)),
+ ?match({'EXIT', _}, mnesia:dirty_next(foo, foo)),
+ ?match({'EXIT', _}, mnesia:dirty_slot(foo, 0)),
+ ?match({'EXIT', _}, mnesia:dirty_slot(foo, [])),
+ ?match({atomic, Keys},
+ mnesia:transaction(fun() -> lists:sort(all_nexts(Tab)) end)),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Returns a list of all keys in table
+all_slots(Tab) ->
+ all_slots(Tab, [], 0).
+
+all_slots(_Tab, '$end_of_table', _) ->
+ [];
+all_slots(Tab, PrevRecords, PrevSlot) ->
+ Records = mnesia:dirty_slot(Tab, PrevSlot),
+ PrevRecords ++ all_slots(Tab, Records, PrevSlot + 1).
+
+%% Returns a list of all keys in table
+
+all_nexts(Tab) ->
+ FirstKey = mnesia:dirty_first(Tab),
+ all_nexts(Tab, FirstKey).
+
+all_nexts(_Tab, '$end_of_table') ->
+ [];
+all_nexts(Tab, PrevKey) ->
+ Key = mnesia:dirty_next(Tab, PrevKey),
+ [PrevKey] ++ all_nexts(Tab, Key).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+admin_tests(doc) ->
+ ["Verifies that dirty operations work during schema operations"];
+
+admin_tests(suite) ->
+ [del_table_copy_1,
+ del_table_copy_2,
+ del_table_copy_3,
+ add_table_copy_1,
+ add_table_copy_2,
+ add_table_copy_3,
+ add_table_copy_4,
+ move_table_copy_1,
+ move_table_copy_2,
+ move_table_copy_3,
+ move_table_copy_4].
+
+update_trans(Tab, Key, Acc) ->
+ Update =
+ fun() ->
+ Res = (catch mnesia:read({Tab, Key})),
+ case Res of
+ [{Tab, Key, Extra, Acc}] ->
+ mnesia:write({Tab,Key,Extra, Acc+1});
+ Val ->
+ {read, Val, {acc, Acc}}
+ end
+ end,
+ receive
+ {Pid, quit} -> Pid ! {self(), Acc}
+ after
+ 3 ->
+ case catch mnesia:sync_dirty(Update) of
+ ok ->
+ update_trans(Tab, Key, Acc+1);
+ Else ->
+ ?error("Dirty Operation failed on ~p (update no ~p) with ~p~n"
+ "Info w2read ~p w2write ~p w2commit ~p storage ~p ~n",
+ [node(),
+ Acc,
+ Else,
+ mnesia:table_info(Tab, where_to_read),
+ mnesia:table_info(Tab, where_to_write),
+ mnesia:table_info(Tab, where_to_commit),
+ mnesia:table_info(Tab, storage_type)])
+ end
+ end.
+
+del_table_copy_1(suite) -> [];
+del_table_copy_1(Config) when is_list(Config) ->
+ [_Node1, Node2, _Node3] = Nodes = ?acquire_nodes(3, Config),
+ del_table(Node2, Node2, Nodes). %Called on same Node as deleted
+del_table_copy_2(suite) -> [];
+del_table_copy_2(Config) when is_list(Config) ->
+ [Node1, Node2, _Node3] = Nodes = ?acquire_nodes(3, Config),
+ del_table(Node1, Node2, Nodes). %Called from other Node
+del_table_copy_3(suite) -> [];
+del_table_copy_3(Config) when is_list(Config) ->
+ [_Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ del_table(Node3, Node2, Nodes). %Called from Node w.o. table
+
+del_table(CallFrom, DelNode, [Node1, Node2, Node3]) ->
+ Tab = schema_ops,
+ Def = [{disc_only_copies, [Node1]}, {ram_copies, [Node2]},
+ {attributes, [key, attr1, attr2]}],
+ ?log("Test case removing table from ~w, with ~w~n", [DelNode, Def]),
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 1000),
+
+ Pid1 = spawn_link(Node1, ?MODULE, update_trans, [Tab, 1, 0]),
+ Pid2 = spawn_link(Node2, ?MODULE, update_trans, [Tab, 2, 0]),
+ Pid3 = spawn_link(Node3, ?MODULE, update_trans, [Tab, 3, 0]),
+
+
+ dbg:tracer(process, {fun(Msg,_) -> tracer(Msg) end, void}),
+ %% dbg:n(Node2),
+ %% dbg:n(Node3),
+ %% dbg:tp('_', []),
+ %% dbg:tpl(dets, [timestamp]),
+ dbg:p(Pid1, [m,c,timestamp]),
+
+ ?match({atomic, ok},
+ rpc:call(CallFrom, mnesia, del_table_copy, [Tab, DelNode])),
+
+ Pid1 ! {self(), quit}, R1 =
+ receive {Pid1, Res1} -> Res1
+ after
+ 5000 -> io:format("~p~n",[process_info(Pid1)]),error
+ end,
+ Pid2 ! {self(), quit}, R2 =
+ receive {Pid2, Res2} -> Res2
+ after
+ 5000 -> error
+ end,
+ Pid3 ! {self(), quit}, R3 =
+ receive {Pid3, Res3} -> Res3
+ after
+ 5000 -> error
+ end,
+ verify_oids(Tab, Node1, Node2, Node3, R1, R2, R3),
+ ?verify_mnesia([Node1, Node2, Node3], []).
+
+tracer({trace_ts, _, send, Msg, Pid, {_,S,Ms}}) ->
+ io:format("~p:~p ~p >> ~w ~n",[S,Ms,Pid,Msg]);
+tracer({trace_ts, _, 'receive', Msg, {_,S,Ms}}) ->
+ io:format("~p:~p << ~w ~n",[S,Ms,Msg]);
+
+
+tracer(Msg) ->
+ io:format("UMsg ~p ~n",[Msg]),
+ ok.
+
+
+
+add_table_copy_1(suite) -> [];
+add_table_copy_1(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node1, Node3, Nodes, Def).
+%% Not so much diff from 1 but I got a feeling of a bug
+%% should behave exactly the same but just checking the internal ordering
+add_table_copy_2(suite) -> [];
+add_table_copy_2(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node2, Node3, Nodes, Def).
+add_table_copy_3(suite) -> [];
+add_table_copy_3(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node3, Node3, Nodes, Def).
+add_table_copy_4(suite) -> [];
+add_table_copy_4(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_only_copies, [Node1]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node2, Node3, Nodes, Def).
+
+add_table(CallFrom, AddNode, [Node1, Node2, Node3], Def) ->
+ ?log("Test case adding table at ~w, with ~w~n", [AddNode, Def]),
+ Tab = schema_ops,
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 1002),
+
+ Pid1 = spawn_link(Node1, ?MODULE, update_trans, [Tab, 1, 0]),
+ Pid2 = spawn_link(Node2, ?MODULE, update_trans, [Tab, 2, 0]),
+ Pid3 = spawn_link(Node3, ?MODULE, update_trans, [Tab, 3, 0]),
+
+ ?match({atomic, ok}, rpc:call(CallFrom, mnesia, add_table_copy,
+ [Tab, AddNode, ram_copies])),
+ Pid1 ! {self(), quit}, R1 = receive {Pid1, Res1} -> Res1 after 5000 -> error end,
+ Pid2 ! {self(), quit}, R2 = receive {Pid2, Res2} -> Res2 after 5000 -> error end,
+ Pid3 ! {self(), quit}, R3 = receive {Pid3, Res3} -> Res3 after 5000 -> error end,
+ verify_oids(Tab, Node1, Node2, Node3, R1, R2, R3),
+ ?verify_mnesia([Node1, Node2, Node3], []).
+
+move_table_copy_1(suite) -> [];
+move_table_copy_1(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node1, Node1, Node3, Nodes, Def).
+move_table_copy_2(suite) -> [];
+move_table_copy_2(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node2, Node1, Node3, Nodes, Def).
+move_table_copy_3(suite) -> [];
+move_table_copy_3(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node3, Node1, Node3, Nodes, Def).
+move_table_copy_4(suite) -> [];
+move_table_copy_4(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{ram_copies, [Node1]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node2, Node1, Node3, Nodes, Def).
+
+move_table(CallFrom, FromNode, ToNode, [Node1, Node2, Node3], Def) ->
+ ?log("Test case move table from ~w to ~w, with ~w~n", [FromNode, ToNode, Def]),
+ Tab = schema_ops,
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 1002),
+
+ Pid1 = spawn_link(Node1, ?MODULE, update_trans, [Tab, 1, 0]),
+ Pid2 = spawn_link(Node2, ?MODULE, update_trans, [Tab, 2, 0]),
+ Pid3 = spawn_link(Node3, ?MODULE, update_trans, [Tab, 3, 0]),
+
+ ?match({atomic, ok}, rpc:call(CallFrom, mnesia, move_table_copy,
+ [Tab, FromNode, ToNode])),
+ Pid1 ! {self(), quit},
+ R1 = receive {Pid1, Res1} -> Res1 after 5000 -> ?error("timeout pid1~n", []) end,
+ Pid2 ! {self(), quit},
+ R2 = receive {Pid2, Res2} -> Res2 after 5000 -> ?error("timeout pid2~n", []) end,
+ Pid3 ! {self(), quit},
+ R3 = receive {Pid3, Res3} -> Res3 after 5000 -> ?error("timeout pid3~n", []) end,
+ verify_oids(Tab, Node1, Node2, Node3, R1, R2, R3),
+ ?verify_mnesia([Node1, Node2, Node3], []).
+
+% Verify consistency between different nodes
+% Due to limitations in the current dirty_ops this can wrong from time to time!
+verify_oids(Tab, N1, N2, N3, R1, R2, R3) ->
+ io:format("DEBUG 1=>~p 2=>~p 3=>~p~n", [R1,R2,R3]),
+ ?match([{_, _, _, R1}], rpc:call(N1, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{_, _, _, R1}], rpc:call(N2, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{_, _, _, R1}], rpc:call(N3, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{_, _, _, R2}], rpc:call(N1, mnesia, dirty_read, [{Tab, 2}])),
+ ?match([{_, _, _, R2}], rpc:call(N2, mnesia, dirty_read, [{Tab, 2}])),
+ ?match([{_, _, _, R2}], rpc:call(N3, mnesia, dirty_read, [{Tab, 2}])),
+ ?match([{_, _, _, R3}], rpc:call(N1, mnesia, dirty_read, [{Tab, 3}])),
+ ?match([{_, _, _, R3}], rpc:call(N2, mnesia, dirty_read, [{Tab, 3}])),
+ ?match([{_, _, _, R3}], rpc:call(N3, mnesia, dirty_read, [{Tab, 3}])).
+
+insert(_Tab, 0) -> ok;
+insert(Tab, N) when N > 0 ->
+ ok = mnesia:sync_dirty(fun() -> false = mnesia:is_transaction(), mnesia:write({Tab, N, N, 0}) end),
+ insert(Tab, N-1).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/mnesia/test/mnesia_durability_test.erl b/lib/mnesia/test/mnesia_durability_test.erl
new file mode 100644
index 0000000000..b917b0ca40
--- /dev/null
+++ b/lib/mnesia/test/mnesia_durability_test.erl
@@ -0,0 +1,1470 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_durability_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-record(test_rec,{key,val}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(doc) ->
+ ["Verify durability",
+ "Verify that the effects of committed transactions are durable.",
+ "The content of the tables tables must be restored at startup."];
+all(suite) ->
+ [
+ load_tables,
+ durability_of_dump_tables,
+ durability_of_disc_copies,
+ durability_of_disc_only_copies
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+load_tables(doc) ->
+ ["Try to provoke all kinds of table load scenarios."];
+load_tables(suite) ->
+ [
+ load_latest_data,
+ load_local_contents_directly,
+ load_directly_when_all_are_ram_copiesA,
+ load_directly_when_all_are_ram_copiesB,
+ late_load_when_all_are_ram_copies_on_ram_nodes,
+ load_when_last_replica_becomes_available,
+ load_when_we_have_down_from_all_other_replica_nodes,
+ late_load_transforms_into_disc_load,
+ late_load_leads_to_hanging,
+ force_load_when_nobody_intents_to_load,
+ force_load_when_someone_has_decided_to_load,
+ force_load_when_someone_else_already_has_loaded,
+ force_load_when_we_has_loaded,
+ force_load_on_a_non_local_table,
+ force_load_when_the_table_does_not_exist,
+ load_tables_with_master_tables
+ ].
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load_latest_data(doc) ->
+ ["Base functionality, verify that the latest data is loaded"];
+load_latest_data(suite) -> [];
+load_latest_data(Config) when is_list(Config) ->
+ [N1,N2,N3] = Nodes = ?acquire_nodes(3, Config),
+ %%Create a replicated local table
+ ?match({atomic,ok}, mnesia:create_table(t0, [{disc_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t1, [{disc_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t2, [{disc_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t3, [{disc_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t4, [{disc_copies,[N1,N2]}])),
+ ?match({atomic,ok}, mnesia:create_table(t5, [{disc_copies,[N1,N2]}])),
+ Rec1 = {t1, test, ok},
+ Rec2 = {t1, test, 2},
+
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ ?match(ok, rpc:call(N2, mnesia, dirty_write, [Rec2])),
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+
+ ?match([], mnesia_test_lib:start_mnesia([N1], [])),
+ %% Should wait for N2
+ ?match({timeout, [t1]}, rpc:call(N1, mnesia, wait_for_tables, [[t1], 3000])),
+ ?match([], mnesia_test_lib:start_mnesia([N3], [])),
+ ?match({timeout, [t1]}, rpc:call(N1, mnesia, wait_for_tables, [[t1], 3000])),
+
+
+ ?match([], mnesia_test_lib:start_mnesia([N2], [])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[t1], 3000])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[t1], 3000])),
+ %% We should find the record
+ ?match([Rec2], rpc:call(N1, mnesia, dirty_read, [t1, test])),
+ ?match([Rec2], rpc:call(N2, mnesia, dirty_read, [t1, test])),
+
+ %% ok, lets switch order
+ ?match(ok, mnesia:dirty_delete_object(Rec1)),
+ ?match(ok, mnesia:dirty_delete_object(Rec2)),
+ %% redo
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match(ok, mnesia:dirty_write(Rec1)),
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+
+ ?match([], mnesia_test_lib:start_mnesia([N2], [])),
+ %% Should wait for N1
+ ?match({timeout, [t1]}, rpc:call(N2, mnesia, wait_for_tables, [[t1], 2000])),
+ ?match([], mnesia_test_lib:start_mnesia([N3], [])),
+ ?match({timeout, [t1]}, rpc:call(N2, mnesia, wait_for_tables, [[t1], 2000])),
+ ?match([], mnesia_test_lib:start_mnesia([N1], [])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[t1], 1000])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[t1], 1000])),
+ %% We should find the record
+ ?match([Rec1], rpc:call(N1, mnesia, dirty_read, [t1, test])),
+ ?match([Rec1], rpc:call(N2, mnesia, dirty_read, [t1, test])),
+
+ ?verify_mnesia(Nodes, []).
+
+
+load_local_contents_directly(doc) ->
+ ["Local contents shall always be loaded. Check this by having a local ",
+ "table on two nodes N1, N2, stopping N1 before N2, an then verifying ",
+ "that N1 can start without N2 being started."];
+load_local_contents_directly(suite) -> [];
+load_local_contents_directly(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+ %%Create a replicated local table
+ ?match({atomic,ok},
+ mnesia:create_table(test_rec,
+ [{local_content,true},
+ {disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}]
+ ) ),
+ %%Verify that it has local contents.
+ ?match( true, mnesia:table_info(test_rec,local_content) ),
+ %%Helper Funs
+ Write_one = fun(Value) -> mnesia:write(#test_rec{key=1,val=Value}) end,
+ Read_one = fun(Key) -> mnesia:read( {test_rec, Key}) end,
+ %%Write a value one N1 that we may test against later
+ ?match({atomic,ok},
+ rpc:call( N1, mnesia, transaction, [Write_one,[11]] ) ),
+ %%Stop Mnesia on N1
+ %?match([], mnesia_test_lib:stop_mnesia([N1])),
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+
+ %%Write a value on N2, same key but a different value
+ ?match({atomic,ok},
+ rpc:call( N2, mnesia, transaction, [Write_one,[22]] ) ),
+ %%Stop Mnesia on N2
+ %?match([], mnesia_test_lib:stop_mnesia([N2])),
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+
+ %%Restart Mnesia on N1 verify that we can read from it without
+ %%starting Mnesia on N2.
+ ?match(ok, rpc:call(N1, mnesia, start, [])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[test_rec], 30000])),
+ %%Read back the value
+ ?match( {atomic,[#test_rec{key=1,val=11}]},
+ rpc:call(N1, mnesia, transaction, [Read_one,[1]] ) ),
+ %%Restart Mnesia on N2 and verify the contents there.
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[test_rec], 30000])),
+ ?match( {atomic,[#test_rec{key=1,val=22}]},
+ rpc:call(N2, mnesia, transaction, [Read_one,[1]] ) ),
+ %%Check that the start of Mnesai on N2 did not affect the contents on N1
+ ?match( {atomic,[#test_rec{key=1,val=11}]},
+ rpc:call(N1, mnesia, transaction, [Read_one,[1]] ) ),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load_directly_when_all_are_ram_copiesA(doc) ->
+ ["Tables that are RAM copies only shall also be loaded directly. ",
+ "1. N1 and N2 has RAM copies of a table, stop N1 before N2. ",
+ "2. When N1 starts he shall have access to the table ",
+ " without having to start N2" ];
+load_directly_when_all_are_ram_copiesA(suite) -> [];
+load_directly_when_all_are_ram_copiesA(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+
+ ?match({atomic,ok},
+ mnesia:create_table(test_rec,
+ [{ram_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}]
+ ) ),
+ ?match( Nodes, mnesia:table_info(test_rec,ram_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_only_copies) ),
+ Write_one = fun(Value) -> mnesia:write(#test_rec{key=2,val=Value}) end,
+ Read_one = fun() -> mnesia:read({test_rec,2}) end,
+ %%Write a value one N1 that we may test against later
+ ?match({atomic,ok},
+ rpc:call( N1, mnesia, transaction, [Write_one,[11]] ) ),
+ %%Stop Mnesia on N1
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ %%Write a value and check result (on N2; not possible on N1
+ %%since Mnesia is stopped there).
+ ?match({atomic,ok}, rpc:call(N2,mnesia,transaction,[Write_one,[22]]) ),
+ ?match({atomic,[#test_rec{key=2,val=22}]},
+ rpc:call(N2,mnesia,transaction,[Read_one]) ),
+ %%Stop Mnesia on N2
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ %%Restart Mnesia on N1 verify that we can access test_rec from
+ %%N1 without starting Mnesia on N2.
+ ?match(ok, rpc:call(N1, mnesia, start, [])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[test_rec], 30000])),
+ ?match({atomic,[]}, rpc:call(N1,mnesia,transaction,[Read_one])),
+ ?match({atomic,ok}, rpc:call(N1,mnesia,transaction,[Write_one,[33]])),
+ ?match({atomic,[#test_rec{key=2,val=33}]},
+ rpc:call(N1,mnesia,transaction,[Read_one])),
+ %%Restart Mnesia on N2 and verify the contents there.
+ ?match([], mnesia_test_lib:start_mnesia([N2], [test_rec])),
+ ?match( {atomic,[#test_rec{key=2,val=33}]},
+ rpc:call(N2, mnesia, transaction, [Read_one] ) ),
+ %%Check that the start of Mnesai on N2 did not affect the contents on N1
+ ?match( {atomic,[#test_rec{key=2,val=33}]},
+ rpc:call(N1, mnesia, transaction, [Read_one] ) ),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load_directly_when_all_are_ram_copiesB(doc) ->
+ ["Tables that are RAM copies only shall be loaded from a replicat ",
+ "when possible. ",
+ "1. N1 and N2 has RAM copies of a table, stop N1 before N2.",
+ "2. Now start N2 first and then N1, N1 shall then load the table ",
+ " from N2."];
+load_directly_when_all_are_ram_copiesB(suite) -> [];
+load_directly_when_all_are_ram_copiesB(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+ ?match({atomic,ok},
+ mnesia:create_table(test_rec,
+ [{ram_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}]
+ ) ),
+ ?match( Nodes, mnesia:table_info(test_rec,ram_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_only_copies) ),
+ Write_one = fun(Value) -> mnesia:write(#test_rec{key=3,val=Value}) end,
+ Read_one = fun() -> mnesia:read( {test_rec, 3}) end,
+ %%Write a value one N1 that we may test against later
+ ?match({atomic,ok},
+ rpc:call( N1, mnesia, transaction, [Write_one,[11]] ) ),
+ ?match({atomic,[#test_rec{key=3,val=11}]},
+ rpc:call(N2,mnesia,transaction,[Read_one]) ),
+ %%Stop Mnesia on N1
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ %%Write a value and check result (on N2; not possible on N1
+ %%since Mnesia is stopped there).
+ ?match({atomic,ok}, rpc:call(N2,mnesia,transaction,[Write_one,[22]]) ),
+ ?match({atomic,[#test_rec{key=3,val=22}]},
+ rpc:call(N2,mnesia,transaction,[Read_one]) ),
+ %%Stop Mnesia on N2
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ %%Restart Mnesia on N2 verify that we can access test_rec from
+ %%N2 without starting Mnesia on N1.
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[test_rec], 30000])),
+ ?match({atomic,[]}, rpc:call(N2,mnesia,transaction,[Read_one])),
+ ?match({atomic,ok}, rpc:call(N2,mnesia,transaction,[Write_one,[33]])),
+ ?match({atomic,[#test_rec{key=3,val=33}]},
+ rpc:call(N2,mnesia,transaction,[Read_one])),
+ %%Restart Mnesia on N1 and verify the contents there.
+ ?match([], mnesia_test_lib:start_mnesia([N1], [test_rec])),
+ ?match( {atomic,[#test_rec{key=3,val=33}]},
+ rpc:call(N1,mnesia,transaction,[Read_one])),
+ %%Check that the start of Mnesai on N1 did not affect the contents on N2
+ ?match( {atomic,[#test_rec{key=3,val=33}]},
+ rpc:call(N2,mnesia,transaction,[Read_one])),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+late_load_when_all_are_ram_copies_on_ram_nodes(doc) ->
+ ["Load of ram_copies tables when all replicas resides on disc less nodes"];
+late_load_when_all_are_ram_copies_on_ram_nodes(suite) ->
+ [
+ late_load_when_all_are_ram_copies_on_ram_nodes1,
+ late_load_when_all_are_ram_copies_on_ram_nodes2
+ ].
+
+late_load_when_all_are_ram_copies_on_ram_nodes1(suite) -> [];
+late_load_when_all_are_ram_copies_on_ram_nodes1(Config) when is_list(Config) ->
+ [N1, N2] = mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema,
+ {reload_appls, [mnesia]}],
+ 2, Config, ?FILE, ?LINE),
+ Res = late_load_when_all_are_ram_copies_on_ram_nodes(N1, [N2], Config),
+ mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
+ 2, Config, ?FILE, ?LINE),
+ Res.
+
+late_load_when_all_are_ram_copies_on_ram_nodes2(suite) -> [];
+late_load_when_all_are_ram_copies_on_ram_nodes2(Config) when is_list(Config) ->
+ [N1, N2, N3] = mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema,
+ {reload_appls, [mnesia]}],
+ 3, Config, ?FILE, ?LINE),
+ Res = late_load_when_all_are_ram_copies_on_ram_nodes(N1, [N2, N3], Config),
+ mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
+ 3, Config, ?FILE, ?LINE),
+ Res.
+
+late_load_when_all_are_ram_copies_on_ram_nodes(DiscNode, RamNs, _Config)
+ when DiscNode == node() ->
+ ?match(ok, mnesia:create_schema([DiscNode])),
+ ?match(ok, mnesia:start()),
+ Nodes = [DiscNode | RamNs],
+ Extra = [{extra_db_nodes, Nodes}],
+ Ok = [ok || _ <- RamNs],
+ ?match({Ok, []}, rpc:multicall(RamNs, mnesia, start, [Extra])),
+ ?match([], wait_until_running(Nodes)),
+
+ LastRam = lists:last(RamNs),
+ %% ?match({atomic, ok},
+ %% mnesia:add_table_copy(schema, LastRam, ram_copies)),
+ Def = [{ram_copies, RamNs}, {attributes, record_info(fields, test_rec)}],
+ ?match({atomic,ok}, mnesia:create_table(test_rec, Def)),
+ ?verify_mnesia(Nodes, []),
+ ?match([], mnesia_test_lib:stop_mnesia(RamNs)),
+ ?match(stopped, mnesia:stop()),
+ ?match(ok, mnesia:start()),
+
+ Rec1 = #test_rec{key=3, val=33},
+ Rec2 = #test_rec{key=4, val=44},
+
+ FirstRam = hd(RamNs),
+ ?match(ok, rpc:call(FirstRam, mnesia, start, [Extra])),
+ ?match(ok, rpc:call(FirstRam, mnesia, wait_for_tables,
+ [[test_rec], 30000])),
+ ?match(ok, rpc:call(FirstRam, mnesia, dirty_write,[Rec1])),
+ ?match(ok, mnesia:wait_for_tables([test_rec], 30000)),
+ mnesia:dirty_write(Rec2),
+
+ if
+ FirstRam /= LastRam ->
+ ?match(ok, rpc:call(LastRam, mnesia, start, [Extra])),
+ ?match(ok, rpc:call(LastRam, mnesia, wait_for_tables,
+ [[test_rec], 30000]));
+ true ->
+ ignore
+ end,
+ ?match([Rec1], rpc:call(LastRam, mnesia, dirty_read, [{test_rec, 3}])),
+ ?match([Rec2], rpc:call(LastRam, mnesia, dirty_read, [{test_rec, 4}])),
+ ?verify_mnesia(Nodes, []).
+
+wait_until_running(Nodes) ->
+ wait_until_running(Nodes, 30).
+
+wait_until_running(Nodes, Times) when Times > 0->
+ Alive = mnesia:system_info(running_db_nodes),
+ case Nodes -- Alive of
+ [] ->
+ [];
+ Remaining ->
+ timer:sleep(timer:seconds(1)),
+ wait_until_running(Remaining, Times - 1)
+ end;
+wait_until_running(Nodes, _) ->
+ Nodes.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+load_when_last_replica_becomes_available(doc) ->
+ ["Check that when all Mnesia nodes die at the same instant, then the ",
+ "replicated table shall be accessible when the last node is started ",
+ "again.",
+ "Checked by cheating. Start Mnesia on N1, N2, N3. Have a table ",
+ "replicated on disc on all three nodes, fill in some transactions, ",
+ "install a fallback. Restart mnesia on all nodes"
+ "This is the cheat and it simulates that all nodes died at the same ",
+ "time. Check that the table is only accessible after the last node ",
+ "has come up."];
+load_when_last_replica_becomes_available(suite) -> [];
+load_when_last_replica_becomes_available(Config) when is_list(Config) ->
+ [N1, N2, N3] = Nodes = ?acquire_nodes(3, Config),
+ ?match({atomic,ok},
+ mnesia:create_table(test_rec,
+ [{disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}]
+ ) ),
+ ?match( [], mnesia:table_info(test_rec,ram_copies) ),
+ ?match( Nodes, mnesia:table_info(test_rec,disc_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_only_copies) ),
+ Write_one = fun(Key,Val)->mnesia:write(#test_rec{key=Key,val=Val}) end,
+ Read_one = fun(Key) ->mnesia:read( {test_rec, Key}) end,
+ %%Write one value from each node.
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[1,11]])),
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[2,22]])),
+ ?match({atomic,ok},rpc:call(N3,mnesia,transaction,[Write_one,[3,33]])),
+ %%Check the values
+ ?match({atomic,[#test_rec{key=1,val=11}]},
+ rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[#test_rec{key=2,val=22}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[#test_rec{key=3,val=33}]},
+ rpc:call(N1,mnesia,transaction,[Read_one,[3]]) ),
+
+ ?match(ok, mnesia:backup("test_last_replica")),
+ ?match(ok, mnesia:install_fallback("test_last_replica")),
+ file:delete("test_last_replica"),
+ %%Stop Mnesia on all three nodes
+ ?match([], mnesia_test_lib:kill_mnesia(Nodes)),
+
+ %%Start Mnesia on one node, make sure that test_rec is not available
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match({timeout,[test_rec]},
+ rpc:call(N2, mnesia, wait_for_tables, [[test_rec], 10000])),
+ ?match(ok, rpc:call(N1, mnesia, start, [])),
+ ?match({timeout,[test_rec]},
+ rpc:call(N1, mnesia, wait_for_tables, [[test_rec], 10000])),
+ %%Start the third node
+ ?match(ok, rpc:call(N3, mnesia, start, [])),
+ %%Make sure that the table is loaded everywhere
+ ?match(ok, rpc:call(N3, mnesia, wait_for_tables, [[test_rec], 30000])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[test_rec], 30000])),
+ ?match(ok, rpc:call(N1, mnesia, wait_for_tables, [[test_rec], 30000])),
+
+ %%Check the values
+ ?match({atomic,[#test_rec{key=1,val=11}]},
+ rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[#test_rec{key=2,val=22}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[#test_rec{key=3,val=33}]},
+ rpc:call(N1,mnesia,transaction,[Read_one,[3]]) ),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load_when_we_have_down_from_all_other_replica_nodes(doc) ->
+ ["The table can be loaded if this node was the last one surviving. ",
+ "Check this by having N1, N2, N3 and a table replicated on all those ",
+ "nodes. Then kill them in the N1, N2, N3 order. Then start N3 and ",
+ "verify that the table is available with correct contents."];
+load_when_we_have_down_from_all_other_replica_nodes(suite) -> [];
+load_when_we_have_down_from_all_other_replica_nodes(Config) when is_list(Config) ->
+ [N1, N2, N3] = Nodes = ?acquire_nodes(3, Config),
+ ?match({atomic,ok},
+ mnesia:create_table(test_rec,
+ [{disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}]
+ ) ),
+ ?match( [], mnesia:table_info(test_rec,ram_copies) ),
+ ?match( Nodes, mnesia:table_info(test_rec,disc_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_only_copies) ),
+ Write_one = fun(Key,Val)->mnesia:write(#test_rec{key=Key,val=Val}) end,
+ Read_one = fun(Key) ->mnesia:read( {test_rec, Key}) end,
+ %%Write one value from each node.
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[1,111]])),
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[2,222]])),
+ ?match({atomic,ok},rpc:call(N3,mnesia,transaction,[Write_one,[3,333]])),
+ %%Check the values
+ ?match({atomic,[#test_rec{key=1,val=111}]},
+ rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[#test_rec{key=2,val=222}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[#test_rec{key=3,val=333}]},
+ rpc:call(N1,mnesia,transaction,[Read_one,[3]]) ),
+ %%Stop Mnesia on all three nodes
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[22,22]])),
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match({atomic,ok},rpc:call(N3,mnesia,transaction,[Write_one,[33,33]])),
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+ ?verbose("Mnesia stoped on all three nodes.~n",[]),
+
+ %%Start Mnesia on N3; wait for 'test_rec' table to load
+ ?match(ok, rpc:call(N3, mnesia, start, [])),
+ ?match(ok, rpc:call(N3, mnesia, wait_for_tables, [[test_rec], 30000])),
+
+ %%Check the values
+ ?match({atomic,[#test_rec{key=1,val=111}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[#test_rec{key=2,val=222}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[#test_rec{key=3,val=333}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[3]]) ),
+ ?match({atomic,[#test_rec{key=22,val=22}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[22]]) ),
+ ?match({atomic,[#test_rec{key=33,val=33}]},
+ rpc:call(N3,mnesia,transaction,[Read_one,[33]]) ),
+ ?verify_mnesia([N3], [N1, N2]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+late_load_transforms_into_disc_load(doc) ->
+ ["Difficult case that needs instrumentation of Mnesia.",
+ "A table is force loaded, and Mnesia decides to load it from another ",
+ "Mnesia node because it is avaliable there. The other Mnesia node then ",
+ "dies in mid copy which shall make the first Mnesia node to really ",
+ "force load from disc.",
+ "Check this by starting N1 and N2 and replicating a table between ",
+ "them. Then kill N1 before N2. The idea is to start N2 first, then ",
+ "N1 and then do a force load on N1. This force load will load from ",
+ "N2 BUT N2 must be killed after the decision to load from it has ",
+ "been made. tricky."];
+
+late_load_transforms_into_disc_load(suite) -> [];
+late_load_transforms_into_disc_load(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+
+ {success, [A, B]} = ?start_activities(Nodes),
+
+ ?match(Node1, node(A)),
+ ?match(Node2, node(B)),
+
+ Tab = late_load_table,
+ Def = [{attributes, [key, value]},
+ {disc_copies, Nodes}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 111, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 222, 42})),
+
+ TestPid = self(),
+ DebugId = {mnesia_loader, do_get_network_copy},
+ DebugFun = fun(PrevContext, EvalContext) ->
+ ?verbose("interrupt late load, pid ~p #~p ~n context ~p ~n",
+ [self(),PrevContext,EvalContext]),
+
+ mnesia_test_lib:kill_mnesia([Node2]),
+ TestPid ! {self(),debug_fun_was_called},
+
+ ?verbose("interrupt late_log - continues ~n",[]),
+ ?deactivate_debug_fun(DebugId),
+ PrevContext+1
+ end,
+ ?remote_activate_debug_fun(Node1,DebugId, DebugFun, 1),
+
+ %% kill mnesia on node1
+ mnesia_test_lib:kill_mnesia([Node1]),
+ %% wait a while, so that mnesia is really down
+ timer:sleep(timer:seconds(1)),
+
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab, 222, 815}])),
+
+ %% start Mnesia on node1
+ ?match(ok,mnesia:start()),
+ ?match(yes, mnesia:force_load_table(Tab)),
+ ?match(ok, mnesia:wait_for_tables([Tab],timer:seconds(30))),
+
+ receive_messages([debug_fun_was_called]),
+
+ check_tables([A],[{Tab,111},{Tab,222}],[[{Tab,111,4711}],[{Tab,222,42}]]),
+ ?verify_mnesia([Node1], [Node2]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+late_load_leads_to_hanging(doc) ->
+ ["Difficult case that needs instrumentation of Mnesia.",
+ "A table is loaded, and Mnesia decides to load it from another ",
+ "Mnesia node because it has the latest copy there. ",
+ "The other Mnesia node then ",
+ "dies in mid copy which shall make the first Mnesia node not to ",
+ "force load from disc but to wait for the other node to come up again",
+ "Check this by starting N1 and N2 and replicating a table between ",
+ "them. Then kill N1 before N2. The idea is to start N2 first, then ",
+ "N1. This load will load from ",
+ "N2 BUT N2 must be killed after the decision to load from it has ",
+ "been made. tricky."];
+
+late_load_leads_to_hanging(suite) -> [];
+late_load_leads_to_hanging(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+
+ Tab = late_load_table,
+ Def = [{attributes, [key, value]},
+ {disc_copies, Nodes}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 111, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 222, 42})),
+
+ DebugId = {mnesia_loader, do_get_network_copy},
+ DebugFun = fun(PrevContext, EvalContext) ->
+ ?verbose("interrupt late load, pid ~p #~p ~n context ~p ~n",
+ [self(), PrevContext, EvalContext]),
+ mnesia_test_lib:kill_mnesia([Node2]),
+ ?verbose("interrupt late load - continues ~n",[]),
+ ?deactivate_debug_fun(DebugId),
+ PrevContext+1
+ end,
+
+ ?remote_activate_debug_fun(Node1,DebugId, DebugFun, 1),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ %% wait a while, so that mnesia is really down
+ timer:sleep(timer:seconds(1)),
+
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab, 333, 666}])),
+
+ %% start Mnesia on node1
+ ?match(ok, mnesia:start()),
+
+ ?match({timeout, [Tab]}, mnesia:wait_for_tables([Tab], timer:seconds(2))),
+
+ ?match({'EXIT', {aborted, _}}, mnesia:dirty_read({Tab, 222})),
+ %% mnesia on node1 is waiting for node2 coming up
+
+ ?match(ok, rpc:call(Node2, mnesia, start, [])),
+ ?match(ok, mnesia:wait_for_tables([Tab], timer:seconds(30))),
+ ?match([{Tab, 333, 666}], mnesia:dirty_read({Tab, 333})),
+ ?verify_mnesia([Node2, Node1], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+force_load_when_nobody_intents_to_load(doc) ->
+ ["Normal force load. Start N1 N2, kill in N1, N2 order. Start N1 do ",
+ "force load. Did it work?"];
+force_load_when_nobody_intents_to_load(suite) -> [];
+force_load_when_nobody_intents_to_load(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+ Table = test_rec,
+ Trec1a = #test_rec{key=1,val=111},
+ Trec1b = #test_rec{key=1,val=333},
+ Trec2a = #test_rec{key=2,val=222},
+ Trec3a = #test_rec{key=3,val=333},
+ Trec3b = #test_rec{key=3,val=666},
+
+ ?match({atomic,ok}, rpc:call(N1, mnesia,create_table,
+ [Table,
+ [{disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}
+ ] ] ) ),
+ ?match( [], mnesia:table_info(Table,ram_copies) ),
+ ?match( Nodes, mnesia:table_info(Table,disc_copies) ),
+ ?match( [], mnesia:table_info(Table,disc_only_copies) ),
+ Write_one = fun(Rec) -> mnesia:write(Rec) end,
+ Read_one = fun(Key) -> mnesia:read({Table, Key}) end,
+ %%Write one value
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[Trec1a]])),
+ %%Check it
+ ?match({atomic,[Trec1a]},rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ %%Shut down mnesia on N1
+ ?match([], mnesia_test_lib:stop_mnesia([N1])),
+ %%Write and check value while N1 is down
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[Trec1b]])),
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[Trec2a]])),
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[Trec3a]])),
+ ?match({aborted,{node_not_running,N1}},
+ rpc:call(N1,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[Trec1b]},rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[Trec2a]},rpc:call(N2,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[Trec3a]},rpc:call(N2,mnesia,transaction,[Read_one,[3]]) ),
+ %%Shut down Mnesia on N2
+ ?match([], mnesia_test_lib:stop_mnesia([N2])),
+
+ %%Restart Mnesia on N1
+ ?match(ok, rpc:call(N1, mnesia, start, [])),
+ %%Check that table is not available (waiting for N2)
+ ?match({timeout,[Table]},
+ rpc:call(N1, mnesia, wait_for_tables, [[Table], 3000])),
+
+ %%Force load on N1
+ ?match(yes,rpc:call(N1,mnesia,force_load_table,[Table])),
+ %%Check values
+ ?match({atomic,[Trec1a]},rpc:call(N1,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[]}, rpc:call(N1,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[]}, rpc:call(N1,mnesia,transaction,[Read_one,[3]]) ),
+ %%Write a value for key=3
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[Trec3b]])),
+
+ %%Restart N2 and check values
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[Table], 30000])),
+
+ ?match({atomic,[Trec1a]},rpc:call(N1,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[Trec1a]},rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+
+ ?match({atomic,[]},rpc:call(N1,mnesia,transaction,[Read_one,[2]]) ),
+ ?match({atomic,[]},rpc:call(N2,mnesia,transaction,[Read_one,[2]]) ),
+
+ ?match({atomic,[Trec3b]},rpc:call(N1,mnesia,transaction,[Read_one,[3]]) ),
+ ?match({atomic,[Trec3b]},rpc:call(N2,mnesia,transaction,[Read_one,[3]]) ),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+force_load_when_someone_has_decided_to_load(doc) ->
+ ["Difficult case that needs instrumentation of Mnesia.",
+ "Start N1 and N2, replicate table, kill in N1, N2 order. Start N2 ",
+ "and start N1 before N2 has really loaded the table but after N2 has ",
+ "decided to load it."];
+
+force_load_when_someone_has_decided_to_load(suite) -> [];
+force_load_when_someone_has_decided_to_load(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ {success, [A, B]} = ?start_activities(Nodes),
+ ?match(Node1, node(A)), %% Just to check :)
+ ?match(Node2, node(B)),
+
+ Tab = late_load_table,
+ Def = [{attributes, [key, value]}, {disc_copies, Nodes}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 111, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 222, 42})),
+
+ Self = self(),
+ DebugId = {mnesia_controller, late_disc_load},
+ DebugFun = fun(PrevContext, EvalContext) ->
+ ?verbose("interrupt late disc load,
+ pid ~p #~p ~n context ~p ~n",
+ [self(),PrevContext,EvalContext]),
+ Self ! {self(), fun_in_postion},
+ wait_for_signal(),
+ ?verbose("interrupt late disc load - continues ~n",[]),
+ ?deactivate_debug_fun(DebugId),
+ PrevContext+1
+ end,
+
+ %% kill mnesia on node1
+ mnesia_test_lib:kill_mnesia([Node1]),
+ %% wait a while, so that mnesia is really down
+ timer:sleep(timer:seconds(1)),
+
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab, 222, 815}])),
+ %% kill mnesia on node2
+ mnesia_test_lib:kill_mnesia([Node2]),
+ %% wait a while, so that mnesia is really down
+ timer:sleep(timer:seconds(1)),
+
+ ?remote_activate_debug_fun(Node2,DebugId, DebugFun, 1),
+
+ B ! fun() -> mnesia:start() end,
+ [{Mnesia_Pid, fun_in_postion}] = receive_messages([fun_in_postion]),
+
+ %% start Mnesia on node1
+ A ! fun() -> mnesia:start() end,
+ ?match_receive(timeout),
+% Got some problem with this testcase when we modified mnesia init
+% These test cases are very implementation dependent!
+% A ! fun() -> mnesia:wait_for_tables([Tab], 3000) end,
+% ?match_receive({A, {timeout, [Tab]}}),
+ A ! fun() -> mnesia:force_load_table(Tab) end,
+ ?match_receive(timeout),
+
+ Mnesia_Pid ! continue,
+ ?match_receive({B, ok}),
+ ?match_receive({A, ok}),
+ ?match_receive({A, yes}),
+
+ B ! fun() -> mnesia:wait_for_tables([Tab], 10000) end,
+ ?match_receive({B, ok}),
+ ?match(ok, mnesia:wait_for_tables([Tab], timer:seconds(30))),
+ ?match([{Tab, 222, 815}], mnesia:dirty_read({Tab, 222})),
+ ?verify_mnesia(Nodes, []).
+
+wait_for_signal() ->
+ receive
+ continue -> ok
+ %% Don't eat any other mnesia internal msg's
+ after
+ timer:minutes(2) -> ?error("Timedout in wait_for_signal~n", [])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+force_load_when_someone_else_already_has_loaded(doc) ->
+ ["Normal case. Do a force load when somebody else has loaded the table. ",
+ "Start N1, N2, kill in N1, N2 order. Start N2 load the table, start N1 ",
+ "force load. Did it work? (i.e: did N1 load the table from N2 as that",
+ "one is the latest version and it is available on N2)"];
+
+force_load_when_someone_else_already_has_loaded(suite) -> [];
+force_load_when_someone_else_already_has_loaded(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+ Table = test_rec,
+ Trec1 = #test_rec{key=1,val=111},
+ Trec2 = #test_rec{key=1,val=222},
+
+ ?match({atomic,ok}, rpc:call(N1, mnesia,create_table,
+ [Table,
+ [{disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}
+ ] ] ) ),
+ ?match( [], mnesia:table_info(Table,ram_copies) ),
+ ?match( Nodes, mnesia:table_info(Table,disc_copies) ),
+ ?match( [], mnesia:table_info(Table,disc_only_copies) ),
+ Write_one = fun(Rec) -> mnesia:write(Rec) end,
+ Read_one = fun(Key) -> mnesia:read({Table, Key}) end,
+ %%Write one value
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[Trec1]])),
+ %%Check it
+ ?match({atomic,[Trec1]},rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ %%Shut down mnesia
+ ?match([], mnesia_test_lib:stop_mnesia([N1])),
+ timer:sleep(500),
+ ?match([], mnesia_test_lib:stop_mnesia([N2])),
+ %%Restart Mnesia on N2;wait for tables to load
+ ?match(ok, rpc:call(N2, mnesia, start, [])),
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [[test_rec], 30000])),
+ %%Write one value
+ ?match({atomic,ok},rpc:call(N2,mnesia,transaction,[Write_one,[Trec2]])),
+ %%Start on N1; force load
+ ?match(ok, rpc:call(N1, mnesia, start, [])),
+ %%Force load from file
+ ?match(yes, rpc:call(N1,mnesia,force_load_table,[Table])),
+ %%Check the value
+ ?match({atomic,[Trec2]},rpc:call(N1,mnesia,transaction,[Read_one,[1]]) ),
+ %% === there must be a Trec2 here !!!!
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+force_load_when_we_has_loaded(doc) ->
+ ["Force load a table we already have loaded"];
+force_load_when_we_has_loaded(suite) -> [];
+force_load_when_we_has_loaded(Config) when is_list(Config) ->
+ [N1] = Nodes = ?acquire_nodes(1, Config),
+ Table = test_rec,
+ Trec1 = #test_rec{key=1,val=111},
+ Trec2 = #test_rec{key=1,val=222},
+
+ ?match({atomic,ok}, rpc:call(N1, mnesia,create_table,
+ [Table,
+ [{disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}
+ ] ] ) ),
+ ?match( [], mnesia:table_info(Table,ram_copies) ),
+ ?match( Nodes, mnesia:table_info(Table,disc_copies) ),
+ ?match( [], mnesia:table_info(Table,disc_only_copies) ),
+ Write_one = fun(Rec) -> mnesia:write(Rec) end,
+ Read_one = fun(Key) -> mnesia:read({Table, Key}) end,
+ %%Write one value
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[Trec1]])),
+ %%Check it
+ ?match({atomic,[Trec1]},rpc:call(N1,mnesia,transaction,[Read_one,[1]]) ),
+ %%Shut down mnesia
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+ %%Restart Mnesia;wait for tables to load
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Table])),
+ %%Write one value
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[Trec2]])),
+ %%Force load from file
+ ?match(yes, rpc:call(N1,mnesia,force_load_table,[Table])),
+ %%Check the value
+ ?match({atomic,[Trec2]},rpc:call(N1,mnesia,transaction,[Read_one,[1]]) ),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+force_load_on_a_non_local_table(doc) ->
+ ["This is NOT allowed, the test case is a negative test",
+ "Force load on a table that isn't replicated on this node."];
+force_load_on_a_non_local_table(suite) -> [];
+force_load_on_a_non_local_table(Config) when is_list(Config) ->
+ [N1, N2, N3] = Nodes = ?acquire_nodes( 3, Config),
+ TableNodes = lists:sublist(Nodes,2),
+ Table = test_rec,
+ Trec1 = #test_rec{key=1,val=11},
+
+ ?match({atomic,ok}, rpc:call(N1, mnesia,create_table,
+ [Table,
+ [{disc_copies,TableNodes},
+ {attributes,record_info(fields,test_rec)}
+ ] ] ) ),
+ ?match( [], mnesia:table_info(Table,ram_copies) ),
+ ?match( TableNodes, mnesia:table_info(Table,disc_copies) ),
+ ?match( [], mnesia:table_info(Table,disc_only_copies) ),
+ Write_one = fun(Rec) -> mnesia:write(Rec) end,
+ Read_one = fun(Key) -> mnesia:read({Table, Key}) end,
+ %%Write one value
+ ?match({atomic,ok},rpc:call(N1,mnesia,transaction,[Write_one,[Trec1]])),
+ %%Check it from the other nodes
+ ?match({atomic,[Trec1]},rpc:call(N2,mnesia,transaction,[Read_one,[1]]) ),
+ ?match({atomic,[Trec1]},rpc:call(N3,mnesia,transaction,[Read_one,[1]]) ),
+
+ %%Make sure that Table is non-local
+ ?match_inverse(N3, rpc:call(N3,mnesia,table_info,[Table,where_to_read])),
+ %%Try to force load it
+ ?match(yes, rpc:call(N3,mnesia,force_load_table,[Table])),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+force_load_when_the_table_does_not_exist(doc) ->
+ ["This is NOT allowed, the test case is a negative test",
+ "Force load on a table that doesn't exist."];
+force_load_when_the_table_does_not_exist(suite) -> [];
+force_load_when_the_table_does_not_exist(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes( 2, Config),
+
+ %%Dummy table
+ ?match({atomic,ok},
+ mnesia:create_table(test_rec,
+ [{disc_copies,Nodes},
+ {attributes,record_info(fields,test_rec)}]
+ ) ),
+ ?match( [], mnesia:table_info(test_rec,ram_copies) ),
+ ?match( Nodes, mnesia:table_info(test_rec,disc_copies) ),
+ ?match( [], mnesia:table_info(test_rec,disc_only_copies) ),
+ Tab = dummy,
+ %%Make sure that Tab is an unknown table
+ ?match( false, lists:member(Tab,mnesia:system_info(tables)) ),
+ ?match( {error, {no_exists, Tab}}, mnesia:force_load_table(Tab) ),
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load_tables_with_master_tables(doc) ->
+ ["Verifies the semantics of different master nodes settings",
+ "The semantics should be:",
+ "1. Mnesia downs, Normally decides from where mnesia should load tables",
+ "2. Master tables (overrides mnesia downs) ",
+ "3. Force load (overrides Master tables) ",
+ "--- 1st from active master nodes",
+ "--- 2nd from active nodes",
+ "--- 3rd get local copy (if ram create new one)"
+ ];
+
+load_tables_with_master_tables(suite) ->
+ [master_nodes,
+ starting_master_nodes,
+ master_on_non_local_tables,
+ remote_force_load_with_local_master_node].
+
+
+-define(SDwrite(Tup), fun() -> mnesia:write(Tup) end).
+
+master_nodes(suite) -> [];
+master_nodes(Config) when is_list(Config) ->
+ [A, B, C] = Nodes = ?acquire_nodes(3, Config),
+ Tab = test_table_master_nodes,
+ ?match({atomic,ok}, mnesia:create_table(Tab, [{disc_copies, Nodes}])),
+
+ %% Test one: Master A and the table should be loaded from A
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [A]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match([{Tab, 1, init}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(B, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(C, mnesia, dirty_read, [{Tab, 1}])),
+
+ %% Test 2: Master [A,B] and B is Up the table should be loaded from B
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [A, B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match([{Tab, 1, updated}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(B, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(C, mnesia, dirty_read, [{Tab, 1}])),
+
+ %% Test 3: Master [A,B] and B is down the table should be loaded from A
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [A, B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ mnesia_test_lib:stop_mnesia([B]),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match(ok, rpc:call(B, mnesia, start, [])),
+ ?match(ok, rpc:call(B, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match([{Tab, 1, init}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, _Unknown}], rpc:call(B, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(C, mnesia, dirty_read, [{Tab, 1}])),
+
+ %% Test 4: Master [B] and B is Up the table should be loaded from B
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match([{Tab, 1, updated}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(B, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated}], rpc:call(C, mnesia, dirty_read, [{Tab, 1}])),
+
+ %% Test 5: Master [B] and B is down the table should not be loaded
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ mnesia_test_lib:stop_mnesia([B]),
+ ?match({atomic, ok}, rpc:call(C, mnesia, sync_transaction, [?SDwrite({Tab, 1, update_2})])),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match({timeout, [Tab]}, rpc:call(A, mnesia, wait_for_tables, [[Tab], 2000])),
+
+ %% Test 6: Force load on table that couldn't be loaded due to master
+ %% table setttings, loads other active replicas i.e. from C
+
+ ?match(yes, rpc:call(A, mnesia, force_load_table, [Tab])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match(ok, rpc:call(B, mnesia, start, [])),
+ ?match(ok, rpc:call(B, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?match([{Tab, 1, update_2}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, update_2}], rpc:call(B, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, update_2}], rpc:call(C, mnesia, dirty_read, [{Tab, 1}])),
+
+ %% Test 7: Master [B] and B is down the table should not be loaded,
+ %% force_load when there are no active replicas availible
+ %% should generate a load of a local table
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ mnesia_test_lib:stop_mnesia([B, C]),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match({timeout, [Tab]}, rpc:call(A, mnesia, wait_for_tables, [[Tab], 2000])),
+
+ ?match(yes, rpc:call(A, mnesia, force_load_table, [Tab])),
+ ?match([{Tab, 1, init}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+
+ ?verify_mnesia([A], [B,C]).
+
+starting_master_nodes(suite) -> [];
+starting_master_nodes(doc) ->
+ ["Complementory to TEST 5 and 6 above, if the master node (B) starts"
+ " and loads the table it should be loaded on the waiting node (A) "];
+starting_master_nodes(Config) when is_list(Config) ->
+ [A, B, C] = Nodes = ?acquire_nodes(3, Config),
+ Tab = starting_master_nodes,
+ ?match({atomic,ok}, mnesia:create_table(Tab, [{disc_copies, Nodes}])),
+ %% Start by checking TEST 5 above.
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+ mnesia_test_lib:stop_mnesia([A]),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ mnesia_test_lib:stop_mnesia([B]),
+ ?match({atomic, ok}, rpc:call(C, mnesia, sync_transaction, [?SDwrite({Tab, 1, update_2})])),
+
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match({timeout, [Tab]}, rpc:call(A, mnesia, wait_for_tables, [[Tab], 2000])),
+ %% Start the B node and the table should be loaded on A!
+ ?match(ok, rpc:call(B, mnesia, start, [])),
+ ?match(ok, rpc:call(B, mnesia, wait_for_tables, [[Tab], 3000])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+
+ ?verify_mnesia([A,B,C], []).
+
+
+master_on_non_local_tables(suite) -> [];
+master_on_non_local_tables(Config) when is_list(Config) ->
+ [A, B, C] = Nodes = ?acquire_nodes(3, Config),
+ Tab = test_table_non_local,
+ ?match({atomic,ok}, mnesia:create_table(Tab, [{disc_copies, [B, C]}])),
+
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [B]])),
+ ?match({atomic, ok}, mnesia:sync_transaction(?SDwrite({Tab, 1, init}))),
+
+ %% Test 1: Test that table info are updated when master node comes up
+
+ mnesia_test_lib:stop_mnesia([A, B]),
+ ?match({atomic, ok}, rpc:call(C, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+
+ ?match({timeout, [Tab]}, rpc:call(A, mnesia, wait_for_tables, [[Tab], 2000])),
+ ErrorRead = {badrpc,{'EXIT', {aborted,{no_exists,[test_table_non_local,1]}}}},
+ ErrorWrite = {badrpc,{'EXIT', {aborted,{no_exists,test_table_non_local}}}},
+ ?match(ErrorRead, rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match(ErrorWrite, rpc:call(A, mnesia, dirty_write, [{Tab, 1, updated_twice}])),
+
+ ?match(ok, rpc:call(B, mnesia, start, [])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 2000])),
+
+ ?match([{Tab, 1, updated}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match(B, rpc:call(A, mnesia, table_info, [Tab, where_to_read])),
+ ?match({atomic, ok}, rpc:call(A, mnesia, sync_transaction, [?SDwrite({Tab, 1, init})])),
+
+ %% Test 2: Test that table info are updated after force_load
+
+ mnesia_test_lib:stop_mnesia([A, B]),
+ ?match({atomic, ok}, rpc:call(C, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated})])),
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+
+ ?match({timeout, [Tab]}, rpc:call(A, mnesia, wait_for_tables, [[Tab], 2000])),
+ ?match(yes, rpc:call(A, mnesia, force_load_table, [Tab])),
+ ?match(C, rpc:call(A, mnesia, table_info, [Tab, where_to_read])),
+
+ ?match([{Tab, 1, updated}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match({atomic, ok}, rpc:call(A, mnesia, sync_transaction, [?SDwrite({Tab, 1, updated_twice})])),
+
+ ?match(ok, rpc:call(B, mnesia, start, [])),
+ ?match(ok, rpc:call(B, mnesia, wait_for_tables, [[Tab], 10000])),
+
+ ?match([{Tab, 1, updated_twice}], rpc:call(A, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated_twice}], rpc:call(B, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{Tab, 1, updated_twice}], rpc:call(C, mnesia, dirty_read, [{Tab, 1}])),
+
+ ?verify_mnesia(Nodes, []).
+
+remote_force_load_with_local_master_node(doc) ->
+ ["Force load a table on a remote node while the ",
+ "local node is down. Start the local node and ",
+ "verfify that the tables is loaded from disc locally "
+ "if the local node has itself as master node and ",
+ "the remote node has both the local and remote node ",
+ "as master nodes"];
+remote_force_load_with_local_master_node(suite) -> [];
+remote_force_load_with_local_master_node(Config) when is_list(Config) ->
+ [A, B] = Nodes = ?acquire_nodes(2, Config),
+
+ Tab = remote_force_load_with_local_master_node,
+ ?match({atomic,ok}, mnesia:create_table(Tab, [{disc_copies, Nodes}])),
+ ?match(ok, rpc:call(A, mnesia, set_master_nodes, [Tab, [A, B]])),
+ ?match(ok, rpc:call(B, mnesia, set_master_nodes, [Tab, [B]])),
+
+ W = fun(Who) -> mnesia:write({Tab, who, Who}) end,
+ ?match({atomic, ok}, rpc:call(A,mnesia, sync_transaction, [W, [a]])),
+ ?match(stopped, rpc:call(A, mnesia, stop, [])),
+ ?match({atomic, ok}, rpc:call(B, mnesia, sync_transaction, [W, [b]])),
+ ?match(stopped, rpc:call(B, mnesia, stop, [])),
+
+ ?match(ok, rpc:call(A, mnesia, start, [])),
+ ?match(ok, rpc:call(A, mnesia, wait_for_tables, [[Tab], 3000])),
+ ?match([{Tab, who, a}], rpc:call(A, mnesia, dirty_read, [{Tab, who}])),
+
+ ?match(ok, rpc:call(B, mnesia, start, [])),
+ ?match(ok, rpc:call(B, mnesia, wait_for_tables, [[Tab], 3000])),
+ ?match([{Tab, who, b}], rpc:call(B, mnesia, dirty_read, [{Tab, who}])),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+durability_of_dump_tables(doc) ->
+ [ "Verify that all tables contain the correct data when Mnesia",
+ "is restarted and tables are loaded from disc to recover",
+ " their previous contents. " ];
+durability_of_dump_tables(suite) -> [dump_ram_copies,
+ dump_disc_copies,
+ dump_disc_only].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+dump_ram_copies(doc) ->
+ ["Check that ram_copies tables are loaded with the"
+ "contents that had been dumped before Mnesia",
+ "was restarted. " ];
+dump_ram_copies(suite) -> [];
+dump_ram_copies(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, Config),
+ {success, [P1,P2,P3]} = ?start_activities(Nodes),
+
+ NP1 = node(P1),
+ NP2 = node(P2),
+
+ {A,B,C} = case node() of
+ NP1 ->
+ %?verbose("first case ~n"),
+ {P3,P2,P1};
+ NP2 ->
+ %?verbose("second case ~n"),
+ {P3,P1,P2};
+ _ ->
+ {P1,P2,P3}
+ end,
+
+ Node1 = node(A),
+ Node2 = node(B),
+ Node3 = node(C),
+
+ ?verbose(" A pid:~p node:~p ~n",[A,Node1]),
+ ?verbose(" B pid:~p node:~p ~n",[B,Node2]),
+ ?verbose(" C pid:~p node:~p ~n",[C,Node3]),
+
+
+ %% ram copies table on 2 nodes
+
+ Tab = dump_table,
+ Def = [{attributes, [key, value]},
+ {ram_copies, [Node1,Node2]}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ %% dump the table
+
+ ?match( {atomic,ok}, mnesia:dump_tables([Tab])),
+
+ %% perform updates (they shall be lost after kill Mnesia )
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 815})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 915})),
+
+ %% add another replica on node3
+ mnesia:add_table_copy(Tab,Node3,ram_copies),
+
+ %% all 3 replicas shall have the new contents
+ cross_check_tables([A,B,C],Tab,
+ {[{Tab,1,815}],[{Tab,2,915}],[{Tab,3,256}]}),
+
+ %% kill mnesia on node 3
+ mnesia_test_lib:kill_mnesia([Node3]),
+
+ %% wait a while, so that mnesia is really down
+ timer:sleep(timer:seconds(2)),
+
+ mnesia_test_lib:kill_mnesia([Node1,Node2]), %% kill them as well
+ timer:sleep(timer:seconds(2)),
+
+ %% start Mnesia only on node 3
+ ?verbose("starting mnesia on Node3~n",[]),
+
+ %% test_lib:mnesia_start doesnt work, because it waits
+ %% for the schema on all nodes ... ???
+ ?match(ok,rpc:call(Node3,mnesia,start,[]) ),
+ ?match(ok,rpc:call(Node3,mnesia,wait_for_tables,
+ [[Tab],timer:seconds(30)] ) ),
+
+ %% node3 shall have the conents of the dump
+ cross_check_tables([C],Tab,{[{Tab,1,4711}],[{Tab,2,42}],[{Tab,3,256}]}),
+
+ %% start Mnesia on the other 2 nodes, too
+ mnesia_test_lib:start_mnesia([Node1,Node2],[Tab]),
+
+ cross_check_tables([A,B,C],Tab,
+ {[{Tab,1,4711}],[{Tab,2,42}],[{Tab,3,256}]}),
+ ?verify_mnesia(Nodes, []).
+
+%% check the contents of the table
+
+cross_check_tables([],_tab,_elements) -> ok;
+cross_check_tables([Pid|Rest],Tab,{Val1,Val2,Val3}) ->
+ Pid ! fun () ->
+ R1 = mnesia:dirty_read({Tab,1}),
+ R2 = mnesia:dirty_read({Tab,2}),
+ R3 = mnesia:dirty_read({Tab,3}),
+ {R1,R2,R3}
+ end,
+ ?match_receive({ Pid, {Val1, Val2, Val3 } }),
+ cross_check_tables(Rest,Tab,{Val1,Val2,Val3} ).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Should be in evil test suite !!!
+
+dump_disc_copies(doc) ->
+ ["Check that it is not possible to dump disc_copies tables"];
+dump_disc_copies(suite) -> [];
+dump_disc_copies(Config) when is_list(Config) ->
+ do_dump_copies(Config, disc_copies).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Should be in evil test suite !!!
+dump_disc_only(doc) ->
+ ["Check that it is not possible to dump disc_only_copies tables"];
+dump_disc_only(suite) -> [];
+dump_disc_only(Config) when is_list(Config) ->
+ do_dump_copies(Config,disc_only_copies).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+do_dump_copies(Config,Copies) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+
+ Tab = dump_copies,
+ Def = [{attributes, [key, value]},
+ {Copies, [Node1]}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 4711})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 42})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, 256})),
+
+ %% dump the table
+ ?match( {aborted, {"Only allowed on ram_copies",Tab,[Node1]}},
+ mnesia:dump_tables([Tab])),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, 815})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 915})),
+
+ %% kill mnesia on node1
+ mnesia_test_lib:kill_mnesia([Node1]),
+
+ %% wait a while, so that mnesia is really down
+ timer:sleep(timer:seconds(1)),
+
+ mnesia_test_lib:start_mnesia([Node1],[Tab]),
+
+ ?match([{Tab, 1, 815}], mnesia:dirty_read({Tab,1}) ),
+ ?match([{Tab, 2, 915}], mnesia:dirty_read({Tab,2}) ),
+ ?match([{Tab, 3, 256}], mnesia:dirty_read({Tab,3}) ),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+durability_of_disc_copies(doc) ->
+ ["Perform all possible kinds of updates on tables and check"
+ "whether no data is lost after a restart of Mnesia.",
+ "This test is done for disc_copies"];
+
+durability_of_disc_copies(suite) -> [];
+durability_of_disc_copies(Config) when is_list(Config) ->
+ do_disc_durability(Config,disc_copies).
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+durability_of_disc_only_copies(doc) ->
+ ["Perform all possible kinds of updates on tables and check"
+ "whether no data is lost after a restart of Mnesia.",
+ "This test is done for disc_only_copies"];
+durability_of_disc_only_copies(suite) -> [];
+durability_of_disc_only_copies(Config) when is_list(Config) ->
+ do_disc_durability(Config,disc_only_copies).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+do_disc_durability(Config,CopyType) ->
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(1)}]),
+ {success, [A,B,C]} = ?start_activities(Nodes),
+
+ Tab_set = disc_durability_set,
+ Def_set = [{attributes, [key, value]},
+ {CopyType, Nodes}],
+
+ Tab_bag = disc_durability_bag,
+ Def_bag = [{attributes, [key, value]},
+ {type, bag},
+ {CopyType, Nodes}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab_set, Def_set)),
+ ?match({atomic, ok}, mnesia:create_table(Tab_bag, Def_bag)),
+
+ %% do updates
+ ?match({atomic, ok},
+ mnesia:transaction(fun()->
+ mnesia:write({Tab_set, 11, 1111}),
+ mnesia:write({Tab_set, 22, 2222}),
+ mnesia:write({Tab_set, 33, 3333}),
+ mnesia:write({Tab_set, 55, 5555})
+ end)),
+ mnesia:dirty_write({Tab_set, 44, 4444}),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun()->
+ mnesia:write({Tab_bag, 11, a_1111}),
+ mnesia:write({Tab_bag, 11, b_1111}),
+ mnesia:write({Tab_bag, 22, a_2222}),
+ mnesia:write({Tab_bag, 22, b_2222}),
+ mnesia:write({Tab_bag, 33, a_3333}),
+ mnesia:write({Tab_bag, 33, b_3333})
+ end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun()-> mnesia:delete({Tab_set, 22}) end)),
+ ?match(ok, mnesia:dirty_delete({Tab_set, 33})),
+ ?match(5558, mnesia:dirty_update_counter({Tab_set, 55}, 3)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun()->
+ mnesia:delete_object({Tab_bag, 22, b_2222})
+ end)),
+ ?match(ok, mnesia:dirty_delete_object({Tab_bag, 33, b_3333})),
+ ?match(10, mnesia:dirty_update_counter({Tab_set, counter}, 10)),
+ ?match({atomic, ok}, % Also syncs update_counter
+ mnesia:sync_transaction(fun() -> mnesia:write({Tab_set,66,6666}) end)),
+
+ Updated = {[[{Tab_set,counter,10}],
+ [{Tab_set,counter,10}],
+ [{Tab_set,counter,10}]],[]},
+ ?match(Updated, rpc:multicall(Nodes, mnesia, dirty_read, [Tab_set,counter])),
+
+ %% kill mnesia on all nodes, start it again and check the data
+ mnesia_test_lib:kill_mnesia(Nodes),
+ mnesia_test_lib:start_mnesia(Nodes,[Tab_set,Tab_bag]),
+
+ ?log("Flushed ~p ~n", [mnesia_test_lib:flush()]), %% Debugging strange msgs..
+ ?log("Processes ~p ~p ~p~n", [A,B,C]),
+ check_tables([A,B,C],
+ [{Tab_set,11}, {Tab_set,22},{Tab_set,33},
+ {Tab_set,44},{Tab_set,55}, {Tab_set,66},
+ {Tab_bag,11}, {Tab_bag,22},{Tab_bag,33},
+ {Tab_set, counter}],
+ [[{Tab_set, 11, 1111}], [], [], [{Tab_set, 44, 4444}],
+ [{Tab_set, 55, 5558}], [{Tab_set, 66, 6666}],
+ lists:sort([{Tab_bag, 11, a_1111},{Tab_bag, 11, b_1111}]),
+ [{Tab_bag, 22, a_2222}], [{Tab_bag, 33, a_3333}],
+ [{Tab_set, counter, 10}]]),
+
+ timer:sleep(1000), %% Debugging strange msgs..
+ ?log("Flushed ~p ~n", [mnesia_test_lib:flush()]),
+ ?verify_mnesia(Nodes, []).
+
+%% check the contents of the table
+%%
+%% all the processes in the PidList shall find all
+%% table entries in ValList
+
+check_tables([],_vallist,_resultList) -> ok;
+check_tables([Pid|Rest],ValList,ResultList) ->
+ Pid ! fun () ->
+ check_values(ValList)
+ end,
+ ?match_receive({ Pid, ResultList }),
+ check_tables(Rest,ValList,ResultList).
+
+check_values([]) -> [];
+check_values([{Tab,Key}|Rest]) ->
+ Ret = lists:sort(mnesia:dirty_read({Tab,Key})),
+ [Ret|check_values(Rest)].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% stolen from mnesia_recovery_test.erl:
+
+receive_messages([]) -> [];
+receive_messages(ListOfMsgs) ->
+ receive
+ timeout ->
+ case lists:member(timeout, ListOfMsgs) of
+ false ->
+ ?warning("I (~p) have received unexpected msg~n ~p ~n",
+ [self(),timeout]),
+ receive_messages(ListOfMsgs);
+ true ->
+ ?verbose("I (~p) got msg ~p ~n", [self(),timeout]),
+ [ timeout | receive_messages(ListOfMsgs -- [timeout])]
+ end;
+
+ {Pid, Msg} ->
+ case lists:member(Msg, ListOfMsgs) of
+ false ->
+ ?warning("I (~p) have received unexpected msg~n ~p ~n",
+ [self(),{Pid, Msg}]),
+ receive_messages(ListOfMsgs);
+ true ->
+ ?verbose("I (~p) got msg ~p from ~p ~n", [self(),Msg, Pid]),
+ [{Pid, Msg} | receive_messages(ListOfMsgs -- [Msg])]
+ end;
+
+ Else -> ?warning("Recevied unexpected Msg~n ~p ~n", [Else])
+ after timer:seconds(40) ->
+ ?error("Timeout in receive msgs while waiting for ~p~n",
+ [ListOfMsgs])
+ end.
+
diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl
new file mode 100644
index 0000000000..bbbebeb02c
--- /dev/null
+++ b/lib/mnesia/test/mnesia_evil_backup.erl
@@ -0,0 +1,750 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%%%----------------------------------------------------------------------
+%%% File : mnesia_evil_backup.erl
+%%% Author : Dan Gudmundsson <dgud@legolas>
+%%% Purpose : Evil backup tests
+%%% Created : 3 Jun 1998 by Dan Gudmundsson <[email protected]>
+%%%----------------------------------------------------------------------
+
+-module(mnesia_evil_backup).
+-author('[email protected]').
+-compile(export_all).
+-include("mnesia_test_lib.hrl").
+
+%%-export([Function/Arity, ...]).
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(doc) ->
+ ["Checking all the functionality regarding ",
+ "to the backup and different ",
+ "kinds of restore and fallback interface"];
+
+all(suite) ->
+ [
+ backup,
+ bad_backup,
+ global_backup_checkpoint,
+ restore_tables,
+ traverse_backup,
+ selective_backup_checkpoint,
+ incremental_backup_checkpoint,
+%% local_backup_checkpoint,
+ install_fallback,
+ uninstall_fallback,
+ local_fallback,
+ sops_with_checkpoint
+ ].
+
+backup(doc) -> ["Checking the interface to the function backup",
+ "We don't check that the backups can be used here",
+ "That is checked in install_fallback and in restore"];
+backup(suite) -> [];
+backup(Config) when is_list(Config) ->
+ [Node1, Node2] = _Nodes = ?acquire_nodes(2, Config),
+ Tab = backup_tab,
+ Def = [{disc_copies, [Node1]}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
+ File = "backup_test.BUP",
+ ?match(ok, mnesia:backup(File)),
+
+ File2 = "backup_test2.BUP",
+ Tab2 = backup_tab2,
+ Def2 = [{disc_only_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match(ok, mnesia:backup(File2, mnesia_backup)),
+
+ File3 = "backup_test3.BUP",
+ mnesia_test_lib:kill_mnesia([Node2]),
+ ?match({error, _}, mnesia:backup(File3, mnesia_backup)),
+
+ ?match(ok, file:delete(File)),
+ ?match(ok, file:delete(File2)),
+ ?match({error, _}, file:delete(File3)),
+ ?verify_mnesia([Node1], [Node2]).
+
+
+bad_backup(suite) -> [];
+bad_backup(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = backup_tab,
+ Def = [{disc_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
+ File = "backup_test.BUP",
+ ?match(ok, mnesia:backup(File)),
+ file:write_file(File, "trash", [append]),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_bad})),
+ ?match({atomic,[Tab]}, mnesia:restore(File, [{clear_tables, [Tab]}])),
+ ?match([{Tab,1,test_ok}], mnesia:dirty_read(Tab, 1)),
+
+ ?match(ok, file:delete(File)),
+ ?verify_mnesia([Node1], []).
+
+
+
+global_backup_checkpoint(doc) ->
+ ["Checking the interface to the function backup_checkpoint",
+ "We don't check that the backups can be used here",
+ "That is checked in install_fallback and in restore"];
+global_backup_checkpoint(suite) -> [];
+global_backup_checkpoint(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = backup_cp,
+ Def = [{disc_copies, [Node1]}, {ram_copies, [Node2]}],
+ File = "backup_checkpoint.BUP",
+ File2 = "backup_checkpoint2.BUP",
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
+ ?match({error, _}, mnesia:backup_checkpoint(cp_name, File)),
+ Spec = [{name, cp_name}, {max, mnesia:system_info(tables)}],
+ ?match({ok, _Name, _Ns}, mnesia:activate_checkpoint(Spec)),
+ ?match(ok, mnesia:backup_checkpoint(cp_name, File)),
+ ?match({error, _}, mnesia:backup_checkpoint(cp_name_nonexist, File)),
+ ?match(ok, mnesia:backup_checkpoint(cp_name, File2, mnesia_backup)),
+ ?match({error, _}, file:delete(File)),
+ ?match(ok, file:delete(File2)),
+ ?verify_mnesia(Nodes, []).
+
+restore_tables(doc) ->
+ ["Tests the interface of restore"];
+
+restore_tables(suite) ->
+ [
+ restore_errors,
+ restore_clear,
+ restore_keep,
+ restore_recreate,
+ restore_clear_ram
+ ].
+
+restore_errors(suite) -> [];
+restore_errors(Config) when is_list(Config) ->
+ [_Node] = ?acquire_nodes(1, Config),
+ ?match({aborted, enoent}, mnesia:restore(notAfile, [])),
+ ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, not_a_list)),
+ ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [test_badarg])),
+ ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{test_badarg, xxx}])),
+ ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{skip_tables, xxx}])),
+ ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{recreate_tables, [schema]}])),
+ ?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{default_op, asdklasd}])),
+ ok.
+
+restore_clear(suite) -> [];
+restore_clear(Config) when is_list(Config) ->
+ restore(Config, clear_tables).
+
+restore_keep(suite) -> [];
+restore_keep(Config) when is_list(Config) ->
+ restore(Config, keep_tables).
+
+restore_recreate(suite) -> [];
+restore_recreate(Config) when is_list(Config) ->
+ restore(Config, recreate_tables).
+
+check_tab(Records, Line) ->
+ Verify = fun({Table, Key, Val}) ->
+ case catch mnesia:dirty_read({Table, Key}) of
+ [{Table, Key, Val}] -> ok;
+ Else ->
+ mnesia_test_lib:error("Not matching on Node ~p ~n"
+ " Expected ~p~n Actual ~p~n",
+ [node(), {Table, Key, Val}, Else],
+ ?MODULE, Line),
+ exit(error)
+ end;
+ (Recs) ->
+ [{Tab, Key, _}, _] = Recs,
+ SRecs = lists:sort(Recs),
+ R_Recs = lists:sort(catch mnesia:dirty_read({Tab, Key})),
+ case R_Recs of
+ SRecs -> ok;
+ Else ->
+ mnesia_test_lib:error("Not matching on Node ~p ~n"
+ " Expected ~p~n Actual ~p~n",
+ [node(), SRecs, Else],
+ ?MODULE, Line),
+ exit(error)
+ end
+ end,
+ lists:foreach(Verify, Records).
+
+restore(Config, Op) ->
+ [Node1, Node2, _Node3] = Nodes = ?acquire_nodes(3, Config),
+
+ Tab1 = ram_snmp,
+ Def1 = [{snmp, [{key, integer}]}, {ram_copies, [Node1]}],
+ Tab2 = disc_index,
+ Def2 = [{index, [val]}, {disc_copies, [Node1, Node2]}],
+ Tab3 = dionly_bag,
+ Def3 = [{type, bag}, {disc_only_copies, Nodes}],
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+
+ File1 = "restore1.BUP",
+ File2 = "restore2.BUP",
+
+ Restore = fun(O, A) ->
+ case mnesia:restore(O, A) of
+ {atomic, Tabs} when is_list(Tabs) -> {atomic, lists:sort(Tabs)};
+ Other -> Other
+ end
+ end,
+ Tabs = lists:sort([Tab1, Tab2, Tab3]),
+
+ [mnesia:dirty_write({Tab1, N, N+42}) || N <- lists:seq(1, 10)],
+ [mnesia:dirty_write({Tab2, N, N+43}) || N <- lists:seq(1, 10)],
+ [mnesia:dirty_write({Tab3, N, N+44}) || N <- lists:seq(1, 10)],
+
+ Res1 = [{Tab1, N, N+42} || N <- lists:seq(1, 10)],
+ Res2 = [{Tab2, N, N+43} || N <- lists:seq(1, 10)],
+ Res3 = [{Tab3, N, N+44} || N <- lists:seq(1, 10)],
+
+ {ok, Name, _} = mnesia:activate_checkpoint([{min, Tabs}, {ram_overrides_dump, true}]),
+ file:delete(File1),
+
+ %% Test standard Restore on one table on one node
+ ?match(ok, mnesia:backup_checkpoint(Name, File1)),
+ ?match(ok, mnesia:deactivate_checkpoint(Name)),
+ ?match(ok, mnesia:backup(File2)),
+ [mnesia:dirty_write({Tab1, N, N+1}) || N <- lists:seq(1, 11)],
+ [mnesia:dirty_write({Tab2, N, N+1}) || N <- lists:seq(1, 11)],
+ [mnesia:dirty_write({Tab3, N, N+1}) || N <- lists:seq(1, 11)],
+ _Res11 = [{Tab1, N, N+1} || N <- lists:seq(1, 11)],
+ Res21 = [{Tab2, N, N+1} || N <- lists:seq(1, 11)],
+ Res31 = [[{Tab3, N, N+1}, {Tab3, N, N+44}] || N <- lists:seq(1, 10)],
+
+ ?match({atomic, [Tab1]}, Restore(File1, [{Op, [Tab1]},
+ {skip_tables, Tabs -- [Tab1]}])),
+ case Op of
+ keep_tables ->
+ ?match([{Tab1, 11, 12}], mnesia:dirty_read({Tab1, 11}));
+ clear_tables ->
+ ?match([], mnesia:dirty_read({Tab1, 11}));
+ recreate_tables ->
+ ?match([], mnesia:dirty_read({Tab1, 11}))
+ end,
+ [rpc:call(Node, ?MODULE, check_tab, [Res1, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res21, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res31, ?LINE]) || Node <- Nodes],
+
+ %% Restore all tables on it's nodes
+ mnesia_schema:clear_table(Tab1),
+ mnesia_schema:clear_table(Tab2),
+ mnesia_schema:clear_table(Tab3),
+ [mnesia:dirty_write({Tab1, N, N+1}) || N <- lists:seq(1, 11)],
+ [mnesia:dirty_write({Tab2, N, N+1}) || N <- lists:seq(1, 11)],
+ [mnesia:dirty_write({Tab3, N, N+1}) || N <- lists:seq(1, 11)],
+
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab2, Node1)),
+
+ ?match({ok, Node1}, mnesia:subscribe({table, Tab1})),
+
+ ?match({atomic, Tabs}, Restore(File1, [{default_op, Op},
+ {module, mnesia_backup}])),
+ case Op of
+ clear_tables ->
+ ?match_receive({mnesia_table_event, {delete, {schema, Tab1}, _}}),
+ ?match_receive({mnesia_table_event, {write, {schema, Tab1, _}, _}}),
+ check_subscr(Tab1),
+ [rpc:call(Node, ?MODULE, check_tab, [Res1, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res2, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res3, ?LINE]) || Node <- Nodes],
+ ?match([], mnesia:dirty_read({Tab1, 11})),
+ ?match([], mnesia:dirty_read({Tab2, 11})),
+ ?match([], mnesia:dirty_read({Tab3, 11})),
+ %% Check Index
+ ?match([{Tab2, 10, 53}], mnesia:dirty_index_read(Tab2, 53, val)),
+ ?match([], mnesia:dirty_index_read(Tab2, 11, val)),
+ %% Check Snmp
+ ?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
+ ?match({ok, {Tab1, 1, 43}}, mnesia:snmp_get_row(Tab1, [1])),
+ ?match(undefined, mnesia:snmp_get_row(Tab1, [11])),
+ %% Check schema info
+ ?match([Node2], mnesia:table_info(Tab2, where_to_write));
+ keep_tables ->
+ check_subscr(Tab1),
+ [rpc:call(Node, ?MODULE, check_tab, [Res1, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res2, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res31, ?LINE]) || Node <- Nodes],
+ ?match([{Tab1, 11, 12}], mnesia:dirty_read({Tab1, 11})),
+ ?match([{Tab2, 11, 12}], mnesia:dirty_read({Tab2, 11})),
+ ?match([{Tab3, 11, 12}], mnesia:dirty_read({Tab3, 11})),
+ ?match([{Tab2, 10, 53}], mnesia:dirty_index_read(Tab2, 53, val)),
+ %% Check Index
+ ?match([], mnesia:dirty_index_read(Tab2, 11, val)),
+ ?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
+ %% Check Snmp
+ ?match({ok, {Tab1, 1, 43}}, mnesia:snmp_get_row(Tab1, [1])),
+ ?match({ok, {Tab1, 11, 12}}, mnesia:snmp_get_row(Tab1, [11])),
+ %% Check schema info
+ ?match([Node2], mnesia:table_info(Tab2, where_to_write));
+ recreate_tables ->
+ check_subscr(Tab1, 0),
+ [rpc:call(Node, ?MODULE, check_tab, [Res1, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res2, ?LINE]) || Node <- Nodes],
+ [rpc:call(Node, ?MODULE, check_tab, [Res3, ?LINE]) || Node <- Nodes],
+ ?match([], mnesia:dirty_read({Tab1, 11})),
+ ?match([], mnesia:dirty_read({Tab2, 11})),
+ ?match([], mnesia:dirty_read({Tab3, 11})),
+ %% Check Index
+ ?match([{Tab2, 10, 53}], mnesia:dirty_index_read(Tab2, 53, val)),
+ ?match([], mnesia:dirty_index_read(Tab2, 11, val)),
+ %% Check Snmp
+ ?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
+ ?match({ok, {Tab1, 1, 43}}, mnesia:snmp_get_row(Tab1, [1])),
+ ?match(undefined, mnesia:snmp_get_row(Tab1, [11])),
+ %% Check schema info
+ Ns = lists:sort([Node1, Node2]),
+ ?match(Ns, lists:sort(mnesia:table_info(Tab2, where_to_write)))
+ end,
+ ?match(ok, file:delete(File1)),
+ ?match(ok, file:delete(File2)),
+ ?verify_mnesia(Nodes, []).
+
+
+check_subscr(Tab) ->
+ check_subscr(Tab, 10).
+
+check_subscr(_Tab, 0) ->
+ receive
+ Msg ->
+ ?error("Too many msgs ~p~n", [Msg])
+ after 500 ->
+ ok
+ end;
+check_subscr(Tab, N) ->
+ V = N +42,
+ receive
+ {mnesia_table_event, {write, {Tab, N, V}, _}} ->
+ check_subscr(Tab, N-1)
+ after 500 ->
+ ?error("Missing ~p~n", [{Tab, N, V}])
+ end.
+
+restore_clear_ram(suite) -> [];
+restore_clear_ram(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, [{diskless, true}|Config]),
+
+ ?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, Nodes}])),
+
+ Write = fun(What) ->
+ mnesia:write({a,1,What}),
+ mnesia:write({a,2,What}),
+ mnesia:write({a,3,What})
+ end,
+ Bup = "restore_clear_ram.BUP",
+
+ ?match({atomic, ok}, mnesia:transaction(Write, [initial])),
+ ?match({ok, _, _}, mnesia:activate_checkpoint([{name,test},
+ {min, [schema, a]},
+ {ram_overrides_dump, true}])),
+ ?match(ok, mnesia:backup_checkpoint(test, Bup)),
+
+ ?match({atomic, ok}, mnesia:transaction(Write, [data])),
+ ?match({atomic, [a]}, mnesia:restore(Bup, [{clear_tables,[a]},{default_op,skip_tables}])),
+
+ restore_clear_ram_loop(100, Nodes, Bup),
+
+ ok.
+
+restore_clear_ram_loop(N, Nodes = [N1,N2,N3], Bup) when N > 0 ->
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+ ?match({_, []}, rpc:multicall([N1,N2], mnesia, start, [[{extra_db_nodes, Nodes}]])),
+ Key = rpc:async_call(N3, mnesia, start, [[{extra_db_nodes, Nodes}]]),
+ ?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, Nodes}])),
+ ?match({atomic, [a]}, mnesia:restore(Bup, [{clear_tables,[a]},{default_op,skip_tables}])),
+ ?match(ok, rpc:yield(Key)),
+ ?match(ok, rpc:call(N3, mnesia, wait_for_tables, [[a], 3000])),
+ case rpc:multicall(Nodes, mnesia, table_info, [a,size]) of
+ {[3,3,3], []} ->
+ restore_clear_ram_loop(N-1, Nodes, Bup);
+ Error ->
+ ?match(3, Error)
+ end;
+restore_clear_ram_loop(_,_,_) ->
+ ok.
+
+traverse_backup(doc) ->
+ ["Testing the traverse_backup interface, the resulting file is not tested though",
+ "See install_fallback for result using the output file from traverse_backup",
+ "A side effect is that the backup file contents are tested"];
+traverse_backup(suite) -> [];
+traverse_backup(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = backup_tab,
+ Def = [{disc_copies, [Node1]}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 4, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 5, test_nok})),
+ File = "_treverse_backup.BUP",
+ File2 = "traverse_backup2.BUP",
+ File3 = "traverse_backup3.BUP",
+ ?match(ok, mnesia:backup(File)),
+
+ Fun = fun({backup_tab, N, _}, Acc) -> {[{backup_tab, N, test_ok}], Acc+1};
+ (Other, Acc) -> {[Other], Acc}
+ end,
+
+ ?match({ok, 5}, mnesia:traverse_backup(File, read_only, Fun, 0)),
+ ?match(ok, file:delete(read_only)),
+
+ ?match({ok, 5}, mnesia:traverse_backup(File, mnesia_backup,
+ dummy, read_only, Fun, 0)),
+
+ ?match({ok, 5}, mnesia:traverse_backup(File, File2, Fun, 0)),
+ ?match({ok, 5}, mnesia:traverse_backup(File2, mnesia_backup,
+ File3, mnesia_backup, Fun, 0)),
+
+ BadFun = fun({bad_tab, _N, asd}, Acc) -> {{error, error}, Acc} end,
+ ?match({error, _}, mnesia:traverse_backup(File, read_only, BadFun, 0)),
+ ?match({error, _}, file:delete(read_only)),
+ ?match(ok, file:delete(File)),
+ ?match(ok, file:delete(File2)),
+ ?match(ok, file:delete(File3)),
+ ?verify_mnesia(Nodes, []).
+
+
+install_fallback(doc) ->
+ ["This tests the install_fallback intf.",
+ "It also verifies that the output from backup_checkpoint and traverse_backup",
+ "is valid"];
+install_fallback(suite) -> [];
+install_fallback(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = fallbacks_test,
+ Def = [{disc_copies, [Node1]}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 4, test_nok})),
+ ?match(ok, mnesia:dirty_write({Tab, 5, test_nok})),
+
+ Tab2 = fallbacks_test2,
+ Def2 = [{disc_copies, [node()]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ Tab3 = fallbacks_test3,
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def2)),
+ Fun2 = fun(Key) ->
+ Rec = {Tab2, Key, test_ok},
+ mnesia:dirty_write(Rec),
+ [Rec]
+ end,
+ TabSize3 = 1000,
+ OldRecs2 = [Fun2(K) || K <- lists:seq(1, TabSize3)],
+
+ Spec =[{name, cp_name}, {max, mnesia:system_info(tables)}],
+ ?match({ok, _Name, Nodes}, mnesia:activate_checkpoint(Spec)),
+ ?match(ok, mnesia:dirty_write({Tab, 6, test_nok})),
+ [mnesia:dirty_write({Tab2, K, test_nok}) || K <- lists:seq(1, TabSize3 + 10)],
+ File = "install_fallback.BUP",
+ File2 = "install_fallback2.BUP",
+ File3 = "install_fallback3.BUP",
+ ?match(ok, mnesia:backup_checkpoint(cp_name, File)),
+
+ Fun = fun({T, N, _}, Acc) when T == Tab ->
+ case N rem 2 of
+ 0 ->
+ io:format("write ~p -> ~p~n", [N, T]),
+ {[{T, N, test_ok}], Acc + 1};
+ 1 ->
+ io:format("write ~p -> ~p~n", [N, Tab3]),
+ {[{Tab3, N, test_ok}], Acc + 1}
+ end;
+ ({T, N}, Acc) when T == Tab ->
+ case N rem 2 of
+ 0 ->
+ io:format("delete ~p -> ~p~n", [N, T]),
+ {[{T, N}], Acc + 1};
+ 1 ->
+ io:format("delete ~p -> ~p~n", [N, Tab3]),
+ {[{Tab3, N}], Acc + 1}
+ end;
+ (Other, Acc) ->
+ {[Other], Acc}
+ end,
+ ?match({ok, _}, mnesia:traverse_backup(File, File2, Fun, 0)),
+ ?match(ok, mnesia:install_fallback(File2)),
+
+ mnesia_test_lib:kill_mnesia([Node1, Node2]),
+ timer:sleep(timer:seconds(1)), % Let it die!
+
+ ?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [Tab, Tab2, Tab3])),
+
+ % Verify
+ ?match([], mnesia:dirty_read({Tab, 1})),
+ ?match([{Tab3, 1, test_ok}], mnesia:dirty_read({Tab3, 1})),
+ ?match([{Tab, 2, test_ok}], mnesia:dirty_read({Tab, 2})),
+ ?match([], mnesia:dirty_read({Tab3, 2})),
+ ?match([], mnesia:dirty_read({Tab, 3})),
+ ?match([{Tab3, 3, test_ok}], mnesia:dirty_read({Tab3, 3})),
+ ?match([{Tab, 4, test_ok}], mnesia:dirty_read({Tab, 4})),
+ ?match([], mnesia:dirty_read({Tab3, 4})),
+ ?match([], mnesia:dirty_read({Tab, 5})),
+ ?match([{Tab3, 5, test_ok}], mnesia:dirty_read({Tab3, 5})),
+ ?match([], mnesia:dirty_read({Tab, 6})),
+ ?match([], mnesia:dirty_read({Tab3, 6})),
+ ?match([], [mnesia:dirty_read({Tab2, K}) || K <- lists:seq(1, TabSize3)] -- OldRecs2),
+ ?match(TabSize3, mnesia:table_info(Tab2, size)),
+
+ % Check the interface
+ file:delete(File3),
+ ?match({error, _}, mnesia:install_fallback(File3)),
+ ?match({error, _}, mnesia:install_fallback(File2, mnesia_badmod)),
+ ?match(ok, mnesia:install_fallback(File2, mnesia_backup)),
+ ?match(ok, file:delete(File)),
+ ?match(ok, file:delete(File2)),
+ ?match({error, _}, file:delete(File3)),
+ ?verify_mnesia(Nodes, []).
+
+uninstall_fallback(suite) -> [];
+uninstall_fallback(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = uinst_fallbacks_test,
+ File = "uinst_fallback.BUP",
+ File2 = "uinst_fallback2.BUP",
+ Def = [{disc_copies, [Node1]}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
+ ?match(ok, mnesia:backup(File)),
+ Fun = fun({T, N, _}, Acc) when T == Tab ->
+ {[{T, N, test_nok}], Acc+1};
+ (Other, Acc) -> {[Other], Acc}
+ end,
+ ?match({ok, _}, mnesia:traverse_backup(File, File2, Fun, 0)),
+ ?match({error, enoent}, mnesia:uninstall_fallback()),
+ ?match(ok, mnesia:install_fallback(File2)),
+ ?match(ok, file:delete(File)),
+ ?match(ok, file:delete(File2)),
+ ?match(ok, mnesia:uninstall_fallback()),
+
+ mnesia_test_lib:kill_mnesia([Node1, Node2]),
+ timer:sleep(timer:seconds(1)), % Let it die!
+ ?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [Tab])),
+ ?match([{Tab, 1, test_ok}], mnesia:dirty_read({Tab, 1})),
+ ?verify_mnesia(Nodes, []).
+
+local_fallback(suite) -> [];
+local_fallback(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = local_fallback,
+ File = "local_fallback.BUP",
+ Def = [{disc_copies, Nodes}],
+ Key = foo,
+ Pre = {Tab, Key, pre},
+ Post = {Tab, Key, post},
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write(Pre)),
+ ?match(ok, mnesia:backup(File)),
+ ?match(ok, mnesia:dirty_write(Post)),
+ Local = [{scope, local}],
+ ?match({error, enoent}, mnesia:uninstall_fallback(Local)),
+ ?match(ok, mnesia:install_fallback(File, Local)),
+ ?match(true, mnesia:system_info(fallback_activated)),
+ ?match(ok, mnesia:uninstall_fallback(Local)),
+ ?match(false, mnesia:system_info(fallback_activated)),
+ ?match(ok, mnesia:install_fallback(File, Local)),
+ ?match(true, mnesia:system_info(fallback_activated)),
+
+ ?match(false, rpc:call(Node2, mnesia, system_info , [fallback_activated])),
+ ?match(ok, rpc:call(Node2, mnesia, install_fallback , [File, Local])),
+ ?match([Post], mnesia:dirty_read({Tab, Key})),
+ ?match([Post], rpc:call(Node2, mnesia, dirty_read, [{Tab, Key}])),
+
+ ?match([], mnesia_test_lib:kill_mnesia(Nodes)),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab])),
+ ?match([Pre], mnesia:dirty_read({Tab, Key})),
+ ?match([Pre], rpc:call(Node2, mnesia, dirty_read, [{Tab, Key}])),
+ Dir = rpc:call(Node2, mnesia, system_info , [directory]),
+
+ ?match(ok, mnesia:dirty_write(Post)),
+ ?match([Post], mnesia:dirty_read({Tab, Key})),
+ ?match([], mnesia_test_lib:kill_mnesia([Node2])),
+ ?match(ok, mnesia:install_fallback(File, Local ++ [{mnesia_dir, Dir}])),
+ ?match([], mnesia_test_lib:kill_mnesia([Node1])),
+
+ ?match([], mnesia_test_lib:start_mnesia([Node2], [])),
+ ?match(yes, rpc:call(Node2, mnesia, force_load_table, [Tab])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab])),
+ ?match([Pre], mnesia:dirty_read({Tab, Key})),
+
+ ?match(ok, file:delete(File)),
+ ?verify_mnesia(Nodes, []).
+
+selective_backup_checkpoint(doc) ->
+ ["Perform a selective backup of a checkpoint"];
+selective_backup_checkpoint(suite) -> [];
+selective_backup_checkpoint(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = sel_backup,
+ OmitTab = sel_backup_omit,
+ CpName = sel_cp,
+ Def = [{disc_copies, [Node1, Node2]}],
+ File = "selective_backup_checkpoint.BUP",
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match({atomic, ok}, mnesia:create_table(OmitTab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
+ ?match(ok, mnesia:dirty_write({OmitTab, 1, test_ok})),
+ CpSpec = [{name, CpName}, {max, mnesia:system_info(tables)}],
+ ?match({ok, CpName, _Ns}, mnesia:activate_checkpoint(CpSpec)),
+
+ BupSpec = [{tables, [Tab]}],
+ ?match(ok, mnesia:backup_checkpoint(CpName, File, BupSpec)),
+
+ ?match([schema, sel_backup], bup_tables(File, mnesia_backup)),
+ ?match(ok, file:delete(File)),
+
+ BupSpec2 = [{tables, [Tab, OmitTab]}],
+ ?match(ok, mnesia:backup_checkpoint(CpName, File, BupSpec2)),
+
+ ?match([schema, sel_backup, sel_backup_omit],
+ bup_tables(File, mnesia_backup)),
+ ?match(ok, file:delete(File)),
+ ?verify_mnesia(Nodes, []).
+
+bup_tables(File, Mod) ->
+ Fun = fun(Rec, Tabs) ->
+ Tab = element(1, Rec),
+ Tabs2 = [Tab | lists:delete(Tab, Tabs)],
+ {[Rec], Tabs2}
+ end,
+ case mnesia:traverse_backup(File, Mod, dummy, read_only, Fun, []) of
+ {ok, Tabs} ->
+ lists:sort(Tabs);
+ {error, Reason} ->
+ exit(Reason)
+ end.
+
+incremental_backup_checkpoint(doc) ->
+ ["Perform a incremental backup of a checkpoint"];
+incremental_backup_checkpoint(suite) -> [];
+incremental_backup_checkpoint(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = incr_backup,
+ Def = [{disc_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ OldRecs = [{Tab, K, -K} || K <- lists:seq(1, 5)],
+ ?match([ok|_], [mnesia:dirty_write(R) || R <- OldRecs]),
+ OldCpName = old_cp,
+ OldCpSpec = [{name, OldCpName}, {min, [Tab]}],
+ ?match({ok, OldCpName, _Ns}, mnesia:activate_checkpoint(OldCpSpec)),
+
+ BupSpec = [{tables, [Tab]}],
+ OldFile = "old_full_backup.BUP",
+ ?match(ok, mnesia:backup_checkpoint(OldCpName, OldFile, BupSpec)),
+ ?match(OldRecs, bup_records(OldFile, mnesia_backup)),
+ ?match(ok, mnesia:dirty_delete({Tab, 1})),
+ ?match(ok, mnesia:dirty_write({Tab, 2, 2})),
+ ?match(ok, mnesia:dirty_write({Tab, 3, -3})),
+
+ NewCpName = new_cp,
+ NewCpSpec = [{name, NewCpName}, {min, [Tab]}],
+ ?match({ok, NewCpName, _Ns}, mnesia:activate_checkpoint(NewCpSpec)),
+ ?match(ok, mnesia:dirty_write({Tab, 4, 4})),
+
+ NewFile = "new_full_backup.BUP",
+ ?match(ok, mnesia:backup_checkpoint(NewCpName, NewFile, BupSpec)),
+ NewRecs = [{Tab, 2, 2}, {Tab, 3, -3},
+ {Tab, 4, 4}, {Tab, 4}, {Tab, 4, -4}, {Tab, 5, -5}],
+ ?match(NewRecs, bup_records(NewFile, mnesia_backup)),
+
+ DiffFile = "diff_backup.BUP",
+ DiffBupSpec = [{tables, [Tab]}, {incremental, OldCpName}],
+ ?match(ok, mnesia:backup_checkpoint(NewCpName, DiffFile, DiffBupSpec)),
+ DiffRecs = [{Tab, 1}, {Tab, 2}, {Tab, 2, 2}, {Tab, 3}, {Tab, 3, -3},
+ {Tab, 4}, {Tab, 4, 4}, {Tab, 4}, {Tab, 4, -4}],
+ ?match(DiffRecs, bup_records(DiffFile, mnesia_backup)),
+
+ ?match(ok, mnesia:deactivate_checkpoint(OldCpName)),
+ ?match(ok, mnesia:deactivate_checkpoint(NewCpName)),
+ ?match(ok, file:delete(OldFile)),
+ ?match(ok, file:delete(NewFile)),
+ ?match(ok, file:delete(DiffFile)),
+
+ ?verify_mnesia(Nodes, []).
+
+bup_records(File, Mod) ->
+ Fun = fun(Rec, Recs) when element(1, Rec) == schema ->
+ {[Rec], Recs};
+ (Rec, Recs) ->
+ {[Rec], [Rec | Recs]}
+ end,
+ case mnesia:traverse_backup(File, Mod, dummy, read_only, Fun, []) of
+ {ok, Recs} ->
+ lists:keysort(1, lists:keysort(2, lists:reverse(Recs)));
+ {error, Reason} ->
+ exit(Reason)
+ end.
+
+sops_with_checkpoint(doc) ->
+ ["Test schema operations during a checkpoint"];
+sops_with_checkpoint(suite) -> [];
+sops_with_checkpoint(Config) when is_list(Config) ->
+ Ns = ?acquire_nodes(2, Config),
+
+ ?match({ok, cp1, Ns}, mnesia:activate_checkpoint([{name, cp1},{max,mnesia:system_info(tables)}])),
+ Tab = tab,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies,Ns}])),
+ OldRecs = [{Tab, K, -K} || K <- lists:seq(1, 5)],
+ [mnesia:dirty_write(R) || R <- OldRecs],
+
+ ?match({ok, cp2, Ns}, mnesia:activate_checkpoint([{name, cp2},{max,mnesia:system_info(tables)}])),
+ File1 = "cp1_delete_me.BUP",
+ ?match(ok, mnesia:dirty_write({Tab,6,-6})),
+ ?match(ok, mnesia:backup_checkpoint(cp1, File1)),
+ ?match(ok, mnesia:dirty_write({Tab,7,-7})),
+ File2 = "cp2_delete_me.BUP",
+ ?match(ok, mnesia:backup_checkpoint(cp2, File2)),
+
+ ?match(ok, mnesia:deactivate_checkpoint(cp1)),
+ ?match(ok, mnesia:backup_checkpoint(cp2, File1)),
+ ?match(ok, mnesia:dirty_write({Tab,8,-8})),
+
+ ?match({atomic,ok}, mnesia:delete_table(Tab)),
+ ?match({error,_}, mnesia:backup_checkpoint(cp2, File2)),
+ ?match({'EXIT',_}, mnesia:dirty_write({Tab,9,-9})),
+
+ ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
+ Test = fun(N) when N > 5 -> ?error("To many records in backup ~p ~n", [N]);
+ (N) -> case mnesia:dirty_read(Tab,N) of
+ [{Tab,N,B}] when -B =:= N -> ok;
+ Other -> ?error("Not matching ~p ~p~n", [N,Other])
+ end
+ end,
+ [Test(N) || N <- mnesia:dirty_all_keys(Tab)],
+ ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
+
+ file:delete(File1), file:delete(File2),
+
+ ?verify_mnesia(Ns, []).
diff --git a/lib/mnesia/test/mnesia_evil_coverage_test.erl b/lib/mnesia/test/mnesia_evil_coverage_test.erl
new file mode 100644
index 0000000000..4fbf1b4003
--- /dev/null
+++ b/lib/mnesia/test/mnesia_evil_coverage_test.erl
@@ -0,0 +1,2401 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_evil_coverage_test).
+-author('[email protected]').
+-include("mnesia_test_lib.hrl").
+
+-compile([export_all]).
+
+-define(cleanup(N, Config),
+ mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
+ N, Config, ?FILE, ?LINE)).
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Evil usage of the API.",
+ "Invoke all functions in the API and try to cover all legal uses",
+ "cases as well the illegal dito. This is a complement to the",
+ "other more explicit test cases."];
+all(suite) ->
+ [
+ system_info,
+ table_info,
+ error_description,
+ db_node_lifecycle,
+ evil_delete_db_node,
+ start_and_stop,
+ checkpoint,
+ table_lifecycle,
+ add_copy_conflict,
+ add_copy_when_going_down,
+ replica_management,
+ schema_availability,
+ local_content,
+ table_access_modifications,
+ replica_location,
+ table_sync,
+ user_properties,
+ unsupp_user_props,
+ record_name,
+ snmp_access,
+ subscriptions,
+ iteration,
+ debug_support,
+ sorted_ets,
+ {mnesia_dirty_access_test, all},
+ {mnesia_trans_access_test, all},
+ {mnesia_evil_backup, all}
+ ].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Get meta info about Mnesia
+
+system_info(suite) -> [];
+system_info(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(all, Config),
+ Ns = ?sort(Nodes),
+ ?match(yes, mnesia:system_info(is_running)),
+ ?match(Ns, ?sort(mnesia:system_info(db_nodes))),
+ ?match(Ns, ?sort(mnesia:system_info(running_db_nodes))),
+ ?match(A when is_atom(A), mnesia:system_info(debug)),
+ ?match(L when is_list(L), mnesia:system_info(directory)),
+ ?match(L when is_list(L), mnesia:system_info(log_version)),
+ ?match({_, _}, mnesia:system_info(schema_version)),
+ ?match(L when is_list(L), mnesia:system_info(tables)),
+ ?match(L when is_list(L), mnesia:system_info(local_tables)),
+ ?match(L when is_list(L), mnesia:system_info(held_locks)),
+ ?match(L when is_list(L), mnesia:system_info(lock_queue)),
+ ?match(L when is_list(L), mnesia:system_info(transactions)),
+ ?match(I when is_integer(I), mnesia:system_info(transaction_failures)),
+ ?match(I when is_integer(I), mnesia:system_info(transaction_commits)),
+ ?match(I when is_integer(I), mnesia:system_info(transaction_restarts)),
+ ?match(L when is_list(L), mnesia:system_info(checkpoints)),
+ ?match(A when is_atom(A), mnesia:system_info(backup_module)),
+ ?match(true, mnesia:system_info(auto_repair)),
+ ?match({_, _}, mnesia:system_info(dump_log_interval)),
+ ?match(A when is_atom(A), mnesia:system_info(dump_log_update_in_place)),
+ ?match(I when is_integer(I), mnesia:system_info(transaction_log_writes)),
+ ?match(I when is_integer(I), mnesia:system_info(send_compressed)),
+ ?match(L when is_list(L), mnesia:system_info(all)),
+ ?match({'EXIT', {aborted, Reason }} when element(1, Reason) == badarg
+ , mnesia:system_info(ali_baba)),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Get meta info about table
+
+table_info(suite) -> [];
+table_info(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+
+ Tab = table_info,
+ Type = bag,
+ ValPos = 3,
+ Attrs = [k, v],
+ Arity = length(Attrs) +1,
+
+ Schema =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{type, Type}, {attributes, Attrs}, {index, [ValPos]},
+ {ram_copies, Nodes}];
+ false ->
+ [{type, Type}, {attributes, Attrs}, {index, [ValPos]},
+ {disc_only_copies, [Node1]}, {ram_copies, [Node2]},
+ {disc_copies, [Node3]}]
+ end,
+ ?match({atomic, ok}, mnesia:create_table(Tab, Schema)),
+
+ Size = 10,
+ Keys = lists:seq(1, Size),
+ Records = [{Tab, A, 7} || A <- Keys],
+ lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
+ ?match(Mem when is_integer(Mem), mnesia:table_info(Tab, memory)),
+ ?match(Size, mnesia:table_info(Tab, size)),
+ ?match(Type, mnesia:table_info(Tab, type)),
+
+ case mnesia_test_lib:diskless(Config) of
+ true ->
+ ?match(Nodes, mnesia:table_info(Tab, ram_copies));
+ false ->
+ ?match([Node3], mnesia:table_info(Tab, mnesia_test_lib:storage_type(disc_copies, Config))),
+ ?match([Node2], mnesia:table_info(Tab, ram_copies)),
+ ?match([Node1], mnesia:table_info(Tab, mnesia_test_lib:storage_type(disc_only_copies, Config)))
+ end,
+ Read = [Node1, Node2, Node3],
+ ?match(true, lists:member(mnesia:table_info(Tab, where_to_read), Read)),
+ Write = ?sort([Node1, Node2, Node3]),
+ ?match(Write, ?sort(mnesia:table_info(Tab, where_to_write))),
+ ?match([ValPos], mnesia:table_info(Tab, index)),
+ ?match(Arity, mnesia:table_info(Tab, arity)),
+ ?match(Attrs, mnesia:table_info(Tab, attributes)),
+ ?match({Tab, '_', '_'}, mnesia:table_info(Tab, wild_pattern)),
+ ?match({atomic, Attrs}, mnesia:transaction(fun() ->
+ mnesia:table_info(Tab, attributes) end)),
+
+ ?match(L when is_list(L), mnesia:table_info(Tab, all)),
+
+ %% Table info when table not loaded
+ ?match({atomic, ok},
+ mnesia:create_table(tab_info, Schema)),
+ ?match(stopped, mnesia:stop()),
+ ?match(stopped, rpc:call(Node2, mnesia, stop, [])),
+ ?match(ok, mnesia:start()),
+ ?match(ok, mnesia:wait_for_tables([tab_info], 5000)),
+ ?match(0, mnesia:table_info(tab_info, size)),
+ ?verify_mnesia([Node1, Node3], [Node2]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Check the error descriptions
+
+error_description(suite) -> [];
+error_description(Config) when is_list(Config) ->
+ ?acquire_nodes(1, Config),
+ Errors = [nested_transaction, badarg, no_transaction, combine_error,
+ bad_index, already_exists, index_exists, no_exists, system_limit,
+ mnesia_down, not_a_db_node, bad_type, node_not_running,
+ truncated_binary_file, active, illegal
+ ],
+ ?match(X when is_atom(X), mnesia:error_description({error, bad_error_msg})),
+ ?match(X when is_tuple(X), mnesia:error_description({'EXIT', pid, bad})),
+ %% This is real error msg
+ ?match(X when is_tuple(X), mnesia:error_description(
+ {error,
+ {"Cannot prepare checkpoint (bad reply)",
+ {{877,957351,758147},a@legolas},
+ {error,{node_not_running,a1@legolas}}}})),
+ check_errors(error, Errors),
+ check_errors(aborted, Errors),
+ check_errors('EXIT', Errors).
+
+check_errors(_Err, []) -> ok;
+check_errors(Err, [Desc|R]) ->
+ ?match(X when is_list(X), mnesia:error_description({Err, Desc})),
+ check_errors(Err, R).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add and drop db nodes
+
+db_node_lifecycle(suite) -> [];
+db_node_lifecycle(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = AllNodes = ?acquire_nodes(3, Config),
+ Tab = db_node_lifecycle,
+
+ Who = fun(T) ->
+ L1 = mnesia:table_info(T, ram_copies),
+ L2 = mnesia:table_info(T, disc_copies),
+ L3 = mnesia:table_info(T, disc_only_copies),
+ L1 ++ L2 ++ L3
+ end,
+
+ SNs = ?sort(AllNodes),
+
+ Schema = [{name, Tab}, {ram_copies, [Node1, Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ ?match([], mnesia_test_lib:stop_mnesia(AllNodes)),
+ ?match(ok, mnesia:delete_schema(AllNodes)),
+ ?match({error, _}, mnesia:create_schema(foo)),
+ ?match({error, _}, mnesia:create_schema([foo])),
+ ?match({error, _}, mnesia:create_schema([foo@bar])),
+ ?match(ok, mnesia:start()),
+ ?match(false, mnesia:system_info(use_dir)),
+ ?match({atomic, ok}, mnesia:create_table(Tab, [])),
+ ?match({aborted, {has_no_disc, Node1}}, mnesia:dump_tables([Tab])),
+ ?match({aborted, {has_no_disc, Node1}}, mnesia:change_table_copy_type(Tab, node(), disc_copies)),
+ ?match({aborted, {has_no_disc, Node1}}, mnesia:change_table_copy_type(Tab, node(), disc_only_copies)),
+
+ ?match(stopped, mnesia:stop()),
+
+ ?match(ok, mnesia:create_schema(AllNodes)),
+ ?match([], mnesia_test_lib:start_mnesia(AllNodes)),
+
+ ?match([SNs, SNs, SNs],
+ lists:map({lists, sort},
+ element(1, rpc:multicall(AllNodes, mnesia, table_info,
+ [schema, disc_copies])))),
+
+ ?match({aborted, {already_exists, schema, Node2, _}},
+ mnesia:change_table_copy_type(schema, Node2, disc_copies)),
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(schema, Node2, ram_copies)),
+ ?match({aborted, {already_exists, schema, Node2, _}},
+ mnesia:change_table_copy_type(schema, Node2, ram_copies)),
+
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(schema, Node2, disc_copies)),
+
+ ?match([SNs, SNs, SNs],
+ lists:map({lists, sort},
+ element(1, rpc:multicall(AllNodes, mnesia, table_info,
+ [schema, disc_copies])))),
+
+ %% Delete the DB
+
+ Tab2 = disk_tab,
+ Tab3 = not_local,
+ Tab4 = local,
+ Tab5 = remote,
+
+ Tabs = [Schema,
+ [{name, Tab2}, {disc_copies, AllNodes}],
+ [{name, Tab3}, {ram_copies, [Node2, Node3]}],
+ [{name, Tab4}, {disc_only_copies, [Node1]}],
+ [{name, Tab5}, {disc_only_copies, [Node2]}]],
+
+ [?match({atomic, ok}, mnesia:create_table(T)) || T <- Tabs ],
+
+ ?match({aborted, {active, _, Node2}},
+ mnesia:del_table_copy(schema, Node2)),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node1])),
+ ?match({aborted, {node_not_running, Node1}},
+ mnesia:del_table_copy(schema, Node2)),
+
+ ?match([], mnesia_test_lib:start_mnesia([Node1],[Tab2,Tab4])),
+ ?match([], mnesia_test_lib:stop_mnesia([Node2])),
+ ?match({atomic, ok},
+ mnesia:del_table_copy(schema, Node2)),
+
+ %% Check
+ RemNodes = AllNodes -- [Node2],
+
+ ?match(RemNodes, mnesia:system_info(db_nodes)),
+ ?match([Node1], Who(Tab)),
+ ?match(RemNodes, Who(Tab2)),
+ ?match([Node3], Who(Tab3)),
+ ?match([Node1], Who(Tab4)),
+ ?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab5)),
+
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(Tab2, Node3, ram_copies)),
+
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(schema, Node3, ram_copies)),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node3])),
+ ?match({atomic, ok},
+ mnesia:del_table_copy(schema, Node3)),
+ ?match([Node1], mnesia:system_info(db_nodes)),
+ ?match([Node1], Who(Tab)),
+ ?match([Node1], Who(Tab2)),
+ ?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab3)),
+ ?match([Node1], Who(Tab4)),
+ ?match({'EXIT', {aborted, {no_exists, _, _}}}, Who(Tab5)),
+
+ ?verify_mnesia([Node1], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Drop a db node when several disk resident nodes are down
+
+evil_delete_db_node(suite) -> [];
+evil_delete_db_node(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = AllNodes = ?acquire_nodes(3, Config),
+ Tab = evil_delete_db_node,
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies, AllNodes}])),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node2, Node3])),
+
+ ?match({atomic, ok}, mnesia:del_table_copy(schema, Node2)),
+
+ RemNodes = AllNodes -- [Node2],
+
+ ?match(RemNodes, mnesia:system_info(db_nodes)),
+ ?match(RemNodes, mnesia:table_info(Tab, disc_copies)),
+
+ ?verify_mnesia([Node1], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Start and stop the system
+
+start_and_stop(suite) -> [];
+start_and_stop(Config) when is_list(Config) ->
+ [Node1 | _] = Nodes = ?acquire_nodes(all, Config),
+
+ ?match(stopped, rpc:call(Node1, mnesia, stop, [])),
+ ?match(stopped, rpc:call(Node1, mnesia, stop, [])),
+ ?match(ok, rpc:call(Node1, mnesia, start, [])),
+ ?match(ok, rpc:call(Node1, mnesia, start, [])),
+ ?match(stopped, rpc:call(Node1, mnesia, stop, [])),
+ ?verify_mnesia(Nodes -- [Node1], [Node1]),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes)),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Checkpoints and backup management
+
+checkpoint(suite) -> [];
+checkpoint(Config) when is_list(Config) ->
+ checkpoint(2, Config),
+ checkpoint(3, Config).
+
+checkpoint(NodeConfig, Config) ->
+ [Node1 | _] = TabNodes = ?acquire_nodes(NodeConfig, Config),
+ CreateTab = fun(Type, N, Ns) ->
+ Tab0 = lists:concat(["local_checkpoint_", Type, N]),
+ Tab = list_to_atom(Tab0),
+ catch mnesia:delete_table(Tab),
+ ?match({atomic, ok},
+ mnesia:create_table(Tab, [{Type, Ns}])),
+ Tab
+ end,
+ CreateTabs = fun(Type, Acc) ->
+ [CreateTab(Type, 1, [hd(TabNodes)]),
+ CreateTab(Type, 2, TabNodes),
+ CreateTab(Type, 3, [lists:last(TabNodes)])] ++
+ Acc
+ end,
+ Types = [ram_copies, disc_copies, disc_only_copies],
+ Tabs = lists:foldl(CreateTabs, [], Types),
+ Recs = ?sort([{T, N, N} || T <- Tabs, N <- lists:seq(1, 10)]),
+ lists:foreach(fun(R) -> ?match(ok, mnesia:dirty_write(R)) end, Recs),
+
+ CpName = a_checkpoint_name,
+ MinArgs = [{name, CpName}, {min, Tabs}, {allow_remote, false}],
+ ?match({error, _}, rpc:call(Node1, mnesia, activate_checkpoint, [MinArgs])),
+
+ MaxArgs = [{name, CpName}, {max, Tabs}, {allow_remote, true}],
+ ?match({ok, CpName, L} when is_list(L),
+ rpc:call(Node1, mnesia, activate_checkpoint, [MaxArgs])),
+ ?match(ok, rpc:call(Node1, mnesia, deactivate_checkpoint, [CpName])),
+
+ Args = [{name, CpName}, {min, Tabs}, {allow_remote, true}],
+ ?match({ok, CpName, L} when is_list(L),
+ rpc:call(Node1, mnesia, activate_checkpoint, [Args])),
+ Recs2 = ?sort([{T, K, 0} || {T, K, _} <- Recs]),
+ lists:foreach(fun(R) -> ?match(ok, mnesia:dirty_write(R)) end, Recs2),
+ ?match(ok, rpc:call(Node1, mnesia, deactivate_checkpoint, [CpName])),
+
+ ?match({error, Reason1 } when element(1, Reason1) == no_exists,
+ mnesia:deactivate_checkpoint(CpName)),
+ ?match({error, Reason2 } when element(1, Reason2) == badarg,
+ mnesia:activate_checkpoint(foo)),
+ ?match({error, Reason3 } when element(1, Reason3) == badarg,
+ mnesia:activate_checkpoint([{foo, foo}])),
+ ?match({error, Reason4 } when element(1, Reason4) == badarg,
+ mnesia:activate_checkpoint([{max, foo}])),
+ ?match({error, Reason5 } when element(1, Reason5) == badarg,
+ mnesia:activate_checkpoint([{min, foo}])),
+ ?match({error, _}, mnesia:activate_checkpoint([{min, [foo@bar]}])),
+ ?match({error, Reason6 } when element(1, Reason6) == badarg,
+ mnesia:activate_checkpoint([{allow_remote, foo}])),
+
+ Fun = fun(Tab) -> ?match({atomic, ok}, mnesia:delete_table(Tab)) end,
+ lists:foreach(Fun, Tabs),
+ ?verify_mnesia(TabNodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Create and delete tables
+
+%% Get meta info about table
+
+-define(vrl, mnesia_test_lib:verify_replica_location).
+
+replica_location(suite) -> [];
+replica_location(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab = replica_location,
+
+ %% Create three replicas
+ Schema = [{name, Tab}, {disc_only_copies, [Node1]},
+ {ram_copies, [Node2]}, {disc_copies, [Node3]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ?match([], ?vrl(Tab, [Node1], [Node2], [Node3], Nodes)),
+
+ %% Delete one replica
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, Node2)),
+ ?match([], ?vrl(Tab, [Node1], [], [Node3], Nodes)),
+
+ %% Move one replica
+ ?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
+ ?match([], ?vrl(Tab, [Node2], [], [Node3], Nodes)),
+
+ %% Change replica type
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node2, ram_copies)),
+ ?match([], ?vrl(Tab, [], [Node2], [Node3], Nodes)),
+
+ ?verify_mnesia(Nodes, []).
+
+table_lifecycle(suite) -> [];
+table_lifecycle(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+
+ ?match({atomic, ok}, mnesia:create_table([{type, bag},
+ {ram_copies, [Node1]},
+ {attributes, [rajtan, tajtan]},
+ {name, order_of_args}])),
+ ?match([], mnesia:dirty_read({order_of_args, 4711})),
+ ?match({atomic, ok}, mnesia:create_table([{name, already_exists},
+ {ram_copies, [Node1]}])),
+ ?match({aborted, Reason23 } when element(1, Reason23) ==already_exists,
+ mnesia:create_table([{name, already_exists},
+ {ram_copies, [Node1]}])),
+ ?match({aborted, Reason21 } when element(1, Reason21) == bad_type,
+ mnesia:create_table([{name, bad_node}, {ram_copies, ["foo"]}])),
+ ?match({aborted, Reason2} when element(1, Reason2) == bad_type,
+ mnesia:create_table([{name, zero_arity}, {attributes, []}])),
+ ?match({aborted, Reason3} when element(1, Reason3) == badarg,
+ mnesia:create_table([])),
+ ?match({aborted, Reason4} when element(1, Reason4) == badarg,
+ mnesia:create_table(atom)),
+ ?match({aborted, Reason5} when element(1, Reason5) == badarg,
+ mnesia:create_table({cstruct, table_name_as_atom})),
+ ?match({aborted, Reason6 } when element(1, Reason6) == bad_type,
+ mnesia:create_table([{name, no_host}, {ram_copies, foo}])),
+ ?match({aborted, Reason7 } when element(1, Reason7) == bad_type,
+ mnesia:create_table([{name, no_host}, {disc_only_copies, foo}])),
+ ?match({aborted, Reason8} when element(1, Reason8) == bad_type,
+ mnesia:create_table([{name, no_host}, {disc_copies, foo}])),
+
+ CreateFun =
+ fun() -> ?match({aborted, nested_transaction},
+ mnesia:create_table([{name, nested_trans}])), ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(CreateFun)),
+ ?match({atomic, ok}, mnesia:create_table([{name, remote_tab},
+ {ram_copies, [Node2]}])),
+
+ ?match({atomic, ok}, mnesia:create_table([{name, a_brand_new_tab},
+ {ram_copies, [Node1]}])),
+ ?match([], mnesia:dirty_read({a_brand_new_tab, 4711})),
+ ?match({atomic, ok}, mnesia:delete_table(a_brand_new_tab)),
+ ?match({'EXIT', {aborted, Reason31}} when element(1, Reason31) == no_exists,
+ mnesia:dirty_read({a_brand_new_tab, 4711})),
+ ?match({aborted, Reason41} when element(1, Reason41) == no_exists,
+ mnesia:delete_table(a_brand_new_tab)),
+ ?match({aborted, Reason9} when element(1, Reason9) == badarg,
+ mnesia:create_table([])),
+
+ ?match({atomic, ok}, mnesia:create_table([{name, nested_del_trans},
+ {ram_copies, [Node1]}])),
+
+ DeleteFun = fun() -> ?match({aborted, nested_transaction},
+ mnesia:delete_table(nested_del_trans)), ok end,
+ ?match({atomic, ok}, mnesia:transaction(DeleteFun)),
+
+ ?match({aborted, Reason10} when element(1, Reason10) == bad_type,
+ mnesia:create_table([{name, create_with_index}, {index, 2}])),
+ ?match({aborted, Reason32} when element(1, Reason32) == bad_type,
+ mnesia:create_table([{name, create_with_index}, {index, [-1]}])),
+ ?match({aborted, Reason33} when element(1, Reason33) == bad_type,
+ mnesia:create_table([{name, create_with_index}, {index, [0]}])),
+ ?match({aborted, Reason34} when element(1, Reason34) == bad_type,
+ mnesia:create_table([{name, create_with_index}, {index, [1]}])),
+ ?match({aborted, Reason35} when element(1, Reason35) == bad_type,
+ mnesia:create_table([{name, create_with_index}, {index, [2]}])),
+ ?match({atomic, ok},
+ mnesia:create_table([{name, create_with_index}, {index, [3]},
+ {ram_copies, [Node1]}])),
+ ets:new(ets_table, [named_table]),
+
+ ?match({aborted, _}, mnesia:create_table(ets_table, [{ram_copies, Nodes}])),
+
+ ?verify_mnesia(Nodes, []).
+
+add_copy_conflict(suite) -> [];
+add_copy_conflict(doc) ->
+ ["Verify that OTP-5065 doesn't happen again, whitebox testing"];
+add_copy_conflict(Config) when is_list(Config) ->
+ Nodes = [Node1, Node2] =
+ ?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
+
+ ?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(b, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(test, [{ram_copies, [Node2]}])),
+ mnesia:stop(),
+ ?match(ok,mnesia:start([{no_table_loaders, 1}])),
+
+ verify_ll_queue(10),
+
+ Self = self(),
+ Test = fun() ->
+ Res = mnesia:add_table_copy(test, Node1, ram_copies),
+ Self ! {test, Res}
+ end,
+ %% Create conflict with loader queue.
+ spawn_link(Test),
+ ?match_receive(timeout),
+ %% Conflict ok
+ mnesia_controller:unblock_controller(),
+
+ ?match_receive({test, {atomic,ok}}),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(1, Config).
+
+verify_ll_queue(0) ->
+ ?error("Couldn't find anything in queue~n",[]);
+verify_ll_queue(N) ->
+ ?match(granted,mnesia_controller:block_controller()),
+ case mnesia_controller:get_info(1000) of
+ {info,{state,_,true,[],_Loader,[],[],[],_,_,_,_,_,_}} ->
+ %% Very slow SMP machines havn't loaded it yet..
+ mnesia_controller:unblock_controller(),
+ timer:sleep(10),
+ verify_ll_queue(N-1);
+ {info,{state,_,true,[],Loader,LL,[],[],_,_,_,_,_,_}} ->
+ io:format("~p~n", [{Loader,LL}]),
+ ?match([_], LL); %% Verify that something is in the loader queue
+ Else ->
+ ?error("No match ~p maybe the internal format has changed~n",[Else])
+ end.
+
+add_copy_when_going_down(suite) -> [];
+add_copy_when_going_down(doc) ->
+ ["Tests abort when node we load from goes down"];
+add_copy_when_going_down(Config) ->
+ [Node1, Node2] =
+ ?acquire_nodes(2, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ ?match({atomic, ok}, mnesia:create_table(a, [{ram_copies, [Node1]}])),
+ %% Grab a write lock
+ WriteAndWait = fun() ->
+ mnesia:write({a,1,1}),
+ receive continue -> ok
+ end
+ end,
+ _Lock = spawn(fun() -> mnesia:transaction(WriteAndWait) end),
+ Tester = self(),
+ spawn_link(fun() -> Res = rpc:call(Node2,mnesia, add_table_copy,
+ [a, Node2, ram_copies]),
+ Tester ! {test, Res}
+ end),
+ %% We have a lock here we should get a timeout
+ ?match_receive(timeout),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ ?match_receive({test,{aborted,_}}),
+ ?verify_mnesia([Node2], []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add, drop and move replicas, change storage types
+%% Change table layout (only arity change supported)
+
+-record(replica_management, {k, v}).
+-record(new_replica_management, {k, v, extra}).
+
+-define(SS(R), lists:sort(element(1,R))).
+
+replica_management(doc) ->
+ "Add, drop and move replicas, change storage types.";
+replica_management(suite) ->
+ [];
+replica_management(Config) when is_list(Config) ->
+ %% add_table_copy/3, del_table_copy/2, move_table_copy/3,
+ %% change_table_copy_type/3, transform_table/3
+
+ Nodes = [Node1, Node2, Node3] = ?acquire_nodes(3, Config),
+
+ Tab = replica_management,
+ Attrs = record_info(fields, replica_management),
+
+ %%
+ %% Add, delete and change replicas
+ %%
+ ?match({atomic, ok},
+ mnesia:create_table([{name, Tab}, {attributes, Attrs},
+ {ram_copies, [Node1, Node3]}])),
+ [?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
+ ?match([], ?vrl(Tab, [], [Node1, Node3], [], Nodes)),
+ %% R - -
+ ?match({atomic, ok}, mnesia:dump_tables([Tab])),
+ ?match({aborted, Reason50 } when element(1, Reason50) == combine_error,
+ mnesia:add_table_copy(Tab, Node2, disc_copies)),
+ ?match({aborted, Reason51 } when element(1, Reason51) == combine_error,
+ mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
+ ?match({atomic, ok}, mnesia:clear_table(Tab)),
+ SyncedCheck = fun() ->
+ mnesia:lock({record,Tab,0}, write),
+ ?match([0,0,0], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size])))
+ end,
+ mnesia:transaction(SyncedCheck),
+
+ ?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, Node3)),
+ ?match([], ?vrl(Tab, [], [], [], Nodes)),
+ %% - - -
+ ?match({aborted,Reason52} when element(1, Reason52) == no_exists,
+ mnesia:add_table_copy(Tab, Node3, ram_copies)),
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {attributes, Attrs},
+ {disc_copies, [Node1]}])),
+ ?match([], ?vrl(Tab, [], [], [Node1], Nodes)),
+ %% D - -
+ [?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
+
+ ?match({aborted, Reason53} when element(1, Reason53) == badarg,
+ mnesia:add_table_copy(Tab, Node2, bad_storage_type)),
+ ?match({atomic, ok}, mnesia:add_table_copy(Tab, Node2, disc_only_copies)),
+ ?match([], ?vrl(Tab, [Node2], [], [Node1], Nodes)),
+ ?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% D DO -
+ ?match({atomic, ok}, mnesia:add_table_copy(Tab, Node3, ram_copies)),
+ ?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% D DO R
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(Tab, Node1, disc_only_copies)),
+ ?match([], ?vrl(Tab, [Node1, Node2], [Node3], [], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% DO DO R
+ ?match({aborted, Reason54} when element(1, Reason54) == already_exists,
+ mnesia:add_table_copy(Tab, Node3, ram_copies)),
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
+ ?match([], ?vrl(Tab, [Node2], [Node3], [], Nodes)),
+ %% - DO R
+ ?match({aborted, _}, mnesia:del_table_copy(Tab, Node1)),
+ ?match(Tab, ets:new(Tab, [named_table])),
+ ?match({aborted, _}, mnesia:add_table_copy(Tab, Node1, disc_copies)),
+ ?match(true, ets:delete(Tab)),
+ ?match({atomic, ok}, mnesia:add_table_copy(Tab, Node1, disc_copies)),
+ ?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% D DO R
+ ?match({atomic, ok},mnesia:change_table_copy_type(Tab, Node3, disc_only_copies)),
+ ?match([], ?vrl(Tab, [Node2, Node3], [], [Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+
+ %% D DO D0
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, ram_copies)),
+ ?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% D DO R
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(Tab, Node2, disc_copies)),
+ ?match([], ?vrl(Tab, [], [Node3], [Node1,Node2], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+
+ %% D D R
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, disc_only_copies)),
+ ?match([], ?vrl(Tab, [Node1], [Node3], [Node2], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+
+ %% DO D R
+ ?match(Tab, ets:new(Tab, [named_table])),
+ ?match({aborted, _}, mnesia:change_table_copy_type(Tab, Node1, ram_copies)),
+ ?match(true, ets:delete(Tab)),
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, ram_copies)),
+ ?match([], ?vrl(Tab, [], [Node3,Node1], [Node2], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% R D R
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
+ ?match([], ?vrl(Tab, [], [Node3], [Node2,Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+
+ %% D D R
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node2, disc_only_copies)),
+ ?match([], ?vrl(Tab, [Node2], [Node3], [Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+
+ %% D DO R
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, Node3, disc_only_copies)),
+ ?match([], ?vrl(Tab, [Node2, Node3], [], [Node1], Nodes)),
+ ?match([10,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% D DO DO
+ %% test clear
+ ?match({atomic, ok}, mnesia:clear_table(Tab)),
+ mnesia:transaction(SyncedCheck),
+
+ %% rewrite for rest of testcase
+ [?match(ok, mnesia:dirty_write({Tab, K, K + 2})) || K <-lists:seq(1, 10)],
+
+ %% D DO DO
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, Node2)),
+ ?match([], ?vrl(Tab, [Node3], [], [Node1], Nodes)),
+ %% D - DO
+ ?match({aborted, Reason55} when element(1, Reason55) == already_exists,
+ mnesia:change_table_copy_type(Tab, Node1, disc_copies)),
+
+ %%
+ %% Move replica
+ %%
+ ?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
+ ?match([], ?vrl(Tab, [Node3], [], [Node2], Nodes)),
+ ?match([0,10,10], ?SS(rpc:multicall(Nodes, mnesia, table_info, [Tab, size]))),
+ %% - D DO
+ ?match({aborted, _}, mnesia:move_table_copy(Tab, Node1, Node2)),
+ ?match([], mnesia_test_lib:stop_mnesia([Node3])),
+ ?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:write({Tab, 43, sync_me}) end)),
+ ?match([], ?vrl(Tab, [Node3], [], [Node2],Nodes -- [Node3])),
+ %% - D DO
+ ?match({aborted,Reason56} when element(1, Reason56) == not_active,
+ mnesia:move_table_copy(Tab, Node3, Node1)),
+ ?match([], ?vrl(Tab, [Node3], [], [Node2],Nodes -- [Node3])),
+ %% DO D -
+ ?match([], mnesia_test_lib:start_mnesia([Node3])),
+ ?match([], ?vrl(Tab, [Node3], [], [Node2], Nodes)),
+ %% DO D -
+
+ %%
+ %% Transformer
+ %%
+
+ NewAttrs = record_info(fields, new_replica_management),
+ Transformer =
+ fun(Rec) when is_record(Rec, replica_management) ->
+ #new_replica_management{k = Rec#replica_management.k,
+ v = Rec#replica_management.v,
+ extra = Rec#replica_management.k * 2}
+ end,
+ ?match({atomic, ok}, mnesia:transform_table(Tab, fun(R) -> R end, Attrs)),
+ ?match({atomic, ok}, mnesia:transform_table(Tab, Transformer, NewAttrs, new_replica_management)),
+
+ ERlist = [#new_replica_management{k = K, v = K+2, extra = K*2} || K <- lists:seq(1, 10)],
+ ARlist = [hd(mnesia:dirty_read(Tab, K)) || K <- lists:seq(1, 10)],
+
+ ?match(ERlist, ARlist),
+
+ ?match({aborted, Reason56} when element(1, Reason56) == bad_type,
+ mnesia:transform_table(Tab, Transformer, 0)),
+ ?match({aborted, Reason57} when element(1, Reason57) == bad_type,
+ mnesia:transform_table(Tab, Transformer, -1)),
+ ?match({aborted, Reason58} when element(1, Reason58) == bad_type,
+ mnesia:transform_table(Tab, Transformer, [])),
+ ?match({aborted, Reason59} when element(1, Reason59) == bad_type,
+ mnesia:transform_table(Tab, no_fun, NewAttrs)),
+ ?match({aborted, Reason59} when element(1, Reason59) == bad_type,
+ mnesia:transform_table(Tab, fun(X) -> X end, NewAttrs, {tuple})),
+
+ %% OTP-3878
+ ?match({atomic, ok}, mnesia:transform_table(Tab, ignore,
+ NewAttrs ++ [dummy])),
+ ?match({atomic, ok}, mnesia:transform_table(Tab, ignore,
+ NewAttrs ++ [dummy], last_rec)),
+
+ ARlist = [hd(mnesia:dirty_read(Tab, K)) || K <- lists:seq(1, 10)],
+ ?match({'EXIT',{aborted,{bad_type,_}}},
+ mnesia:dirty_write(Tab, #new_replica_management{})),
+ ?match(ok, mnesia:dirty_write(Tab, {last_rec, k, v, e, dummy})),
+
+ ?verify_mnesia(Nodes, []).
+
+schema_availability(doc) ->
+ ["Test that schema succeeds (or fails) as intended when some db nodes are down."];
+schema_availability(suite) ->
+ [];
+schema_availability(Config) when is_list(Config) ->
+ [N1, _N2, N3] = Nodes = ?acquire_nodes(3, Config),
+ Tab = schema_availability,
+ Storage = mnesia_test_lib:storage_type(ram_copies, Config),
+ Def1 = [{Storage, [N1, N3]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def1)),
+
+ N = 10,
+ ?match(ok, mnesia:sync_dirty(fun() -> [mnesia:write({Tab, K, K + 2}) || K <- lists:seq(1, N)], ok end)),
+ ?match({[N,0,N], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+ ?match({[N,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
+
+ ?match([], mnesia_test_lib:start_mnesia([N3], [Tab])),
+ ?match({[N,0,N], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+
+ ?match({atomic, ok}, mnesia:clear_table(Tab)),
+ ?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
+
+ ?match([], mnesia_test_lib:start_mnesia([N3], [Tab])),
+ ?match({[0,0,0], []}, rpc:multicall(Nodes, mnesia, table_info, [Tab, size])),
+
+ ?verify_mnesia(Nodes, []).
+
+-define(badrpc(Tab), {badrpc, {'EXIT', {aborted,{no_exists,Tab}}}}).
+
+local_content(doc) ->
+ ["Test local_content functionality, we want to see that correct"
+ " properties gets propageted correctly between nodes"];
+local_content(suite) -> [];
+local_content(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab1 = local1,
+ Def1 = [{local_content, true}, {ram_copies, Nodes}],
+ Tab2 = local2,
+ Def2 = [{local_content, true}, {disc_copies, [Node1]}],
+ Tab3 = local3,
+ Def3 = [{local_content, true}, {disc_only_copies, [Node1]}],
+ Tab4 = local4,
+ Def4 = [{local_content, true}, {ram_copies, [Node1]}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+ ?match({atomic, ok}, mnesia:create_table(Tab4, Def4)),
+
+ ?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab1, 1, Node1}])),
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab1, 1, Node2}])),
+ ?match(ok, rpc:call(Node3, mnesia, dirty_write, [{Tab1, 1, Node3}])),
+ ?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab2, 1, Node1}])),
+ ?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab3, 1, Node1}])),
+ ?match(ok, rpc:call(Node1, mnesia, dirty_write, [{Tab4, 1, Node1}])),
+
+ ?match(?badrpc(Tab2), rpc:call(Node2, mnesia, dirty_write, [{Tab2, 1, Node2}])),
+ ?match(?badrpc(Tab3), rpc:call(Node2, mnesia, dirty_write, [{Tab3, 1, Node2}])),
+ ?match(?badrpc(Tab4), rpc:call(Node2, mnesia, dirty_write, [{Tab4, 1, Node2}])),
+
+ ?match({atomic, ok}, rpc:call(Node1, mnesia, add_table_copy, [Tab2, Node2, ram_copies])),
+ ?match({atomic, ok}, rpc:call(Node2, mnesia, add_table_copy, [Tab3, Node2, disc_copies])),
+ ?match({atomic, ok}, rpc:call(Node3, mnesia, add_table_copy, [Tab4, Node2, disc_only_copies])),
+ ?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab2, 1}])),
+ ?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab3, 1}])),
+ ?match([], rpc:call(Node2, mnesia, dirty_read, [{Tab4, 1}])),
+
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab2, 1, Node2}])),
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab3, 1, Node2}])),
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab4, 1, Node2}])),
+
+ ?match([{Tab1, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab1, 1}])),
+ ?match([{Tab2, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab2, 1}])),
+ ?match([{Tab3, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab3, 1}])),
+ ?match([{Tab4, 1, Node1}], rpc:call(Node1, mnesia, dirty_read, [{Tab4, 1}])),
+
+ ?match([{Tab1, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab1, 1}])),
+ ?match([{Tab2, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab2, 1}])),
+ ?match([{Tab3, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab3, 1}])),
+ ?match([{Tab4, 1, Node2}], rpc:call(Node2, mnesia, dirty_read, [{Tab4, 1}])),
+
+ ?match([{Tab1, 1, Node3}], rpc:call(Node3, mnesia, dirty_read, [{Tab1, 1}])),
+ ?match(?badrpc([_Tab2, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab2, 1}])),
+ ?match(?badrpc([_Tab3, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab3, 1}])),
+ ?match(?badrpc([_Tab4, 1]), rpc:call(Node3, mnesia, dirty_read, [{Tab4, 1}])),
+
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(schema, Node3, ram_copies)),
+ ?match([], mnesia_test_lib:stop_mnesia([Node3])),
+
+ %% Added for OTP-44306
+ ?match(ok, rpc:call(Node3, mnesia, start, [])),
+ ?match({ok, _}, mnesia:change_config(extra_db_nodes, [Node3])),
+
+ mnesia_test_lib:sync_tables([Node3], [Tab1]),
+
+ ?match([], rpc:call(Node3, mnesia, dirty_read, [{Tab1, 1}])),
+
+ ?match({atomic, ok}, rpc:call(Node1, mnesia, clear_table, [Tab1])),
+
+ SyncedCheck = fun(Tab) ->
+ mnesia:lock({record,Tab,0}, write),
+ {OK, []} = rpc:multicall(Nodes, mnesia, table_info, [Tab, size]),
+ OK
+ end,
+ ?match({atomic, [0,1,0]}, mnesia:transaction(SyncedCheck, [Tab1])),
+ ?match({atomic, ok}, rpc:call(Node2, mnesia, clear_table, [Tab2])),
+ ?match({atomic, [1,0,0]}, mnesia:transaction(SyncedCheck, [Tab2])),
+ ?match({atomic, ok}, rpc:call(Node2, mnesia, clear_table, [Tab3])),
+ ?match({atomic, [1,0,0]}, mnesia:transaction(SyncedCheck, [Tab3])),
+
+ ?verify_mnesia(Nodes, []).
+
+table_access_modifications(suite) ->
+ [
+ change_table_access_mode,
+ change_table_load_order,
+ set_master_nodes,
+ offline_set_master_nodes
+ ].
+
+change_table_access_mode(suite) -> [];
+change_table_access_mode(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab = test_access_mode_tab,
+
+ Def = case mnesia_test_lib:diskless(Config) of
+ true -> [{name, Tab}, {ram_copies, Nodes}];
+ false -> [{name, Tab}, {ram_copies, [Node1]},
+ {disc_copies, [Node2]},
+ {disc_only_copies, [Node3]}]
+ end,
+ ?match({atomic, ok}, mnesia:create_table(Def)),
+
+ Write = fun(What) -> mnesia:write({Tab, 1, What}) end,
+ Read = fun() -> mnesia:read({Tab, 1}) end,
+
+ ?match({atomic, ok}, mnesia:transaction(Write, [test_ok])),
+ %% test read_only
+ ?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_only)),
+ ?match({aborted, _}, mnesia:transaction(Write, [nok])),
+ ?match({'EXIT', {aborted, _}}, mnesia:dirty_write({Tab, 1, [nok]})),
+ ?match({aborted, _}, rpc:call(Node2, mnesia, transaction, [Write, [nok]])),
+ ?match({aborted, _}, rpc:call(Node3, mnesia, transaction, [Write, [nok]])),
+ ?match({atomic, [{Tab, 1, test_ok}]}, mnesia:transaction(Read)),
+ %% test read_write
+ ?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
+ ?match({atomic, ok}, mnesia:transaction(Write, [test_ok1])),
+ ?match({atomic, [{Tab, 1, test_ok1}]}, mnesia:transaction(Read)),
+ ?match({atomic, ok}, rpc:call(Node2, mnesia, transaction, [Write, [test_ok2]])),
+ ?match({atomic, [{Tab, 1, test_ok2}]}, mnesia:transaction(Read)),
+ ?match({atomic, ok}, rpc:call(Node3, mnesia, transaction, [Write, [test_ok3]])),
+ ?match({atomic, [{Tab, 1, test_ok3}]}, mnesia:transaction(Read)),
+
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+
+ Def4 = [{name, Tab}, {access_mode, read_only_bad}],
+ ?match({aborted, {bad_type, _, _}}, mnesia:create_table(Def4)),
+
+ Def2 = [{name, Tab}, {access_mode, read_only}],
+ ?match({atomic, ok}, mnesia:create_table(Def2)),
+ ?match({aborted, _}, mnesia:transaction(Write, [nok])),
+
+ ?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+
+ Def3 = [{name, Tab}, {mnesia_test_lib:storage_type(disc_copies, Config),
+ [Node1, Node2]},
+ {access_mode, read_write}],
+ ?match({atomic, ok}, mnesia:create_table(Def3)),
+ ?match({atomic, ok}, mnesia:transaction(Write, [ok])),
+ ?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_only)),
+ ?match({aborted, _}, mnesia:del_table_copy(Tab, Node2)),
+ ?match({aborted, _}, mnesia:del_table_copy(Tab, Node1)),
+ ?match({aborted, _}, mnesia:delete_table(Tab)),
+ ?match({atomic, ok}, mnesia:change_table_access_mode(Tab, read_write)),
+
+ ?match({aborted, {bad_type, _, _}},
+ mnesia:change_table_access_mode(Tab, strange_atom)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+
+ ?match({aborted, {no_exists, _}},
+ mnesia:change_table_access_mode(err_tab, read_only)),
+ ?match({aborted, {no_exists, _}},
+ mnesia:change_table_access_mode([Tab], read_only)),
+ ?verify_mnesia(Nodes, []).
+
+change_table_load_order(suite) -> [];
+change_table_load_order(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab1 = load_order_tab1,
+ Tab2 = load_order_tab2,
+ Tab3 = load_order_tab3,
+
+ Def = case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{ram_copies, [Node1]},
+ {disc_copies, [Node2]},
+ {disc_only_copies, [Node3]}]
+ end,
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def)),
+
+ ?match({aborted, _}, mnesia:change_table_load_order(Tab1, should_be_integer)),
+ ?match({aborted, _}, mnesia:change_table_load_order(err_tab, 5)),
+ ?match({aborted, _}, mnesia:change_table_load_order([err_tab], 5)),
+ ?match({atomic, ok}, mnesia:change_table_load_order(Tab1, 5)),
+ ?match({atomic, ok}, mnesia:change_table_load_order(Tab2, 4)),
+ ?match({atomic, ok}, mnesia:change_table_load_order(Tab3, 73)),
+
+ ?match({aborted, _}, mnesia:change_table_load_order(schema, -32)),
+
+ ?verify_mnesia(Nodes, []).
+
+set_master_nodes(suite) -> [];
+set_master_nodes(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab1 = master_node_tab1,
+ Tab2 = master_node_tab2,
+ Tab3 = master_node_tab3,
+ Def1 = [{ram_copies, [Node1, Node2]}],
+ Def2 = [{disc_copies, [Node2, Node3]}],
+ Def3 = [{disc_only_copies, [Node3, Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+
+ ?match({error, _}, mnesia:set_master_nodes(schema, ['[email protected]'])),
+ ?match({error, _}, mnesia:set_master_nodes(badtab, [Node2, Node3])),
+ ?match({error, _}, mnesia:set_master_nodes(Tab1, [Node3])),
+ ?match([], mnesia:table_info(Tab1, master_nodes)),
+
+ ?match(ok, mnesia:set_master_nodes(schema, [Node3, Node1])),
+ ?match([Node3, Node1], mnesia:table_info(schema, master_nodes)),
+ ?match(ok, mnesia:set_master_nodes(Tab1, [Node2])),
+ ?match([Node2], mnesia:table_info(Tab1, master_nodes)),
+ ?match(ok, mnesia:set_master_nodes(Tab1, [Node2, Node1])),
+ ?match([Node2, Node1], mnesia:table_info(Tab1, master_nodes)),
+ ?match(ok, mnesia:set_master_nodes(Tab2, [Node2])), % Should set where_to_read to Node2!
+ ?match([Node2], mnesia:table_info(Tab2, master_nodes)),
+ ?match(ok, mnesia:set_master_nodes(Tab3, [Node3])),
+ ?match([Node3], mnesia:table_info(Tab3, master_nodes)),
+ ?match(ok, mnesia:set_master_nodes(Tab3, [])),
+ ?match([], mnesia:table_info(Tab3, master_nodes)),
+
+ ?match(ok, mnesia:set_master_nodes([Node1])),
+ ?match([Node1], mnesia:table_info(schema, master_nodes)),
+ ?match([Node1], mnesia:table_info(Tab1, master_nodes)),
+ ?match([], mnesia:table_info(Tab2, master_nodes)),
+ ?match([Node1], mnesia:table_info(Tab3, master_nodes)),
+
+ ?match(ok, mnesia:set_master_nodes([Node1, Node2])),
+ ?match([Node1, Node2], mnesia:table_info(schema, master_nodes)),
+ ?match([Node1, Node2], mnesia:table_info(Tab1, master_nodes)),
+ ?match([Node2], mnesia:table_info(Tab2, master_nodes)),
+ ?match([Node1], mnesia:table_info(Tab3, master_nodes)),
+
+ ?verify_mnesia(Nodes, []).
+
+offline_set_master_nodes(suite) -> [];
+offline_set_master_nodes(Config) when is_list(Config) ->
+ [Node] = Nodes = ?acquire_nodes(1, Config),
+ Tab1 = offline_master_node_tab1,
+ Tab2 = offline_master_node_tab2,
+ Tab3 = offline_master_node_tab3,
+ Tabs = ?sort([Tab1, Tab2, Tab3]),
+ Def1 = [{ram_copies, [Node]}],
+ Def2 = [{disc_copies, [Node]}],
+ Def3 = [{disc_only_copies, [Node]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+ ?match([], mnesia:system_info(master_node_tables)),
+ ?match([], mnesia_test_lib:stop_mnesia([Node])),
+
+ ?match(ok, mnesia:set_master_nodes(Tab1, [Node])),
+ ?match(ok, mnesia:set_master_nodes(Tab2, [Node])),
+ ?match(ok, mnesia:set_master_nodes(Tab3, [Node])),
+ ?match([], mnesia_test_lib:start_mnesia([Node])),
+ ?match(Tabs, ?sort(mnesia:system_info(master_node_tables))),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node])),
+ ?match(ok, mnesia:set_master_nodes(Tab1, [])),
+ ?match(ok, mnesia:set_master_nodes(Tab2, [])),
+ ?match(ok, mnesia:set_master_nodes(Tab3, [])),
+ ?match([], mnesia_test_lib:start_mnesia([Node])),
+ ?match([], mnesia:system_info(master_node_tables)),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node])),
+ ?match(ok, mnesia:set_master_nodes([Node])),
+ ?match([], mnesia_test_lib:start_mnesia([Node])),
+ AllTabs = ?sort([schema | Tabs]),
+ ?match(AllTabs, ?sort(mnesia:system_info(master_node_tables))),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node])),
+ ?match(ok, mnesia:set_master_nodes([])),
+ ?match([], mnesia_test_lib:start_mnesia([Node])),
+ ?match([], mnesia:system_info(master_node_tables)),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Syncronize table with log or disc
+%%
+table_sync(suite) ->
+ [
+ dump_tables,
+ dump_log,
+ wait_for_tables,
+ force_load_table
+ ].
+
+%% Dump ram tables on disc
+dump_tables(suite) -> [];
+dump_tables(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = dump_tables,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ %% Dump 10 records
+ Size = 10,
+ Keys = lists:seq(1, Size),
+ Records = [{Tab, A, 7} || A <- Keys],
+ lists:foreach(fun(Rec) -> ?match(ok, mnesia:dirty_write(Rec)) end, Records),
+
+ AllKeys = fun() -> ?sort(mnesia:all_keys(Tab)) end,
+
+ ?match({atomic, Keys}, mnesia:transaction(AllKeys)),
+ ?match({atomic, ok}, mnesia:dump_tables([Tab])),
+
+ %% Delete one record
+ ?match(ok, mnesia:dirty_delete({Tab, 5})),
+ Keys2 = lists:delete(5, Keys),
+
+ ?match({atomic, Keys2}, mnesia:transaction(AllKeys)),
+
+ %% Check that all 10 is restored after a stop
+ ?match([], mnesia_test_lib:stop_mnesia([Node1, Node2])),
+ ?match([], mnesia_test_lib:start_mnesia([Node1, Node2])),
+ ?match(ok, mnesia:wait_for_tables([Tab], infinity)),
+
+ ?match({atomic, Keys}, mnesia:transaction(AllKeys)),
+
+ ?match({aborted,Reason} when element(1, Reason) == no_exists,
+ mnesia:dump_tables([foo])),
+ ?verify_mnesia(Nodes, []).
+
+dump_log(suite) -> [];
+dump_log(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = dump_log,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1, Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ Tab1 = dump_log1,
+ Schema1 = [{name, Tab1}, {attributes, [k, v]}, {disc_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema1)),
+ Tab2 = dump_log2,
+ Schema2 = [{name, Tab2}, {attributes, [k, v]}, {disc_only_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema2)),
+
+ ?match(ok, mnesia:dirty_write({Tab, 1, ok})),
+ ?match(ok, mnesia:dirty_write({Tab1, 1, ok})),
+ ?match(ok, mnesia:dirty_write({Tab2, 1, ok})),
+
+ ?match(dumped, mnesia:dump_log()),
+ ?match(dumped, rpc:call(Node2, mnesia, dump_log, [])),
+
+ ?match({atomic, ok}, mnesia:change_table_copy_type(schema, Node2, ram_copies)),
+ ?match(dumped, rpc:call(Node2, mnesia, dump_log, [])),
+
+ Self = self(),
+ spawn(fun() -> dump_log(100, Self) end),
+ spawn(fun() -> dump_log(100, Self) end),
+
+ ?match(ok, receive finished -> ok after 3000 -> timeout end),
+ ?match(ok, receive finished -> ok after 3000 -> timeout end),
+
+ ?verify_mnesia(Nodes, []).
+
+dump_log(N, Tester) when N > 0 ->
+ mnesia:dump_log(),
+ dump_log(N-1, Tester);
+dump_log(_, Tester) ->
+ Tester ! finished.
+
+
+wait_for_tables(doc) ->
+ ["Intf. test of wait_for_tables, see also force_load_table"];
+wait_for_tables(suite) -> [];
+wait_for_tables(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = wf_tab,
+ Schema = [{name, Tab}, {ram_copies, [Node1, Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ?match(ok, mnesia:wait_for_tables([wf_tab], infinity)),
+ ?match(ok, mnesia:wait_for_tables([], timer:seconds(5))),
+ ?match({timeout, [bad_tab]}, mnesia:wait_for_tables([bad_tab], timer:seconds(5))),
+ ?match(ok, mnesia:wait_for_tables([wf_tab], 0)),
+ ?match({error,_}, mnesia:wait_for_tables([wf_tab], -1)),
+ ?verify_mnesia(Nodes, []).
+
+force_load_table(suite) -> [];
+force_load_table(Config) when is_list(Config) ->
+ [Node1, Node2] = ?acquire_nodes(2, Config),
+ Tab = wf_tab,
+
+ Schema = [{name, Tab}, {disc_copies, [Node1, Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, test_ok})),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ ?match(ok, rpc:call(Node2, mnesia, dirty_write, [{Tab, 1, test_nok}])),
+ mnesia_test_lib:kill_mnesia([Node2]),
+ %% timer:sleep(timer:seconds(5)),
+ ?match(ok, mnesia:start()),
+ ?match({timeout, [Tab]}, mnesia:wait_for_tables([Tab], 5)),
+ ?match({'EXIT', _}, mnesia:dirty_read({Tab, 1})),
+ ?match(yes, mnesia:force_load_table(Tab)),
+ ?match([{Tab, 1, test_ok}], mnesia:dirty_read({Tab, 1})),
+
+ ?match({error, _}, mnesia:force_load_table(error_tab)),
+ ?verify_mnesia([Node1], [Node2]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+user_properties(doc) ->
+ ["Test of reading, writing and deletion of user properties",
+ "Plus initialization of user properties when a table is created",
+ "Do also test mnesia:table_info(Tab, user_properties)"];
+user_properties(suite) -> [];
+user_properties(Config) when is_list(Config) ->
+ [Node] = Nodes = ?acquire_nodes(1, Config),
+ Tab1 = user_properties_1,
+ Tab2 = user_properties_2,
+ Tab3 = user_properties_3,
+ Def1 = [{ram_copies, [Node]}, {user_properties, []}],
+ Def2 = [{mnesia_test_lib:storage_type(disc_copies, Config), [Node]}],
+ Def3 = [{mnesia_test_lib:storage_type(disc_only_copies, Config), [Node]},
+ {user_properties, []}],
+
+ PropKey = my_prop,
+ Prop = {PropKey, some, elements},
+ Prop2 = {PropKey, some, other, elements},
+ YourProp= {your_prop},
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+
+ ?match([], mnesia:table_info(Tab1, user_properties)),
+ ?match([], mnesia:table_info(Tab2, user_properties)),
+ ?match([], mnesia:table_info(Tab3, user_properties)),
+
+ ?match({'EXIT', {no_exists, {Tab1, user_property, PropKey}}},
+ mnesia:read_table_property(Tab1, PropKey)),
+ ?match({'EXIT', {no_exists, {Tab2, user_property, PropKey}}},
+ mnesia:read_table_property(Tab2, PropKey)),
+ ?match({'EXIT', {no_exists, {Tab3, user_property, PropKey}}},
+ mnesia:read_table_property(Tab3, PropKey)),
+
+ ?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab2, Prop)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab3, Prop)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab1, YourProp)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab2, YourProp)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab3, YourProp)),
+
+ ?match(Prop, mnesia:read_table_property(Tab1, PropKey)),
+ ?match(Prop, mnesia:read_table_property(Tab2, PropKey)),
+ ?match(Prop, mnesia:read_table_property(Tab3, PropKey)),
+
+ ?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop2)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab2, Prop2)),
+ ?match({atomic, ok}, mnesia:write_table_property(Tab3, Prop2)),
+ ?match(Prop2, mnesia:read_table_property(Tab1, PropKey)),
+ ?match(Prop2, mnesia:read_table_property(Tab2, PropKey)),
+ ?match(Prop2, mnesia:read_table_property(Tab3, PropKey)),
+
+ ?match({atomic, ok}, mnesia:delete_table_property(Tab1, PropKey)),
+ ?match({atomic, ok}, mnesia:delete_table_property(Tab2, PropKey)),
+ ?match({atomic, ok}, mnesia:delete_table_property(Tab3, PropKey)),
+
+ ?match([YourProp], mnesia:table_info(Tab1, user_properties)),
+ ?match([YourProp], mnesia:table_info(Tab2, user_properties)),
+ ?match([YourProp], mnesia:table_info(Tab3, user_properties)),
+
+ Tab4 = user_properties_4,
+ ?match({atomic, ok},
+ mnesia:create_table(Tab4, [{user_properties, [Prop]}])),
+
+ ?match([Prop], mnesia:table_info(Tab4, user_properties)),
+
+ %% Some error cases
+
+ ?match({aborted, {bad_type, Tab1, {}}},
+ mnesia:write_table_property(Tab1, {})),
+ ?match({aborted, {bad_type, Tab1, ali}},
+ mnesia:write_table_property(Tab1, ali)),
+
+ Tab5 = user_properties_5,
+ ?match({aborted, {bad_type, Tab5, {user_properties, {}}}},
+ mnesia:create_table(Tab5, [{user_properties, {}}])),
+ ?match({aborted, {bad_type, Tab5, {user_properties, ali}}},
+ mnesia:create_table(Tab5, [{user_properties, ali}])),
+ ?match({aborted, {bad_type, Tab5, {user_properties, [{}]}}},
+ mnesia:create_table(Tab5, [{user_properties, [{}]}])),
+ ?match({aborted, {bad_type, Tab5, {user_properties, [ali]}}},
+ mnesia:create_table(Tab5, [{user_properties, [ali]}])),
+
+ ?verify_mnesia(Nodes, []).
+
+
+unsupp_user_props(doc) ->
+ ["Simple test of adding user props in a schema_transaction"];
+unsupp_user_props(suite) -> [];
+unsupp_user_props(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab1 = silly1,
+ Tab2 = silly2,
+ Storage = mnesia_test_lib:storage_type(ram_copies, Config),
+
+ ?match({atomic, ok}, rpc:call(Node1, mnesia,
+ create_table, [Tab1, [{Storage, [Node1]}]])),
+ ?match({atomic, ok}, rpc:call(Node1, mnesia,
+ create_table, [Tab2, [{Storage, [Node1]}]])),
+
+ F1 = fun() ->
+ mnesia_schema:do_write_table_property(
+ silly1, {prop, propval1}),
+ mnesia_schema:do_write_table_property(
+ silly2, {prop, propval2}), % same key as above
+ mnesia_schema:do_write_table_property(
+ schema, {prop, propval3}) % same key as above
+ end,
+ ?match({atomic, ok}, rpc:call(Node1, mnesia_schema,
+ schema_transaction, [F1])),
+
+ ?match([{prop,propval1}], rpc:call(Node1, mnesia,
+ table_info, [silly1, user_properties])),
+ ?match([{prop,propval2}], rpc:call(Node1, mnesia,
+ table_info, [silly2, user_properties])),
+ ?match([{prop,propval3}], rpc:call(Node1, mnesia,
+ table_info, [schema, user_properties])),
+
+ F2 = fun() ->
+ mnesia_schema:do_write_table_property(
+ silly1, {prop, propval1a}),
+ mnesia_schema:do_write_table_property(
+ silly1, {prop, propval1b}) % same key as above
+ end,
+ ?match({atomic, ok}, rpc:call(Node1, mnesia_schema,
+ schema_transaction, [F2])),
+
+ ?match([{prop,propval1b}], rpc:call(Node1, mnesia,
+ table_info,
+ [silly1, user_properties])),
+ ?verify_mnesia([Node1], []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+snmp_access(doc) ->
+ ["Make Mnesia table accessible via SNMP"];
+
+snmp_access(suite) ->
+ [
+ snmp_open_table,
+ snmp_close_table,
+ snmp_get_next_index,
+ snmp_get_row,
+ snmp_get_mnesia_key,
+ snmp_update_counter,
+ snmp_order
+ ].
+
+snmp_open_table(suite) -> [];
+snmp_open_table(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab1 = local_snmp_table,
+
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ Def1 =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{disc_copies, [Node1]}, {ram_copies, [Node2]}]
+ end,
+
+ Tab2 = ext_snmp_table,
+ Def2 = [{Storage, [Node2]}],
+ ErrTab = non_existing_tab,
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
+ ?match({aborted, _}, mnesia:snmp_open_table(ErrTab, [{key, integer}])),
+ ?verify_mnesia(Nodes, []).
+
+snmp_close_table(suite) -> [];
+snmp_close_table(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab1 = local_snmp_table,
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ Def1 =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{disc_copies, [Node1]}, {ram_copies, [Node2]}]
+ end,
+
+ Tab2 = ext_snmp_table,
+ Def2 = [{snmp, [{key, integer}]}, {Storage, [Node2]}],
+ ErrTab = non_existing_tab,
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(no_snmp_tab, [])),
+ add_some_records(Tab1, Tab2, 3),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
+ add_some_records(Tab1, Tab2, 5),
+ ?match({atomic, ok}, mnesia:snmp_close_table(Tab1)),
+
+ Transform = fun(Tab, Key) ->
+ [{T,K,V}] = mnesia:read(Tab, Key, write),
+ mnesia:delete(T,K, write),
+ mnesia:write({T, {K,K}, V, 43+V})
+ end,
+
+ ?match({atomic, ok}, mnesia:transform_table(Tab1, ignore, [key,val,new])),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() ->
+ mnesia:write_lock_table(Tab1),
+ Keys = mnesia:select(Tab1, [{{'_','$1','_'}, [],
+ ['$1']}]),
+ [Transform(Tab1, Key) || Key <- Keys],
+ ok
+ end)),
+ ?match([{Tab1, {1, 1}, 1, 44}], mnesia:dirty_read(Tab1, {1, 1})),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key,{integer,integer}}])),
+
+ ?match({atomic, ok}, mnesia:snmp_close_table(Tab2)),
+ ?match({atomic, ok}, mnesia:transform_table(Tab2, ignore, [key,val,new])),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() ->
+ mnesia:write_lock_table(Tab2),
+ Keys = mnesia:select(Tab2, [{{'_','$1','_'}, [],
+ ['$1']}]),
+ [Transform(Tab2, Key) || Key <- Keys],
+ ok
+ end)),
+
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key,{integer,integer}}])),
+
+ %% Should be aborted ????
+ ?match({atomic, ok}, mnesia:snmp_close_table(no_snmp_tab)),
+ ?match({aborted, _}, mnesia:snmp_close_table(ErrTab)),
+ ?verify_mnesia(Nodes, []).
+
+snmp_get_next_index(suite) -> [];
+snmp_get_next_index(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab1 = local_snmp_table,
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ Def1 =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{disc_copies, [Node1]}, {ram_copies, [Node2]}]
+ end,
+
+ Tab2 = ext_snmp_table,
+ Def2 = [{Storage, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
+ add_some_records(Tab1, Tab2, 1),
+ Test =
+ fun() ->
+ %% Test local tables
+ {success, Res11} = ?match({ok, _}, mnesia:snmp_get_next_index(Tab1,[])),
+ {ok, Index11} = Res11,
+ {success, _Res12} =
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab1, Index11)),
+ ?match({'EXIT',_}, mnesia:snmp_get_next_index(Tab1, endOfTable)),
+
+ %% Test external table
+ {success, Res21} =
+ ?match({ok, _}, mnesia:snmp_get_next_index(Tab2, [])),
+ {ok, Index21} = Res21,
+ {success, _Res22} =
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab2, Index21)),
+
+ {ok, Row} = mnesia:snmp_get_row(Tab1, Index11),
+ ?match(ok, mnesia:dirty_delete(Tab1, hd(Index11))),
+
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab1,[])),
+
+ ok = mnesia:dirty_write(Row), %% Reset to coming tests
+
+ %% Test of non existing table
+ %%?match(endOfTable, mnesia:snmp_get_next_index(ErrTab, [])),
+ ok
+ end,
+ ?match(ok, Test()),
+ ?match({atomic,ok}, mnesia:transaction(Test)),
+ ?match(ok, mnesia:sync_dirty(Test)),
+ ?match(ok, mnesia:activity(transaction,Test,mnesia)),
+
+ %%io:format("**********Before ~p~n", [mnesia_lib:val({Tab1,snmp})]),
+ %%io:format(" ~p ~n", [ets:tab2list(mnesia_lib:val({local_snmp_table,{index,snmp}}))]),
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
+ %%io:format("**********After ~p~n", [mnesia_lib:val({Tab1,snmp})]),
+ %%io:format(" ~p ~n", [ets:tab2list(mnesia_lib:val({local_snmp_table,{index,snmp}}))]),
+
+ ?match(ok, Test()),
+ ?match({atomic,ok}, mnesia:transaction(Test)),
+ ?match(ok, mnesia:sync_dirty(Test)),
+ ?match(ok, mnesia:activity(transaction,Test,mnesia)),
+
+ ?verify_mnesia(Nodes, []).
+
+add_some_records(Tab1, Tab2, N) ->
+ Recs1 = [{Tab1, I, I} || I <- lists:reverse(lists:seq(1, N))],
+ Recs2 = [{Tab2, I, I} || I <- lists:reverse(lists:seq(20, 20+N-1))],
+ lists:foreach(fun(R) -> mnesia:dirty_write(R) end, Recs1),
+ Fun = fun(R) -> mnesia:write(R) end,
+ Trans = fun() -> lists:foreach(Fun, Recs2) end,
+ {atomic, ok} = mnesia:transaction(Trans),
+ %% Sync things, so everything gets everywhere!
+ {atomic, ok} = mnesia:sync_transaction(fun() -> mnesia:write(lists:last(Recs1)) end),
+ {atomic, ok} = mnesia:sync_transaction(fun() -> mnesia:write(lists:last(Recs2)) end),
+ ?sort(Recs1 ++ Recs2).
+
+snmp_get_row(suite) -> [];
+snmp_get_row(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab1 = local_snmp_table,
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ Def1 =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{disc_copies, [Node1]}, {ram_copies, [Node2]}]
+ end,
+ Tab2 = ext_snmp_table,
+ Def2 = [{Storage, [Node2]}],
+ Tab3 = snmp_table,
+ Def3 = [{Storage, [Node1]},
+ {attributes, [key, data1, data2]}],
+
+ Setup = fun() ->
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(
+ Tab3, [{key, {fix_string,integer}}])),
+ add_some_records(Tab1, Tab2, 1)
+ end,
+ Clear = fun() ->
+ ?match({atomic, ok}, mnesia:delete_table(Tab1)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab2)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab3))
+ end,
+ Test =
+ fun() ->
+ %% Test local tables
+ {success, Res11} =
+ ?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
+ {ok, Index11} = Res11,
+ ?match({ok, {Tab1,1,1}}, mnesia:snmp_get_row(Tab1, Index11)),
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab1, Index11)),
+ ?match({'EXIT',_}, mnesia:snmp_get_row(Tab1, endOfTable)),
+ ?match(undefined, mnesia:snmp_get_row(Tab1, [73])),
+
+ Add = fun() -> mnesia:write({Tab3, {"f_string", 3}, data1, data2}) end,
+ ?match({atomic, ok}, mnesia:transaction(Add)),
+ {success, {ok, Index31}} = ?match({ok, RowIndex31} when is_list(RowIndex31),
+ mnesia:snmp_get_next_index(Tab3,[])),
+ ?match({ok, Row31} when is_tuple(Row31),
+ mnesia:snmp_get_row(Tab3, Index31)),
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab3, Index31)),
+ Del = fun() -> mnesia:delete({Tab3,{"f_string",3}}) end,
+ ?match({atomic, ok}, mnesia:transaction(Del)),
+ ?match(undefined, mnesia:snmp_get_row(Tab3, "f_string" ++ [3])),
+ ?match(undefined, mnesia:snmp_get_row(Tab3, "f_string" ++ [73])),
+
+ %% Test external table
+ {success, Res21} = ?match({ok,[20]}, mnesia:snmp_get_next_index(Tab2, [])),
+ {ok, Index21} = Res21,
+ ?match({ok, Row2} when is_tuple(Row2), mnesia:snmp_get_row(Tab2, Index21)),
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab2, Index21)),
+ %% Test of non existing table
+ %% ?match(endOfTable, mnesia:snmp_get_next_index(ErrTab, [])),
+ ok
+ end,
+ Setup(),
+ ?match(ok, Test()),
+ Clear(), Setup(),
+ ?match({atomic,ok}, mnesia:transaction(Test)),
+ Clear(), Setup(),
+ ?match(ok, mnesia:sync_dirty(Test)),
+ Clear(), Setup(),
+ ?match(ok, mnesia:activity(transaction,Test,mnesia)),
+
+ Clear(), Setup(),
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
+ ?match(ok, Test()),
+ Clear(), Setup(),
+ ?match([], mnesia_test_lib:stop_mnesia(Nodes)),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab1, Tab2])),
+ ?match({atomic,ok}, mnesia:transaction(Test)),
+
+ ?verify_mnesia(Nodes, []).
+
+snmp_get_mnesia_key(suite) -> [];
+snmp_get_mnesia_key(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab1 = local_snmp_table,
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ Def1 =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{disc_copies, [Node1]}, {ram_copies, [Node2]}]
+ end,
+
+ Tab2 = ext_snmp_table,
+ Def2 = [{Storage, [Node2]}],
+
+ Tab3 = fix_string,
+ Setup = fun() ->
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def1)),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab1, [{key, integer}])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab2, [{key, integer}])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab3, [{key, {fix_string,integer}}])),
+
+ add_some_records(Tab1, Tab2, 1)
+ end,
+ Clear = fun() ->
+ ?match({atomic, ok}, mnesia:delete_table(Tab1)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab2)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab3))
+ end,
+ Test =
+ fun() ->
+ %% Test local tables
+ {success, Res11} =
+ ?match({ok, [1]}, mnesia:snmp_get_next_index(Tab1,[])),
+ {ok, Index11} = Res11,
+ ?match({ok, 1}, mnesia:snmp_get_mnesia_key(Tab1, Index11)),
+ %% Test external tables
+ {success, Res21} =
+ ?match({ok, [20]}, mnesia:snmp_get_next_index(Tab2, [])),
+ {ok, Index21} = Res21,
+ ?match({ok, 20}, mnesia:snmp_get_mnesia_key(Tab2, Index21)),
+ ?match(undefined, mnesia:snmp_get_mnesia_key(Tab2, [97])),
+ ?match({'EXIT', _}, mnesia:snmp_get_mnesia_key(Tab2, 97)),
+
+ ?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:delete({Tab1,1}) end)),
+ ?match(undefined, mnesia:snmp_get_mnesia_key(Tab1, Index11)),
+
+ ?match({atomic,ok},mnesia:transaction(fun() -> mnesia:write({Tab1,73,7}) end)),
+ ?match({ok, 73}, mnesia:snmp_get_mnesia_key(Tab1, [73])),
+ ?match({atomic,ok}, mnesia:transaction(fun() -> mnesia:delete({Tab1,73}) end)),
+ ?match(undefined, mnesia:snmp_get_mnesia_key(Tab1, [73])),
+
+ ?match({atomic,ok},mnesia:transaction(fun() -> mnesia:write({Tab3,{"S",5},7}) end)),
+ ?match({ok,{"S",5}}, mnesia:snmp_get_mnesia_key(Tab3, [$S,5])),
+ ?match({atomic,ok},mnesia:transaction(fun() -> mnesia:delete({Tab3,{"S",5}}) end)),
+ ?match(undefined, mnesia:snmp_get_mnesia_key(Tab3, [$S,5])),
+
+ ok
+ end,
+ Setup(),
+ ?match(ok, Test()),
+ Clear(), Setup(),
+ ?match({atomic,ok}, mnesia:transaction(Test)),
+ Clear(), Setup(),
+ ?match(ok, mnesia:sync_dirty(Test)),
+ Clear(), Setup(),
+ ?match(ok, mnesia:activity(transaction,Test,mnesia)),
+ ?verify_mnesia(Nodes, []).
+
+snmp_update_counter(doc) ->
+ ["Verify that counters may be updated for tables with SNMP property"];
+snmp_update_counter(suite) -> [];
+snmp_update_counter(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = snmp_update_counter,
+ Def = [{attributes, [key, value]},
+ {snmp, [{key, integer}]},
+ {ram_copies, [Node1]}
+ ],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ Oid = {Tab, 1},
+ ?match([], mnesia:dirty_read(Oid)),
+ ?match(ok, mnesia:dirty_write({Tab, 1, 1})),
+ ?match([{Tab, _Key, 1}], mnesia:dirty_read(Oid)),
+ ?match(3, mnesia:dirty_update_counter(Oid, 2)),
+ ?match([{Tab, _Key, 3}], mnesia:dirty_read(Oid)),
+ ?verify_mnesia(Nodes, []).
+
+snmp_order(doc) ->
+ ["Verify that sort order is correct in transactions and dirty variants"];
+snmp_order(suite) -> [];
+snmp_order(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = snmp_order,
+ Def = [{attributes, [key, value]},
+ {snmp, [{key, {integer, integer, integer}}]},
+ {ram_copies, [Node1]}
+ ],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ Oid = {Tab, 1},
+ ?match([], mnesia:dirty_read(Oid)),
+ ?match({'EXIT', {aborted, _}}, mnesia:dirty_write({Tab, 1, 1})),
+ [mnesia:dirty_write({Tab, {A,B,2}, default}) ||
+ A <- lists:seq(1, 9, 2),
+ B <- lists:seq(2, 8, 2)],
+
+ Test1 = fun() ->
+ Keys0 = get_keys(Tab, []),
+ ?match(Keys0, lists:sort(Keys0)),
+ ?match([[1,2,2]|_], Keys0),
+ Keys1 = get_keys(Tab, [5]),
+ ?match(Keys1, lists:sort(Keys1)),
+ ?match([[5,2,2]|_], Keys1),
+ Keys2 = get_keys(Tab, [7, 4]),
+ ?match(Keys2, lists:sort(Keys2)),
+ ?match([[7,4,2]|_], Keys2),
+ ok
+ end,
+ ?match(ok, Test1()),
+ ?match({atomic, ok},mnesia:transaction(Test1)),
+ ?match(ok,mnesia:sync_dirty(Test1)),
+
+ Test2 = fun() ->
+ mnesia:write(Tab, {Tab,{0,0,2},updated}, write),
+ mnesia:write(Tab, {Tab,{5,3,2},updated}, write),
+ mnesia:write(Tab, {Tab,{10,10,2},updated}, write),
+ Keys0 = get_keys(Tab, []),
+ ?match([[0,0,2],[1,2,2]|_], Keys0),
+ ?match(Keys0, lists:sort(Keys0)),
+
+ Keys1 = get_keys(Tab, [5]),
+ ?match([[5,2,2],[5,3,2]|_], Keys1),
+ ?match(Keys1, lists:sort(Keys1)),
+
+ Keys2 = get_keys(Tab, [7,4]),
+ ?match([[7,4,2]|_], Keys2),
+ ?match(Keys2, lists:sort(Keys2)),
+ ?match([10,10,2], lists:last(Keys0)),
+ ?match([10,10,2], lists:last(Keys1)),
+ ?match([10,10,2], lists:last(Keys2)),
+
+ ?match([[10,10,2]], get_keys(Tab, [10])),
+ ok
+ end,
+
+ ?match({atomic, ok},mnesia:transaction(Test2)),
+
+ ?verify_mnesia(Nodes, []).
+
+get_keys(Tab, Key) ->
+ case mnesia:snmp_get_next_index(Tab, Key) of
+ endOfTable -> [];
+ {ok, Next} ->
+ [Next|get_keys(Tab, Next)]
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-record(tab, {i, e1, e2}). % Simple test table
+
+subscriptions(doc) ->
+ ["Test the event subscription mechanism"];
+subscriptions(suite) ->
+ [subscribe_standard,
+ subscribe_extended].
+
+subscribe_extended(doc) ->
+ ["Test the extended set of events, test with and without checkpoints. "];
+subscribe_extended(suite) ->
+ [];
+subscribe_extended(Config) when is_list(Config) ->
+ [N1, N2]=Nodes=?acquire_nodes(2, Config),
+ Tab1 = etab,
+ Storage = mnesia_test_lib:storage_type(ram_copies, Config),
+ Def1 = [{Storage, [N1, N2]}, {attributes, record_info(fields, tab)}],
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+
+ Tab2 = bag,
+ Def2 = [{Storage, [N1, N2]},
+ {type, bag},
+ {record_name, Tab1},
+ {attributes, record_info(fields, tab)}],
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+
+ ?match({ok, N1}, mnesia:subscribe({table, Tab1, detailed})),
+ ?match({ok, N1}, mnesia:subscribe({table, Tab2, detailed})),
+
+ ?match({error, {already_exists, _}}, mnesia:subscribe({table, Tab1, simple})),
+ ?match({error, {badarg, {table, Tab1, bad}}}, mnesia:subscribe({table, Tab1, bad})),
+
+ ?match({ok, N1}, mnesia:subscribe(activity)),
+ test_ext_sub(Tab1, Tab2),
+
+ ?match({ok, N1}, mnesia:unsubscribe(activity)),
+ ?match({ok, N1}, mnesia:subscribe({table, Tab1, detailed})),
+ ?match({atomic, ok}, mnesia:clear_table(Tab1)),
+ ?match({mnesia_table_event, {delete, schema, {schema, Tab1}, [{schema, Tab1, _}],_}}, recv_event()),
+ ?match({mnesia_table_event, {write, schema, {schema, Tab1, _}, [], _}}, recv_event()),
+
+ ?match({atomic, ok}, mnesia_schema:clear_table(Tab2)),
+ ?match({mnesia_table_event, {delete, schema, {schema, Tab2}, [{schema, Tab2, _}],_}},
+ recv_event()),
+ ?match({mnesia_table_event, {write, schema, {schema, Tab2, _}, [], _}}, recv_event()),
+
+ ?match({ok, N1}, mnesia:unsubscribe({table, Tab2, detailed})),
+ {ok, _, _} = mnesia:activate_checkpoint([{name, testing},
+ {ram_overrides_dump, true},
+ {max, [Tab1, Tab2]}]),
+ ?match({ok, N1}, mnesia:subscribe({table, Tab2, detailed})),
+ ?match({ok, N1}, mnesia:subscribe(activity)),
+ test_ext_sub(Tab1, Tab2),
+
+ ?verify_mnesia(Nodes, []).
+
+test_ext_sub(Tab1, Tab2) ->
+ %% The basics
+ Rec1 = {Tab1, 1, 0, 0},
+ Rec2 = {Tab1, 1, 1, 0},
+ Rec3 = {Tab1, 2, 1, 0},
+ Rec4 = {Tab1, 2, 2, 0},
+
+ Write = fun(Tab, Rec) ->
+ mnesia:transaction(fun() -> mnesia:write(Tab, Rec, write)
+ end)
+ end,
+ Delete = fun(Tab, Rec) ->
+ mnesia:transaction(fun() -> mnesia:delete(Tab, Rec, write)
+ end)
+ end,
+ DelObj = fun(Tab, Rec) ->
+ mnesia:transaction(fun() -> mnesia:delete_object(Tab, Rec, write)
+ end)
+ end,
+
+ S = self(),
+ D = {dirty, self()},
+ %% SET
+ ?match(ok, mnesia:dirty_write(Rec1)),
+ ?match({mnesia_table_event, {write, Tab1, Rec1, [], D}}, recv_event()),
+ ?match(ok, mnesia:dirty_write(Rec3)),
+ ?match({mnesia_table_event, {write, Tab1, Rec3, [], D}}, recv_event()),
+ ?match(ok, mnesia:dirty_write(Rec1)),
+ ?match({mnesia_table_event, {write, Tab1, Rec1, [Rec1], D}}, recv_event()),
+ ?match({atomic, ok}, Write(Tab1, Rec2)),
+ ?match({mnesia_table_event, {write, Tab1, Rec2, [Rec1], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match(ok, mnesia:dirty_delete({Tab1, 2})),
+ ?match({mnesia_table_event, {delete, Tab1, {Tab1, 2}, [Rec3], D}}, recv_event()),
+ ?match({atomic, ok}, DelObj(Tab1, Rec2)),
+ ?match({mnesia_table_event, {delete, Tab1, Rec2, [Rec2], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+
+ ?match({atomic, ok}, Delete(Tab1, 1)),
+ ?match({mnesia_table_event, {delete, Tab1, {Tab1, 1}, [], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+
+ ?match({ok, _N1}, mnesia:unsubscribe({table, Tab1, detailed})),
+
+ %% BAG
+
+ ?match({atomic, ok}, Write(Tab2, Rec1)),
+ ?match({mnesia_table_event, {write, Tab2, Rec1, [], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, Write(Tab2, Rec4)),
+ ?match({mnesia_table_event, {write, Tab2, Rec4, [], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, Write(Tab2, Rec3)),
+ ?match({mnesia_table_event, {write, Tab2, Rec3, [Rec4], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, Write(Tab2, Rec2)),
+ ?match({mnesia_table_event, {write, Tab2, Rec2, [Rec1], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, Write(Tab2, Rec1)),
+ ?match({mnesia_table_event, {write, Tab2, Rec1, [Rec1, Rec2], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, DelObj(Tab2, Rec2)),
+ ?match({mnesia_table_event, {delete, Tab2, Rec2, [Rec2], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, Delete(Tab2, 1)),
+ ?match({mnesia_table_event, {delete, Tab2, {Tab2, 1}, [Rec1], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ?match({atomic, ok}, Delete(Tab2, 2)),
+ ?match({mnesia_table_event, {delete, Tab2, {Tab2, 2}, [Rec4, Rec3], {tid,_,S}}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid,_,S}}}, recv_event()),
+ ok.
+
+
+subscribe_standard(doc) ->
+ ["Tests system events and the orignal table events"];
+subscribe_standard(suite) -> [];
+subscribe_standard(Config) when is_list(Config)->
+ [N1, N2]=?acquire_nodes(2, Config),
+ Tab = tab,
+
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ Def = [{Storage, [N1, N2]}, {attributes, record_info(fields, tab)}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+
+ %% Check system events
+ ?match({ok, N1}, mnesia:subscribe(system)),
+ ?match({ok, N1}, mnesia:subscribe(activity)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match({mnesia_system_event, {mnesia_down, N2}}, recv_event()),
+ ?match(timeout, recv_event()),
+
+ ?match([], mnesia_test_lib:start_mnesia([N2], [Tab])),
+ ?match({mnesia_activity_event, _}, recv_event()),
+ ?match({mnesia_system_event,{mnesia_up, N2}}, recv_event()),
+
+ ?match(true, lists:member(self(), mnesia:system_info(subscribers))),
+ ?match([], mnesia_test_lib:kill_mnesia([N1])),
+ timer:sleep(500),
+ mnesia_test_lib:flush(),
+ ?match([], mnesia_test_lib:start_mnesia([N1], [Tab])),
+ ?match(timeout, recv_event()),
+
+ ?match({ok, N1}, mnesia:subscribe(system)),
+ ?match({error, {already_exists, system}}, mnesia:subscribe(system)),
+ ?match(stopped, mnesia:stop()),
+ ?match({mnesia_system_event, {mnesia_down, N1}}, recv_event()),
+ ?match({error, {node_not_running, N1}}, mnesia:subscribe(system)),
+ ?match([], mnesia_test_lib:start_mnesia([N1, N2], [Tab])),
+
+ %% Check table events
+ ?match({ok, N1}, mnesia:subscribe(activity)),
+ Old_Level = mnesia:set_debug_level(trace),
+ ?match({ok, N1}, mnesia:subscribe({table,Tab})),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(#tab{i=155}) end)),
+ Self = self(),
+ ?match({mnesia_table_event, {write, _, _}}, recv_event()),
+ ?match({mnesia_activity_event, {complete, {tid, _, Self}}}, recv_event()),
+
+ ?match({ok, N1}, mnesia:unsubscribe({table,Tab})),
+ ?match({ok, N1}, mnesia:unsubscribe(activity)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(#tab{i=255}) end)),
+
+ ?match(timeout, recv_event()),
+ mnesia:set_debug_level(Old_Level),
+
+ %% Check deletion of replica
+
+ ?match({ok, N1}, mnesia:subscribe({table,Tab})),
+ ?match({ok, N1}, mnesia:subscribe(activity)),
+ ?match(ok, mnesia:dirty_write(#tab{i=355})),
+ ?match({mnesia_table_event, {write, _, _}}, recv_event()),
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, N1)),
+ ?match({mnesia_activity_event, _}, recv_event()),
+ ?match(ok, mnesia:dirty_write(#tab{i=455})),
+ ?match(timeout, recv_event()),
+
+ ?match({atomic, ok}, mnesia:move_table_copy(Tab, N2, N1)),
+ ?match({mnesia_activity_event, _}, recv_event()),
+ ?match({ok, N1}, mnesia:subscribe({table,Tab})),
+ ?match(ok, mnesia:dirty_write(#tab{i=555})),
+ ?match({mnesia_table_event, {write, _, _}}, recv_event()),
+ ?match({atomic, ok}, mnesia:move_table_copy(Tab, N1, N2)),
+ ?match({mnesia_activity_event, _}, recv_event()),
+ ?match(ok, mnesia:dirty_write(#tab{i=655})),
+ ?match(timeout, recv_event()),
+
+ ?match({atomic, ok}, mnesia:add_table_copy(Tab, N1, ram_copies)),
+ ?match({mnesia_activity_event, _}, recv_event()),
+ ?match({ok, N1}, mnesia:subscribe({table,Tab})),
+ ?match({error, {already_exists, {table,Tab, simple}}},
+ mnesia:subscribe({table,Tab})),
+ ?match(ok, mnesia:dirty_write(#tab{i=755})),
+ ?match({mnesia_table_event, {write, _, _}}, recv_event()),
+
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+ ?match({mnesia_activity_event, _}, recv_event()),
+ ?match(timeout, recv_event()),
+
+ mnesia_test_lib:kill_mnesia([N1]),
+
+ ?verify_mnesia([N2], [N1]).
+
+recv_event() ->
+ receive
+ Event -> Event
+ after 1000 ->
+ timeout
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+iteration(doc) ->
+ ["Verify that the iteration functions works as expected"];
+iteration(suite) ->
+ [foldl].
+
+
+foldl(suite) ->
+ [];
+foldl(doc) ->
+ [""];
+foldl(Config) when is_list(Config) ->
+ Nodes = [_N1, N2] = ?acquire_nodes(2, Config),
+ Tab1 = fold_local,
+ Tab2 = fold_remote,
+ Tab3 = fold_ordered,
+ ?match({atomic, ok}, mnesia:create_table(Tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies, [N2]}, {type, bag}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, [{ram_copies, Nodes},
+ {type, ordered_set}])),
+
+ Tab1Els = [{Tab1, N, N} || N <- lists:seq(1, 10)],
+ Tab2Els = ?sort([{Tab2, 1, 2} | [{Tab2, N, N} || N <- lists:seq(1, 10)]]),
+ Tab3Els = [{Tab3, N, N} || N <- lists:seq(1, 10)],
+
+ [mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab1Els],
+ [mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab2Els],
+ [mnesia:sync_transaction(fun() -> mnesia:write(E) end) || E <- Tab3Els],
+
+ Fold = fun(Tab) ->
+ lists:reverse(mnesia:foldl(fun(E, A) -> [E | A] end, [], Tab))
+ end,
+ Fold2 = fun(Tab, Lock) ->
+ lists:reverse(mnesia:foldl(fun(E, A) -> [E | A] end, [], Tab, Lock))
+ end,
+ Exit = fun(Tab) ->
+ lists:reverse(mnesia:foldl(fun(_E, _A) -> exit(testing) end, [], Tab))
+ end,
+ %% Errors
+ ?match({aborted, _}, mnesia:transaction(Fold, [error])),
+ ?match({aborted, _}, mnesia:transaction(fun(Tab) -> mnesia:foldl(badfun,[],Tab) end,
+ [Tab1])),
+ ?match({aborted, testing}, mnesia:transaction(Exit, [Tab1])),
+ ?match({aborted, _}, mnesia:transaction(Fold2, [Tab1, read_lock])),
+
+ %% Success
+ ?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold, [Tab1]))),
+ ?match({atomic, Tab2Els}, sort_res(mnesia:transaction(Fold, [Tab2]))),
+ ?match({atomic, Tab3Els}, mnesia:transaction(Fold, [Tab3])),
+
+ ?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold2, [Tab1, read]))),
+ ?match({atomic, Tab1Els}, sort_res(mnesia:transaction(Fold2, [Tab1, write]))),
+
+ ?match(Tab1Els, sort_res(mnesia:sync_dirty(Fold, [Tab1]))),
+ ?match(Tab2Els, sort_res(mnesia:async_dirty(Fold, [Tab2]))),
+
+ ?verify_mnesia(Nodes, []).
+
+sort_res({atomic, List}) ->
+ {atomic, ?sort(List)};
+sort_res(Else) when is_list(Else) ->
+ ?sort(Else);
+sort_res(Else) ->
+ Else.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+debug_support(doc) ->
+ ["Check that the debug support has not decayed."];
+debug_support(suite) ->
+ [
+ info,
+ schema_0,
+ schema_1,
+ view_0,
+ view_1,
+ view_2,
+ lkill,
+ kill
+ ].
+
+info(suite) -> [];
+info(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ ?match(ok, mnesia:info()),
+ ?verify_mnesia(Nodes, []).
+
+schema_0(suite) -> [];
+schema_0(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ ?match(ok, mnesia:schema()),
+ ?verify_mnesia(Nodes, []).
+
+schema_1(suite) -> [];
+schema_1(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ Tab = schema_1,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [])),
+ ?match(ok, mnesia:schema(Tab)),
+ ?verify_mnesia(Nodes, []).
+
+view_0(suite) -> [];
+view_0(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ ?match(ok, mnesia_lib:view()),
+ ?verify_mnesia(Nodes, []).
+
+view_1(suite) -> [];
+view_1(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ BinCore = mnesia_lib:mkcore({crashinfo, "Just testing..."}),
+ CoreFile = lists:concat(["MnesiaCore.", node(), ".view_1.", ?MODULE]),
+ ?match(ok, file:write_file(CoreFile, BinCore)),
+ ?match(ok, mnesia_lib:view(CoreFile)),
+ ?match(ok, file:delete(CoreFile)),
+
+ ?match(stopped, mnesia:stop()),
+ Dir = mnesia:system_info(directory),
+ ?match(eof, mnesia_lib:view(filename:join(Dir, "LATEST.LOG"))),
+ ?match(ok, mnesia_lib:view(filename:join(Dir, "schema.DAT"))),
+ ?verify_mnesia([], Nodes).
+
+view_2(suite) -> [];
+view_2(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ BinCore = mnesia_lib:mkcore({crashinfo, "More testing..."}),
+ File = lists:concat([?MODULE, "view_2.", node()]),
+ ?match(ok, file:write_file(File, BinCore)),
+ ?match(ok, mnesia_lib:view(File, core)),
+ ?match(ok, file:delete(File)),
+
+ ?match(stopped, mnesia:stop()),
+ Dir = mnesia:system_info(directory),
+ ?match(ok, file:rename(filename:join(Dir, "LATEST.LOG"), File)),
+ ?match(eof, mnesia_lib:view(File, log)),
+ ?match(ok, file:delete(File)),
+
+ ?match(ok, file:rename(filename:join(Dir, "schema.DAT"), File)),
+ ?match(ok, mnesia_lib:view(File, dat)),
+ ?match(ok, file:delete(File)),
+ ?verify_mnesia([], Nodes).
+
+lkill(suite) -> [];
+lkill(Config) when is_list(Config) ->
+ [Node1, Node2] = ?acquire_nodes(2, Config),
+
+ ?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
+ ?match(yes, rpc:call(Node2, mnesia, system_info, [is_running])),
+ ?match(ok, rpc:call(Node2, mnesia, lkill, [])),
+ ?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
+ ?match(no, rpc:call(Node2, mnesia, system_info, [is_running])),
+ ?verify_mnesia([Node1], [Node2]).
+
+kill(suite) -> [];
+kill(Config) when is_list(Config) ->
+ [Node1, Node2] = ?acquire_nodes(2, Config),
+
+ ?match(yes, rpc:call(Node1, mnesia, system_info, [is_running])),
+ ?match(yes, rpc:call(Node2, mnesia, system_info, [is_running])),
+ ?match({_, []}, rpc:call(Node2, mnesia, kill, [])),
+ ?match(no, rpc:call(Node1, mnesia, system_info, [is_running])),
+ ?match(no, rpc:call(Node2, mnesia, system_info, [is_running])),
+ ?verify_mnesia([], [Node1, Node2]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+record_name(doc) ->
+ ["Verify that record names may be differ from the name of ",
+ "the hosting table. Check at least access, restore, "
+ "registry, subscriptions and traveres_backup"];
+record_name(suite) ->
+ [
+ record_name_dirty_access
+ ].
+
+record_name_dirty_access(suite) ->
+ [
+ record_name_dirty_access_ram,
+ record_name_dirty_access_disc,
+ record_name_dirty_access_disc_only
+ ].
+
+record_name_dirty_access_ram(suite) ->
+ [];
+record_name_dirty_access_ram(Config) when is_list(Config) ->
+ record_name_dirty_access(ram_copies, Config).
+
+record_name_dirty_access_disc(suite) ->
+ [];
+record_name_dirty_access_disc(Config) when is_list(Config) ->
+ record_name_dirty_access(disc_copies, Config).
+
+record_name_dirty_access_disc_only(suite) ->
+ [];
+record_name_dirty_access_disc_only(Config) when is_list(Config) ->
+ record_name_dirty_access(disc_only_copies, Config).
+
+record_name_dirty_access(Storage, Config) ->
+ [Node1, _Node2] = Nodes = ?acquire_nodes(2, Config),
+
+ List = lists:concat([record_name_dirty_access_, Storage]),
+ Tab = list_to_atom(List),
+ RecName = some_record,
+ Attr = val,
+ TabDef = [{type, bag},
+ {record_name, RecName},
+ {index, [Attr]},
+ {Storage, Nodes}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, TabDef)),
+
+ ?match(RecName, mnesia:table_info(Tab, record_name)),
+
+ ?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 20})),
+ ?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 21})),
+ ?match(ok, mnesia:dirty_write(Tab, {RecName, 2, 22})),
+
+ %% Backup test
+ BupFile = List ++ ".BUP",
+ CpName = cpname,
+ CpArgs = [{name, CpName}, {min, [Tab]}, {ram_overrides_dump, true}],
+ ?match({ok, CpName, _}, mnesia:activate_checkpoint(CpArgs)),
+ ?match(ok, mnesia:backup_checkpoint(CpName, BupFile)),
+ ?match(ok, mnesia:deactivate_checkpoint(CpName)),
+
+ ?match(ok, mnesia:dirty_write(Tab, {RecName, 1, 10})),
+ ?match({ok, Node1}, mnesia:subscribe({table, Tab})),
+ ?match(ok, mnesia:dirty_write(Tab, {RecName, 3, 10})),
+
+ Twos =?sort( [{RecName, 2, 20}, {RecName, 2, 21}, {RecName, 2, 22}]),
+ ?match(Twos, ?sort(mnesia:dirty_read(Tab, 2))),
+
+ ?match(ok, mnesia:dirty_delete_object(Tab, {RecName, 2, 21})),
+
+ Tens = ?sort([{RecName, 1, 10}, {RecName, 3, 10}]),
+ TenPat = {RecName, '_', 10},
+ ?match(Tens, ?sort(mnesia:dirty_match_object(Tab, TenPat))),
+ ?match(Tens, ?sort(mnesia:dirty_select(Tab, [{TenPat, [], ['$_']}]))),
+
+ %% Subscription test
+ E = mnesia_table_event,
+ ?match_receive({E, {write, {Tab, 3, 10}, _}}),
+ ?match_receive({E, {delete_object, {Tab, 2, 21}, _}}),
+ ?match({ok, Node1}, mnesia:unsubscribe({table, Tab})),
+
+ ?match([], mnesia_test_lib:stop_mnesia([Node1])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [Tab])),
+
+ ?match(Tens, ?sort(mnesia:dirty_index_match_object(Tab, TenPat, Attr) )),
+ ?match(Tens, ?sort(mnesia:dirty_index_read(Tab, 10, Attr))),
+
+ ?match([1, 2, 3], ?sort(mnesia:dirty_all_keys(Tab))),
+
+ ?match({ok, Node1}, mnesia:subscribe({table, Tab})),
+ ?match(ok, mnesia:dirty_delete(Tab, 2)),
+ ?match([], mnesia:dirty_read(Tab, 2)),
+
+ ?match_receive({E, {delete, {Tab, 2}, _}}),
+ ?match([], mnesia_test_lib:flush()),
+ ?match({ok, Node1}, mnesia:unsubscribe({table, Tab})),
+
+ %% Restore test
+ ?match({atomic, [Tab]}, mnesia:restore(BupFile, [{recreate_tables, [Tab]}])),
+ ?match(RecName, mnesia:table_info(Tab, record_name)),
+
+ ?match(Twos, ?sort(mnesia:dirty_match_object(Tab, mnesia:table_info(Tab, wild_pattern)))),
+ ?match(Twos, ?sort(mnesia:dirty_select(Tab,
+ [{mnesia:table_info(Tab, wild_pattern),
+ [],['$_']}]))),
+
+ %% Traverse backup test
+
+ Fun = fun(Rec, {Good, Bad}) ->
+ ?verbose("BUP: ~p~n", [Rec]),
+ case Rec of
+ {T, K, V} when T == Tab ->
+ Good2 = Good ++ [{RecName, K, V}],
+ {[Rec], {?sort(Good2), Bad}};
+ {T, K} when T == Tab ->
+ Good2 = [G || G <- Good, element(2, G) /= K],
+ {[Rec], {?sort(Good2), Bad}};
+ _ when element(1, Rec) == schema ->
+ {[Rec], {Good, Bad}};
+ _ ->
+ Bad2 = Bad ++ [Rec],
+ {[Rec], {Good, ?sort(Bad2)}}
+ end
+ end,
+
+ ?match({ok, {Twos, []}}, mnesia:traverse_backup(BupFile, mnesia_backup,
+ dummy, read_only,
+ Fun, {[], []})),
+ ?match(ok, file:delete(BupFile)),
+
+ %% Update counter test
+
+ CounterTab = list_to_atom(lists:concat([Tab, "_counter"])),
+ CounterTabDef = [{record_name, some_counter}],
+ C = my_counter,
+ ?match({atomic, ok}, mnesia:create_table(CounterTab, CounterTabDef)),
+ ?match(some_counter, mnesia:table_info(CounterTab, record_name)),
+ ?match(0, mnesia:dirty_update_counter(CounterTab, gurka, -10)),
+ ?match(10, mnesia:dirty_update_counter(CounterTab, C, 10)),
+ ?match(11, mnesia:dirty_update_counter(CounterTab, C, 1)),
+ ?match(4711, mnesia:dirty_update_counter(CounterTab, C, 4700)),
+ ?match([{some_counter, C, 4711}], mnesia:dirty_read(CounterTab, C)),
+ ?match(0, mnesia:dirty_update_counter(CounterTab, C, -4747)),
+
+ %% Registry tests
+
+ RegTab = list_to_atom(lists:concat([Tab, "_registry"])),
+ RegTabDef = [{record_name, some_reg}],
+ ?match(ok, mnesia_registry:create_table(RegTab, RegTabDef)),
+ ?match(some_reg, mnesia:table_info(RegTab, record_name)),
+ {success, RegRecs} =
+ ?match([_ | _], mnesia_registry_test:dump_registry(node(), RegTab)),
+
+ R = ?sort(RegRecs),
+ ?match(R, ?sort(mnesia_registry_test:restore_registry(node(), RegTab))),
+
+ ?verify_mnesia(Nodes, []).
+
+sorted_ets(suite) ->
+ [];
+sorted_ets(Config) when is_list(Config) ->
+ [N1, N2, N3] = All = ?acquire_nodes(3, Config),
+
+ Tab = sorted_tab,
+ Def = case mnesia_test_lib:diskless(Config) of
+ true -> [{name, Tab}, {type, ordered_set}, {ram_copies, All}];
+ false -> [{name, Tab}, {type, ordered_set},
+ {ram_copies, [N1]},
+ {disc_copies,[N2, N3]}]
+ end,
+
+ ?match({atomic, ok}, mnesia:create_table(Def)),
+ ?match({aborted, _}, mnesia:create_table(fel, [{disc_only_copies, N1}])),
+
+ ?match([ok | _],
+ [mnesia:dirty_write({Tab, {dirty, N}, N}) || N <- lists:seq(1, 10)]),
+ ?match({atomic, _},
+ mnesia:sync_transaction(fun() ->
+ [mnesia:write({Tab, {trans, N}, N}) ||
+ N <- lists:seq(1, 10)]
+ end)),
+
+ List = mnesia:dirty_match_object({Tab, '_', '_'}),
+ ?match(List, ?sort(List)),
+ ?match(List, rpc:call(N2, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
+ ?match(List, rpc:call(N3, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
+
+ mnesia_test_lib:stop_mnesia(All),
+ mnesia_test_lib:start_mnesia(All, [sorted_tab]),
+
+ List = mnesia:dirty_match_object({Tab, '_', '_'}),
+ ?match(List, ?sort(List)),
+ ?match(List, rpc:call(N2, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
+ ?match(List, rpc:call(N3, mnesia, dirty_match_object, [{Tab, '_', '_'}])),
+
+ ?match(List, rpc:call(N3, mnesia, dirty_select, [Tab, [{{Tab, '_', '_'},[],['$_']}]])),
+
+ TransMatch = fun() ->
+ mnesia:write({Tab, {trans, 0}, 0}),
+ mnesia:write({Tab, {trans, 11}, 11}),
+ mnesia:match_object({Tab, '_', '_'})
+ end,
+ TransSelect = fun() ->
+ mnesia:write({Tab, {trans, 0}, 0}),
+ mnesia:write({Tab, {trans, 11}, 11}),
+ mnesia:select(Tab, [{{Tab, '_', '_'},[],['$_']}])
+ end,
+
+ TList = mnesia:transaction(TransMatch),
+ STList = ?sort(TList),
+ ?match(STList, TList),
+ ?match(STList, rpc:call(N2, mnesia, transaction, [TransMatch])),
+ ?match(STList, rpc:call(N3, mnesia, transaction, [TransMatch])),
+
+ TSel = mnesia:transaction(TransSelect),
+ ?match(STList, TSel),
+ ?match(STList, rpc:call(N2, mnesia, transaction, [TransSelect])),
+ ?match(STList, rpc:call(N3, mnesia, transaction, [TransSelect])),
+
+ ?match({atomic, ok}, mnesia:create_table(rec, [{type, ordered_set}])),
+ [ok = mnesia:dirty_write(R) || R <- [{rec,1,1}, {rec,2,1}]],
+ ?match({atomic, ok}, mnesia:add_table_index(rec, 3)),
+ TestIt = fun() ->
+ ok = mnesia:write({rec,1,1}),
+ mnesia:index_read(rec, 1, 3)
+ end,
+ ?match({atomic, [{rec,1,1}, {rec,2,1}]}, mnesia:transaction(TestIt)).
+
+
diff --git a/lib/mnesia/test/mnesia_examples_test.erl b/lib/mnesia/test/mnesia_examples_test.erl
new file mode 100644
index 0000000000..d1b1409c9d
--- /dev/null
+++ b/lib/mnesia/test/mnesia_examples_test.erl
@@ -0,0 +1,160 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_examples_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-define(init(N, Config),
+ mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema],
+ N, Config, ?FILE, ?LINE)).
+
+opt_net_load(ExampleMod) ->
+ opt_net_load([node() | nodes()], ExampleMod, ok).
+
+opt_net_load([Node | Nodes], ExampleMod, Res) ->
+ case rpc:call(Node, ?MODULE, opt_load, [ExampleMod]) of
+ {module, ExampleMod} ->
+ opt_net_load(Nodes, ExampleMod, Res);
+ {error, Reason} ->
+ Error = {opt_net_load, ExampleMod, Node, Reason},
+ opt_net_load(Nodes, ExampleMod, {error, Error});
+ {badrpc, Reason} ->
+ Error = {opt_net_load, ExampleMod, Node, Reason},
+ opt_net_load(Nodes, ExampleMod, {error, Error})
+ end;
+opt_net_load([], _ExampleMod, Res) ->
+ Res.
+
+opt_load(Mod) ->
+ case code:is_loaded(Mod) of
+ {file, _} ->
+ {module, Mod};
+ false ->
+ Abs = filename:join([code:lib_dir(mnesia), examples, Mod]),
+ code:load_abs(Abs)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Run all examples mentioned in the documentation",
+ "Are really all examples covered?"];
+all(suite) ->
+ [
+ bup,
+ company,
+ meter,
+ tpcb
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+bup(doc) -> ["Run the backup examples in bup.erl"];
+bup(suite) -> [];
+bup(Config) when is_list(Config) ->
+ Nodes = ?init(3, Config),
+ opt_net_load(bup),
+ ?match(ok, bup:test(Nodes)).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+company(doc) ->
+ ["Run the company examples in company.erl and company_o.erl"].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+tpcb(doc) ->
+ ["Run the sample configurations of the stress tests in mnesia_tpcb.erl"];
+tpcb(suite) ->
+ [
+ replica_test,
+ sticky_replica_test,
+ dist_test,
+ conflict_test,
+ frag_test,
+ frag2_test,
+ remote_test,
+ remote_frag2_test
+ ].
+
+replica_test(suite) -> [];
+replica_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(replica_test, ram_copies))).
+
+sticky_replica_test(suite) -> [];
+sticky_replica_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(sticky_replica_test, ram_copies))).
+
+dist_test(suite) -> [];
+dist_test(Config) when is_list(Config) ->
+ ?init(3, [{tc_timeout, timer:minutes(10)} | Config]),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(dist_test, ram_copies))).
+
+conflict_test(suite) -> [];
+conflict_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(conflict_test, ram_copies))).
+
+frag_test(suite) -> [];
+frag_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(frag_test, ram_copies))).
+
+frag2_test(suite) -> [];
+frag2_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(frag2_test, ram_copies))).
+
+remote_test(suite) -> [];
+remote_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(remote_test, ram_copies))).
+
+remote_frag2_test(suite) -> [];
+remote_frag2_test(Config) when is_list(Config) ->
+ ?init(3, Config),
+ opt_net_load(mnesia_tpcb),
+ ?match({ok, _}, mnesia_tpcb:start(mnesia_tpcb:config(remote_frag2_test, ram_copies))).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+meter(doc) ->
+ ["Run the meter example in mnesia_meter.erl"];
+meter(suite) ->
+ [];
+meter(Config) when is_list(Config) ->
+ [N | _] = ?init(3, Config),
+ opt_net_load(mnesia_meter),
+ ?match(ok, mnesia_meter:go(ram_copies, [N])).
+
+
diff --git a/lib/mnesia/test/mnesia_frag_test.erl b/lib/mnesia/test/mnesia_frag_test.erl
new file mode 100644
index 0000000000..4add340254
--- /dev/null
+++ b/lib/mnesia/test/mnesia_frag_test.erl
@@ -0,0 +1,875 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_frag_test).
+-author('[email protected]').
+-include("mnesia_test_lib.hrl").
+
+-compile([export_all]).
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-define(match_dist(ExpectedRes, Expr),
+ case ?match(ExpectedRes, Expr) of
+
+ mnesia_test_lib:error(Format, Args,?FILE,?LINE)).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(doc) ->
+ ["Verify the functionality of fragmented tables"];
+all(suite) ->
+ [
+ light,
+ medium
+ ].
+
+light(suite) ->
+ [
+ nice,
+ evil
+ ].
+
+medium(suite) ->
+ [
+ consistency
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+nice(suite) ->
+ [
+ nice_single,
+ nice_multi,
+ nice_access,
+ iter_access
+ ].
+
+nice_single(suite) -> [];
+nice_single(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+
+ %% Create a table with 2 fragments and 12 records
+ Tab = nice_frag,
+ Props = [{n_fragments, 2}, {node_pool, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{frag_properties, Props}])),
+ Records = [{Tab, N, -N} || N <- lists:seq(1, 12)],
+ [frag_write(Tab, R) || R <- Records],
+ ?match([{Node1, 2}], frag_dist(Tab)),
+ ?match([8, 4], frag_rec_dist(Tab)),
+
+ %% Adding a new node to pool should not affect distribution
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_node, Node2})),
+ Dist = frag_dist(Tab),
+ ?match([{Node2, 0}, {Node1, 2}], Dist),
+ ?match([8, 4], frag_rec_dist(Tab)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist})),
+ Dist2 = frag_dist(Tab),
+ ?match([{Node2, 1}, {Node1, 2}], Dist2),
+ ?match([3, 4, 5], frag_rec_dist(Tab)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist2})),
+ Dist3 = frag_dist(Tab),
+ ?match([{Node1, 2}, {Node2, 2}], Dist3),
+ ?match([3, 2, 5, 2], frag_rec_dist(Tab)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist3})),
+ Dist4 = frag_dist(Tab),
+ ?match([{Node2, 2}, {Node1, 3}], Dist4),
+ ?match([_, _, _, _, _], frag_rec_dist(Tab)),
+
+ %% Dropping a node in pool should not affect distribution
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {del_node, Node1})),
+ ?match([{Node2, 2}, {Node1, 3}], frag_dist(Tab)),
+ ?match([_, _, _, _, _], frag_rec_dist(Tab)),
+
+ %% Dropping a fragment
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ Dist5 = frag_dist(Tab),
+ ?match([{Node2, 2}, {Node1, 2}], Dist5),
+ ?match([3, 2, 5, 2], frag_rec_dist(Tab)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist5})),
+ Dist6 = frag_dist(Tab),
+ ?match([{Node2, 3}, {Node1, 2}], Dist6),
+ ?match([_, _, _, _, _], frag_rec_dist(Tab)),
+
+ %% Dropping all fragments but one
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([3, 2, 5, 2], frag_rec_dist(Tab)),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([3, 4, 5], frag_rec_dist(Tab)),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([8, 4], frag_rec_dist(Tab)),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([{Node2, 0}, {Node1, 1}], frag_dist(Tab)),
+ ?match([12], frag_rec_dist(Tab)),
+
+ %% Defragmenting the table clears frag_properties
+ ?match(Len when Len > 0,
+ length(mnesia:table_info(Tab, frag_properties))),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, deactivate)),
+ ?match(0, length(mnesia:table_info(Tab, frag_properties))),
+
+ %% Making the table fragmented again
+ Props2 = [{n_fragments, 1}],
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {activate, Props2})),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, frag_dist(Tab)})),
+ Dist7 = frag_dist(Tab),
+ ?match([{Node1, 1}, {Node2, 1}], Dist7),
+ ?match([8, 4], frag_rec_dist(Tab)),
+
+ %% Deleting the fragmented table
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+ ?match(false, lists:member(Tab, mnesia:system_info(tables))),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+nice_multi(doc) ->
+ ["Extending the nice case with one more node, ",
+ "one more replica and a foreign key"];
+nice_multi(suite) -> [];
+nice_multi(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+
+ %% Create a table with 2 fragments and 8 records
+ Tab = frag_master,
+ Name = frag_rec,
+ Type = case mnesia_test_lib:diskless(Config) of
+ true -> n_ram_copies;
+ false -> n_disc_copies
+ end,
+ Props = [{n_fragments, 2},
+ {Type, 2},
+ {node_pool, [Node2, Node1]}],
+ Def = [{frag_properties, Props},
+ {attributes, [id, data]},
+ {record_name, Name}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ [frag_write(Tab, {Name, Id, -Id}) || Id <- lists:seq(1, 8)],
+ ?match([6, 2], frag_rec_dist(Tab)),
+ ?match([{Node2, 2}, {Node1, 2}], frag_dist(Tab)),
+
+ %% And connect another table to it, via a foreign key
+ TabF = frag_slave,
+ PropsF = [{foreign_key, {Tab, foreign_id}}],
+ DefF = [{frag_properties, PropsF},
+ {attributes, [id, foreign_id]}],
+
+ ?match({atomic, ok}, mnesia:create_table(TabF, DefF)),
+ [frag_write(TabF, {TabF, {Id}, Id}) || Id <- lists:seq(1, 16)],
+ ?match([10, 6], frag_rec_dist(TabF)),
+ ?match([{Node2, 2}, {Node1, 2}], frag_dist(TabF)),
+
+ %% Adding a new node to pool should not affect distribution
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_node, Node3})),
+ Dist = frag_dist(Tab),
+ ?match([{Node3, 0}, {Node2, 2}, {Node1, 2}], Dist),
+ ?match([6, 2], frag_rec_dist(Tab)),
+ DistF = frag_dist(TabF),
+ ?match([{Node3, 0}, {Node2, 2}, {Node1, 2}], DistF),
+ ?match([10, 6], frag_rec_dist(TabF)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist})),
+ Dist2 = frag_dist(Tab),
+ ?match([{Node3, 1},{Node1, 2},{Node2,3}], Dist2),
+ ?match([_, _, _], frag_rec_dist(Tab)),
+ DistF2 = frag_dist(TabF),
+ ?match([{Node3, 1},{Node1, 2},{Node2,3}], DistF2),
+ ?match([_, _, _], frag_rec_dist(TabF)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist2})),
+ Dist3 = frag_dist(Tab),
+ ?match([{Node3, 2},{Node2,3},{Node1, 3}], Dist3),
+ ?match([3, 0, 3, 2], frag_rec_dist(Tab)),
+ DistF3 = frag_dist(TabF),
+ ?match([{Node3, 2},{Node2,3},{Node1, 3}], DistF3),
+ ?match([3, 3, 7, 3], frag_rec_dist(TabF)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist3})),
+ Dist4 = frag_dist(Tab),
+ ?match([{Node1, 3}, {Node3, 3},{Node2, 4}], Dist4),
+ ?match([_, _, _, _, _], frag_rec_dist(Tab)),
+ DistF4 = frag_dist(TabF),
+ ?match([{Node1, 3}, {Node3, 3},{Node2, 4}], DistF4),
+ ?match([_, _, _, _, _], frag_rec_dist(TabF)),
+
+ %% Dropping a node in pool should not affect distribution
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {del_node, Node1})),
+ ?match([{Node3, 3},{Node2, 4}, {Node1, 3}], frag_dist(Tab)),
+ ?match([_, _, _, _, _], frag_rec_dist(Tab)),
+ ?match([{Node3, 3},{Node2, 4}, {Node1, 3}], frag_dist(TabF)),
+ ?match([_, _, _, _, _], frag_rec_dist(TabF)),
+
+ %% Dropping a fragment
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ Dist5 = frag_dist(Tab),
+ ?match([{Node3, 2},{Node2,3},{Node1, 3}], Dist5),
+ ?match([3, 0, 3, 2], frag_rec_dist(Tab)),
+ DistF5 = frag_dist(Tab),
+ ?match([{Node3, 2},{Node2,3},{Node1, 3}], DistF5),
+ ?match([3, 3, 7, 3], frag_rec_dist(TabF)),
+
+ %% Add new fragment hopefully on the new node
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist5})),
+ Dist6 = frag_dist(Tab),
+ ?match([{Node3, 3},{Node2, 4},{Node1, 3}], Dist6),
+ ?match([_, _, _, _, _], frag_rec_dist(Tab)),
+ DistF6 = frag_dist(TabF),
+ ?match([{Node3, 3},{Node2, 4},{Node1, 3}], DistF6),
+ ?match([_, _, _, _, _], frag_rec_dist(TabF)),
+
+ %% Dropping all fragments but one
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([3, 0, 3, 2], frag_rec_dist(Tab)),
+ ?match([3, 3, 7, 3], frag_rec_dist(TabF)),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([_, _, _], frag_rec_dist(Tab)),
+ ?match([_, _, _], frag_rec_dist(TabF)),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([6, 2], frag_rec_dist(Tab)),
+ ?match([10, 6], frag_rec_dist(TabF)),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, del_frag)),
+ ?match([{Node3, 0}, {Node2, 1}, {Node1, 1}], frag_dist(Tab)),
+ ?match([8], frag_rec_dist(Tab)),
+ ?match([{Node3, 0}, {Node2, 1}, {Node1, 1}], frag_dist(TabF)),
+ ?match([16], frag_rec_dist(TabF)),
+
+ %% Defragmenting the tables clears frag_properties
+ ?match(Len when Len > 0,
+ length(mnesia:table_info(TabF, frag_properties))),
+ ?match({atomic, ok}, mnesia:change_table_frag(TabF, deactivate)),
+ ?match(0, length(mnesia:table_info(TabF, frag_properties))),
+ ?match(Len when Len > 0,
+ length(mnesia:table_info(Tab, frag_properties))),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, deactivate)),
+ ?match(0, length(mnesia:table_info(Tab, frag_properties))),
+
+ %% Making the tables fragmented again
+ Props2 = [{n_fragments, 1}, {node_pool, [Node1, Node2]}],
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {activate, Props2})),
+ ?match({atomic, ok}, mnesia:delete_table(TabF)),
+ ?match({atomic, ok}, mnesia:create_table(TabF, DefF)),
+ [frag_write(TabF, {TabF, {Id}, Id}) || Id <- lists:seq(1, 16)],
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, frag_dist(Tab)})),
+ ?match([{Node1, 2}, {Node2, 2}], frag_dist(Tab)),
+ ?match([6, 2], frag_rec_dist(Tab)),
+ ?match([{Node1, 2}, {Node2, 2}], frag_dist(TabF)),
+ ?match([10, 6], frag_rec_dist(TabF)),
+
+ %% Deleting the fragmented tables
+ ?match({atomic, ok}, mnesia:delete_table(TabF)),
+ ?match(false, lists:member(TabF, mnesia:system_info(tables))),
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+ ?match(false, lists:member(Tab, mnesia:system_info(tables))),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+nice_access(doc) ->
+ ["Cover entire callback interface"];
+nice_access(suite) -> [];
+nice_access(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, Config),
+
+ Tab = frag_access,
+ Pool = lists:sort(Nodes),
+ Props = [{n_fragments, 20},
+ {n_ram_copies, 2},
+ {node_pool, Pool}],
+ Def = [{frag_properties, Props},
+ {type, ordered_set},
+ {index, [val]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ [frag_write(Tab, {Tab, Id, Id}) || Id <- lists:seq(1, 400)],
+
+ %% And connect another table to it, via a foreign key
+ TabF = frag_access_slave,
+ PropsF = [{foreign_key, {Tab, val}}],
+ DefF = [{frag_properties, PropsF},
+ {index, [val]}],
+ ?match({atomic, ok}, mnesia:create_table(TabF, DefF)),
+ [frag_write(TabF, {TabF, Id, Id}) || Id <- lists:seq(1, 400)],
+
+ ?match(done, mnesia:activity(transaction, fun do_access/3, [Tab, Tab, Pool], mnesia_frag)),
+ ?match(done, mnesia:activity(transaction, fun do_access/3, [TabF, Tab, Pool], mnesia_frag)),
+
+ ?verify_mnesia(Nodes, []).
+
+do_access(Tab, Master, Pool) ->
+ ?match(20, mnesia:table_info(Tab, n_fragments)),
+ ?match(Pool, mnesia:table_info(Tab, node_pool)),
+ ?match(2, mnesia:table_info(Tab, n_ram_copies)),
+ ?match(0, mnesia:table_info(Tab, n_disc_copies)),
+ ?match(0, mnesia:table_info(Tab, n_disc_only_copies)),
+ ?match(20, length(mnesia:table_info(Tab, frag_names))),
+ ?match(20, length(mnesia:table_info(Tab, frag_size))),
+ ?match(20, length(mnesia:table_info(Tab, frag_memory))),
+ PoolSize = length(Pool),
+ ?match(PoolSize, length(mnesia:table_info(Tab, frag_dist))),
+ ?match(400, mnesia:table_info(Tab, size)),
+ ?match(I when is_integer(I), mnesia:table_info(Tab, memory)),
+ ?match(Tab, mnesia:table_info(Tab, base_table)),
+
+ Foreign =
+ if
+ Master == Tab ->
+ ?match(undefined, mnesia:table_info(Tab, foreign_key)),
+ ?match([_], mnesia:table_info(Tab, foreigners)),
+ ?match({'EXIT', {aborted, {combine_error, Tab, frag_properties, {foreign_key, undefined}}}},
+ mnesia:read({Tab, 5}, 5, read)),
+ fun({T, _K}) -> T end;
+ true ->
+ ?match({Master, 3}, mnesia:table_info(Tab, foreign_key)),
+ ?match([], mnesia:table_info(Tab, foreigners)),
+ fun({T, K}) -> {T, K} end
+ end,
+
+ Attr = val,
+ ?match(400, mnesia:table_info(Tab, size)),
+ Count = fun(_, N) -> N + 1 end,
+ ?match(400, mnesia:foldl(Count, 0, Tab)),
+ ?match(400, mnesia:foldr(Count, 0, Tab)),
+ ?match(ok, mnesia:write({Tab, [-1], 1})),
+ ?match(401, length(mnesia:match_object(Tab, {Tab, '_', '_'}, read))),
+ ?match(401, length(mnesia:select(Tab, [{{Tab, '_', '$1'}, [], ['$1']}], read))),
+
+ First = mnesia:select(Tab, [{{Tab, '_', '$1'}, [], ['$1']}], 10, read),
+ TestCont = fun('$end_of_table', Total, _This) ->
+ Total;
+ ({Res,Cont1}, Total, This) ->
+ Cont = mnesia:select(Cont1),
+ This(Cont, length(Res) + Total, This)
+ end,
+ ?match(401, TestCont(First, 0, TestCont)),
+
+ %% OTP
+ [_, Frag2|_] = frag_names(Tab),
+ Frag2key = mnesia:dirty_first(Frag2),
+ ?match({[Frag2key],_},mnesia:select(Tab,[{{Tab,Frag2key,'$1'},[],['$1']}],100,read)),
+
+ ?match([{Tab, [-1], 1}], mnesia:read(Foreign({Tab, 1}), [-1], read)),
+ ?match(401, mnesia:foldl(Count, 0, Tab)),
+ ?match(401, mnesia:foldr(Count, 0, Tab)),
+ ?match(ok, mnesia:delete(Foreign({Tab, 2}), 2, write)),
+ ?match([], mnesia:read(Foreign({Tab, 2}), 2, read)),
+ ?match([{Tab, 3, 3}], mnesia:read(Foreign({Tab, 3}), 3, read)),
+ ?match(400, mnesia:foldl(Count, 0, Tab)),
+ ?match(400, mnesia:foldr(Count, 0, Tab)),
+ ?match(ok, mnesia:delete_object({Tab, 3, 3})),
+ ?match([], mnesia:read(Foreign({Tab, 3}), 3, read)),
+ One = lists:sort([{Tab, 1, 1}, {Tab, [-1], 1}]),
+ Pat = {Tab, '$1', 1},
+ ?match(One, lists:sort(mnesia:match_object(Tab, Pat, read))),
+ ?match([1,[-1]], lists:sort(mnesia:select(Tab, [{Pat, [], ['$1']}], read))),
+ ?match([[[-1]]], lists:sort(mnesia:select(Tab, [{Pat, [{is_list, '$1'}], [['$1']]}], read))),
+ ?match([[1, 100]], lists:sort(mnesia:select(Tab, [{Pat, [{is_integer, '$1'}], [['$1',100]]}], read))),
+ ?match([1,[-1]], lists:sort(mnesia:select(Tab, [{Pat, [{is_list, '$1'}], ['$1']},{Pat, [{is_integer, '$1'}], ['$1']}], read))),
+ ?match(One, lists:sort(mnesia:index_match_object(Tab, Pat, Attr, read) )),
+ ?match(One, lists:sort(mnesia:index_read(Tab, 1, Attr))),
+ Keys = mnesia:all_keys(Tab),
+ ?match([-1], lists:max(Keys)), %% OTP-3779
+ ?match(399, length(Keys)),
+ ?match(399, mnesia:foldl(Count, 0, Tab)),
+ ?match(399, mnesia:foldr(Count, 0, Tab)),
+
+ ?match(Pool, lists:sort(mnesia:lock({table, Tab}, write))),
+
+ done.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+iter_access(doc) ->
+ ["Cover table iteration via callback interface"];
+iter_access(suite) -> [];
+iter_access(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, Config),
+
+ Tab = frag_access,
+ Pool = lists:sort(Nodes),
+ Props = [{n_fragments, 20},
+ {n_ram_copies, 2},
+ {node_pool, Pool}],
+ Def = [{frag_properties, Props},
+ {type, ordered_set},
+ {index, [val]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ [frag_write(Tab, {Tab, Id, Id}) || Id <- lists:seq(1, 400)],
+
+ FragNames = frag_names(Tab),
+ RawRead =
+ fun(Frag) ->
+ Node = mnesia:table_info(Frag, where_to_read),
+ {Frag, rpc:call(Node, ets, tab2list, [Frag])}
+ end,
+
+ ?match(done, mnesia:activity(transaction, fun nice_iter_access/3, [Tab, FragNames, RawRead], mnesia_frag)),
+
+ FragNames = frag_names(Tab),
+ [First, Second | _] = FragNames,
+ [Last, LastButOne | _] = lists:reverse(FragNames),
+
+ ?match({atomic, ok}, mnesia:clear_table(First)),
+ ?match({atomic, ok}, mnesia:clear_table(Second)),
+ ?match({atomic, ok}, mnesia:clear_table(lists:nth(8, FragNames))),
+ ?match({atomic, ok}, mnesia:clear_table(lists:nth(9, FragNames))),
+ ?match({atomic, ok}, mnesia:clear_table(lists:nth(10, FragNames))),
+ ?match({atomic, ok}, mnesia:clear_table(lists:nth(11, FragNames))),
+ ?match({atomic, ok}, mnesia:clear_table(LastButOne)),
+ ?match({atomic, ok}, mnesia:clear_table(Last)),
+
+ ?match(done, mnesia:activity(transaction, fun evil_iter_access/3, [Tab, FragNames, RawRead], mnesia_frag)),
+ Size = fun(Table) -> mnesia:table_info(Table, size) end,
+ ?match(true, 0 < mnesia:activity(transaction, Size, [Tab], mnesia_frag)),
+ ?match({atomic, ok}, mnesia:activity(ets, fun() -> mnesia:clear_table(Tab) end, mnesia_frag)),
+ ?match(0, mnesia:activity(transaction, Size, [Tab], mnesia_frag)),
+
+ ?verify_mnesia(Nodes, []).
+
+nice_iter_access(Tab, FragNames, RawRead) ->
+ RawData = ?ignore(lists:map(RawRead, FragNames)),
+ Keys = [K || {_, Recs} <- RawData, {_, K, _} <- Recs],
+ ExpectedFirst = hd(Keys),
+ ?match(ExpectedFirst, mnesia:first(Tab)),
+ ExpectedLast = lists:last(Keys),
+ ?match(ExpectedLast, mnesia:last(Tab)),
+
+ ExpectedAllPrev = ['$end_of_table' | lists:reverse(tl(lists:reverse(Keys)))],
+ ?match(ExpectedAllPrev, lists:map(fun(K) -> mnesia:prev(Tab, K) end, Keys)),
+
+ ExpectedAllNext = tl(Keys) ++ ['$end_of_table'],
+ ?match(ExpectedAllNext, lists:map(fun(K) -> mnesia:next(Tab, K) end, Keys)),
+
+ done.
+
+evil_iter_access(Tab, FragNames, RawRead) ->
+ RawData = ?ignore(lists:map(RawRead, FragNames)),
+ Keys = [K || {_, Recs} <- RawData, {_, K, _} <- Recs],
+ ExpectedFirst = hd(Keys),
+ ?match(ExpectedFirst, mnesia:first(Tab)),
+ ExpectedLast = lists:last(Keys),
+ ?match(ExpectedLast, mnesia:last(Tab)),
+
+ ExpectedAllPrev = ['$end_of_table' | lists:reverse(tl(lists:reverse(Keys)))],
+ ?match(ExpectedAllPrev, lists:map(fun(K) -> mnesia:prev(Tab, K) end, Keys)),
+
+ ExpectedAllNext = tl(Keys) ++ ['$end_of_table'],
+ ?match(ExpectedAllNext, lists:map(fun(K) -> mnesia:next(Tab, K) end, Keys)),
+
+ done.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+consistency(doc) ->
+ ["Add and delete fragments during TPC-B"];
+consistency(suite) -> [];
+consistency(Config) when is_list(Config) ->
+ ?skip("Not yet implemented (NYI).~n", []),
+ Nodes = ?acquire_nodes(2, Config),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+evil(doc) ->
+ ["Evil coverage of fragmentation API."];
+evil(suite) ->
+ [
+ evil_create,
+ evil_delete,
+ evil_change,
+ evil_combine,
+ evil_loop,
+ evil_delete_db_node
+ ].
+
+evil_create(suite) -> [];
+evil_create(Config) when is_list(Config) ->
+ [Node1, _Node2] = Nodes = ?acquire_nodes(2, Config),
+
+ Create = fun(T, D, P) -> mnesia:create_table(T, [{frag_properties, P}| D]) end,
+
+ Tab = evil_create,
+ %% Props in general
+ ?match({aborted, {badarg, Tab, {frag_properties, no_list}}},
+ Create(Tab, [], no_list)),
+ ?match({aborted, {badarg,Tab , [no_tuple]}},
+ Create(Tab, [], [no_tuple])),
+ ?match({aborted,{badarg, Tab, bad_key}},
+ Create(Tab, [], [{bad_key, 7}])),
+
+ %% n_fragments
+ ?match({aborted,{badarg, Tab, [{n_fragments}]}},
+ Create(Tab, [], [{n_fragments}])),
+ ?match({aborted,{badarg, Tab, [{n_fragments, 1, 1}]}},
+ Create(Tab, [], [{n_fragments, 1, 1}])),
+ ?match({aborted, {bad_type,Tab, {n_fragments, a}}},
+ Create(Tab, [], [{n_fragments, a}])),
+ ?match({aborted, {bad_type, Tab, {n_fragments, 0}}},
+ Create(Tab, [], [{n_fragments, 0}])),
+
+ %% *_copies
+ ?match({aborted, {bad_type, Tab, {n_ram_copies, -1}}},
+ Create(Tab, [], [{n_ram_copies, -1}, {n_fragments, 1}])),
+ ?match({aborted, {bad_type, Tab, {n_disc_copies, -1}}},
+ Create(Tab, [], [{n_disc_copies, -1}, {n_fragments, 1}])),
+ ?match({aborted, {bad_type, Tab, {n_disc_only_copies, -1}}},
+ Create(Tab, [], [{n_disc_only_copies, -1}, {n_fragments, 1}])),
+
+ %% node_pool
+ ?match({aborted, {bad_type, Tab, {node_pool, 0}}},
+ Create(Tab, [], [{node_pool, 0}])),
+ ?match({aborted, {combine_error, Tab, "Too few nodes in node_pool"}},
+ Create(Tab, [], [{n_ram_copies, 2}, {node_pool, [Node1]}])),
+
+ %% foreign_key
+ ?match({aborted, {bad_type, Tab, {foreign_key, bad_key}}},
+ Create(Tab, [], [{foreign_key, bad_key}])),
+ ?match({aborted,{bad_type, Tab, {foreign_key, {bad_key}}}},
+ Create(Tab, [], [{foreign_key, {bad_key}}])),
+ ?match({aborted, {no_exists, {bad_tab, frag_properties}}},
+ Create(Tab, [], [{foreign_key, {bad_tab, val}}])),
+ ?match({aborted, {combine_error, Tab, {Tab, val}}},
+ Create(Tab, [], [{foreign_key, {Tab, val}}])),
+ ?match({atomic, ok},
+ Create(Tab, [], [{n_fragments, 1}])),
+
+ ?match({aborted, {already_exists, Tab}},
+ Create(Tab, [], [{n_fragments, 1}])),
+
+ Tab2 = evil_create2,
+ ?match({aborted, {bad_type, no_attr}},
+ Create(Tab2, [], [{foreign_key, {Tab, no_attr}}])),
+ ?match({aborted, {combine_error, Tab2, _, _, _}},
+ Create(Tab2, [], [{foreign_key, {Tab, val}},
+ {node_pool, [Node1]}])),
+ ?match({aborted, {combine_error, Tab2, _, _, _}},
+ Create(Tab2, [], [{foreign_key, {Tab, val}},
+ {n_fragments, 2}])),
+ ?match({atomic, ok},
+ Create(Tab2, [{attributes, [a, b, c]}], [{foreign_key, {Tab, c}}])),
+ Tab3 = evil_create3,
+ ?match({aborted, {combine_error, Tab3, _, _, _}},
+ Create(Tab3, [{attributes, [a, b]}], [{foreign_key, {Tab2, b}}])),
+ ?match({atomic, ok},
+ Create(Tab3, [{attributes, [a, b]}], [{foreign_key, {Tab, b}}])),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+evil_delete(suite) -> [];
+evil_delete(Config) when is_list(Config) ->
+ ?skip("Not yet implemented (NYI).~n", []),
+ Nodes = ?acquire_nodes(2, Config),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+evil_change(suite) -> [];
+evil_change(Config) when is_list(Config) ->
+ [N1,N2,_N3] = Nodes = ?acquire_nodes(3, Config),
+ Create = fun(T, D, P) -> mnesia:create_table(T, [{frag_properties, P}| D]) end,
+ Props1 = [{n_fragments, 2}, {node_pool, [N1]}],
+ Tab1 = evil_change_ram,
+ ?match({atomic, ok}, Create(Tab1, [], Props1)),
+
+ ?match({atomic,ok}, mnesia:change_table_frag(Tab1, {add_frag, Nodes})),
+ Dist10 = frag_dist(Tab1),
+ ?match([{N1,3}], Dist10),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab1, {add_node, N2})),
+ Dist11 = frag_dist(Tab1),
+ ?match([{N2,0},{N1,3}], Dist11),
+ mnesia_test_lib:kill_mnesia([N2]),
+ ?match({aborted,_}, mnesia:change_table_frag(Tab1, {add_frag, [N2,N1]})),
+ ?verbose("~p~n",[frag_dist(Tab1)]),
+ mnesia_test_lib:start_mnesia([N2]),
+
+ Tab2 = evil_change_disc,
+ ?match({atomic,ok}, Create(Tab2,[],[{n_disc_copies,1},{n_fragments,1},{node_pool,[N1,N2]}])),
+ ?verbose("~p~n", [frag_dist(Tab2)]),
+ ?match({atomic,ok}, mnesia:change_table_frag(Tab2, {add_frag, [N1,N2]})),
+ _Dist20 = frag_dist(Tab2),
+ mnesia_test_lib:kill_mnesia([N2]),
+ ?match({atomic,ok}, mnesia:change_table_frag(Tab2, {add_frag, [N1,N2]})),
+ ?match({aborted,_}, mnesia:change_table_frag(Tab2, {add_frag, [N2,N1]})),
+
+ mnesia_test_lib:start_mnesia([N2]),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+evil_combine(doc) -> ["Bug in mnesia_4.1.5. and earlier"];
+evil_combine(suite) -> [];
+evil_combine(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ ?match({atomic, ok},mnesia:create_table(tab1, [{disc_copies, [Node1]},
+ {frag_properties, [{n_fragments, 2},
+ {node_pool, [Node1]},
+ {n_disc_copies, 1}]}])),
+ ?match({atomic, ok},mnesia:create_table(tab2, [{disc_copies, [Node1]}])),
+ mnesia:wait_for_tables([tab1, tab2], infinity),
+
+ Add2 = fun() ->
+ mnesia:transaction(fun() ->
+ mnesia:write({tab2,1,1})
+ end)
+ end,
+ Fun = fun() ->
+ Add2(),
+ mnesia:write({tab1,9,10})
+ end,
+ ?match(ok, mnesia:activity({transaction, 1}, Fun, [], mnesia_frag)),
+
+ Read = fun(T, K) ->
+ mnesia:read(T, K, read)
+ end,
+
+ ?match([{tab1,9,10}],mnesia:activity(async_dirty, Read, [tab1, 9], mnesia_frag)),
+ ?match([{tab2,1,1}],mnesia:activity(async_dirty, Read, [tab2, 1], mnesia_frag)),
+
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+evil_loop(doc) -> ["Test select/[14]"];
+evil_loop(suite) -> [];
+evil_loop(Config) when is_list(Config) ->
+ [Node1,_Node2] = ?acquire_nodes(2, Config),
+ Tab1 = ss_oset,
+ Tab2 = ss_set,
+ Tab3 = ss_bag,
+ Tabs = [Tab1, Tab2, Tab3],
+ RecName = ss,
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab1},
+ {ram_copies, [Node1]},
+ {record_name, RecName},
+ {type, ordered_set}])),
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab2},
+ {record_name, RecName},
+ {ram_copies, [Node1]},
+ {type, set}])),
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab3},
+ {record_name, RecName},
+ {ram_copies, [Node1]},
+ {type, bag}])),
+ Keys = [-3, -2] ++ lists:seq(1, 5, 2) ++ lists:seq(6, 10),
+ Recs = [{RecName, K, K} || K <- Keys],
+ [mnesia:dirty_write(Tab1, R) || R <- Recs],
+ [mnesia:dirty_write(Tab2, R) || R <- Recs],
+ [mnesia:dirty_write(Tab3, R) || R <- Recs],
+
+ Activate =
+ fun(Tab) ->
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {activate, []})),
+ Dist = frag_dist(Tab),
+ ?match({atomic, ok}, mnesia:change_table_frag(Tab, {add_frag, Dist}))
+ end,
+
+ Activate(Tab1),
+ Activate(Tab2),
+ Activate(Tab3),
+
+ Match = fun(Tab) -> mnesia:match_object(Tab, {'_', '_', '_'}, write) end,
+ Select = fun(Tab) -> mnesia:select(Tab, [{'_', [], ['$_']}]) end,
+ Trans = fun(Fun, Args) -> mnesia:activity(transaction, Fun, Args, mnesia_frag) end,
+ LoopHelp = fun('$end_of_table',_) ->
+ [];
+ ({Res,Cont},Fun) ->
+ Sel = mnesia:select(Cont),
+ Res ++ Fun(Sel, Fun)
+ end,
+ SelLoop = fun(Table) ->
+ Sel = mnesia:select(Table, [{'_', [], ['$_']}], 1, read),
+ LoopHelp(Sel, LoopHelp)
+ end,
+
+ R1 = {RecName, 2, 2},
+ R2 = {RecName, 4, 4},
+ R3 = {RecName, 2, 3},
+ R4 = {RecName, 3, 1},
+ R5 = {RecName, 104, 104},
+ W1 = fun(Tab,Search) ->
+ mnesia:write(Tab,R1,write),
+ mnesia:write(Tab,R2,write),
+ Search(Tab)
+ end,
+ S1 = lists:sort([R1, R2| Recs]),
+ ?match(S1, sort_res(Trans(W1, [Tab1, Select]))),
+ ?match(S1, sort_res(Trans(W1, [Tab1, Match]))),
+ ?match(S1, sort_res(Trans(W1, [Tab1, SelLoop]))),
+ ?match(S1, sort_res(Trans(W1, [Tab2, Select]))),
+ ?match(S1, sort_res(Trans(W1, [Tab2, SelLoop]))),
+ ?match(S1, sort_res(Trans(W1, [Tab2, Match]))),
+ ?match(S1, sort_res(Trans(W1, [Tab3, Select]))),
+ ?match(S1, sort_res(Trans(W1, [Tab3, SelLoop]))),
+ ?match(S1, sort_res(Trans(W1, [Tab3, Match]))),
+ [mnesia:dirty_delete_object(Frag, R) || R <- [R1, R2],
+ Tab <- Tabs,
+ Frag <- frag_names(Tab)],
+
+ W2 = fun(Tab, Search) ->
+ mnesia:write(Tab, R3, write),
+ mnesia:write(Tab, R1, write),
+ Search(Tab)
+ end,
+ S2 = lists:sort([R1 | Recs]),
+ S2Bag = lists:sort([R1, R3 | Recs]),
+ io:format("S2 = ~p\n", [S2]),
+ ?match(S2, sort_res(Trans(W2, [Tab1, Select]))),
+ ?match(S2, sort_res(Trans(W2, [Tab1, SelLoop]))),
+ ?match(S2, sort_res(Trans(W2, [Tab1, Match]))),
+ ?match(S2, sort_res(Trans(W2, [Tab2, Select]))),
+ ?match(S2, sort_res(Trans(W2, [Tab2, SelLoop]))),
+ ?match(S2, sort_res(Trans(W2, [Tab2, Match]))),
+ io:format("S2Bag = ~p\n", [S2Bag]),
+ ?match(S2Bag, sort_res(Trans(W2, [Tab3, Select]))),
+ ?match(S2Bag, sort_res(Trans(W2, [Tab3, SelLoop]))),
+ ?match(S2Bag, sort_res(Trans(W2, [Tab3, Match]))),
+
+ W3 = fun(Tab,Search) ->
+ mnesia:write(Tab, R4, write),
+ mnesia:delete(Tab, element(2, R1), write),
+ Search(Tab)
+ end,
+ S3Bag = lists:sort([R4 | lists:delete(R1, Recs)]),
+ S3 = lists:delete({RecName, 3, 3}, S3Bag),
+ ?match(S3, sort_res(Trans(W3, [Tab1, Select]))),
+ ?match(S3, sort_res(Trans(W3, [Tab1, SelLoop]))),
+ ?match(S3, sort_res(Trans(W3, [Tab1, Match]))),
+ ?match(S3, sort_res(Trans(W3, [Tab2, SelLoop]))),
+ ?match(S3, sort_res(Trans(W3, [Tab2, Select]))),
+ ?match(S3, sort_res(Trans(W3, [Tab2, Match]))),
+ ?match(S3Bag, sort_res(Trans(W3, [Tab3, Select]))),
+ ?match(S3Bag, sort_res(Trans(W3, [Tab3, SelLoop]))),
+ ?match(S3Bag, sort_res(Trans(W3, [Tab3, Match]))),
+
+ W4 = fun(Tab,Search) ->
+ mnesia:delete(Tab, -1, write),
+ mnesia:delete(Tab, 4 , write),
+ mnesia:delete(Tab, 17, write),
+ mnesia:delete_object(Tab, {RecName, -1, x}, write),
+ mnesia:delete_object(Tab, {RecName, 4, x}, write),
+ mnesia:delete_object(Tab, {RecName, 42, x}, write),
+ mnesia:delete_object(Tab, R2, write),
+ mnesia:write(Tab, R5, write),
+ Search(Tab)
+ end,
+ S4Bag = lists:sort([R5 | S3Bag]),
+ S4 = lists:sort([R5 | S3]),
+ ?match(S4, sort_res(Trans(W4, [Tab1, Select]))),
+ ?match(S4, sort_res(Trans(W4, [Tab1, SelLoop]))),
+ ?match(S4, sort_res(Trans(W4, [Tab1, Match]))),
+ ?match(S4, sort_res(Trans(W4, [Tab2, Select]))),
+ ?match(S4, sort_res(Trans(W4, [Tab2, SelLoop]))),
+ ?match(S4, sort_res(Trans(W4, [Tab2, Match]))),
+ ?match(S4Bag, sort_res(Trans(W4, [Tab3, Select]))),
+ ?match(S4Bag, sort_res(Trans(W4, [Tab3, SelLoop]))),
+ ?match(S4Bag, sort_res(Trans(W4, [Tab3, Match]))),
+ [mnesia:dirty_delete_object(Tab, R) || R <- [{RecName, 3, 3}, R5], Tab <- Tabs],
+
+ %% hmmm anything more??
+
+ ?verify_mnesia([Node1], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+evil_delete_db_node(doc) ->
+ ["Delete db_node with a repicated table with foreign key"];
+evil_delete_db_node(suite) -> [];
+evil_delete_db_node(Config) when is_list(Config) ->
+ Nodes = lists:sort(?acquire_nodes(2, Config)),
+ Local = node(),
+ Remote = hd(Nodes -- [Local]),
+
+ Type = case mnesia_test_lib:diskless(Config) of
+ true -> n_ram_copies;
+ false -> n_disc_copies
+ end,
+ Tab = frag_master,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{frag_properties, [{Type, 2}, {node_pool, Nodes}]}])),
+ ExtraTab = frag_foreigner,
+ ?match({atomic, ok}, mnesia:create_table(ExtraTab, [{frag_properties, [{foreign_key, {Tab, key}}, {node_pool, Nodes}]}])),
+
+ GetPool = fun(T) ->
+ case lists:keysearch(node_pool, 1, mnesia:table_info (T, frag_properties)) of
+ {value, {node_pool, N}} -> lists:sort(N);
+ false -> []
+ end
+ end,
+ ?match(Nodes, GetPool(Tab)),
+ ?match(Nodes, GetPool(ExtraTab)),
+
+
+ ?match(stopped, rpc:call(Remote, mnesia, stop, [])),
+ ?match({atomic, ok}, mnesia:del_table_copy(schema, Remote)),
+
+ ?match([Local], GetPool(Tab)),
+ ?match([Local], GetPool(ExtraTab)),
+
+ ?verify_mnesia([Local], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Misc convenient helpers
+
+frag_write(Tab, Rec) ->
+ Fun = fun() -> mnesia:write(Tab, Rec, write) end,
+ mnesia:activity(sync_dirty, Fun, mnesia_frag).
+
+frag_dist(Tab) ->
+ Fun = fun() -> mnesia:table_info(Tab, frag_dist) end,
+ mnesia:activity(sync_dirty, Fun, mnesia_frag).
+
+frag_names(Tab) ->
+ Fun = fun() -> mnesia:table_info(Tab, frag_names) end,
+ mnesia:activity(sync_dirty, Fun, mnesia_frag).
+
+frag_rec_dist(Tab) ->
+ Fun = fun() -> mnesia:table_info(Tab, frag_size) end,
+ [Size || {_, Size} <- mnesia:activity(sync_dirty, Fun, mnesia_frag)].
+
+table_size(Tab) ->
+ Node = mnesia:table_info(Tab, where_to_read),
+ rpc:call(Node, mnesia, table_info, [Tab, size]).
+
+sort_res(List) when is_list(List) ->
+ lists:sort(List);
+sort_res(Else) ->
+ Else.
+
+rev_res(List) when is_list(List) ->
+ lists:reverse(List);
+rev_res(Else) ->
+ Else.
diff --git a/lib/mnesia/test/mnesia_inconsistent_database_test.erl b/lib/mnesia/test/mnesia_inconsistent_database_test.erl
new file mode 100644
index 0000000000..b19cd8e01b
--- /dev/null
+++ b/lib/mnesia/test/mnesia_inconsistent_database_test.erl
@@ -0,0 +1,74 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_inconsistent_database_test).
+-author('[email protected]').
+
+-behaviour(gen_event).
+
+%%-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+%% gen_event callback interface
+-export([init/1, handle_event/2, handle_call/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+
+init(_Args) ->
+ ?verbose("~p installed as event_module~n", [?MODULE]),
+ {ok, []}.
+
+handle_event(Msg, State) ->
+ handle_any_event(Msg, State).
+
+handle_info(Msg, State) ->
+ handle_any_event(Msg, State).
+
+
+handle_call(Msg, State) ->
+ handle_any_event(Msg, State).
+
+
+%% The main...
+
+handle_any_event({mnesia_system_event, Event}, State)
+ when element(1, Event) == inconsistent_database ->
+ ?error("Got event: ~p~n", [Event]),
+ {ok, State};
+handle_any_event(Msg, State) ->
+ ?verbose("Got event: ~p~n", [Msg]),
+ {ok, State}.
+
+%%-----------------------------------------------------------------
+%% terminate(Reason, State) ->
+%% AnyVal
+%%-----------------------------------------------------------------
+
+terminate(_Reason, _State) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, _State, _Extra) ->
+ exit(not_supported).
+
diff --git a/lib/mnesia/test/mnesia_install_test.erl b/lib/mnesia/test/mnesia_install_test.erl
new file mode 100644
index 0000000000..42a2a19f37
--- /dev/null
+++ b/lib/mnesia/test/mnesia_install_test.erl
@@ -0,0 +1,342 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_install_test).
+-author('[email protected]').
+
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Run some small but demanding test cases in order to verify",
+ "that the basic functionality in Mnesia still works.",
+ "",
+ "Try some very simple things to begin with and increase the",
+ "difficulty stepwise. This test suite should be run before",
+ "all the others if you expect to find bugs.",
+ "",
+ "The function mnesia_install_test:silly() does not use the whole",
+ "infra structure of the test suite. Invoke it on a single node to",
+ "begin with. If that works, proceed with pong = net_adm:ping(SomeOtherNode)",
+ "and rerun silly() in order to perform some distributed tests."];
+all(suite) ->
+ [
+ silly_durability,
+ silly_move,
+ silly_upgrade
+ %,stress
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Stepwise of more and more advanced features
+silly() ->
+ Nodes = [node()] ++ nodes(),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ Config = [{nodes, Nodes}],
+ mnesia_test_lib:eval_test_case(?MODULE, silly2, Config).
+
+silly2(Config) when is_list(Config) ->
+ [Node1 | _] = Nodes = ?acquire_nodes(3, Config),
+ mnesia_test_lib:kill_mnesia(Nodes),
+ ?ignore([mnesia:delete_schema([N]) || N <- Nodes]),
+ ?match(ok, mnesia:create_schema([Node1])),
+ ?match(ok, rpc:call(Node1, mnesia, start, [])),
+ ?match(ok, rpc:call(Node1, mnesia, wait_for_tables,
+ [[schema], infinity])),
+ Res = silly_durability(Config),
+ StressFun = fun(F) -> apply(?MODULE, F, [Config]) end,
+ R =
+ case length(Nodes) of
+ L when L > 1 ->
+ Node2 = lists:nth(2, Nodes),
+ AddDb = [schema, Node2, ram_copies],
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia, add_table_copy, AddDb)),
+ Args = [[{extra_db_nodes, [Node1]}]],
+ ?match(ok, rpc:call(Node2, mnesia, start, Args)),
+ ChangeDb = [schema, Node2, disc_copies],
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia, change_table_copy_type,
+ ChangeDb)),
+ ?match([], mnesia_test_lib:sync_tables([Node1, Node2],
+ [schema])),
+ MoveRes = silly_move(Config),
+ UpgradeRes = silly_upgrade(Config),
+ StressRes = [StressFun(F) || F <- stress(suite)],
+ ?verify_mnesia([Node2], []),
+ [Res, MoveRes, UpgradeRes] ++ StressRes;
+ _ ->
+ StressRes = [StressFun(F) || F <- stress(suite)],
+ ?warning("Too few nodes. Perform net_adm:ping(OtherNode) "
+ "and rerun!!!~n", []),
+ [Res | StressRes]
+ end,
+ ?verify_mnesia([Node1], []),
+ R.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+silly_durability(doc) ->
+ ["Simple test of durability"];
+silly_durability(suite) -> [];
+silly_durability(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = silly,
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+
+ ?match({atomic, ok}, rpc:call(Node1, mnesia,
+ create_table, [Tab, [{Storage, [Node1]}]])),
+
+ Read = fun() -> mnesia:read({Tab, a}) end,
+ Write = fun() -> mnesia:write({Tab, a, b}) end,
+
+ ?match({atomic, []},
+ rpc:call(Node1, mnesia, transaction, [Read])),
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia, transaction, [Write])),
+ ?match({atomic, [{Tab, a, b}]},
+ rpc:call(Node1, mnesia, transaction, [Read])),
+
+ ?match(stopped, rpc:call(Node1, mnesia, stop, [])),
+ ?match(ok, rpc:call(Node1, mnesia, start, [])),
+ case mnesia_test_lib:diskless(Config) of
+ true ->
+ skip;
+ false ->
+ ?match(ok, rpc:call(Node1, mnesia, wait_for_tables, [[Tab], infinity])),
+ ?match({atomic, [{Tab, a, b}]},
+ rpc:call(Node1, mnesia, transaction, [Read]))
+ end,
+ ?verify_mnesia([Node1], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+silly_move(doc) ->
+ ["Simple test of movement of a replica from one node to another"];
+silly_move(suite) -> [];
+silly_move(Config) when is_list(Config) ->
+ [Node1, Node2] = ?acquire_nodes(2, Config),
+ Tab = silly_move,
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia,
+ create_table, [Tab, [{ram_copies, [Node2]}]])),
+ ?match([], mnesia_test_lib:sync_tables([Node1, Node2], [Tab])),
+
+ Read = fun() -> mnesia:read({Tab, a}) end,
+ Write = fun() -> mnesia:write({Tab, a, b}) end,
+
+ ?match({atomic, []},
+ rpc:call(Node1, mnesia, transaction, [Read])),
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia, transaction, [Write])),
+ ?match({atomic, [{Tab, a, b}]},
+ rpc:call(Node1, mnesia, transaction, [Read])),
+
+ case mnesia_test_lib:diskless(Config) of
+ true -> skip;
+ false ->
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia,
+ change_table_copy_type, [Tab, Node2, disc_only_copies])),
+ ?match([], mnesia_test_lib:sync_tables([Node1, Node2], [Tab]))
+ end,
+ ?match({atomic, [{Tab, a, b}]}, rpc:call(Node1, mnesia, transaction, [Read])),
+
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia,
+ move_table_copy, [Tab, Node2, Node1])),
+ ?match([], mnesia_test_lib:sync_tables([Node1, Node2], [Tab])),
+ ?match({atomic, [{Tab, a, b}]},
+ rpc:call(Node1, mnesia, transaction, [Read])),
+ ?verify_mnesia([Node1], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+silly_upgrade(doc) ->
+ ["Simple test of a schema upgrade and restore from backup"];
+silly_upgrade(suite) -> [];
+silly_upgrade(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Name = silly_upgrade,
+ Tab1 = silly_upgrade1,
+ Tab2 = silly_upgrade2,
+ Bup = "silly_upgrade.BUP",
+ Bup2 = "silly_upgrade_part.BUP",
+ ?match({atomic, ok}, mnesia:create_table(Tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{disc_only_copies, Nodes}])),
+
+ CpState = add_some_records(Tab1, Tab2, []),
+ ?match(match, verify_state(Tab1, Tab2, CpState)),
+ file:delete(Bup),
+ ?match(ok, mnesia:backup(Bup)),
+ Args = [{name, Name}, {ram_overrides_dump, true},
+ {min, [Tab1, schema]}, {max, [Tab2]}],
+ ?match({ok, Name, _}, mnesia:activate_checkpoint(Args)),
+
+ IgnoreState = add_more_records(Tab1, Tab2, CpState),
+ ?match(match, verify_state(Tab1, Tab2, IgnoreState)),
+ ?match({mismatch, _, _}, verify_state(Tab1, Tab2, CpState)),
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab2, Node1)),
+ file:delete(Bup2),
+ ?match(ok, mnesia:backup_checkpoint(Name, Bup2)),
+
+ UpgradeState = transform_some_records(Tab1, Tab2, IgnoreState),
+ ?match({mismatch, _, _}, verify_state(Tab1, Tab2, CpState)),
+ ?match({mismatch, _, _}, verify_state(Tab1, Tab2, IgnoreState)),
+ ?match(match, verify_state(Tab1, Tab2, UpgradeState)),
+
+ ?match(ok, mnesia:deactivate_checkpoint(Name)),
+ ?match(match, verify_state(Tab1, Tab2, UpgradeState)),
+
+ ?match(ok, mnesia:install_fallback(Bup2)),
+ file:delete(Bup2),
+ %% Will generate intentional crash, fatal error
+ ?match([], mnesia_test_lib:stop_mnesia([Node2])),
+ wait_till_dead([Node1, Node2]),
+ ?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [Tab1, Tab2])),
+ ?match(match, verify_state(Tab1, Tab2, CpState)),
+
+ ?match(ok, mnesia:install_fallback(Bup)),
+ file:delete(Bup),
+ %% Will generate intentional crash, fatal error
+ ?match([], mnesia_test_lib:stop_mnesia([Node1, Node2])),
+ wait_till_dead([Node1, Node2]),
+ ?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [Tab1, Tab2])),
+ CpState2 = [X || X <- CpState, element(1, X) /= Tab1],
+ ?match(match, verify_state(Tab1, Tab2, CpState2)),
+ ?verify_mnesia(Nodes, []).
+
+wait_till_dead([]) -> ok;
+wait_till_dead([N|Ns]) ->
+ Apps = rpc:call(N, application, which_applications, []),
+ case lists:keymember(mnesia, 1, Apps) of
+ true ->
+ timer:sleep(10),
+ wait_till_dead([N|Ns]);
+ false ->
+ wait_till_dead(Ns)
+ end.
+
+add_some_records(Tab1, Tab2, Old) ->
+ Recs1 = [{Tab1, I, I} || I <- lists:seq(1, 30)],
+ Recs2 = [{Tab2, I, I} || I <- lists:seq(20, 40)],
+ lists:foreach(fun(R) -> mnesia:dirty_write(R) end, Recs1),
+ Fun = fun(R) -> mnesia:write(R) end,
+ Trans = fun() -> lists:foreach(Fun, Recs2) end,
+ ?match({atomic, _}, mnesia:transaction(Trans)),
+ lists:sort(Old ++ Recs1 ++ Recs2).
+
+add_more_records(Tab1, Tab2, Old) ->
+ Change1 = [{T, K, V+100} || {T, K, V} <- Old, K==23],
+ Change2 = [{T, K, V+100} || {T, K, V} <- Old, K==24],
+ Del = [{T, K} || {T, K, _V} <- Old, K>=25],
+ New = [{Tab1, 50, 50}, {Tab2, 50, 50}],
+ lists:foreach(fun(R) -> mnesia:dirty_write(R) end, Change1),
+ lists:foreach(fun(R) -> mnesia:dirty_delete(R) end, Del),
+ Fun = fun(R) -> mnesia:write(R) end,
+ Trans = fun() -> lists:foreach(Fun, Change2 ++ New) end,
+ ?match({atomic, ok}, mnesia:transaction(Trans)),
+ Recs = [{T, K, V} || {T, K, V} <- Old, K<23] ++ Change1 ++ Change2 ++ New,
+ lists:sort(Recs).
+
+
+verify_state(Tab1, Tab2, Exp) ->
+ Fun = fun() ->
+ Act1 = [mnesia:read({Tab1, K}) || K <- mnesia:all_keys(Tab1)],
+ Act2 = [mnesia:read({Tab2, K}) || K <- mnesia:all_keys(Tab2)],
+ Act = lists:append(Act1) ++ lists:append(Act2),
+ {ok, Act -- Exp, Exp -- Act}
+ end,
+ case mnesia:transaction(Fun) of
+ {atomic, {ok, [], []}} -> match;
+ {atomic, {ok, More, Less}} -> {mismatch, More, Less};
+ {aborted, Reason} -> {error, Reason}
+ end.
+
+transform_some_records(Tab1, _Tab2, Old) ->
+ Fun = fun(Rec) ->
+ list_to_tuple(tuple_to_list(Rec) ++ [4711])
+ end,
+ ?match({atomic, ok},
+ mnesia:transform_table(Tab1, Fun, [key, val, extra])),
+ Filter = fun(Rec) when element(1, Rec) == Tab1 -> {true, Fun(Rec)};
+ (_) -> true
+ end,
+ lists:sort(lists:zf(Filter, Old)).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+stress(doc) ->
+ ["Stress the system a little"];
+stress(suite) ->
+ [
+ conflict,
+ dist
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dist(doc) ->
+ ["Avoid lock conflicts in order to maximize thruput",
+ "Ten drivers per node, tables replicated to all nodes, lots of branches"];
+dist(suite) -> [];
+dist(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, 10 * 60000}]),
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ ?match({ok, _}, mnesia_tpcb:start(dist_args(Nodes, Storage))).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+conflict(doc) ->
+ ["Provoke a lot of lock conflicts.",
+ "Ten drivers per node, tables replicated to all nodes, single branch"];
+conflict(suite) -> [];
+conflict(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, 10 * 60000}]),
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ ?match({ok, _}, mnesia_tpcb:start(conflict_args(Nodes, Storage))).
+
+conflict_args(Nodes, ReplicaType) ->
+ [{db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {replica_nodes, Nodes},
+ {n_drivers_per_node, 10},
+ {n_branches, 1},
+ {n_accounts_per_branch, 10},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(5)},
+ {report_interval, timer:seconds(10)},
+ {use_running_mnesia, true},
+ {reuse_history_id, true}].
+
+dist_args(Nodes, ReplicaType) ->
+ [{db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {replica_nodes, Nodes},
+ {n_drivers_per_node, 10},
+ {n_branches, length(Nodes) * 100},
+ {n_accounts_per_branch, 10},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(5)},
+ {report_interval, timer:seconds(10)},
+ {use_running_mnesia, true},
+ {reuse_history_id, true}].
+
diff --git a/lib/mnesia/test/mnesia_isolation_test.erl b/lib/mnesia/test/mnesia_isolation_test.erl
new file mode 100644
index 0000000000..4fc6e8fe58
--- /dev/null
+++ b/lib/mnesia/test/mnesia_isolation_test.erl
@@ -0,0 +1,2419 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_isolation_test).
+-author('[email protected]').
+
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Verify the isolation property.",
+ "Operations of concurrent transactions must yield results which",
+ "are indistinguishable from the results which would be obtained by",
+ "forcing each transaction to be serially executed to completion in",
+ "some order. This means that repeated reads of the same records",
+ "within any committed transaction must have returned identical",
+ "data when run concurrently with any mix of arbitary transactions.",
+ "Updates in one transaction must not be visible in any other",
+ "transaction before the transaction has been committed."];
+all(suite) ->
+ [
+ locking,
+ visibility
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+locking(doc) ->
+ ["Verify locking semantics for various configurations",
+ " NoLock = lock_funs(no_lock, any_granularity)",
+ " SharedLock = lock_funs(shared_lock, any_granularity)",
+ " ExclusiveLock = lock_funs(exclusive_lock, any_granularity)",
+ " AnyLock = lock_funs(any_lock, any_granularity)"];
+locking(suite) ->
+ [no_conflict,
+ simple_queue_conflict,
+ advanced_queue_conflict,
+ simple_deadlock_conflict,
+ advanced_deadlock_conflict,
+ lock_burst,
+ sticky_locks,
+ unbound_locking,
+ admin_conflict,
+%% removed_resources,
+ nasty
+ ].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+no_conflict(suite) -> [];
+no_conflict(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = no_conflict,
+ create_conflict_table(Tab, [Node1]),
+ Fun = fun(OtherOid, Lock1, Lock2) ->
+ %% Start two transactions
+ {success, [B, A]} = ?start_activities([Node1, Node1]),
+ ?start_transactions([B, A]),
+
+ A ! fun() -> Lock1(one_oid(Tab)), ok end,
+ ?match_receive({A, ok}),
+ B ! fun() -> Lock2(OtherOid), ok end,
+ ?match_receive({B, ok}),
+ A ! fun() -> mnesia:abort(ok) end,
+ ?match_receive({A, {aborted, ok}}),
+ B ! fun() -> mnesia:abort(ok) end,
+ ?match_receive({B, {aborted, ok}})
+ end,
+ NoLocks = lock_funs(no_lock, any_granularity),
+ SharedLocks = lock_funs(shared_lock, any_granularity),
+ AnyLocks = lock_funs(any_lock, any_granularity),
+ OneOneFun = fun(Lock1, Lock2) -> Fun(one_oid(Tab), Lock1, Lock2) end,
+ fun_loop(OneOneFun, NoLocks, AnyLocks),
+ fun_loop(OneOneFun, AnyLocks, NoLocks),
+ fun_loop(OneOneFun, SharedLocks, SharedLocks),
+
+ %% Lock different objects
+ OneOtherFun = fun(Lock1, Lock2) -> Fun(other_oid(Tab), Lock1, Lock2) end,
+ OneSharedLocks = lock_funs(shared_lock, one),
+ OneExclusiveLocks = lock_funs(exclusive_lock, one),
+ fun_loop(OneOtherFun, OneSharedLocks, OneExclusiveLocks),
+ fun_loop(OneOtherFun, OneExclusiveLocks, OneSharedLocks),
+ fun_loop(OneOtherFun, OneExclusiveLocks, OneExclusiveLocks),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+simple_queue_conflict(suite) -> [];
+simple_queue_conflict(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = simple_queue_conflict,
+ create_conflict_table(Tab, [Node1]),
+ Fun = fun(OneLock, OtherLock) ->
+ %% Start two transactions
+ {success, [B, A]} = ?start_activities([Node1, Node1]),
+ ?start_transactions([B, A]),
+
+ A ! fun() -> OneLock(one_oid(Tab)), ok end,
+ ?match_receive({A, ok}),
+ B ! fun() -> OtherLock(one_oid(Tab)), ok end,
+ wait_for_lock(B, [Node1], 20), % Max 10 sec
+ A ! end_trans,
+ ?match_multi_receive([{A, {atomic, end_trans}}, {B, ok}]),
+ B ! fun() -> mnesia:abort(ok) end,
+ ?match_receive({B, {aborted, ok}})
+ end,
+ OneSharedLocks = lock_funs(shared_lock, one),
+ AllSharedLocks = lock_funs(shared_lock, all),
+ OneExclusiveLocks = lock_funs(exclusive_lock, one),
+ AllExclusiveLocks = lock_funs(exclusive_lock, all),
+ fun_loop(Fun, OneExclusiveLocks, OneExclusiveLocks),
+ fun_loop(Fun, AllExclusiveLocks, AllExclusiveLocks),
+ fun_loop(Fun, OneExclusiveLocks, AllExclusiveLocks),
+ fun_loop(Fun, AllExclusiveLocks, OneExclusiveLocks),
+ fun_loop(Fun, OneSharedLocks, AllExclusiveLocks),
+ fun_loop(Fun, AllSharedLocks, OneExclusiveLocks),
+ ok.
+
+wait_for_lock(Pid, _Nodes, 0) ->
+ Queue = mnesia:system_info(lock_queue),
+ ?error("Timeout while waiting for lock on Pid ~p in queue ~p~n", [Pid, Queue]);
+wait_for_lock(Pid, Nodes, N) ->
+ rpc:multicall(Nodes, sys, get_status, [mnesia_locker]),
+ List = [rpc:call(Node, mnesia, system_info, [lock_queue]) || Node <- Nodes],
+ Q = lists:append(List),
+ check_q(Pid, Q, Nodes, N).
+
+check_q(Pid, [{_Oid, _Op, Pid, _Tid, _WFT} | _Tail], _N, _Count) -> ok;
+check_q(Pid, [_ | Tail], N, Count) -> check_q(Pid, Tail, N, Count);
+check_q(Pid, [], N, Count) ->
+ timer:sleep(500),
+ wait_for_lock(Pid, N, Count - 1).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+advanced_queue_conflict(suite) -> [];
+advanced_queue_conflict(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = advanced_queue_conflict,
+ create_conflict_table(Tab, [Node1]),
+ OneRec = {Tab, 3, 3},
+ OneOid = {Tab, 3},
+ OtherRec = {Tab, 4, 4},
+ OtherOid = {Tab, 4},
+
+ %% Start four transactions
+ {success, [D, C, B, A]} = ?start_activities(lists:duplicate(4, Node1)),
+ ?start_transactions([D, C, B, A]),
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+
+ %% Acquire some locks
+ A ! fun() -> mnesia:write(OneRec) end,
+ ?match_receive({A, ok}),
+ A ! fun() -> mnesia:read(OneOid) end,
+ ?match_receive({A, [OneRec]}),
+
+ B ! fun() -> mnesia:write(OtherRec) end,
+ ?match_receive({B, ok}),
+ B ! fun() -> mnesia:read(OneOid) end,
+ ?match_receive(timeout),
+
+ C ! fun() -> mnesia:read(OtherOid) end,
+ ?match_receive(timeout),
+ D ! fun() -> mnesia:wread(OtherOid) end,
+ ?match_receive(timeout),
+
+ %% and release them in a certain order
+ A ! end_trans,
+ ?match_multi_receive([{A, {atomic, end_trans}}, {B, [OneRec]}]),
+ B ! end_trans,
+ ?match_multi_receive([{B, {atomic, end_trans}}, {C, [OtherRec]}]),
+ C ! end_trans,
+ ?match_multi_receive([{C, {atomic, end_trans}}, {D, [OtherRec]}]),
+ D ! end_trans,
+ ?match_receive({D, {atomic, end_trans}}),
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+simple_deadlock_conflict(suite) -> [];
+simple_deadlock_conflict(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = simple_deadlock_conflict,
+ create_conflict_table(Tab, [Node1]),
+ Rec = {Tab, 4, 4},
+ Oid = {Tab, 4},
+
+ %% Start two transactions
+ {success, [B, A]} = ?start_activities(lists:duplicate(2, Node1)),
+ mnesia_test_lib:start_transactions([B, A], 0), % A is newest
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+
+ B ! fun() -> mnesia:write(Rec) end,
+ ?match_receive({B, ok}),
+ A ! fun() -> mnesia:read(Oid) end,
+ ?match_receive({A, {aborted, nomore}}),
+ B ! end_trans,
+ ?match_receive({B, {atomic, end_trans}}),
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+advanced_deadlock_conflict(suite) -> [];
+advanced_deadlock_conflict(Config) when is_list(Config) ->
+ [Node1, Node2] = ?acquire_nodes(2, Config),
+ Tab = advanced_deadlock_conflict,
+ create_conflict_table(Tab, [Node2]),
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ Rec = {Tab, 4, 4},
+ Oid = {Tab, 4},
+
+ %% Start two transactions
+ {success, [B, A]} = ?start_activities([Node1, Node2]),
+ mnesia_test_lib:start_sync_transactions([B, A], 0), % A is newest
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+
+ B ! fun() -> mnesia:write(Rec) end,
+ ?match_receive({B, ok}),
+ A ! fun() -> mnesia:read(Oid) end,
+ ?match_receive({A, {aborted, nomore}}),
+ B ! end_trans,
+ ?match_receive({B, {atomic, end_trans}}),
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+one_oid(Tab) -> {Tab, 1}.
+other_oid(Tab) -> {Tab, 2}.
+
+create_conflict_table(Tab, Nodes) ->
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, Nodes},
+ {attributes, [key, val]},
+ {index, [val]}
+ ])),
+ ?match([], mnesia_test_lib:sync_tables(Nodes, [Tab])),
+ init_conflict_table(Tab).
+
+init_conflict_table(Tab) ->
+ Recs = mnesia:dirty_match_object({Tab, '_', '_'}),
+ lists:foreach(fun(R) -> mnesia:dirty_delete_object(R) end, Recs),
+ Keys = [one_oid(Tab), other_oid(Tab)],
+ [mnesia:dirty_write({T, K, K}) || {T, K} <- Keys].
+
+%% Apply Fun for each X and Y
+fun_loop(Fun, Xs, Ys) ->
+ lists:foreach(fun(X) -> lists:foreach(fun(Y) -> do_fun(Fun, X, Y) end, Ys) end, Xs).
+
+do_fun(Fun, X, Y) ->
+ Pid = spawn_link(?MODULE, do_fun, [self(), Fun, X, Y]),
+ receive
+ {done_fun, Pid} -> done_fun
+ end.
+
+do_fun(Monitor, Fun, X, Y) ->
+ ?log("{do_fun ~p~n", [[Fun, X, Y]]),
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ Fun(X, Y),
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ unlink(Monitor),
+ Monitor ! {done_fun, self()},
+ exit(done_fun).
+
+%% Returns a list of fun's
+lock_funs(no_lock, one) ->
+ [
+ fun(Oid) -> mnesia:dirty_read(Oid) end,
+ fun({Tab, Key}) -> mnesia:dirty_write({Tab, Key, Key}) end,
+ fun({Tab, Key}) -> mnesia:dirty_write({Tab, Key, Key}),
+ mnesia:dirty_update_counter({Tab, Key}, 0) end,
+ fun(Oid) -> mnesia:dirty_delete(Oid) end,
+ fun({Tab, Key}) -> mnesia:dirty_delete_object({Tab, Key, Key}) end,
+ fun({Tab, Key}) -> mnesia:dirty_match_object({Tab, Key, Key}) end,
+ fun({Tab, Key}) -> mnesia:dirty_index_match_object({Tab, Key, Key}, val) end,
+ fun({Tab, Key}) -> mnesia:dirty_index_read(Tab, Key, val) end,
+ fun({Tab, Key}) -> mnesia:dirty_index_match_object({Tab, '_', Key}, val) end
+ ];
+lock_funs(no_lock, all) ->
+ [
+ fun({Tab, _}) -> mnesia:dirty_match_object({Tab, '_', '_'}) end,
+ fun({Tab, _}) -> slot_iter(Tab) end,
+ fun({Tab, _}) -> key_iter(Tab) end
+ ];
+lock_funs(shared_lock, one) ->
+
+ [
+ fun(Oid) -> mnesia:read(Oid) end,
+ fun({Tab, Key}) ->
+ init_conflict_table(Tab),
+ mnesia:dirty_delete(other_oid(Tab)),
+ mnesia:match_object({Tab, Key, Key}) end
+ ];
+lock_funs(shared_lock, all) ->
+ [
+ fun({Tab, _}) -> mnesia:read_lock_table(Tab) end,
+ fun({Tab, Key}) -> mnesia:match_object({Tab, '_', Key}) end,
+ fun({Tab, _}) -> mnesia:match_object({Tab, '_', '_'}) end,
+ fun({Tab, _}) -> mnesia:all_keys(Tab) end,
+ fun({Tab, Key}) -> mnesia:index_match_object({Tab, '_', Key}, val) end,
+ fun({Tab, Key}) -> mnesia:index_read(Tab, Key, val) end
+ ];
+lock_funs(exclusive_lock, one) ->
+ [
+ fun(Oid) -> mnesia:wread(Oid) end,
+ fun({Tab, Key}) -> mnesia:write({Tab, Key, Key}) end,
+ fun(Oid) -> mnesia:delete(Oid) end,
+ fun({Tab, Key}) -> mnesia:delete_object({Tab, Key, Key}) end,
+ fun({Tab, Key}) -> mnesia:s_write({Tab, Key, Key}) end,
+ fun(Oid) -> mnesia:s_delete(Oid) end,
+ fun({Tab, Key}) -> mnesia:s_delete_object({Tab, Key, Key}) end
+ ];
+lock_funs(exclusive_lock, all) ->
+ [
+ fun({Tab, _}) -> mnesia:write_lock_table(Tab) end
+ ];
+lock_funs(Compatibility, any_granularity) ->
+ lists:append([lock_funs(Compatibility, Granularity) ||
+ Granularity <- [one, all]]);
+lock_funs(any_lock, Granularity) ->
+ lists:append([lock_funs(Compatibility, Granularity) ||
+ Compatibility <- [no_lock, shared_lock, exclusive_lock]]).
+
+slot_iter(Tab) ->
+ slot_iter(Tab, mnesia:dirty_slot(Tab, 0), 1).
+slot_iter(_Tab, '$end_of_table', _) ->
+ [];
+slot_iter(Tab, Recs, Slot) ->
+ Recs ++ slot_iter(Tab, mnesia:dirty_slot(Tab, Slot), Slot+1).
+
+key_iter(Tab) ->
+ key_iter(Tab, mnesia:dirty_first(Tab)).
+key_iter(_Tab, '$end_of_table') ->
+ [];
+key_iter(Tab, Key) ->
+ [Key | key_iter(Tab, mnesia:dirty_next(Tab, Key))].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+lock_burst(suite) -> [];
+lock_burst(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = burst,
+ ?match({atomic, ok}, mnesia:create_table(Tab,
+ [{attributes, [a, b]},
+ {ram_copies, [Node1]}])),
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ?match(ok, burst_em(Tab, 1000)),
+ ?match([{burst,1,1000}], mnesia:dirty_read(Tab,1)),
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+burst_em(Tab, N) ->
+ spawn_link(?MODULE, burst_counter, [self(), Tab, N]),
+ receive
+ burst_counter_done -> ok
+ end.
+
+burst_counter(Monitor, Tab, N) when N > 0 ->
+ ?match(ok, burst_gen(Tab, N, self())),
+ Monitor ! burst_receiver(N).
+
+burst_receiver(0) ->
+ burst_counter_done;
+burst_receiver(N) ->
+ receive
+ burst_incr_done ->
+ burst_receiver(N-1)
+ end.
+
+burst_gen(_, 0, _) ->
+ ok;
+burst_gen(Tab, N, Father) when is_integer(N), N > 0 ->
+ spawn_link(?MODULE, burst_incr, [Tab, Father]),
+ burst_gen(Tab, N-1, Father).
+
+burst_incr(Tab, Father) ->
+ Fun = fun() ->
+ Val =
+ case mnesia:read({Tab, 1}) of
+ [{Tab, 1, V}] -> V;
+ [] -> 0
+ end,
+ mnesia:write({Tab, 1, Val+1})
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun)),
+ Father ! burst_incr_done.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+sticky_locks(doc) ->
+ ["Simple Tests of sticky locks"];
+
+sticky_locks(suite) ->
+ [
+ basic_sticky_functionality
+ %% Needs to be expandand a little bit further
+ ].
+
+basic_sticky_functionality(suite) -> [];
+basic_sticky_functionality(Config) when is_list(Config) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = basic_table,
+ Storage = mnesia_test_lib:storage_type(disc_copies, Config),
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Storage, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(sync, [{ram_copies, Nodes}])),
+ Trans1 = fun() ->
+ ?match(ok, mnesia:s_write({Tab, 1, 2})),
+ ?match([{Tab, 1, 2}], mnesia:read({Tab, 1})),
+ ?match(timeout, receive M -> M after 500 -> timeout end),
+ ?match(ok, mnesia:s_write({Tab, 2, 2})),
+ ?match(ok, mnesia:write({Tab, 42, 4711}))
+ end,
+ Trans2 = fun() ->
+ ?match([{Tab, 1, 2}], mnesia:read({Tab, 1})),
+ ?match(timeout, receive M -> M after 500 -> timeout end),
+ ?match(ok, mnesia:write({Tab, 1, 4711})),
+ ?match(ok, mnesia:s_write({Tab, 2, 4})),
+ ?match(ok, mnesia:delete({Tab, 42}))
+ end,
+ rpc:call(N1, mnesia, transaction, [Trans1]),
+ ?match([{Tab,N1}], rpc:call(N1, ?MODULE, get_sticky, [])),
+ ?match([{Tab,N1}], rpc:call(N2, ?MODULE, get_sticky, [])),
+
+ rpc:call(N2, mnesia, transaction, [Trans2]),
+ ?match([], rpc:call(N1, ?MODULE, get_sticky, [])),
+ ?match([], rpc:call(N2, ?MODULE, get_sticky, [])),
+
+ Slock = fun() -> mnesia:read({sync,sync}),get_sticky() end,
+ ?match({atomic, [{Tab,1, 4711}]}, mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+ ?match({atomic, [{Tab,2, 4}]}, mnesia:transaction(fun() -> mnesia:read({Tab, 2}) end)),
+ ?match({atomic, [{Tab,N1}]}, rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 3}),Slock() end])),
+ ?match([{Tab,N1}], rpc:call(N2, ?MODULE, get_sticky, [])),
+
+ ?match({atomic,[]}, rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 4}),Slock() end])),
+
+ ?match([], rpc:call(N1, ?MODULE, get_sticky, [])),
+ ?match([], rpc:call(N2, ?MODULE, get_sticky, [])),
+
+ ?match({atomic,[{Tab,N2}]}, rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 4}),Slock() end])),
+
+ ?match({atomic,[]}, rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 5}),Slock() end])),
+ ?match({atomic,[{Tab,N1}]}, rpc:call(N1, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 5}),Slock() end])),
+ ?match({atomic,[]}, rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 6}),Slock() end])),
+ ?match({atomic,[{Tab,N2}]}, rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 7}),Slock() end])),
+
+ ?match([{Tab,N2}], get_sticky()),
+ ?match({atomic, [{Tab,1, 7}]}, mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+ ?match([{Tab,N2}], get_sticky()),
+ ?match({atomic, [{Tab,2, 4}]}, mnesia:transaction(fun() -> mnesia:read({Tab, 2}) end)),
+ ?match([{Tab,N2}], get_sticky()),
+ ?match({atomic,[{Tab,N2}]}, rpc:call(N2, mnesia, transaction,
+ [fun() -> mnesia:s_write({Tab, 1, 6}),Slock() end])),
+ ?match([{Tab,N2}], get_sticky()),
+ ?match({atomic, [{Tab,1, 6}]}, mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+ ?match([{Tab,N2}], get_sticky()),
+ ?match({atomic, [{Tab,2, 4}]}, mnesia:transaction(fun() -> mnesia:read({Tab, 2}) end)),
+ ?match([{Tab,N2}], get_sticky()),
+ ?verify_mnesia(Nodes, []).
+
+get_sticky() ->
+ mnesia_locker ! {get_table, self(), mnesia_sticky_locks},
+ receive {mnesia_sticky_locks, Locks} -> Locks end.
+
+get_held() ->
+ mnesia_locker ! {get_table, self(), mnesia_sticky_locks},
+ receive {mnesia_sticky_locks, Locks} -> Locks end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+unbound_locking(suite) ->
+ [unbound1, unbound2];
+
+unbound_locking(doc) ->
+ ["Check that mnesia handles unbound key variables, GPRS bug."
+ "Ticket id: OTP-3342"].
+
+unbound1(suite) -> [];
+unbound1(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+
+ ?match({atomic, ok}, mnesia:create_table(ul, [])),
+
+ Tester = self(),
+ Write = fun() ->
+ mnesia:write({ul, {key, {17,42}}, val}),
+ ?log("~p Got write lock waiting...~n", [self()]),
+ Tester ! continue,
+ receive
+ continue ->
+ ok
+ end,
+ ?log("..continuing~n", []),
+ ok
+ end,
+
+ {success, [A]} = ?start_activities([Node1]),
+ ?start_transactions([A]),
+ A ! Write,
+
+ receive continue -> ok end,
+
+ Match = fun() ->
+ case catch mnesia:match_object({ul, {key, {'_', '$0'}}, '_'}) of
+ {'EXIT', What} -> %% Cyclic first time
+ ?log("Cyclic Restarting~n", []),
+ A ! continue,
+ A ! end_trans,
+ exit(What);
+ Res ->
+ ?log("Got match log ~p...~n", [Res]),
+ Res
+ end
+ end,
+ ?match({atomic, [{ul,{key,{17,42}},val}]}, mnesia:transaction(Match)),
+
+ ?match_receive({A, ok}),
+ ?match_receive({A, {atomic, end_trans}}),
+ ok.
+
+unbound2(suite) -> [];
+unbound2(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+
+ ?match({atomic, ok}, mnesia:create_table(ul, [])),
+
+ {success, [B, A]} = ?start_activities([Node1, Node1]),
+
+ Me = self(),
+
+ Write = fun() ->
+ mnesia:write({ul, {key, {17,42}}, val}),
+ ?log("~p Got write lock waiting... Tid ~p ~n",
+ [self(), get(mnesia_activity_state)]),
+ Me ! ok_lock,
+ receive
+ continue ->
+ ok
+ end,
+ ?log("..continuing~n", []),
+ ok
+ end,
+
+ Match = fun() ->
+ receive
+ continueB ->
+ ?log("~p, moving on TID ~p~n",
+ [self(), get(mnesia_activity_state)]),
+ Me ! {self(), continuing}
+ end,
+ case catch mnesia:match_object({ul, {key, {'_', '$0'}},
+ '_'}) of
+ {'EXIT', What} -> %% Cyclic first time
+ ?log("Cyclic Restarting ~p ~n", [What]),
+ {should_not_happen,What};
+ Res ->
+ ?log("Got match log ~p...~n", [Res]),
+ Res
+ end
+ end,
+
+ B ! fun() -> mnesia:transaction(Match) end,
+ timer:sleep(100), %% Let B be started first..
+ A ! fun() -> mnesia:transaction(Write) end,
+
+ receive ok_lock -> ok end,
+
+ B ! continueB,
+ ?match_receive({B, continuing}),
+
+ %% B should now be in lock queue.
+ A ! continue,
+ ?match_receive({A, {atomic, ok}}),
+ ?match_receive({B, {atomic, [{ul,{key,{17,42}},val}]}}),
+ ok.
+
+receiver() ->
+ receive
+ {_Pid, begin_trans} ->
+ receiver();
+ Else ->
+ Else
+ after
+ 10000 ->
+ timeout
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+admin_conflict(doc) ->
+ ["Provoke lock conflicts with schema transactions and checkpoints."];
+admin_conflict(suite) ->
+ [
+ create_table,
+ delete_table,
+ move_table_copy,
+ add_table_index,
+ del_table_index,
+ transform_table,
+ snmp_open_table,
+ snmp_close_table,
+ change_table_copy_type,
+ change_table_access,
+ add_table_copy,
+ del_table_copy,
+ dump_tables,
+ extra_admin_tests
+ ].
+
+create_table(suite) -> [];
+create_table(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = c_t_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 1, updated}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ DiskMaybe = mnesia_test_lib:storage_type(disc_copies, Config),
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, create_table,
+ [test_tab1, [{DiskMaybe, [ThisNode]}]]]),
+ ?match_multi_receive([{Pid, {atomic, ok}},
+ {'EXIT', Pid, normal}]), %% No Locks! op should be exec.
+
+ Pid2 = spawn_link(?MODULE, op, [self(), mnesia, create_table,
+ [test_tab2, [{ram_copies, [Node2]}]]]),
+
+ ?match_multi_receive([{Pid2, {atomic, ok}},
+ {'EXIT', Pid2, normal}]), %% No Locks! op should be exec.
+
+ A ! end_trans,
+ ?match_receive({A,{atomic,end_trans}}),
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+delete_table(suite) -> [];
+delete_table(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = d_t_tab,
+ Def = [{ram_copies, [ThisNode, Node2]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:read({Tab, 1}) end,
+ ?match_receive({A, [{Tab, 1, 1, 0}]}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, delete_table,
+ [Tab]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+move_table_copy(suite) -> [];
+move_table_copy(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = m_t_c_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 2, 3}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, move_table_copy,
+ [Tab, ThisNode, Node2]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ timer:sleep(500), %% Don't know how to sync this !!!
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ sys:get_status(whereis(mnesia_tm)), % Explicit sync, release locks is async
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+add_table_index(suite) -> [];
+add_table_index(Config) when is_list(Config) ->
+ [ThisNode, _Node2] = ?acquire_nodes(2, Config ++ [{tc_timeout, 60000}]),
+ Tab = a_t_i_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 1, updated}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia,
+ add_table_index, [Tab, attr1]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+del_table_index(suite) -> [];
+del_table_index(Config) when is_list(Config) ->
+ [ThisNode, _Node2] = ?acquire_nodes(2, Config),
+ Tab = d_t_i_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, attr1)),
+
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 51, 51, attr2}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, del_table_index,
+ [Tab, attr1]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+ %% Locks released! op should be exec.
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+transform_table(suite) -> [];
+transform_table(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = t_t_tab,
+ Def = [{ram_copies, [ThisNode, Node2]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:read({Tab, 1}) end,
+ ?match_receive({A, [{Tab, 1, 1, 0}]}), %% A is executed
+
+ Transform = fun({Table, Key, Attr1, Attr2}) -> % Need todo a transform
+ {Table, Key, {Attr1, Attr2}} end,
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, transform_table,
+ [Tab, Transform, [key, attr1]]]),
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+snmp_open_table(suite) -> [];
+snmp_open_table(Config) when is_list(Config) ->
+ [ThisNode, _Node2] = ?acquire_nodes(2, Config),
+ Tab = s_o_t_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 1, 100}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, snmp_open_table,
+ [Tab, [{key, integer}]]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ %% Locks released! op should be exec. Can take a while (thats the timeout)
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+snmp_close_table(suite) -> [];
+snmp_close_table(Config) when is_list(Config) ->
+ [ThisNode, _Node2] = ?acquire_nodes(2, Config),
+ Tab = s_c_t_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab, [{key, integer}])),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 1, 100}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, snmp_close_table, [Tab]]),
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ %% Locks released! op should be exec. Can take a while (thats the timeout)
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+change_table_copy_type(suite) -> [];
+change_table_copy_type(Config) when is_list(Config) ->
+ [ThisNode, _Node2] = ?acquire_nodes(2, Config),
+ Tab = c_t_c_t_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+ A ! fun() -> mnesia:write({Tab, 1, 1, updated}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, change_table_copy_type,
+ [Tab, ThisNode, disc_copies]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+change_table_access(suite) -> [];
+change_table_access(Config) when is_list(Config) ->
+ [ThisNode, _Node2] = ?acquire_nodes(2, Config),
+ Tab = c_t_a_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 1, updated}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, change_table_access_mode,
+ [Tab, read_only]]),
+
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+add_table_copy(suite) -> [];
+add_table_copy(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = a_t_c_tab,
+ Def = [{ram_copies, [ThisNode]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+
+ A ! fun() -> mnesia:write({Tab, 1, 1, updated}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, add_table_copy,
+ [Tab, Node2, ram_copies]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+del_table_copy(suite) -> [];
+del_table_copy(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = d_t_c_tab,
+ Def = [{ram_copies, [ThisNode, Node2]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+ A ! fun() -> mnesia:write({Tab, 1, 2, 5}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, del_table_copy,
+ [Tab, ThisNode]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A, {atomic,end_trans}}),
+
+ ?match_receive({Pid, {atomic, ok}}),
+ ?match_receive({'EXIT', Pid, normal}),
+
+ timer:sleep(500), %% Don't know how to sync this !!!
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ sys:get_status(whereis(mnesia_tm)), % Explicit sync, release locks is async
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+dump_tables(suite) -> [];
+dump_tables(Config) when is_list(Config) ->
+ [ThisNode, Node2] = ?acquire_nodes(2, Config),
+ Tab = dump_t_tab,
+ Def = [{ram_copies, [ThisNode, Node2]}, {attributes, [key, attr1, attr2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 50),
+ {success, [A]} = ?start_activities([ThisNode]),
+ mnesia_test_lib:start_sync_transactions([A], 0),
+ A ! fun() -> mnesia:write({Tab, 1, 1, updated}) end,
+ ?match_receive({A, ok}), %% A is executed
+
+ Pid = spawn_link(?MODULE, op, [self(), mnesia, dump_tables,
+ [[Tab]]]),
+
+ ?match_receive(timeout), %% op waits for locks occupied by A
+
+ A ! end_trans, %% Kill A, locks should be released
+ ?match_receive({A,{atomic,end_trans}}),
+
+ receive
+ Msg -> ?match({Pid, {atomic, ok}}, Msg)
+ after
+ timer:seconds(20) -> ?error("Operation timed out", [])
+ end,
+
+ sys:get_status(whereis(mnesia_locker)), % Explicit sync, release locks is async
+ ?match([], mnesia:system_info(held_locks)),
+ ?match([], mnesia:system_info(lock_queue)),
+ ok.
+
+op(Father, Mod, Fun, Args) ->
+ Res = apply(Mod, Fun, Args),
+ Father ! {self(), Res}.
+
+insert(_Tab, 0) -> ok;
+insert(Tab, N) when N > 0 ->
+ ok = mnesia:sync_dirty(fun() -> mnesia:write({Tab, N, N, 0}) end),
+ insert(Tab, N-1).
+
+extra_admin_tests(suite) ->
+ [del_table_copy_1,
+ del_table_copy_2,
+ del_table_copy_3,
+ add_table_copy_1,
+ add_table_copy_2,
+ add_table_copy_3,
+ add_table_copy_4,
+ move_table_copy_1,
+ move_table_copy_2,
+ move_table_copy_3,
+ move_table_copy_4].
+
+update_own(Tab, Key, Acc) ->
+ Update =
+ fun() ->
+ Res = mnesia:read({Tab, Key}),
+ case Res of
+ [{Tab, Key, Extra, Acc}] ->
+ mnesia:write({Tab,Key,Extra, Acc+1});
+ Val ->
+ {read, Val, {acc, Acc}}
+ end
+ end,
+ receive
+ {Pid, quit} -> Pid ! {self(), Acc}
+ after
+ 0 ->
+ case mnesia:transaction(Update) of
+ {atomic, ok} ->
+ update_own(Tab, Key, Acc+1);
+ Else ->
+ ?error("Trans failed on ~p with ~p~n"
+ "Info w2read ~p w2write ~p w2commit ~p storage ~p ~n",
+ [node(),
+ Else,
+ mnesia:table_info(Tab, where_to_read),
+ mnesia:table_info(Tab, where_to_write),
+ mnesia:table_info(Tab, where_to_commit),
+ mnesia:table_info(Tab, storage_type)])
+ end
+ end.
+
+update_shared(Tab, Me, Acc) ->
+ Update =
+ fun() ->
+ W2R = mnesia:table_info(Tab, where_to_read),
+ Res = mnesia:read({Tab, 0}),
+ case Res of
+ [{Tab, Key, Extra, Val}] when element(Me, Extra) == Acc ->
+ Extra1 = setelement(Me, Extra, Acc+1),
+ Term = {Tab, Key, Extra1, Val+1},
+ ok = mnesia:write(Term),
+% ?log("At ~p: ~p w2r ~p w2w ~p ~n",
+% [node(), Term,
+% mnesia:table_info(Tab, where_to_read),
+ W2W = mnesia:table_info(Tab, where_to_write),
+ W2C = mnesia:table_info(Tab, where_to_commit),
+%% mnesia:table_info(Tab, storage_type)
+% ]),
+ {_Mod, Tid, Ts} = get(mnesia_activity_state),
+ io:format("~p ~p~n", [Tid, ets:tab2list(element(2,Ts))]),
+ {ok,Term,{W2R,W2W,W2C}};
+ Val ->
+ Info = [{acc, Acc}, {me, Me},
+ {tid, element(2, mnesia:get_activity_id())},
+ {locks, mnesia:system_info(held_locks)}],
+ {read, Val, Info}
+ end
+ end,
+ receive
+ {Pid, quit} -> Pid ! {self(), Acc}
+ after
+ 0 ->
+ case mnesia:transaction(Update) of
+ {atomic, {ok,Term,W2}} ->
+ io:format("~p:~p:(~p,~p) ~w@~w~n", [erlang:now(),node(),Me,Acc,Term,W2]),
+ update_shared(Tab, Me, Acc+1);
+ Else ->
+ ?error("Trans failed on ~p with ~p~n"
+ "Info w2read ~p w2write ~p w2commit ~p storage ~p ~n",
+ [node(),
+ Else,
+ mnesia:table_info(Tab, where_to_read),
+ mnesia:table_info(Tab, where_to_write),
+ mnesia:table_info(Tab, where_to_commit),
+ mnesia:table_info(Tab, storage_type)
+ ])
+ end
+ end.
+
+init_admin(Def, N1, N2, N3) ->
+ Tab = schema_ops,
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert(Tab, 1002),
+
+ Pid1 = spawn_link(N1, ?MODULE, update_own, [Tab, 1, 0]),
+ Pid2 = spawn_link(N2, ?MODULE, update_own, [Tab, 2, 0]),
+ Pid3 = spawn_link(N3, ?MODULE, update_own, [Tab, 3, 0]),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 0, {0,0,0}, 0}) end)),
+
+ Pid4 = spawn_link(N1, ?MODULE, update_shared, [Tab, 1, 0]),
+ Pid5 = spawn_link(N2, ?MODULE, update_shared, [Tab, 2, 0]),
+ Pid6 = spawn_link(N3, ?MODULE, update_shared, [Tab, 3, 0]),
+
+ {Pid1, Pid2, Pid3, Pid4, Pid5, Pid6}.
+
+verify_results({P1, P2, P3, P4, P5, P6}) ->
+ Tab = schema_ops, N1 = node(P1), N2 = node(P2), N3 = node(P3),
+
+ try
+ P1 ! {self(), quit},
+ R1 = receive {P1, Res1} -> Res1 after 9000 -> throw({timeout,P1}) end,
+ P2 ! {self(), quit},
+ R2 = receive {P2, Res2} -> Res2 after 9000 -> throw({timeout,P2}) end,
+ P3 ! {self(), quit},
+ R3 = receive {P3, Res3} -> Res3 after 9000 -> throw({timeout,P3}) end,
+
+ P4 ! {self(), quit},
+ R4 = receive {P4, Res4} -> Res4 after 9000 -> throw({timeout,P4}) end,
+ P5 ! {self(), quit},
+ R5 = receive {P5, Res5} -> Res5 after 9000 -> throw({timeout,P5}) end,
+ P6 ! {self(), quit},
+ R6 = receive {P6, Res6} -> Res6 after 9000 -> throw({timeout,P6}) end,
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write_lock_table(Tab) end)),
+ ?log("Should be ~p~n", [R1]),
+ ?match([{_, _, _, R1}], rpc:call(N1, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{_, _, _, R1}], rpc:call(N2, mnesia, dirty_read, [{Tab, 1}])),
+ ?match([{_, _, _, R1}], rpc:call(N3, mnesia, dirty_read, [{Tab, 1}])),
+ ?log("Should be ~p~n", [R2]),
+ ?match([{_, _, _, R2}], rpc:call(N1, mnesia, dirty_read, [{Tab, 2}])),
+ ?match([{_, _, _, R2}], rpc:call(N2, mnesia, dirty_read, [{Tab, 2}])),
+ ?match([{_, _, _, R2}], rpc:call(N3, mnesia, dirty_read, [{Tab, 2}])),
+ ?log("Should be ~p~n", [R3]),
+ ?match([{_, _, _, R3}], rpc:call(N1, mnesia, dirty_read, [{Tab, 3}])),
+ ?match([{_, _, _, R3}], rpc:call(N2, mnesia, dirty_read, [{Tab, 3}])),
+ ?match([{_, _, _, R3}], rpc:call(N3, mnesia, dirty_read, [{Tab, 3}])),
+
+ Res = R4+R5+R6,
+ ?log("Should be {~p+~p+~p}= ~p~n", [R4, R5, R6, Res]),
+ ?match([{_, _, {R4,R5,R6}, Res}], rpc:call(N1, mnesia, dirty_read, [{Tab, 0}])),
+ ?match([{_, _, {R4,R5,R6}, Res}], rpc:call(N2, mnesia, dirty_read, [{Tab, 0}])),
+ ?match([{_, _, {R4,R5,R6}, Res}], rpc:call(N3, mnesia, dirty_read, [{Tab, 0}]))
+ catch throw:{timeout, Pid} ->
+ mnesia_lib:dist_coredump(),
+ ?error("Timeout ~p ~n", [Pid])
+ end.
+
+
+get_info(Tab) ->
+ Info = mnesia:table_info(Tab, all),
+ mnesia_lib:verbose("~p~n", [Info]).
+
+del_table_copy_1(suite) -> [];
+del_table_copy_1(Config) when is_list(Config) ->
+ [_Node1, Node2, _Node3] = Nodes = ?acquire_nodes(3, Config),
+ del_table(Node2, Node2, Nodes). %Called on same Node as deleted
+del_table_copy_2(suite) -> [];
+del_table_copy_2(Config) when is_list(Config) ->
+ [Node1, Node2, _Node3] = Nodes = ?acquire_nodes(3, Config),
+ del_table(Node1, Node2, Nodes). %Called from other Node
+del_table_copy_3(suite) -> [];
+del_table_copy_3(Config) when is_list(Config) ->
+ [_Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ del_table(Node3, Node2, Nodes). %Called from Node w.o. table
+
+%%% The actual test
+del_table(CallFrom, DelNode, [Node1, Node2, Node3]) ->
+ Def = [{ram_copies, [Node1]}, {disc_copies, [Node2]},
+ {attributes, [key, attr1, attr2]}],
+ Tab = schema_ops,
+ Pids = init_admin(Def, Node1, Node2, Node3),
+
+ ?log("Call from ~p delete table from ~p ~n", [CallFrom, DelNode]),
+ rpc:multicall([Node1, Node2, Node3], ?MODULE, get_info, [Tab]),
+
+ ?match({atomic, ok},
+ rpc:call(CallFrom, mnesia, del_table_copy, [Tab, DelNode])),
+
+ verify_results(Pids),
+ rpc:multicall([Node1, Node2, Node3], ?MODULE, get_info, [Tab]),
+ ?verify_mnesia([Node1, Node2, Node3], []).
+
+add_table_copy_1(suite) -> [];
+add_table_copy_1(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_only_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node1, Node3, Nodes, Def).
+add_table_copy_2(suite) -> [];
+add_table_copy_2(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_only_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node2, Node3, Nodes, Def).
+add_table_copy_3(suite) -> [];
+add_table_copy_3(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_only_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node3, Node3, Nodes, Def).
+add_table_copy_4(suite) -> [];
+add_table_copy_4(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_only_copies, [Node1]},
+ {attributes, [key, attr1, attr2]}],
+ add_table(Node2, Node3, Nodes, Def).
+%%% The actual test
+add_table(CallFrom, AddNode, [Node1, Node2, Node3], Def) ->
+ Pids = init_admin(Def, Node1, Node2, Node3),
+ Tab = schema_ops,
+ ?log("Call from ~p add table to ~p ~n", [CallFrom, AddNode]),
+ rpc:multicall([Node1, Node2, Node3], ?MODULE, get_info, [Tab]),
+ ?match({atomic, ok}, rpc:call(CallFrom, mnesia, add_table_copy,
+ [Tab, AddNode, ram_copies])),
+ verify_results(Pids),
+ rpc:multicall([Node1, Node2, Node3], ?MODULE, get_info, [Tab]),
+ ?verify_mnesia([Node1, Node2, Node3], []).
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+move_table_copy_1(suite) -> [];
+move_table_copy_1(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node1, Node1, Node3, Nodes, Def).
+move_table_copy_2(suite) -> [];
+move_table_copy_2(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node2, Node1, Node3, Nodes, Def).
+move_table_copy_3(suite) -> [];
+move_table_copy_3(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_copies, [Node1, Node2]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node3, Node1, Node3, Nodes, Def).
+move_table_copy_4(suite) -> [];
+move_table_copy_4(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Def = [{disc_copies, [Node1]},
+ {attributes, [key, attr1, attr2]}],
+ move_table(Node2, Node1, Node3, Nodes, Def).
+%%% The actual test
+move_table(CallFrom, FromNode, ToNode, [Node1, Node2, Node3], Def) ->
+ Pids = init_admin(Def, Node1, Node2, Node3),
+ Tab = schema_ops,
+ ?log("Call from ~p move table from ~p to ~p ~n", [CallFrom, FromNode, ToNode]),
+ rpc:multicall([Node1, Node2, Node3], ?MODULE, get_info, [Tab]),
+ ?match({atomic, ok}, rpc:call(CallFrom, mnesia, move_table_copy,
+ [Tab, FromNode, ToNode])),
+ verify_results(Pids),
+ rpc:multicall([Node1, Node2, Node3], ?MODULE, get_info, [Tab]),
+ ?verify_mnesia([Node1, Node2, Node3], []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+visibility(doc) ->
+ ["Verify the visibility semantics for various configurations"];
+visibility(suite) ->
+ [
+ dirty_updates_visible_direct,
+ dirty_reads_regardless_of_trans,
+ trans_update_invisibible_outside_trans,
+ trans_update_visible_inside_trans,
+ write_shadows,
+ delete_shadows,
+%% delete_shadows2,
+ write_delete_shadows_bag,
+ write_delete_shadows_bag2,
+ iteration,
+ shadow_search,
+ snmp_shadows
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+dirty_updates_visible_direct(doc) ->
+ ["One process can immediately see dirty updates of another"];
+dirty_updates_visible_direct(suite) -> [];
+dirty_updates_visible_direct(Config) when is_list(Config) ->
+ dirty_visibility(outside_trans, Config).
+
+dirty_reads_regardless_of_trans(doc) ->
+ ["Dirty reads are not affected by transaction context"];
+dirty_reads_regardless_of_trans(suite) -> [];
+dirty_reads_regardless_of_trans(Config) when is_list(Config) ->
+ dirty_visibility(inside_trans, Config).
+
+dirty_visibility(Mode, Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = list_to_atom(lists:concat([dirty_visibility, '_', Mode])),
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab}, {ram_copies, [Node1]}])),
+ ValPos = 3,
+
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ %% Start two processes
+ {success, [A]} = ?start_activities([Node1]),
+
+ case Mode of
+ inside_trans ->
+ ?start_transactions([A]),
+ A ! fun() ->
+ mnesia:write({Tab, a, 11}),
+ mnesia:write({Tab, b, 22}),
+ mnesia:write({Tab, c, 1}),
+ mnesia:write({Tab, d, 2}),
+ mnesia:write({Tab, e, 3}),
+ lists:sort(mnesia:all_keys(Tab))
+ end,
+ ?match_receive({A, [a, b, c, d, e]});
+ outside_trans ->
+ ignore
+ end,
+
+ RecA = {Tab, a, 1},
+ PatA = {Tab, '$1', 1},
+ RecB = {Tab, b, 3},
+ PatB = {Tab, '$1', 3},
+ RecB2 = {Tab, b, 2},
+ PatB2 = {Tab, '$1', 2},
+ ?match([], mnesia:dirty_read({Tab, a})),
+ ?match([], mnesia:dirty_read({Tab, b})),
+ ?match([], mnesia:dirty_match_object(PatA)),
+ ?match([], mnesia:dirty_match_object(PatB)),
+ ?match([], mnesia:dirty_match_object(PatB2)),
+ ?match([], mnesia:dirty_index_read(Tab, 1, ValPos)),
+ ?match([], mnesia:dirty_index_read(Tab, 3, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatA, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatB, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatB2, ValPos)),
+ ?match('$end_of_table', mnesia:dirty_first(Tab)),
+
+ %% dirty_write
+ A ! fun() -> mnesia:dirty_write(RecA) end,
+ ?match_receive({A, ok}),
+ ?match([RecA], mnesia:dirty_read({Tab, a})),
+ ?match([RecA], mnesia:dirty_match_object(PatA)),
+ ?match(a, mnesia:dirty_first(Tab)),
+ ?match([RecA], mnesia:dirty_index_read(Tab, 1, ValPos)),
+ ?match([RecA], mnesia:dirty_index_match_object(PatA, ValPos)),
+ ?match('$end_of_table', mnesia:dirty_next(Tab, a)),
+
+ %% dirty_create
+ A ! fun() -> mnesia:dirty_write(RecB) end,
+ ?match_receive({A, ok}),
+ ?match([RecB], mnesia:dirty_read({Tab, b})),
+ ?match([RecB], mnesia:dirty_match_object(PatB)),
+ ?match([RecB], mnesia:dirty_index_read(Tab, 3, ValPos)),
+ ?match([RecB], mnesia:dirty_index_match_object(PatB, ValPos)),
+ ?match('$end_of_table',
+ mnesia:dirty_next(Tab, mnesia:dirty_next(Tab, mnesia:dirty_first(Tab)))),
+
+ %% dirty_update_counter
+ A ! fun() -> mnesia:dirty_update_counter({Tab, b}, -1) end,
+ ?match_receive({A, _}),
+ ?match([RecB2], mnesia:dirty_read({Tab, b})),
+ ?match([], mnesia:dirty_match_object(PatB)),
+ ?match([RecB2], mnesia:dirty_match_object(PatB2)),
+ ?match([RecB2], mnesia:dirty_index_read(Tab, 2, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatB, ValPos)),
+ ?match([RecB2], mnesia:dirty_index_match_object(PatB2, ValPos)),
+ ?match('$end_of_table',
+ mnesia:dirty_next(Tab, mnesia:dirty_next(Tab, mnesia:dirty_first(Tab)))),
+
+ %% dirty_delete
+ A ! fun() -> mnesia:dirty_delete({Tab, b}) end,
+ ?match_receive({A, ok}),
+ ?match([], mnesia:dirty_read({Tab, b})),
+ ?match([], mnesia:dirty_match_object(PatB2)),
+ ?match([], mnesia:dirty_index_read(Tab, 3, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatB2, ValPos)),
+ ?match(a, mnesia:dirty_first(Tab)),
+ ?match('$end_of_table', mnesia:dirty_next(Tab, a)),
+
+ %% dirty_delete_object
+ ?match([RecA], mnesia:dirty_match_object(PatA)),
+ A ! fun() -> mnesia:dirty_delete_object(RecA) end,
+ ?match_receive({A, ok}),
+ ?match([], mnesia:dirty_read({Tab, a})),
+ ?match([], mnesia:dirty_match_object(PatA)),
+ ?match([], mnesia:dirty_index_read(Tab, 1, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatA, ValPos)),
+ ?match('$end_of_table', mnesia:dirty_first(Tab)),
+
+ case Mode of
+ inside_trans ->
+ A ! end_trans,
+ ?match_receive({A, {atomic, end_trans}});
+ outside_trans ->
+ ignore
+ end,
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+trans_update_invisibible_outside_trans(doc) ->
+ ["Updates in a transaction are invisible outside the transaction"];
+trans_update_invisibible_outside_trans(suite) -> [];
+trans_update_invisibible_outside_trans(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = trans_update_invisibible_outside_trans,
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]}])),
+ ValPos = 3,
+ RecA = {Tab, a, 1},
+ PatA = {Tab, '$1', 1},
+ RecB = {Tab, b, 3},
+ PatB = {Tab, '$1', 3},
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Verify =
+ fun() ->
+ ?match([], mnesia:dirty_read({Tab, a})),
+ ?match([], mnesia:dirty_read({Tab, b})),
+ ?match([], mnesia:dirty_match_object(PatA)),
+ ?match([], mnesia:dirty_match_object(PatB)),
+ ?match([], mnesia:dirty_index_read(Tab, 1, ValPos)),
+ ?match([], mnesia:dirty_index_read(Tab, 3, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatA, ValPos)),
+ ?match([], mnesia:dirty_index_match_object(PatB, ValPos)),
+ ?match('$end_of_table', mnesia:dirty_first(Tab))
+ end,
+
+ Fun = fun() ->
+ ?match(ok, mnesia:write(RecA)),
+ Verify(),
+
+ ?match(ok, mnesia:write(RecB)),
+ Verify(),
+
+ ?match(ok, mnesia:delete({Tab, b})),
+ Verify(),
+
+ ?match([RecA], mnesia:match_object(PatA)),
+ Verify(),
+
+ ?match(ok, mnesia:delete_object(RecA)),
+ Verify(),
+ ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun)),
+ Verify(),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+trans_update_visible_inside_trans(doc) ->
+ ["Updates in a transaction are visible in the same transaction"];
+trans_update_visible_inside_trans(suite) -> [];
+trans_update_visible_inside_trans(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = trans_update_visible_inside_trans,
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]}])),
+ ValPos = 3,
+ RecA = {Tab, a, 1},
+ PatA = {Tab, '$1', 1},
+ RecB = {Tab, b, 3},
+ PatB = {Tab, '$1', 3},
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Fun = fun() ->
+ %% write
+ ?match(ok, mnesia:write(RecA)),
+ ?match([RecA], mnesia:read({Tab, a})),
+ ?match([RecA], mnesia:wread({Tab, a})),
+ ?match([RecA], mnesia:match_object(PatA)),
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([RecA], mnesia:index_match_object(PatA, ValPos)),
+ ?match([RecA], mnesia:index_read(Tab, 1, ValPos)),
+
+ %% create
+ ?match(ok, mnesia:write(RecB)),
+ ?match([RecB], mnesia:read({Tab, b})),
+ ?match([RecB], mnesia:wread({Tab, b})),
+ ?match([RecB], mnesia:match_object(PatB)),
+ ?match([RecB], mnesia:index_match_object(PatB, ValPos)),
+ ?match([RecB], mnesia:index_read(Tab, 3, ValPos)),
+
+ %% delete
+ ?match(ok, mnesia:delete({Tab, b})),
+ ?match([], mnesia:read({Tab, b})),
+ ?match([], mnesia:wread({Tab, b})),
+ ?match([], mnesia:match_object(PatB)),
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([], mnesia:index_match_object(PatB, ValPos)),
+ ?match([], mnesia:index_read(Tab, 2, ValPos)),
+ ?match([], mnesia:index_read(Tab, 3, ValPos)),
+
+ %% delete_object
+ ?match(ok, mnesia:delete_object(RecA)),
+ ?match([], mnesia:read({Tab, a})),
+ ?match([], mnesia:wread({Tab, a})),
+ ?match([], mnesia:match_object(PatA)),
+ ?match([], mnesia:all_keys(Tab)),
+ ?match([], mnesia:index_match_object(PatA, ValPos)),
+ ?match([], mnesia:index_read(Tab, 2, ValPos)),
+ ?match([], mnesia:index_read(Tab, 3, ValPos)),
+ ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+write_shadows(doc) ->
+ ["Tests whether the shadow shows the correct object when",
+ "writing to the table"];
+write_shadows(suite) -> [];
+write_shadows(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = write_shadows,
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]},
+ {type, set}])),
+ ValPos = 3,
+ RecA1 = {Tab, a, 1},
+ PatA1 = {Tab, '$1', 1},
+ RecA2 = {Tab, a, 2},
+ PatA2 = {Tab, '$1', 2},
+
+
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Fun1 = fun() ->
+ ?match(ok, mnesia:write(RecA1)),
+ ok
+ end,
+
+ ?match({atomic, ok}, mnesia:transaction(Fun1)),
+
+ Fun2 = fun() ->
+ %% write shadow old write - is the confirmed value visable
+ %% in the shadow ?
+ ?match([RecA1], mnesia:read({Tab, a})),
+ ?match([RecA1], mnesia:wread({Tab, a})),
+ ?match([RecA1], mnesia:match_object(PatA1)),
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([RecA1], mnesia:index_match_object(PatA1, ValPos)),
+ ?match([RecA1], mnesia:index_read(Tab, 1, ValPos)),
+
+ %% write shadow new write - is a new value visable instead
+ %% of the old value ?
+ ?match(ok, mnesia:write(RecA2)),
+
+ ?match([RecA2], mnesia:read({Tab, a})),
+ ?match([RecA2], mnesia:wread({Tab, a})),
+ ?match([RecA2], mnesia:match_object(PatA2)), %% delete shadow old but not new write - is the new value visable
+
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([RecA2], mnesia:index_match_object(PatA2, ValPos)),
+ ?match([RecA2], mnesia:index_read(Tab, 2, ValPos)),
+ ok
+
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun2)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+delete_shadows(doc) ->
+ ["Test whether the shadow shows the correct object when deleting objects"];
+delete_shadows(suite) -> [];
+delete_shadows(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = delete_shadows,
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]},
+ {type, set}])),
+ ValPos = 3,
+ OidA = {Tab, a},
+ RecA1 = {Tab, a, 1},
+ PatA1 = {Tab, '$1', 1},
+ RecA2 = {Tab, a, 2},
+ PatA2 = {Tab, '$1', 2},
+
+
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Fun1 = fun() ->
+ ?match(ok, mnesia:write(RecA1)),
+ ok
+ end,
+
+ ?match({atomic, ok}, mnesia:transaction(Fun1)),
+
+ Fun2 = fun() ->
+
+
+ %% delete shadow old write - is the confirmed value invisible
+ %% when deleted in the transaction ?
+ ?match(ok, mnesia:delete(OidA)),
+
+ ?match([], mnesia:read({Tab, a})),
+ ?match([], mnesia:wread({Tab, a})),
+ ?match([], mnesia:match_object(PatA1)),
+ ?match([], mnesia:all_keys(Tab)),
+ ?match([], mnesia:index_match_object(PatA1, ValPos)),
+ ?match([], mnesia:index_read(Tab, 1, ValPos)),
+
+ %% delete shadow old but not new write - is the new value visable
+ %% when the old one was deleted ?
+ ?match(ok, mnesia:write(RecA2)),
+
+ ?match([RecA2], mnesia:read({Tab, a})),
+ ?match([RecA2], mnesia:wread({Tab, a})),
+ ?match([RecA2], mnesia:match_object(PatA2)),
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([RecA2], mnesia:index_match_object(PatA2, ValPos)),
+ ?match([RecA2], mnesia:index_read(Tab, 2, ValPos)),
+
+ %% delete shadow old and new write - is the new value invisable
+ %% when deleted ?
+ ?match(ok, mnesia:delete(OidA)),
+
+ ?match([], mnesia:read({Tab, a})),
+ ?match([], mnesia:wread({Tab, a})),
+ ?match([], mnesia:match_object(PatA2)),
+ ?match([], mnesia:all_keys(Tab)),
+ ?match([], mnesia:index_match_object(PatA2, ValPos)),
+ ?match([], mnesia:index_read(Tab, 2, ValPos)),
+ ok
+
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun2)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+write_delete_shadows_bag(doc) ->
+ ["Test the visibility of written and deleted objects in an bag type table"];
+write_delete_shadows_bag(suite) -> [];
+write_delete_shadows_bag(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = write_delete_shadows_bag,
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]},
+ {type, bag}])),
+ ValPos = 3,
+ OidA = {Tab, a},
+
+ RecA1 = {Tab, a, 1},
+ PatA1 = {Tab, '$1', 1},
+
+ RecA2 = {Tab, a, 2},
+ PatA2 = {Tab, '$1', 2},
+
+ RecA3 = {Tab, a, 3},
+ PatA3 = {Tab, '$1', 3},
+
+ PatA = {Tab, a, '_'},
+
+
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Fun1 = fun() ->
+ ?match(ok, mnesia:write(RecA1)),
+ ?match(ok, mnesia:write(RecA2)),
+ ok
+ end,
+
+ ?match({atomic, ok}, mnesia:transaction(Fun1)),
+
+ Fun2 = fun() ->
+ %% delete shadow old write - is the confirmed value invisible
+ %% when deleted in the transaction ?
+ ?match(ok, mnesia:delete_object(RecA1)),
+
+ ?match([RecA2], mnesia:read({Tab, a})),
+ ?match([RecA2], mnesia:wread({Tab, a})),
+ ?match([RecA2], mnesia:match_object(PatA2)),
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([RecA2], mnesia:index_match_object(PatA2, ValPos)),
+ ?match([RecA2], mnesia:index_read(Tab, 2, ValPos)),
+
+ ?match(ok, mnesia:delete(OidA)),
+
+ ?match([], mnesia:read({Tab, a})),
+ ?match([], mnesia:wread({Tab, a})),
+ ?match([], mnesia:match_object(PatA1)),
+ ?match([], mnesia:all_keys(Tab)),
+ ?match([], mnesia:index_match_object(PatA1, ValPos)),
+ ?match([], mnesia:index_read(Tab, 1, ValPos)),
+
+ %% delete shadow old but not new write - are both new value visable
+ %% when the old one was deleted ?
+ ?match(ok, mnesia:write(RecA2)),
+ ?match(ok, mnesia:write(RecA3)),
+
+
+ ?match([RecA2, RecA3], lists:sort(mnesia:read({Tab, a}))),
+ ?match([RecA2, RecA3], lists:sort(mnesia:wread({Tab, a}))),
+ ?match([RecA2], mnesia:match_object(PatA2)),
+ ?match([a], mnesia:all_keys(Tab)),
+ ?match([RecA2, RecA3], lists:sort(mnesia:match_object(PatA))),
+ ?match([RecA2], mnesia:index_match_object(PatA2, ValPos)),
+ ?match([RecA3], mnesia:index_match_object(PatA3, ValPos)),
+ ?match([RecA2], mnesia:index_read(Tab, 2, ValPos)),
+
+ %% delete shadow old and new write - is the new value invisable
+ %% when deleted ?
+ ?match(ok, mnesia:delete(OidA)),
+
+ ?match([], mnesia:read({Tab, a})),
+ ?match([], mnesia:wread({Tab, a})),
+ ?match([], mnesia:match_object(PatA2)),
+ ?match([], mnesia:all_keys(Tab)),
+ ?match([], mnesia:index_match_object(PatA2, ValPos)),
+ ?match([], mnesia:index_read(Tab, 2, ValPos)),
+ ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun2)),
+ ok.
+
+write_delete_shadows_bag2(doc) ->
+ ["Test the visibility of written and deleted objects in an bag type table "
+ "and verifies the results"];
+write_delete_shadows_bag2(suite) -> [];
+write_delete_shadows_bag2(Config) when is_list(Config) ->
+
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab = w_d_s_b,
+
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab},
+ {ram_copies, [Node1]},
+ {type, bag}])),
+ Del = fun() ->
+ R1 = mnesia:read({Tab, 1}),
+ mnesia:delete({Tab, 1}),
+ R2 = mnesia:read({Tab, 1}),
+ mnesia:write({Tab, 1, 1}),
+ mnesia:write({Tab, 1, 2}),
+ R3 = mnesia:read({Tab, 1}),
+ {R1, R2, R3}
+ end,
+ DelObj = fun() ->
+ R1 = mnesia:read({Tab, 2}),
+ mnesia:delete_object({Tab, 2, 2}),
+ R2 = mnesia:read({Tab, 2}),
+ mnesia:write({Tab, 2, 1}),
+ mnesia:write({Tab, 2, 2}),
+ R3 = mnesia:read({Tab, 2}),
+ {R1, R2, R3}
+ end,
+ Both1 = [{Tab, 1, 1}, {Tab, 1, 2}],
+ Both2 = [{Tab, 2, 1}, {Tab, 2, 2}],
+ ?match({atomic, {[], [], Both1}}, mnesia:transaction(Del)),
+ ?match({atomic, {Both1, [], Both1}}, mnesia:transaction(Del)),
+ ?match({atomic, Both1}, mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+ ?match({atomic, {[], [], Both2}}, mnesia:transaction(DelObj)),
+ ?match({atomic, {Both2, [{Tab, 2, 1}], Both2}}, mnesia:transaction(DelObj)),
+ ?match({atomic, Both2}, mnesia:transaction(fun() -> mnesia:read({Tab, 2}) end)),
+ ?verify_mnesia([Node1], []).
+
+shadow_search(doc) ->
+ ["Verifies that ordered_set tables are ordered, and the order is kept"
+ "even when table is shadowed by transaction updates"];
+shadow_search(suite) -> [];
+shadow_search(Config) when is_list(Config) ->
+ [Node1] = ?acquire_nodes(1, Config),
+ Tab1 = ss_oset,
+ Tab2 = ss_set,
+ Tab3 = ss_bag,
+ Tabs = [Tab1,Tab2,Tab3],
+ RecName = ss,
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab1},
+ {ram_copies, [Node1]},
+ {record_name, RecName},
+ {type, ordered_set}])),
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab2},
+ {record_name, RecName},
+ {ram_copies, [Node1]},
+ {type, set}])),
+ ?match({atomic, ok}, mnesia:create_table([{name, Tab3},
+ {record_name, RecName},
+ {ram_copies, [Node1]},
+ {type, bag}])),
+ Recs = [{RecName, K, K} || K <- [1,3,5]],
+ [mnesia:dirty_write(Tab1, R) || R <- Recs],
+ [mnesia:dirty_write(Tab2, R) || R <- Recs],
+ [mnesia:dirty_write(Tab3, R) || R <- Recs],
+
+ Match = fun(Tab) -> mnesia:match_object(Tab, {'_','_','_'}, write) end,
+ Select = fun(Tab) -> mnesia:select(Tab, [{'_', [], ['$_']}]) end,
+% Trans = fun(Fun,Args) -> mnesia:transaction(Fun,Args) end,
+ LoopHelp = fun('$end_of_table',_) -> [];
+ ({Res,Cont},Fun) ->
+ Sel = mnesia:select(Cont),
+ Res ++ Fun(Sel, Fun)
+ end,
+ SelLoop = fun(Table) ->
+ Sel = mnesia:select(Table, [{'_', [], ['$_']}], 1, read),
+ LoopHelp(Sel,LoopHelp)
+ end,
+
+ R1 = {RecName, 2, 2}, R2 = {RecName, 4, 4},
+ R3 = {RecName, 2, 3}, R4 = {RecName, 3, 1},
+ R5 = {RecName, 104, 104},
+ W1 = fun(Tab,Search) -> mnesia:write(Tab,R1,write),
+ mnesia:write(Tab,R2,write),
+ Search(Tab)
+ end,
+ S1 = lists:sort([R1,R2|Recs]),
+ ?match({atomic,S1}, mnesia:transaction(W1, [Tab1,Select])),
+ ?match({atomic,S1}, mnesia:transaction(W1, [Tab1,Match])),
+ ?match({atomic,S1}, mnesia:transaction(W1, [Tab1,SelLoop])),
+ ?match({atomic,S1}, sort_res(mnesia:transaction(W1, [Tab2,Select]))),
+ ?match({atomic,S1}, sort_res(mnesia:transaction(W1, [Tab2,SelLoop]))),
+ ?match({atomic,S1}, sort_res(mnesia:transaction(W1, [Tab2,Match]))),
+ ?match({atomic,S1}, sort_res(mnesia:transaction(W1, [Tab3,Select]))),
+ ?match({atomic,S1}, sort_res(mnesia:transaction(W1, [Tab3,SelLoop]))),
+ ?match({atomic,S1}, sort_res(mnesia:transaction(W1, [Tab3,Match]))),
+ [mnesia:dirty_delete_object(Tab,R) || R <- [R1,R2], Tab <- Tabs],
+
+ W2 = fun(Tab,Search) ->
+ mnesia:write(Tab,R3,write),
+ mnesia:write(Tab,R1,write),
+ Search(Tab)
+ end,
+ S2 = lists:sort([R1|Recs]),
+ S2Bag = lists:sort([R1,R3|Recs]),
+ ?match({atomic,S2}, mnesia:transaction(W2, [Tab1,Select])),
+ ?match({atomic,S2}, mnesia:transaction(W2, [Tab1,SelLoop])),
+ ?match({atomic,S2}, mnesia:transaction(W2, [Tab1,Match])),
+ ?match({atomic,S2}, sort_res(mnesia:transaction(W2, [Tab2,Select]))),
+ ?match({atomic,S2}, sort_res(mnesia:transaction(W2, [Tab2,SelLoop]))),
+ ?match({atomic,S2}, sort_res(mnesia:transaction(W2, [Tab2,Match]))),
+ ?match({atomic,S2Bag}, sort_res(mnesia:transaction(W2, [Tab3,Select]))),
+ ?match({atomic,S2Bag}, sort_res(mnesia:transaction(W2, [Tab3,SelLoop]))),
+ ?match({atomic,S2Bag}, sort_res(mnesia:transaction(W2, [Tab3,Match]))),
+%% [mnesia:dirty_delete_object(Tab,R) || R <- [R1,R3], Tab <- Tabs],
+
+ W3 = fun(Tab,Search) ->
+ mnesia:write(Tab,R4,write),
+ mnesia:delete(Tab,element(2,R1),write),
+ Search(Tab)
+ end,
+ S3Bag = lists:sort([R4|lists:delete(R1,Recs)]),
+ S3 = lists:delete({RecName,3,3},S3Bag),
+ ?match({atomic,S3}, mnesia:transaction(W3, [Tab1,Select])),
+ ?match({atomic,S3}, mnesia:transaction(W3, [Tab1,SelLoop])),
+ ?match({atomic,S3}, mnesia:transaction(W3, [Tab1,Match])),
+ ?match({atomic,S3}, sort_res(mnesia:transaction(W3, [Tab2,SelLoop]))),
+ ?match({atomic,S3}, sort_res(mnesia:transaction(W3, [Tab2,Select]))),
+ ?match({atomic,S3}, sort_res(mnesia:transaction(W3, [Tab2,Match]))),
+ ?match({atomic,S3Bag}, sort_res(mnesia:transaction(W3, [Tab3,Select]))),
+ ?match({atomic,S3Bag}, sort_res(mnesia:transaction(W3, [Tab3,SelLoop]))),
+ ?match({atomic,S3Bag}, sort_res(mnesia:transaction(W3, [Tab3,Match]))),
+
+ W4 = fun(Tab,Search) ->
+ mnesia:delete(Tab,-1,write),
+ mnesia:delete(Tab,4 ,write),
+ mnesia:delete(Tab,17,write),
+ mnesia:delete_object(Tab,{RecName, -1, x},write),
+ mnesia:delete_object(Tab,{RecName, 4, x},write),
+ mnesia:delete_object(Tab,{RecName, 42, x},write),
+ mnesia:delete_object(Tab,R2,write),
+ mnesia:write(Tab, R5, write),
+ Search(Tab)
+ end,
+ S4Bag = lists:sort([R5|S3Bag]),
+ S4 = lists:sort([R5|S3]),
+ ?match({atomic,S4}, mnesia:transaction(W4, [Tab1,Select])),
+ ?match({atomic,S4}, mnesia:transaction(W4, [Tab1,SelLoop])),
+ ?match({atomic,S4}, mnesia:transaction(W4, [Tab1,Match])),
+ ?match({atomic,S4}, sort_res(mnesia:transaction(W4, [Tab2,Select]))),
+ ?match({atomic,S4}, sort_res(mnesia:transaction(W4, [Tab2,SelLoop]))),
+ ?match({atomic,S4}, sort_res(mnesia:transaction(W4, [Tab2,Match]))),
+ ?match({atomic,S4Bag}, sort_res(mnesia:transaction(W4, [Tab3,Select]))),
+ ?match({atomic,S4Bag}, sort_res(mnesia:transaction(W4, [Tab3,SelLoop]))),
+ ?match({atomic,S4Bag}, sort_res(mnesia:transaction(W4, [Tab3,Match]))),
+ [mnesia:dirty_delete_object(Tab,R) || R <- [{RecName,3,3},R5], Tab <- Tabs],
+
+ %% hmmm anything more??
+
+ ?verify_mnesia([Node1], []).
+
+removed_resources(suite) ->
+ [rr_kill_copy];
+removed_resources(doc) ->
+ ["Verify that the locking behave when resources are removed"].
+
+rr_kill_copy(suite) -> [];
+rr_kill_copy(Config) when is_list(Config) ->
+ Ns = ?acquire_nodes(3,Config ++ [{tc_timeout, 60000}]),
+ DeleteMe = fun(_Tab,Where2read) ->
+ ?match([], mnesia_test_lib:kill_mnesia([Where2read]))
+ end,
+ Del = removed_resources(Ns, DeleteMe),
+ ?verify_mnesia(Ns -- [Del], []).
+
+removed_resources([_N1,N2,N3], DeleteRes) ->
+ Tab = del_res,
+ ?match({atomic, ok}, mnesia:create_table(Tab,[{ram_copies, [N2,N3]}])),
+
+ Init = fun() -> [mnesia:write({Tab,Key,Key}) || Key <- lists:seq(0,99)] end,
+ ?match([], [Bad || Bad <- mnesia:sync_dirty(Init), Bad /= ok]),
+
+ Where2Read = mnesia:table_info(Tab, where_to_read),
+ [Keep] = [N2,N3] -- [Where2Read],
+ Tester = self(),
+
+ Conflict = fun() ->
+ %% Read a value..
+ [{Tab,1,Val}] = mnesia:read({Tab,1}),
+ case get(restart) of
+ undefined ->
+ Tester ! {pid_1, self()},
+ %% Wait for sync, the read value have been
+ %% updated and this function should be restarted.
+ receive {Tester,sync} -> ok end,
+ put(restart, restarted);
+ restarted ->
+ ok
+ end,
+ mnesia:write({Tab,1,Val+10})
+ end,
+ Lucky = fun() ->
+ [{Tab,1,Val}] = mnesia:read({Tab,1}),
+ mnesia:write({Tab,1,Val+100})
+ end,
+
+ CPid = spawn_link(fun() -> Tester ! {self(), mnesia:transaction(Conflict)} end),
+
+ %% sync first transaction
+ receive {pid_1, CPid} -> synced end,
+
+ DeleteRes(Tab, Where2Read),
+
+ ?match(Keep, mnesia:table_info(Tab, where_to_read)),
+
+ %% Run the other/Lucky transaction, this should work since
+ %% it won't grab a lock on the conflicting transactions Where2Read node.
+
+ LPid = spawn_link(Keep, fun() -> Tester ! {self(),mnesia:transaction(Lucky)} end),
+ ?match_receive({LPid,{atomic,ok}}),
+
+ %% Continue Transaction no 1
+ CPid ! {self(), sync},
+
+ ?match(ok, receive {CPid,{atomic,ok}} -> ok after 2000 -> process_info(self()) end),
+
+ ?match({atomic,[{del_res,1,111}]}, mnesia:transaction(fun() -> mnesia:read({Tab,1}) end)),
+ Where2Read.
+
+nasty(suite) -> [];
+
+nasty(doc) ->
+ ["Tries to fullfill a rather nasty locking scenario, where we have had "
+ "bugs, the testcase tries a combination of locks in locker queue"];
+
+%% This testcase no longer works as it was intended to show errors when
+%% tablelocks was allowed to be placed in the queue though locks existed
+%% in the queue with less Tid's. This is no longer allowed and the testcase
+%% has been update.
+
+nasty(Config) ->
+ ?acquire_nodes(1, Config),
+ Tab = nasty,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [])),
+ Coord = self(),
+ Write = fun(Key) ->
+ mnesia:write({Tab, Key, write}),
+ Coord ! {write, Key, self(), mnesia:get_activity_id()},
+ receive
+ continue ->
+ ok
+ end,
+ Coord ! {done, {write, Key}, self()}
+ end,
+
+ Update = fun(Key) ->
+ Coord ! {update, Key, self(), mnesia:get_activity_id()},
+ receive
+ continue ->
+ ok
+ end,
+ mnesia:read({Tab, Key}),
+ mnesia:write({Tab, Key, update}),
+ receive
+ continue ->
+ ok
+ end,
+
+ Coord ! {done, {update, Key}, self()}
+ end,
+
+ TabLock = fun() ->
+ Coord ! {tablock, Tab, self(), mnesia:get_activity_id()},
+ receive
+ continue ->
+ ok
+ end,
+ mnesia:lock({table, Tab}, write),
+ Coord ! {done, {tablock, Tab}, self()}
+ end,
+
+ Up = spawn_link(mnesia, transaction, [Update, [0]]),
+ ?match_receive({update, 0, Up, _Tid}),
+ TL = spawn_link(mnesia, transaction, [TabLock]),
+ ?match_receive({tablock, Tab, _Tl, _Tid}),
+ W0 = spawn_link(mnesia, transaction, [Write, [0]]),
+ ?match_receive({write, 0, W0, _Tid}),
+ W1 = spawn_link(mnesia, transaction, [Write, [1]]),
+ ?match_receive({write, 1, W1, _Tid}),
+
+ %% Nothing should be in msg queue!
+ ?match(timeout, receive A -> A after 1000 -> timeout end),
+ Up ! continue, %% Should be queued
+ ?match(timeout, receive A -> A after 1000 -> timeout end),
+ TL ! continue, %% Should be restarted
+% ?match({tablock, _, _, _}, receive A -> A after 1000 -> timeout end),
+ ?match(timeout, receive A -> A after 1000 -> timeout end),
+
+ LQ1 = mnesia_locker:get_lock_queue(),
+ ?match({2, _}, {length(LQ1), LQ1}),
+ W0 ! continue, % Up should be in queue
+ ?match_receive({done, {write, 0}, W0}),
+ ?match_receive({'EXIT', W0, normal}),
+
+ TL ! continue, % Should stay in queue W1
+ ?match(timeout, receive A -> A after 1000 -> timeout end),
+ Up ! continue, % Should stay in queue (TL got higher tid)
+ ?match(timeout, receive A -> A after 1000 -> timeout end),
+
+ LQ2 = mnesia_locker:get_lock_queue(),
+ ?match({2, _}, {length(LQ2), LQ2}),
+
+ W1 ! continue,
+ ?match_receive({done, {write, 1}, W1}),
+ get_exit(W1),
+ get_exit(TL),
+ ?match_receive({done, {tablock,Tab}, TL}),
+ get_exit(Up),
+ ?match_receive({done, {update, 0}, Up}),
+
+ ok.
+
+get_exit(Pid) ->
+ receive
+ {'EXIT', Pid, normal} ->
+ ok
+ after 10000 ->
+ ?error("Timeout EXIT ~p~n", [Pid])
+ end.
+
+iteration(doc) ->
+ ["Verify that the updates before/during iteration are visable "
+ "and that the order is preserved for ordered_set tables"];
+iteration(suite) ->
+ [foldl,first_next].
+
+foldl(doc) ->
+ [""];
+foldl(suite) ->
+ [];
+foldl(Config) when is_list(Config) ->
+ Nodes = [_,N2] = ?acquire_nodes(2, Config),
+ Tab1 = foldl_local,
+ Tab2 = foldl_remote,
+ Tab3 = foldl_ordered,
+ Tab11 = foldr_local,
+ Tab21 = foldr_remote,
+ Tab31 = foldr_ordered,
+ ?match({atomic, ok}, mnesia:create_table(Tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies, [N2]}, {type, bag}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, [{ram_copies, Nodes},
+ {type, ordered_set}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab11, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab21, [{ram_copies, [N2]}, {type, bag}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab31, [{ram_copies, Nodes},
+ {type, ordered_set}])),
+
+
+ Tab1Els = [{Tab1, N, N} || N <- lists:seq(1, 10)],
+ Tab2Els = [{Tab2, 1, 2} | [{Tab2, N, N} || N <- lists:seq(1, 10)]],
+ Tab3Els = [{Tab3, N, N} || N <- lists:seq(1, 10)],
+ Tab11Els = [{Tab11, N, N} || N <- lists:seq(1, 10)],
+ Tab21Els = [{Tab21, 1, 2} | [{Tab21, N, N} || N <- lists:seq(1, 10)]],
+ Tab31Els = [{Tab31, N, N} || N <- lists:seq(1, 10)],
+
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab1Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab2Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab3Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab11Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab21Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab31Els],
+
+ Get = fun(E, A) -> [E | A] end,
+
+ %% Before
+ AddB = fun(Tab, Func) ->
+ mnesia:write({Tab, 0, 0}),
+ mnesia:write({Tab, 1, 0}),
+ mnesia:write({Tab, 11, 0}),
+ mnesia:Func(Get, [], Tab)
+ end,
+ AddT1 = [{Tab1, 0, 0}, {Tab1, 1, 0}] ++ tl(Tab1Els) ++ [{Tab1, 11, 0}],
+ AddT2 = lists:sort([{Tab2, 0, 0}, {Tab2, 1, 0}] ++ Tab2Els ++ [{Tab2, 11, 0}]),
+ AddT3 = [{Tab3, 0, 0}, {Tab3, 1, 0}] ++ tl(Tab3Els) ++ [{Tab3, 11, 0}],
+ AddT11 = [{Tab11, 0, 0}, {Tab11, 1, 0}] ++ tl(Tab11Els) ++ [{Tab11, 11, 0}],
+ AddT21 = lists:sort([{Tab21, 0, 0}, {Tab21, 1, 0}] ++ Tab21Els ++ [{Tab21, 11, 0}]),
+ AddT31 = [{Tab31, 0, 0}, {Tab31, 1, 0}] ++ tl(Tab31Els) ++ [{Tab31, 11, 0}],
+
+ ?match({atomic, AddT1}, sort_res(mnesia:transaction(AddB, [Tab1, foldl]))),
+ ?match({atomic, AddT2}, sort_res(mnesia:transaction(AddB, [Tab2, foldl]))),
+ ?match({atomic, AddT3}, rev_res(mnesia:transaction(AddB, [Tab3, foldl]))),
+ ?match({atomic, AddT11}, sort_res(mnesia:transaction(AddB, [Tab11, foldr]))),
+ ?match({atomic, AddT21}, sort_res(mnesia:transaction(AddB, [Tab21, foldr]))),
+ ?match({atomic, AddT31}, mnesia:transaction(AddB, [Tab31, foldr])),
+
+ ?match({atomic, ok}, mnesia:create_table(copy, [{ram_copies, [N2]},
+ {record_name, Tab1}])),
+ CopyRec = fun(NewRec, Acc) ->
+ %% OTP-5495
+ W = fun() -> mnesia:write(copy, NewRec, write), [NewRec| Acc] end,
+ {atomic,Res} = sort_res(mnesia:transaction(W)),
+ Res
+ end,
+ Copy = fun() ->
+ AddT1 = mnesia:foldl(CopyRec, [], Tab1),
+ AddT1 = sort_res(mnesia:foldl(Get, [], copy))
+ end,
+ ?match({atomic, AddT1}, sort_res(mnesia:transaction(Copy))),
+
+ Del = fun(E, A) -> mnesia:delete_object(E), [E|A] end,
+ DelD = fun(Tab) ->
+ mnesia:write({Tab, 12, 12}),
+ mnesia:delete({Tab, 0}),
+ mnesia:foldr(Del, [], Tab),
+ mnesia:foldl(Get, [], Tab)
+ end,
+ ?match({atomic, []}, sort_res(mnesia:transaction(DelD, [Tab1]))),
+ ?match({atomic, []}, sort_res(mnesia:transaction(DelD, [Tab2]))),
+ ?match({atomic, []}, rev_res(mnesia:transaction(DelD, [Tab3]))),
+
+ ListWrite = fun(Tab) -> %% OTP-3893
+ mnesia:write({Tab, [12], 12}),
+ mnesia:foldr(Get, [], Tab)
+ end,
+ ?match({atomic, [{Tab1, [12], 12}]}, sort_res(mnesia:transaction(ListWrite, [Tab1]))),
+ ?match({atomic, [{Tab2, [12], 12}]}, sort_res(mnesia:transaction(ListWrite, [Tab2]))),
+ ?match({atomic, [{Tab3, [12], 12}]}, rev_res(mnesia:transaction(ListWrite, [Tab3]))),
+
+ ?verify_mnesia(Nodes, []).
+
+sort_res({atomic, List}) when is_list(List) ->
+ {atomic, lists:sort(List)};
+sort_res(Else) when is_list(Else) ->
+ lists:sort(Else);
+sort_res(Else) ->
+ Else.
+
+rev_res({atomic, List}) ->
+ {atomic, lists:reverse(List)};
+rev_res(Else) ->
+ Else.
+
+
+first_next(doc) -> [""];
+first_next(suite) -> [];
+first_next(Config) when is_list(Config) ->
+ Nodes = [_,N2] = ?acquire_nodes(2, Config),
+ Tab1 = local,
+ Tab2 = remote,
+ Tab3 = ordered,
+ Tab4 = bag,
+ Tabs = [Tab1,Tab2,Tab3,Tab4],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, [{ram_copies, Nodes},
+ {type, ordered_set}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab4, [{ram_copies, Nodes},
+ {type, bag}])),
+
+ %% Some Helpers
+ Trans = fun(Fun) -> mnesia:transaction(Fun) end,
+ Continue = fun(first) -> next;
+ (last) -> prev
+ end,
+ LoopHelp = fun('$end_of_table',_,_,_Fun) -> [];
+ (Key,Tab,Op,Fun) ->
+ Next = mnesia:Op(Tab,Key),
+ [Next |Fun(Next,Tab,Op,Fun)]
+ end,
+ Loop = fun(Tab,Start) ->
+ First = mnesia:Start(Tab),
+ Res = [First|LoopHelp(First,Tab,Continue(Start),LoopHelp)],
+ case mnesia:table_info(Tab, type) of
+ ordered_set when Start == first -> Res;
+ ordered_set ->
+ {L1,L2} = lists:split(length(Res)-1,Res),
+ lists:reverse(L1) ++ L2;
+ _ -> lists:sort(Res)
+ end
+ end,
+
+ %% Verify empty tables
+ [?match({atomic, ['$end_of_table']},
+ Trans(fun() -> Loop(Tab,first) end))
+ || Tab <- Tabs],
+ [?match({atomic, ['$end_of_table']},
+ Trans(fun() -> Loop(Tab,last) end))
+ || Tab <- Tabs],
+ %% Verify that trans write is visible inside trans
+ [?match({atomic, [0,10,'$end_of_table']},
+ Trans(fun() ->
+ mnesia:write({Tab,0,0}),
+ mnesia:write({Tab,10,10}),
+ Loop(Tab,first) end))
+ || Tab <- Tabs],
+ [?match({atomic, ['$end_of_table']},
+ Trans(fun() ->
+ mnesia:delete({Tab,0}),
+ mnesia:delete({Tab,10}),
+ Loop(Tab,first) end))
+ || Tab <- Tabs],
+
+ [?match({atomic, [0,10,'$end_of_table']},
+ Trans(fun() ->
+ mnesia:write({Tab,0,0}),
+ mnesia:write({Tab,10,10}),
+ Loop(Tab,last) end))
+ || Tab <- Tabs],
+ [?match({atomic, ['$end_of_table']},
+ Trans(fun() ->
+ mnesia:delete({Tab,0}),
+ mnesia:delete({Tab,10}),
+ Loop(Tab,last) end))
+ || Tab <- Tabs],
+
+ Tab1Els = [{Tab1, N, N} || N <- lists:seq(1, 5)],
+ Tab2Els = [{Tab2, N, N} || N <- lists:seq(1, 5)],
+ Tab3Els = [{Tab3, N, N} || N <- lists:seq(1, 5)],
+ Tab4Els = [{Tab4, 1, 2} | [{Tab4, N, N} || N <- lists:seq(1, 5)]],
+
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab1Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab2Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab3Els],
+ [mnesia:sync_dirty(fun() -> mnesia:write(E) end) || E <- Tab4Els],
+ Keys = lists:sort(mnesia:dirty_all_keys(Tab1)),
+ R1 = Keys++ ['$end_of_table'],
+ [?match({atomic, R1}, Trans(fun() -> Loop(Tab,first) end))
+ || Tab <- Tabs],
+
+ [?match({atomic, R1}, Trans(fun() -> Loop(Tab,last) end))
+ || Tab <- Tabs],
+ R2 = R1 -- [3],
+
+ [?match({atomic, R2}, Trans(fun() -> mnesia:delete({Tab,3}),Loop(Tab,first) end))
+ || Tab <- Tabs],
+ [?match({atomic, R1}, Trans(fun() -> mnesia:write({Tab,3,3}),Loop(Tab,first) end))
+ || Tab <- Tabs],
+ [?match({atomic, R2}, Trans(fun() -> mnesia:delete({Tab,3}),Loop(Tab,last) end))
+ || Tab <- Tabs],
+ [?match({atomic, R1}, Trans(fun() -> mnesia:write({Tab,3,3}),Loop(Tab,last) end))
+ || Tab <- Tabs],
+ [?match({atomic, R1}, Trans(fun() -> mnesia:write({Tab,4,19}),Loop(Tab,first) end))
+ || Tab <- Tabs],
+ [?match({atomic, R1}, Trans(fun() -> mnesia:write({Tab,4,4}),Loop(Tab,last) end))
+ || Tab <- Tabs],
+
+ ?verify_mnesia(Nodes, []).
+
+
+snmp_shadows(doc) -> [""];
+snmp_shadows(suite) -> [];
+snmp_shadows(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ Tab = snmp_shadows,
+ io:format("With fixstring~n", []),
+ ?match({atomic, ok}, mnesia:create_table(Tab,[{snmp,[{key,{fix_string,integer}}]}])),
+ snmp_shadows_test(Tab),
+ ?match({atomic, ok}, mnesia:delete_table(Tab)),
+ io:format("Without fixstring~n", []),
+ ?match({atomic, ok}, mnesia:create_table(Tab,[{snmp,[{key,{string,integer}}]}])),
+ snmp_shadows_test(Tab),
+ ?verify_mnesia(Nodes, []).
+
+snmp_shadows_test(Tab) ->
+ [mnesia:dirty_write({Tab, {"string", N}, {N, init}}) || N <- lists:seq(2,8,2)],
+
+ CheckOrder = fun(A={_,_,{_,_,State}}, Prev) ->
+ ?match({true, A, Prev}, {Prev < A, A, Prev}),
+ {State,A}
+ end,
+ R1 = mnesia:sync_dirty(fun() -> loop_snmp(Tab, []) end),
+ lists:mapfoldl(CheckOrder, {[],foo,foo}, R1),
+ R2 = mnesia:transaction(fun() -> loop_snmp(Tab, []) end),
+ ?match({atomic, R1}, R2),
+
+ Shadow = fun() ->
+ ok = mnesia:write({Tab, {"string",1}, {1,update}}),
+ ok = mnesia:write({Tab, {"string",4}, {4,update}}),
+ ok = mnesia:write({Tab, {"string",6}, {6,update}}),
+ ok = mnesia:delete({Tab, {"string",6}}),
+ ok = mnesia:write({Tab, {"string",9}, {9,update}}),
+ ok = mnesia:write({Tab, {"string",3}, {3,update}}),
+ ok = mnesia:write({Tab, {"string",5}, {5,update}}),
+ [Row5] = mnesia:read({Tab, {"string",5}}),
+ ok = mnesia:delete_object(Row5),
+ loop_snmp(Tab, [])
+ end,
+ R3 = mnesia:sync_dirty(Shadow),
+ {L3,_} = lists:mapfoldl(CheckOrder, {[],foo,foo}, R3),
+ ?match([{1,update},{2,init},{3,update},{4,update},{8,init},{9,update}], L3),
+ ?match({atomic, ok}, mnesia:clear_table(Tab)),
+
+ [mnesia:dirty_write({Tab, {"string", N}, {N, init}}) || N <- lists:seq(2,8,2)],
+ {atomic, R3} = mnesia:transaction(Shadow),
+ {L4,_} = lists:mapfoldl(CheckOrder, {[],foo,foo}, R3),
+ ?match([{1,update},{2,init},{3,update},{4,update},{8,init},{9,update}], L4),
+ ok.
+
+loop_snmp(Tab,Prev) ->
+ case mnesia:snmp_get_next_index(Tab,Prev) of
+ {ok, SKey} ->
+ {{ok,Row},_} = {mnesia:snmp_get_row(Tab, SKey),{?LINE,Prev,SKey}},
+ {{ok,MKey},_} = {mnesia:snmp_get_mnesia_key(Tab,SKey),{?LINE,Prev,SKey}},
+ ?match({[Row],Row,SKey,MKey}, {mnesia:read({Tab,MKey}),Row,SKey,MKey}),
+ [{SKey, MKey, Row} | loop_snmp(Tab, SKey)];
+ endOfTable ->
+ []
+ end.
diff --git a/lib/mnesia/test/mnesia_measure_test.erl b/lib/mnesia/test/mnesia_measure_test.erl
new file mode 100644
index 0000000000..fbf804dbec
--- /dev/null
+++ b/lib/mnesia/test/mnesia_measure_test.erl
@@ -0,0 +1,203 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_measure_test).
+-author('[email protected]').
+-compile([export_all]).
+
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-define(init(N, Config),
+ mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema],
+ N, Config, ?FILE, ?LINE)).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(doc) ->
+ ["Measure various aspects of Mnesia",
+ "Verify that Mnesia has predictable response times,",
+ "that the transaction system has fair algoritms,",
+ "resource consumption, scalabilitym system limits etc.",
+ "Perform some benchmarks."];
+all(suite) ->
+ [
+ prediction,
+ consumption,
+ scalability,
+ benchmarks
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+prediction(doc) ->
+ ["The system must have predictable response times.",
+ "The maintenance of the system should not impact on the",
+ "availability. Make sure that the response times does not vary too",
+ "much from the undisturbed normal usage.",
+ "Verify that deadlocks never occurs."];
+prediction(suite) ->
+ [
+ reader_disturbed_by_node_down,
+ writer_disturbed_by_node_down,
+ reader_disturbed_by_node_up,
+ writer_disturbed_by_node_up,
+ reader_disturbed_by_schema_ops,
+ writer_disturbed_by_schema_ops,
+ reader_disturbed_by_checkpoint,
+ writer_disturbed_by_checkpoint,
+ reader_disturbed_by_dump_log,
+ writer_disturbed_by_dump_log,
+ reader_disturbed_by_backup,
+ writer_disturbed_by_backup,
+ reader_disturbed_by_restore,
+ writer_disturbed_by_restore,
+ fairness
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+fairness(doc) ->
+ ["Verify that the transaction system behaves fair, even under intense",
+ "stress. Combine different access patterns (transaction profiles)",
+ "in order to verify that concurrent applications gets a fair share",
+ "of the database resource. Verify that starvation never may occur."];
+fairness(suite) ->
+ [
+ reader_competing_with_reader,
+ reader_competing_with_writer,
+ writer_competing_with_reader,
+ writer_competing_with_writer
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+consumption(doc) ->
+ ["Measure the resource consumption and publish the outcome. Make",
+ "sure that resources are released after failures."];
+consumption(suite) ->
+ [
+ measure_resource_consumption,
+ determine_resource_leakage
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+scalability(doc) ->
+ ["Try out where the system limits are. We must at least meet the",
+ "documented system limits.",
+ "Redo the performance meters for various configurations and load,",
+ "especially near system limits."];
+scalability(suite) ->
+ [
+ determine_system_limits,
+ performance_at_min_config,
+ performance_at_max_config,
+ performance_at_full_load,
+ resource_consumption_at_min_config,
+ resource_consumption_at_max_config,
+ resource_consumption_at_full_load
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+benchmarks(doc) ->
+ ["Measure typical database operations and publish them. Try to",
+ "verify that new releases of Mnesia always outperforms old",
+ "releases, or at least that the meters does not get worse."];
+benchmarks(suite) ->
+ [
+ meter,
+ cost,
+ dbn_meters,
+ measure_all_api_functions,
+ tpcb,
+ mnemosyne_vs_mnesia_kernel
+ ].
+
+dbn_meters(suite) -> [];
+dbn_meters(Config) when is_list(Config) ->
+ _Nodes = ?init(3, Config),
+ ?match(ok, mnesia_dbn_meters:start()),
+ ok.
+
+tpcb(suite) ->
+ [
+ ram_tpcb,
+ disc_tpcb,
+ disc_only_tpcb
+ ].
+
+tpcb(ReplicaType, Config) ->
+ HarakiriDelay = {tc_timeout, timer:minutes(20)},
+ Nodes = ?acquire_nodes(2, Config ++ [HarakiriDelay]),
+ Args = [{n_branches, 2},
+ {n_drivers_per_node, 1},
+ {replica_nodes, Nodes},
+ {driver_nodes, [hd(Nodes)]},
+ {use_running_mnesia, true},
+ {use_sticky_locks, true},
+ {replica_type, ReplicaType}],
+ ?match({ok, _}, mnesia_tpcb:start(Args)),
+ ?verify_mnesia(Nodes, []).
+
+ram_tpcb(suite) -> [];
+ram_tpcb(Config) when is_list(Config) ->
+ tpcb(ram_copies, Config).
+
+disc_tpcb(suite) -> [];
+disc_tpcb(Config) when is_list(Config) ->
+ tpcb(disc_copies, Config).
+
+disc_only_tpcb(suite) -> [];
+disc_only_tpcb(Config) when is_list(Config) ->
+ tpcb(disc_only_copies, Config).
+
+meter(suite) ->
+ [
+ ram_meter,
+ disc_meter,
+ disc_only_meter
+ ].
+
+ram_meter(suite) -> [];
+ram_meter(Config) when is_list(Config) ->
+ HarakiriDelay = [{tc_timeout, timer:minutes(20)}],
+ Nodes = ?init(3, Config ++ HarakiriDelay),
+ ?match(ok, mnesia_meter:go(ram_copies, Nodes)).
+
+disc_meter(suite) -> [];
+disc_meter(Config) when is_list(Config) ->
+ HarakiriDelay = [{tc_timeout, timer:minutes(20)}],
+ Nodes = ?init(3, Config ++ HarakiriDelay),
+ ?match(ok, mnesia_meter:go(disc_copies, Nodes)).
+
+disc_only_meter(suite) -> [];
+disc_only_meter(Config) when is_list(Config) ->
+ HarakiriDelay = [{tc_timeout, timer:minutes(20)}],
+ Nodes = ?init(3, Config ++ HarakiriDelay),
+ ?match(ok, mnesia_meter:go(disc_only_copies, Nodes)).
+
+cost(suite) -> [];
+cost(Config) when is_list(Config) ->
+ Nodes = ?init(3, Config),
+ ?match(ok, mnesia_cost:go(Nodes)),
+ file:delete("MNESIA_COST").
diff --git a/lib/mnesia/test/mnesia_meter.erl b/lib/mnesia/test/mnesia_meter.erl
new file mode 100644
index 0000000000..68094c4431
--- /dev/null
+++ b/lib/mnesia/test/mnesia_meter.erl
@@ -0,0 +1,465 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%% Getting started:
+%%
+%% 1 Start one or more distributed Erlang nodes
+%% 2a Connect the nodes, e.g. with net_adm:ping/1
+%% 3a Run mnesia_meter:go()
+%% 3b Run mnesia_meter:go(ReplicaType)
+%% 3c Run mnesia_meter:go(ReplicaType, Nodes)
+
+-module(mnesia_meter).
+-author('[email protected]').
+-export([
+ go/0,
+ go/1,
+ go/2,
+ repeat_meter/2
+ ]).
+
+-record(person, {name, %% atomic, unique key
+ data, %% compound structure
+ married_to, %% name of partner or undefined
+ children}). %% list of children
+
+-record(meter, {desc, init, meter, micros}).
+
+-record(result, {desc, list}).
+
+-define(TIMES, 1000).
+
+go() ->
+ go(ram_copies).
+
+go(ReplicaType) ->
+ go(ReplicaType, [node() | nodes()]).
+
+go(ReplicaType, Nodes) ->
+ {ok, FunOverhead} = tc(fun(_) -> {atomic, ok} end, ?TIMES),
+ Size = size(term_to_binary(#person{})),
+ io:format("A fun apply costs ~p micro seconds. Record size is ~p bytes.~n",
+ [FunOverhead, Size]),
+ Res = go(ReplicaType, Nodes, [], FunOverhead, []),
+ NewRes = rearrange(Res, []),
+ DescHeader = lists:flatten(io_lib:format("~w on ~w", [ReplicaType, Nodes])),
+ ItemHeader = lists:seq(1, length(Nodes)),
+ Header = #result{desc = DescHeader, list = ItemHeader},
+ SepList = ['--------' || _ <- Nodes],
+ Separator = #result{desc = "", list = SepList},
+ display([Separator, Header, Separator | NewRes] ++ [Separator]).
+
+go(_ReplicaType, [], _Config, _FunOverhead, Acc) ->
+ Acc;
+go(ReplicaType, [H | T], OldNodes, FunOverhead, Acc) ->
+ Nodes = [H | OldNodes],
+ Config = [{ReplicaType, Nodes}],
+ Res = run(Nodes, Config, FunOverhead),
+ go(ReplicaType, T, Nodes, FunOverhead, [{ReplicaType, Nodes, Res} | Acc]).
+
+rearrange([{_ReplicaType, _Nodes, Meters} | Tail], Acc) ->
+ Acc2 = [add_meter(M, Acc) || M <- Meters],
+ rearrange(Tail, Acc2);
+rearrange([], Acc) ->
+ Acc.
+
+add_meter(M, Acc) ->
+ case lists:keysearch(M#meter.desc, #result.desc, Acc) of
+ {value, R} ->
+ R#result{list = [M#meter.micros | R#result.list]};
+ false ->
+ #result{desc = M#meter.desc, list = [M#meter.micros]}
+ end.
+
+display(Res) ->
+ MaxDesc = lists:max([length(R#result.desc) || R <- Res]),
+ Format = lists:concat(["! ~-", MaxDesc, "s"]),
+ display(Res, Format, MaxDesc).
+
+display([R | Res], Format, MaxDesc) ->
+ case R#result.desc of
+ "" ->
+ io:format(Format, [lists:duplicate(MaxDesc, "-")]);
+ Desc ->
+ io:format(Format, [Desc])
+ end,
+ display_items(R#result.list, R#result.desc),
+ io:format(" !~n", []),
+ display(Res, Format, MaxDesc);
+display([], _Format, _MaxDesc) ->
+ ok.
+
+display_items([_Item | Items], "") ->
+ io:format(" ! ~s", [lists:duplicate(10, $-)]),
+ display_items(Items, "");
+display_items([Micros | Items], Desc) ->
+ io:format(" ! ~10w", [Micros]),
+ display_items(Items, Desc);
+display_items([], _Desc) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+meters() ->
+ [#meter{desc = "transaction update two records with read and write",
+ init = fun write_records/2,
+ meter = fun update_records/1},
+ #meter{desc = "transaction update two records with wread and write",
+ init = fun write_records/2,
+ meter = fun w_update_records/1},
+ #meter{desc = "transaction update two records with read and s_write",
+ init = fun s_write_records/2,
+ meter = fun s_update_records/1},
+ #meter{desc = "sync_dirty update two records with read and write",
+ init = fun sync_dirty_write_records/2,
+ meter = fun sync_dirty_update_records/1},
+ #meter{desc = "async_dirty update two records with read and write",
+ init = fun async_dirty_write_records/2,
+ meter = fun async_dirty_update_records/1},
+ #meter{desc = "plain fun update two records with dirty_read and dirty_write",
+ init = fun dirty_write_records/2,
+ meter = fun dirty_update_records/1},
+ #meter{desc = "ets update two records with read and write (local only)",
+ init = fun ets_opt_write_records/2,
+ meter = fun ets_update_records/1},
+ #meter{desc = "plain fun update two records with ets:lookup and ets:insert (local only)",
+ init = fun bif_opt_write_records/2,
+ meter = fun bif_update_records/1},
+ #meter{desc = "plain fun update two records with dets:lookup and dets:insert (local only)",
+ init = fun dets_opt_write_records/2,
+ meter = fun dets_update_records/1},
+
+ #meter{desc = "transaction write two records with write",
+ init = fun write_records/2,
+ meter = fun(X) -> write_records(X, 0-X) end},
+ #meter{desc = "transaction write two records with s_write",
+ init = fun s_write_records/2,
+ meter = fun(X) -> s_write_records(X, 0-X) end},
+ #meter{desc = "sync_dirty write two records with write",
+ init = fun sync_dirty_write_records/2,
+ meter = fun(X) -> sync_dirty_write_records(X, 0-X) end},
+ #meter{desc = "async_dirty write two records with write",
+ init = fun async_dirty_write_records/2,
+ meter = fun(X) -> async_dirty_write_records(X, 0-X) end},
+ #meter{desc = "plain fun write two records with dirty_write",
+ init = fun dirty_write_records/2,
+ meter = fun(X) -> dirty_write_records(X, 0-X) end},
+ #meter{desc = "ets write two records with write (local only)",
+ init = fun ets_opt_write_records/2,
+ meter = fun(X) -> ets_write_records(X, 0-X) end},
+ #meter{desc = "plain fun write two records with ets:insert (local only)",
+ init = fun bif_opt_write_records/2,
+ meter = fun(X) -> bif_write_records(X, 0-X) end},
+ #meter{desc = "plain fun write two records with dets:insert (local only)",
+ init = fun dets_opt_write_records/2,
+ meter = fun(X) -> dets_write_records(X, 0-X) end},
+
+ #meter{desc = "transaction read two records with read",
+ init = fun write_records/2,
+ meter = fun(X) -> read_records(X, 0-X) end},
+ #meter{desc = "sync_dirty read two records with read",
+ init = fun sync_dirty_write_records/2,
+ meter = fun(X) -> sync_dirty_read_records(X, 0-X) end},
+ #meter{desc = "async_dirty read two records with read",
+ init = fun async_dirty_write_records/2,
+ meter = fun(X) -> async_dirty_read_records(X, 0-X) end},
+ #meter{desc = "plain fun read two records with dirty_read",
+ init = fun dirty_write_records/2,
+ meter = fun(X) -> dirty_read_records(X, 0-X) end},
+ #meter{desc = "ets read two records with read",
+ init = fun ets_opt_write_records/2,
+ meter = fun(X) -> ets_read_records(X, 0-X) end},
+ #meter{desc = "plain fun read two records with ets:lookup",
+ init = fun bif_opt_write_records/2,
+ meter = fun(X) -> bif_read_records(X, 0-X) end},
+ #meter{desc = "plain fun read two records with dets:lookup",
+ init = fun dets_opt_write_records/2,
+ meter = fun(X) -> dets_read_records(X, 0-X) end}
+ ].
+
+update_fun(Name) ->
+ fun() ->
+ case mnesia:read({person, Name}) of
+ [] ->
+ mnesia:abort(no_such_person);
+ [Pers] ->
+ [Partner] = mnesia:read({person, Pers#person.married_to}),
+ mnesia:write(Pers#person{married_to = undefined}),
+ mnesia:write(Partner#person{married_to = undefined})
+ end
+ end.
+
+update_records(Name) ->
+ mnesia:transaction(update_fun(Name)).
+
+sync_dirty_update_records(Name) ->
+ {atomic, mnesia:sync_dirty(update_fun(Name))}.
+
+async_dirty_update_records(Name) ->
+ {atomic, mnesia:async_dirty(update_fun(Name))}.
+
+ets_update_records(Name) ->
+ {atomic, mnesia:ets(update_fun(Name))}.
+
+w_update_records(Name) ->
+ F = fun() ->
+ case mnesia:wread({person, Name}) of
+ [] ->
+ mnesia:abort(no_such_person);
+ [Pers] ->
+ [Partner] = mnesia:wread({person, Pers#person.married_to}),
+ mnesia:write(Pers#person{married_to = undefined}),
+ mnesia:write(Partner#person{married_to = undefined})
+ end
+ end,
+ mnesia:transaction(F).
+
+s_update_records(Name) ->
+ F = fun() ->
+ case mnesia:read({person, Name}) of
+ [] ->
+ mnesia:abort(no_such_person);
+ [Pers] ->
+ [Partner] = mnesia:read({person, Pers#person.married_to}),
+ mnesia:s_write(Pers#person{married_to = undefined}),
+ mnesia:s_write(Partner#person{married_to = undefined})
+ end
+ end,
+ mnesia:transaction(F).
+
+dirty_update_records(Name) ->
+ case mnesia:dirty_read({person, Name}) of
+ [] ->
+ mnesia:abort(no_such_person);
+ [Pers] ->
+ [Partner] = mnesia:dirty_read({person, Pers#person.married_to}),
+ mnesia:dirty_write(Pers#person{married_to = undefined}),
+ mnesia:dirty_write(Partner#person{married_to = undefined})
+ end,
+ {atomic, ok}.
+
+bif_update_records(Name) ->
+ case ets:lookup(person, Name) of
+ [] ->
+ mnesia:abort(no_such_person);
+ [Pers] ->
+ [Partner] = ets:lookup(person, Pers#person.married_to),
+ ets:insert(person, Pers#person{married_to = undefined}),
+ ets:insert(person, Partner#person{married_to = undefined})
+ end,
+ {atomic, ok}.
+
+dets_update_records(Name) ->
+ case dets:lookup(person, Name) of
+ [] ->
+ mnesia:abort(no_such_person);
+ [Pers] ->
+ [Partner] = dets:lookup(person, Pers#person.married_to),
+ dets:insert(person, Pers#person{married_to = undefined}),
+ dets:insert(person, Partner#person{married_to = undefined})
+ end,
+ {atomic, ok}.
+
+write_records_fun(Pers, Partner) ->
+ fun() ->
+ P = #person{children = [ulla, bella]},
+ mnesia:write(P#person{name = Pers, married_to = Partner}),
+ mnesia:write(P#person{name = Partner, married_to = Pers})
+ end.
+
+write_records(Pers, Partner) ->
+ mnesia:transaction(write_records_fun(Pers, Partner)).
+
+sync_dirty_write_records(Pers, Partner) ->
+ {atomic, mnesia:sync_dirty(write_records_fun(Pers, Partner))}.
+
+async_dirty_write_records(Pers, Partner) ->
+ {atomic, mnesia:async_dirty(write_records_fun(Pers, Partner))}.
+
+ets_write_records(Pers, Partner) ->
+ {atomic, mnesia:ets(write_records_fun(Pers, Partner))}.
+
+s_write_records(Pers, Partner) ->
+ F = fun() ->
+ P = #person{children = [ulla, bella]},
+ mnesia:s_write(P#person{name = Pers, married_to = Partner}),
+ mnesia:s_write(P#person{name = Partner, married_to = Pers})
+ end,
+ mnesia:transaction(F).
+
+dirty_write_records(Pers, Partner) ->
+ P = #person{children = [ulla, bella]},
+ mnesia:dirty_write(P#person{name = Pers, married_to = Partner}),
+ mnesia:dirty_write(P#person{name = Partner, married_to = Pers}),
+ {atomic, ok}.
+
+ets_opt_write_records(Pers, Partner) ->
+ case mnesia:table_info(person, where_to_commit) of
+ [{N, ram_copies}] when N == node() ->
+ ets_write_records(Pers, Partner);
+ _ ->
+ throw(skipped)
+ end.
+
+bif_opt_write_records(Pers, Partner) ->
+ case mnesia:table_info(person, where_to_commit) of
+ [{N, ram_copies}] when N == node() ->
+ bif_write_records(Pers, Partner);
+ _ ->
+ throw(skipped)
+ end.
+
+bif_write_records(Pers, Partner) ->
+ P = #person{children = [ulla, bella]},
+ ets:insert(person, P#person{name = Pers, married_to = Partner}),
+ ets:insert(person, P#person{name = Partner, married_to = Pers}),
+ {atomic, ok}.
+
+dets_opt_write_records(Pers, Partner) ->
+ case mnesia:table_info(person, where_to_commit) of
+ [{N, disc_only_copies}] when N == node() ->
+ dets_write_records(Pers, Partner);
+ _ ->
+ throw(skipped)
+ end.
+
+dets_write_records(Pers, Partner) ->
+ P = #person{children = [ulla, bella]},
+ dets:insert(person, P#person{name = Pers, married_to = Partner}),
+ dets:insert(person, P#person{name = Partner, married_to = Pers}),
+ {atomic, ok}.
+
+read_records_fun(Pers, Partner) ->
+ fun() ->
+ case {mnesia:read({person, Pers}),
+ mnesia:read({person, Partner})} of
+ {[_], [_]} ->
+ ok;
+ _ ->
+ mnesia:abort(no_such_person)
+ end
+ end.
+
+read_records(Pers, Partner) ->
+ mnesia:transaction(read_records_fun(Pers, Partner)).
+
+sync_dirty_read_records(Pers, Partner) ->
+ {atomic, mnesia:sync_dirty(read_records_fun(Pers, Partner))}.
+
+async_dirty_read_records(Pers, Partner) ->
+ {atomic, mnesia:async_dirty(read_records_fun(Pers, Partner))}.
+
+ets_read_records(Pers, Partner) ->
+ {atomic, mnesia:ets(read_records_fun(Pers, Partner))}.
+
+dirty_read_records(Pers, Partner) ->
+ case {mnesia:dirty_read({person, Pers}),
+ mnesia:dirty_read({person, Partner})} of
+ {[_], [_]} ->
+ {atomic, ok};
+ _ ->
+ mnesia:abort(no_such_person)
+ end.
+
+bif_read_records(Pers, Partner) ->
+ case {ets:lookup(person, Pers),
+ ets:lookup(person, Partner)} of
+ {[_], [_]} ->
+ {atomic, ok};
+ _ ->
+ mnesia:abort(no_such_person)
+ end.
+
+dets_read_records(Pers, Partner) ->
+ case {dets:lookup(person, Pers),
+ dets:lookup(person, Partner)} of
+ {[_], [_]} ->
+ {atomic, ok};
+ _ ->
+ mnesia:abort(no_such_person)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+run(Nodes, Config, FunOverhead) ->
+ Meters = meters(),
+ io:format("Run ~w meters with table config: ~w~n", [length(Meters), Config]),
+ rpc:multicall(Nodes, mnesia, lkill, []),
+ start(Nodes, Config),
+ Res = [run_meter(Data, Nodes, FunOverhead) || Data <- Meters],
+ stop(Nodes),
+ Res.
+
+run_meter(M, Nodes, FunOverhead) when is_record(M, meter) ->
+ io:format(".", []),
+ case catch init_records(M#meter.init, ?TIMES) of
+ {atomic, ok} ->
+ rpc:multicall(Nodes, mnesia, dump_log, []),
+ case tc(M#meter.meter, ?TIMES) of
+ {ok, Micros} ->
+ M#meter{micros = lists:max([0, Micros - FunOverhead])};
+ {error, Reason} ->
+ M#meter{micros = Reason}
+ end;
+ Res ->
+ M#meter{micros = Res}
+ end.
+
+start(Nodes, Config) ->
+ mnesia:delete_schema(Nodes),
+ ok = mnesia:create_schema(Nodes),
+ Args = [[{dump_log_write_threshold, ?TIMES div 2},
+ {dump_log_time_threshold, timer:hours(10)}]],
+ lists:foreach(fun(Node) -> rpc:call(Node, mnesia, start, Args) end, Nodes),
+ Attrs = record_info(fields, person),
+ TabDef = [{attributes, Attrs} | Config],
+ {atomic, _} = mnesia:create_table(person, TabDef).
+
+stop(Nodes) ->
+ rpc:multicall(Nodes, mnesia, stop, []).
+
+%% Generate some dummy persons
+init_records(_Fun, 0) ->
+ {atomic, ok};
+init_records(Fun, Times) ->
+ {atomic, ok} = Fun(Times, 0 - Times),
+ init_records(Fun, Times - 1).
+
+tc(Fun, Times) ->
+ case catch timer:tc(?MODULE, repeat_meter, [Fun, Times]) of
+ {Micros, ok} ->
+ {ok, Micros div Times};
+ {_Micros, {error, Reason}} ->
+ {error, Reason};
+ {'EXIT', Reason} ->
+ {error, Reason}
+ end.
+
+%% The meter must return {atomic, ok}
+repeat_meter(Meter, Times) ->
+ repeat_meter(Meter, {atomic, ok}, Times).
+
+repeat_meter(_, {atomic, ok}, 0) ->
+ ok;
+repeat_meter(Meter, {atomic, _Result}, Times) when Times > 0 ->
+ repeat_meter(Meter, Meter(Times), Times - 1);
+repeat_meter(_Meter, Reason, _Times) ->
+ {error, Reason}.
+
diff --git a/lib/mnesia/test/mnesia_nice_coverage_test.erl b/lib/mnesia/test/mnesia_nice_coverage_test.erl
new file mode 100644
index 0000000000..aa9339f6b9
--- /dev/null
+++ b/lib/mnesia/test/mnesia_nice_coverage_test.erl
@@ -0,0 +1,227 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_nice_coverage_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+-record(nice_tab, {key, val}).
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Test nice usage of the entire API",
+ "Invoke all functions in the API, at least once.",
+ "Try to verify that all functions exists and that they perform",
+ "reasonable things when used in the most simple way."];
+all(suite) -> [nice].
+
+nice(doc) -> [""];
+nice(suite) -> [];
+nice(Config) when is_list(Config) ->
+ %% The whole test suite is one huge test case for the time beeing
+
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Attrs = record_info(fields, nice_tab),
+
+ initialize(Attrs, Node1),
+ dirty_access(Node1),
+ success_and_fail(),
+ index_mgt(),
+
+ adm(Attrs, Node1, Node2),
+ snmp(Node1, Node2),
+ backup(Node1),
+ ?verify_mnesia(Nodes, []).
+
+initialize(Attrs, Node1) ->
+ ?match(Version when is_list(Version), mnesia:system_info(version)),
+
+ Schema = [{name, nice_tab},
+ {attributes, Attrs}, {ram_copies, [Node1]}],
+
+ ?match({_, _}, mnesia:system_info(schema_version)),
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ ?match(ok, mnesia:info()),
+ ?match(set, mnesia:table_info(nice_tab, type)),
+ ?match(ok, mnesia:schema()),
+ ?match(ok, mnesia:schema(nice_tab)),
+ ok.
+
+dirty_access(Node1) ->
+ TwoThree = #nice_tab{key=23, val=23},
+ TwoFive = #nice_tab{key=25, val=25},
+ ?match([], mnesia:dirty_slot(nice_tab, 0)),
+ ?match(ok, mnesia:dirty_write(TwoThree)),
+ ?match([TwoThree], mnesia:dirty_read({nice_tab, 23})),
+ ?match(ok, mnesia:dirty_write(TwoFive)),
+ ?match(ok, mnesia:dirty_delete_object(TwoFive)),
+
+ ?match(23, mnesia:dirty_first(nice_tab)),
+ ?match('$end_of_table', mnesia:dirty_next(nice_tab, 23)),
+ ?match([TwoThree], mnesia:dirty_match_object(TwoThree)),
+ ?match(ok, mnesia:dirty_delete({nice_tab, 23})),
+
+ CounterSchema = [{ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(nice_counter_tab, CounterSchema)),
+ TwoFour = {nice_counter_tab, 24, 24},
+ ?match(ok, mnesia:dirty_write(TwoFour)),
+ ?match(34, mnesia:dirty_update_counter({nice_counter_tab, 24}, 10)),
+ TF = {nice_counter_tab, 24, 34},
+ ?match([TF], mnesia:dirty_read({nice_counter_tab, 24})),
+ ?match(ok, mnesia:dirty_delete({nice_counter_tab, 24})),
+ ?match(ok, mnesia:dirty_delete_object(TF)),
+ ok.
+
+success_and_fail() ->
+ ?match({atomic, a_good_trans}, mnesia:transaction(fun() ->good_trans()end)),
+
+ BadFun =
+ fun() ->
+ Two = #nice_tab{key=2, val=12},
+ ?match([Two], mnesia:match_object(#nice_tab{key='$1', val=12})),
+ ?match([#nice_tab{key=3, val=13}], mnesia:wread({nice_tab, 3})),
+ ?match(ok, mnesia:delete({nice_tab, 1})),
+ ?match(ok, mnesia:delete_object(Two)),
+ mnesia:abort(bad_trans),
+ ?match(bad, trans)
+ end,
+ ?match({aborted, bad_trans}, mnesia:transaction(BadFun)),
+ ?match(L when is_list(L), mnesia:error_description(no_exists)),
+ ?match({atomic, ok}, mnesia:transaction(fun(A) -> lock(), A end, [ok])),
+ ?match({atomic, ok}, mnesia:transaction(fun(A) -> lock(), A end, [ok], 3)),
+ ok.
+
+good_trans() ->
+ ?match([], mnesia:read(nice_tab, 3)),
+ ?match([], mnesia:read({nice_tab, 3})),
+ ?match(ok, mnesia:write(#nice_tab{key=14, val=4})),
+ ?match([14], mnesia:all_keys(nice_tab)),
+
+ Records = [ #nice_tab{key=K, val=K+10} || K <- lists:seq(1, 10) ],
+ Ok = [ ok || _ <- Records],
+ ?match(Ok, lists:map(fun(R) -> mnesia:write(R) end, Records)),
+ a_good_trans.
+
+
+lock() ->
+ ?match(ok, mnesia:s_write(#nice_tab{key=22, val=22})),
+ ?match(ok, mnesia:read_lock_table(nice_tab)),
+ ?match(ok, mnesia:write_lock_table(nice_tab)),
+ ok.
+
+index_mgt() ->
+ UniversalRec = #nice_tab{key=4711, val=4711},
+ ?match(ok, mnesia:dirty_write(UniversalRec)),
+ ValPos = #nice_tab.val,
+ ?match({atomic, ok}, mnesia:add_table_index(nice_tab, ValPos)),
+
+ IndexFun =
+ fun() ->
+ ?match([UniversalRec],
+ mnesia:index_read(nice_tab, 4711, ValPos)),
+ Pat = #nice_tab{key='$1', val=4711},
+ ?match([UniversalRec],
+ mnesia:index_match_object(Pat, ValPos)),
+ index_trans
+ end,
+ ?match({atomic, index_trans}, mnesia:transaction(IndexFun, infinity)),
+ ?match([UniversalRec],
+ mnesia:dirty_index_read(nice_tab, 4711, ValPos)),
+ ?match([UniversalRec],
+ mnesia:dirty_index_match_object(#nice_tab{key='$1', val=4711}, ValPos)),
+
+ ?match({atomic, ok}, mnesia:del_table_index(nice_tab, ValPos)),
+ ok.
+
+adm(Attrs, Node1, Node2) ->
+ This = node(),
+ ?match({ok, This}, mnesia:subscribe(system)),
+ ?match({atomic, ok},
+ mnesia:add_table_copy(nice_tab, Node2, disc_only_copies)),
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(nice_tab, Node2, ram_copies)),
+ ?match({atomic, ok}, mnesia:del_table_copy(nice_tab, Node1)),
+ ?match(stopped, rpc:call(Node1, mnesia, stop, [])),
+ ?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [nice_tab])),
+ ?match(ok, mnesia:wait_for_tables([schema], infinity)),
+
+ Transformer = fun(Rec) ->
+ list_to_tuple(tuple_to_list(Rec) ++ [initial_value])
+ end,
+ ?match({atomic, ok},
+ mnesia:transform_table(nice_tab, Transformer, Attrs ++ [extra])),
+
+ ?match({atomic, ok}, mnesia:delete_table(nice_tab)),
+ DumpSchema = [{name, nice_tab}, {attributes, Attrs}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(DumpSchema)),
+ ?match({atomic, ok}, mnesia:dump_tables([nice_tab])),
+ ?match({atomic, ok}, mnesia:move_table_copy(nice_tab, Node2, Node1)),
+
+ ?match(yes, mnesia:force_load_table(nice_counter_tab)),
+ ?match(dumped, mnesia:dump_log()),
+ ok.
+
+backup(Node1) ->
+ Tab = backup_nice,
+ Def = [{disc_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match({ok,_,_}, mnesia:activate_checkpoint([{name, cp}, {max, [Tab]}])),
+ File = "nice_backup.BUP",
+ File2 = "nice_backup2.BUP",
+ File3 = "nice_backup3.BUP",
+ ?match(ok, mnesia:backup_checkpoint(cp, File)),
+ ?match(ok, mnesia:backup_checkpoint(cp, File, mnesia_backup)),
+ ?match(ok, mnesia:deactivate_checkpoint(cp)),
+ ?match(ok, mnesia:backup(File)),
+ ?match(ok, mnesia:backup(File, mnesia_backup)),
+
+ Fun = fun(X, Acc) -> {[X], Acc} end,
+ ?match({ok, 0}, mnesia:traverse_backup(File, File2, Fun, 0)),
+ ?match({ok, 0}, mnesia:traverse_backup(File, mnesia_backup, dummy, read_only, Fun, 0)),
+ ?match(ok, mnesia:install_fallback(File)),
+ ?match(ok, mnesia:uninstall_fallback()),
+ ?match(ok, mnesia:install_fallback(File, mnesia_backup)),
+ ?match(ok, mnesia:dump_to_textfile(File3)),
+ ?match({atomic, ok}, mnesia:load_textfile(File3)),
+ ?match(ok, file:delete(File)),
+ ?match(ok, file:delete(File2)),
+ ?match(ok, file:delete(File3)),
+ ok.
+
+snmp(Node1, Node2) ->
+ Tab = nice_snmp,
+ Def = [{disc_copies, [Node1]}, {ram_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match({aborted, {badarg, Tab, _}}, mnesia:snmp_open_table(Tab, [])),
+ ?match({atomic, ok}, mnesia:snmp_open_table(Tab, [{key, integer}])),
+ ?match(endOfTable, mnesia:snmp_get_next_index(Tab, [0])),
+ ?match(undefined, mnesia:snmp_get_row(Tab, [0])),
+ ?match(undefined, mnesia:snmp_get_mnesia_key(Tab, [0])),
+ ?match({atomic, ok}, mnesia:snmp_close_table(Tab)),
+ ok.
+
diff --git a/lib/mnesia/test/mnesia_qlc_test.erl b/lib/mnesia/test/mnesia_qlc_test.erl
new file mode 100644
index 0000000000..1e4f776c7d
--- /dev/null
+++ b/lib/mnesia/test/mnesia_qlc_test.erl
@@ -0,0 +1,475 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_qlc_test).
+
+-compile(export_all).
+
+-export([all/1]).
+
+-include("mnesia_test_lib.hrl").
+-include_lib("stdlib/include/qlc.hrl").
+
+init_per_testcase(Func, Conf) ->
+ setup(Conf),
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+all(doc) ->
+ ["Test that the qlc mnesia interface works as expected."];
+all(suite) ->
+ case code:which(qlc) of
+ non_existing -> [];
+ _ ->
+ all_qlc()
+ end.
+
+all_qlc() ->
+ [dirty, trans, frag, info, mnesia_down].
+
+init_testcases(Type,Config) ->
+ Nodes = [N1,N2] = ?acquire_nodes(2, Config),
+ ?match({atomic, ok}, mnesia:create_table(a, [{Type,[N1]}, {index,[3]}])),
+ ?match({atomic, ok}, mnesia:create_table(b, [{Type,[N2]}])),
+ Write = fun(Id) ->
+ ok = mnesia:write({a, {a,Id}, 100 - Id}),
+ ok = mnesia:write({b, {b,100-Id}, Id})
+ end,
+ All = fun() -> [Write(Id) || Id <- lists:seq(1,10)], ok end,
+ ?match({atomic, ok}, mnesia:sync_transaction(All)),
+ Nodes.
+
+%% Test cases
+dirty(suite) ->
+ [dirty_nice_ram_copies,
+ dirty_nice_disc_copies,
+ dirty_nice_disc_only_copies].
+
+dirty_nice_ram_copies(Setup) -> dirty_nice(Setup,ram_copies).
+dirty_nice_disc_copies(Setup) -> dirty_nice(Setup,disc_copies).
+dirty_nice_disc_only_copies(Setup) -> dirty_nice(Setup,disc_only_copies).
+
+dirty_nice(suite, _) -> [];
+dirty_nice(doc, _) -> [];
+dirty_nice(Config, Type) when is_list(Config) ->
+ Ns = init_testcases(Type,Config),
+ QA = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(a),"
+ " Val == 90 + Key]">>),
+ QB = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(b),"
+ " Key == 90 + Val]">>),
+ QC = qlc:sort(mnesia:table(a, [{n_objects,1}, {lock,write}, {traverse, select}])),
+ QD = qlc:sort(mnesia:table(a, [{n_objects,1}, {traverse,{select,[{'$1',[],['$1']}]}}])),
+
+ FA = fun() -> qlc:e(QA) end,
+ FB = fun() -> qlc:e(QB) end,
+ FC = fun() -> qlc:e(QC) end,
+ FD = fun() -> qlc:e(QD) end,
+
+ %% Currently unsupported
+ ?match({'EXIT',{aborted,no_transaction}}, FA()),
+ ?match({'EXIT',{aborted,no_transaction}}, FB()),
+ %%
+ CRes = lists:sort(mnesia:dirty_match_object(a, {'_','_','_'})),
+ ?match([{a,{a,5},95}], mnesia:async_dirty(FA)),
+ ?match([{b,{b,95},5}], mnesia:async_dirty(FB)),
+ ?match(CRes, mnesia:async_dirty(FC)),
+ ?match(CRes, mnesia:async_dirty(FD)),
+ ?match([{a,{a,5},95}], mnesia:sync_dirty(FA)),
+ ?match([{b,{b,95},5}], mnesia:sync_dirty(FB)),
+ ?match(CRes, mnesia:sync_dirty(FC)),
+ ?match([{a,{a,5},95}], mnesia:activity(async_dirty, FA)),
+ ?match([{b,{b,95},5}], mnesia:activity(async_dirty, FB)),
+ ?match([{a,{a,5},95}], mnesia:activity(sync_dirty, FA)),
+ ?match([{b,{b,95},5}], mnesia:activity(sync_dirty, FB)),
+ ?match(CRes, mnesia:activity(async_dirty,FC)),
+ case Type of
+ disc_only_copies -> skip;
+ _ ->
+ ?match([{a,{a,5},95}], mnesia:ets(FA)),
+ ?match([{a,{a,5},95}], mnesia:activity(ets, FA))
+ end,
+ ?verify_mnesia(Ns, []).
+
+trans(suite) ->
+ [trans_nice_ram_copies,
+ trans_nice_disc_copies,
+ trans_nice_disc_only_copies,
+ atomic
+ ].
+
+trans_nice_ram_copies(Setup) -> trans_nice(Setup,ram_copies).
+trans_nice_disc_copies(Setup) -> trans_nice(Setup,disc_copies).
+trans_nice_disc_only_copies(Setup) -> trans_nice(Setup,disc_only_copies).
+
+trans_nice(suite, _) -> [];
+trans_nice(doc, _) -> [];
+trans_nice(Config, Type) when is_list(Config) ->
+ Ns = init_testcases(Type,Config),
+ QA = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(a),"
+ " Val == 90 + Key]">>),
+ QB = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(b),"
+ " Key == 90 + Val]">>),
+ QC = handle(recs(),
+ <<"[Q || Q = #a{v=91} <- mnesia:table(a)]"
+ >>),
+
+ QD = qlc:sort(mnesia:table(a, [{n_objects,1}, {lock,write}, {traverse, select}])),
+ QE = qlc:sort(mnesia:table(a, [{n_objects,1}, {traverse,{select,[{'$1',[],['$1']}]}}])),
+
+ DRes = lists:sort(mnesia:dirty_match_object(a, {'_','_','_'})),
+
+ FA = fun() -> qlc:e(QA) end,
+ FB = fun() -> qlc:e(QB) end,
+ FC = fun() -> qlc:e(QC) end,
+ FD = fun() -> qlc:e(QD) end,
+ FE = fun() -> qlc:e(QE) end,
+
+ ?match({atomic,[{a,{a,5},95}]}, mnesia:transaction(FA)),
+ ?match({atomic,[{b,{b,95},5}]}, mnesia:transaction(FB)),
+ ?match({atomic,[{a,{a,9},91}]}, mnesia:transaction(FC)),
+ ?match({atomic,[{a,{a,5},95}]}, mnesia:sync_transaction(FA)),
+ ?match({atomic,[{b,{b,95},5}]}, mnesia:sync_transaction(FB)),
+ ?match({atomic,[{a,{a,9},91}]}, mnesia:sync_transaction(FC)),
+ ?match([{a,{a,5},95}], mnesia:activity(transaction,FA)),
+ ?match([{b,{b,95},5}], mnesia:activity(transaction,FB)),
+ ?match([{a,{a,9},91}], mnesia:activity(transaction,FC)),
+ ?match([{a,{a,5},95}], mnesia:activity(sync_transaction,FA)),
+ ?match([{b,{b,95},5}], mnesia:activity(sync_transaction,FB)),
+ ?match([{a,{a,9},91}], mnesia:activity(sync_transaction,FC)),
+
+ ?match({atomic, DRes}, mnesia:transaction(FD)),
+ ?match({atomic, DRes}, mnesia:transaction(FE)),
+
+ Rest = fun(Cursor,Loop) ->
+ case qlc:next_answers(Cursor, 1) of
+ [] -> [];
+ [A]-> [A|Loop(Cursor,Loop)]
+ end
+ end,
+ Loop = fun() ->
+ Cursor = qlc:cursor(QD),
+ Rest(Cursor,Rest)
+ end,
+ ?match({atomic, DRes}, mnesia:transaction(Loop)),
+
+ ?verify_mnesia(Ns, []).
+
+%% -record(a, {k,v}).
+%% -record(b, {k,v}).
+%% -record(k, {t,v}).
+
+recs() ->
+ <<"-record(a, {k,v}). "
+ "-record(b, {k,v}). "
+ "-record(k, {t,v}). "
+ >>.
+
+atomic(suite) -> [atomic_eval];
+atomic(doc) -> [].
+
+atomic_eval(suite) -> [];
+atomic_eval(doc) -> [];
+atomic_eval(Config) ->
+ Ns = init_testcases(ram_copies, Config),
+ Q1 = handle(recs(),
+ <<"[Q || Q = #a{k={_,9}} <- mnesia:table(a)]"
+ >>),
+ Eval = fun(Q) ->
+ {qlc:e(Q),
+ mnesia:system_info(held_locks)}
+ end,
+ Self = self(),
+ ?match({[{a,{a,9},91}], [{{a,'______WHOLETABLE_____'},read,{tid,_,Self}}]},
+ ok(Eval,[Q1])),
+
+ Q2 = handle(recs(),
+ <<"[Q || Q = #a{k={a,9}} <- mnesia:table(a)]"
+ >>),
+
+ ?match({[{a,{a,9},91}],[{{a,{a,9}},read,{tid,_,Self}}]},
+ ok(Eval,[Q2])),
+
+ Flush = fun(Loop) -> %% Clean queue
+ receive _ -> Loop(Loop)
+ after 0 -> ok end
+ end,
+
+ Flush(Flush),
+
+ GrabLock = fun(Father) ->
+ mnesia:read(a, {a,9}, write),
+ Father ! locked,
+ receive cont -> ok end end,
+
+ Pid1 = spawn(fun() -> ?match(ok, ok(GrabLock, [Self])) end),
+ ?match(locked,receive locked -> locked after 5000 -> timeout end), %% Wait
+
+ put(count,0),
+ Restart = fun(Locker,Fun) ->
+ Count = get(count),
+ case {Count,(catch Fun())} of
+ {0, {'EXIT', R}} ->
+ Locker ! cont,
+ put(count, Count+1),
+ erlang:yield(),
+ exit(R);
+ Else ->
+ Else
+ end
+ end,
+
+ ?match({1,{[{a,{a,9},91}], [{{a,'______WHOLETABLE_____'},read,{tid,_,Self}}]}},
+ ok(Restart,[Pid1,fun() -> Eval(Q1) end])),
+
+ Pid2 = spawn(fun() -> ?match(ok, ok(GrabLock, [Self])) end),
+ ?match(locked,receive locked -> locked after 5000 -> timeout end), %% Wait
+ put(count,0),
+ ?match({1,{[{a,{a,9},91}],[{{a,{a,9}},read,{tid,_,Self}}]}},
+ ok(Restart,[Pid2, fun() -> Eval(Q2) end])),
+
+%% Basic test
+ Cursor = fun() ->
+ QC = qlc:cursor(Q1),
+ qlc:next_answers(QC)
+ end,
+
+ ?match([{a,{a,9},91}], ok(Cursor, [])),
+ %% Lock
+
+ Pid3 = spawn(fun() -> ?match(ok, ok(GrabLock, [Self])) end),
+ ?match(locked,receive locked -> locked after 5000 -> timeout end), %% Wait
+ put(count,0),
+
+ ?match({1,[{a,{a,9},91}]}, ok(Restart,[Pid3, Cursor])),
+ QC1 = ok(fun() -> qlc:cursor(Q1) end, []),
+ ?match({'EXIT', _}, qlc:next_answers(QC1)),
+ ?match({aborted,_}, ok(fun()->qlc:next_answers(QC1)end,[])),
+ ?verify_mnesia(Ns, []).
+
+
+frag(suite) -> [];
+frag(doc) -> [];
+frag(Config) ->
+ Ns = init_testcases(ram_copies,Config),
+ QA = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(a),"
+ " Val == 90 + Key]">>),
+ QB = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(b),"
+ " Key == 90 + Val]">>),
+
+ Activate =
+ fun(Tab) ->
+ ?match({atomic,ok},mnesia:change_table_frag(Tab, {activate, []})),
+ Dist = mnesia_frag_test:frag_dist(Tab),
+ ?match({atomic,ok},mnesia:change_table_frag(Tab,{add_frag,Dist}))
+ end,
+ Activate(a),
+ Activate(b),
+
+ Fun = fun(Tab) -> mnesia:table_info(Tab, frag_names) end,
+ FTs = mnesia:activity(sync_dirty, Fun, [a], mnesia_frag) ++
+ mnesia:activity(sync_dirty, Fun, [b], mnesia_frag),
+ Size = fun(Tab) -> mnesia:dirty_rpc(Tab, mnesia, table_info, [Tab,size]) end,
+
+ %% Verify that all data doesn't belong to the same frag.
+ ?match([], [{Tab,Size(Tab)} || Tab <- FTs,
+ Size(Tab) =< 0]),
+
+ FA = fun() -> qlc:e(QA) end,
+ FB = fun() -> qlc:e(QB) end,
+ ?match([{a,{a,5},95}], mnesia:activity(transaction,FA,[],mnesia_frag)),
+ ?match([{b,{b,95},5}], mnesia:activity(transaction,FB,[],mnesia_frag)),
+
+ ?verify_mnesia(Ns, []).
+
+info(suite) -> [];
+info(doc) -> [];
+info(Config) ->
+ Ns = init_testcases(ram_copies, Config),
+ Q1 = handle(recs(),
+ <<"[Q || Q = #a{k={_,9}} <- mnesia:table(a)]"
+ >>),
+
+ Q2 = handle(recs(),
+ <<"[Q || Q = #a{k={a,9}} <- mnesia:table(a)]"
+ >>),
+
+ Q3 = handle(recs(),
+ <<"[Q || Q = #a{v=91} <- mnesia:table(a)]"
+ >>),
+
+ %% FIXME compile and check results!
+
+ ?match(ok,io:format("~s~n",[qlc:info(Q1)])),
+ ?match(ok,io:format("~s~n",[qlc:info(Q2)])),
+ ?match(ok,io:format("~s~n",[qlc:info(Q3)])),
+
+ ?verify_mnesia(Ns, []).
+
+ok(Fun,A) ->
+ case mnesia:transaction(Fun,A) of
+ {atomic, R} -> R;
+ E -> E
+ end.
+
+
+mnesia_down(suite) -> [];
+mnesia_down(doc) ->
+ ["Test bug OTP-7968, which crashed mnesia when a"
+ "mnesia_down came after qlc had been invoked"];
+mnesia_down(Config) when is_list(Config) ->
+ [N1,N2] = init_testcases(ram_copies,Config),
+ QB = handle(<<"[Q || Q = {_,{_,Key},Val} <- mnesia:table(b),"
+ " Val == Key - 90]">>),
+
+ Tester = self(),
+
+ Eval = fun() ->
+ Cursor = qlc:cursor(QB), %% Forces another process
+ Res = qlc:next_answers(Cursor),
+ Tester ! {qlc, self(), Res},
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ receive
+ continue ->
+ io:format("Continuing ~p ~p ~n",[self(), {Mod, Tid, Ts}]),
+ io:format("ETS ~p~n",[ets:tab2list(element(2,Ts))]),
+ io:format("~p~n",[process_info(self(),messages)]),
+ Res
+ end
+ end,
+ spawn(fun() -> TransRes = mnesia:transaction(Eval), Tester ! {test,TransRes} end),
+
+ TMInfo = fun() ->
+ TmInfo = mnesia_tm:get_info(5000),
+ mnesia_tm:display_info(user, TmInfo)
+ end,
+ receive
+ {qlc, QPid, QRes} ->
+ ?match([{b,{b,95},5}], QRes),
+ TMInfo(),
+ mnesia_test_lib:kill_mnesia([N2]),
+ %%timer:sleep(1000),
+ QPid ! continue
+ after 2000 ->
+ exit(timeout1)
+ end,
+
+ receive
+ {test, QRes2} ->
+ ?match({atomic, [{b,{b,95},5}]}, QRes2)
+ after 2000 ->
+ exit(timeout2)
+ end,
+
+ ?verify_mnesia([N1], [N2]).
+
+
+nested_qlc(suite) -> [];
+nested_qlc(doc) ->
+ ["Test bug in OTP-7968 (the second problem) where nested"
+ "transaction don't work as expected"];
+nested_qlc(Config) when is_list(Config) ->
+ Ns = init_testcases(ram_copies,Config),
+ Res = as_with_bs(),
+ ?match([_|_], Res),
+ top_as_with_some_bs(10),
+
+ ?verify_mnesia(Ns, []).
+
+
+%% Code from Daniel
+bs_by_a_id(A_id) ->
+ find(qlc:q([ B || B={_,_,F_id} <- mnesia:table(b), F_id == A_id])).
+
+as_with_bs() ->
+ find(qlc:q([ {A,bs_by_a_id(Id)} ||
+ A = {_, {a,Id}, _} <- mnesia:table(a)])).
+
+top_as_with_some_bs(Limit) ->
+ top(
+ qlc:q([ {A,bs_by_a_id(Id)} ||
+ A = {_, {a,Id}, _} <- mnesia:table(a)]),
+ Limit,
+ fun(A1,A2) -> A1 < A2 end
+ ).
+
+% --- utils
+
+find(Q) ->
+ F = fun() -> qlc:e(Q) end,
+ {atomic, Res} = mnesia:transaction(F),
+ Res.
+
+% --- it returns top Limit results from query Q ordered by Order sort function
+top(Q, Limit, Order) ->
+ Do = fun() ->
+ OQ = qlc:sort(Q, [{order,Order}]),
+ QC = qlc:cursor(OQ),
+ Res = qlc:next_answers(QC, Limit),
+ qlc:delete_cursor(QC),
+ Res
+ end,
+ {atomic, Res} = mnesia:transaction(Do),
+ Res.
+
+%% To keep mnesia suite backward compatible,
+%% we compile the queries in runtime when qlc is available
+%% Compiles and returns a handle to a qlc
+handle(Expr) ->
+ handle(<<>>,Expr).
+handle(Records,Expr) ->
+ case catch handle2(Records,Expr) of
+ {ok, Handle} ->
+ Handle;
+ Else ->
+ ?match(ok, Else)
+ end.
+
+handle2(Records,Expr) ->
+ {FN,Mod} = temp_name(),
+ ModStr = list_to_binary("-module(" ++ atom_to_list(Mod) ++ ").\n"),
+ Prog = <<
+ ModStr/binary,
+ "-include_lib(\"stdlib/include/qlc.hrl\").\n",
+ "-export([tmp/0]).\n",
+ Records/binary,"\n",
+ "tmp() ->\n",
+%% " _ = (catch throw(fvalue_not_reset)),"
+ " qlc:q( ",
+ Expr/binary,").\n">>,
+
+ ?match(ok,file:write_file(FN,Prog)),
+ {ok,Forms} = epp:parse_file(FN,"",""),
+ {ok,Mod,Bin} = compile:forms(Forms),
+ code:load_binary(Mod,FN,Bin),
+ {ok, Mod:tmp()}.
+
+setup(Config) ->
+ put(mts_config,Config),
+ put(mts_tf_counter,0).
+
+temp_name() ->
+ Conf = get(mts_config),
+ C = get(mts_tf_counter),
+ put(mts_tf_counter,C+1),
+ {filename:join([proplists:get_value(priv_dir,Conf, "."),
+ "tempfile"++integer_to_list(C)++".tmp"]),
+ list_to_atom("tmp" ++ integer_to_list(C))}.
diff --git a/lib/mnesia/test/mnesia_recovery_test.erl b/lib/mnesia/test/mnesia_recovery_test.erl
new file mode 100644
index 0000000000..f6ecf2ce2e
--- /dev/null
+++ b/lib/mnesia/test/mnesia_recovery_test.erl
@@ -0,0 +1,1701 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_recovery_test).
+-author('[email protected]').
+-compile([export_all]).
+
+-include("mnesia_test_lib.hrl").
+-include_lib("kernel/include/file.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-define(receive_messages(Msgs), receive_messages(Msgs, ?FILE, ?LINE)).
+
+% First Some debug logging
+-define(dgb, true).
+-ifdef(dgb).
+-define(dl(X, Y), ?verbose("**TRACING: " ++ X ++ "**~n", Y)).
+-else.
+-define(dl(X, Y), ok).
+-endif.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Verify recoverability",
+ "Verify that the effects of committed transactions are preserved",
+ "after recovery from system failures. It must be possible to",
+ "restore the tables to a consistent state on a node, from (any kind",
+ "of) replica on other nodes as well as from local disk on the failed",
+ "node. The system must also recover from instantaneous",
+ "interruption causing disk files to not be completely synchronized."];
+
+all(suite) ->
+ [
+ mnesia_down,
+ explicit_stop,
+ coord_dies,
+ schema_trans,
+ async_dirty,
+ sync_dirty,
+ sym_trans,
+ asym_trans,
+ after_full_disc_partition,
+ after_corrupt_files,
+ disc_less,
+ garb_decision,
+ system_upgrade
+ ].
+
+schema_trans(suite) ->
+ [{mnesia_schema_recovery_test, all}].
+
+tpcb_config(ReplicaType, _NodeConfig, Nodes) ->
+ [{n_branches, 5},
+ {n_drivers_per_node, 5},
+ {replica_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {use_running_mnesia, true},
+ {report_interval, infinity},
+ {n_accounts_per_branch, 20},
+ {replica_type, ReplicaType}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+mnesia_down(doc) ->
+ [" Various tests about recovery when mnesia goes down on one or several nodes."];
+mnesia_down(suite) ->
+ [
+ mnesia_down_during_startup,
+ master_node_tests,
+ read_during_down,
+ with_checkpoint,
+ delete_during_start
+ ].
+
+master_node_tests(doc) ->
+ ["Verify that mnesia loads the correct data after it has been down, regarding master node settings."];
+master_node_tests(suite) ->
+ [
+ no_master_2,
+ no_master_3,
+ one_master_2,
+ one_master_3,
+ two_master_2,
+ two_master_3,
+ all_master_2,
+ all_master_3
+ ].
+
+no_master_2(suite) -> [];
+no_master_2(Config) when is_list(Config) -> mnesia_down_2(no, Config).
+
+no_master_3(suite) -> [];
+no_master_3(Config) when is_list(Config) -> mnesia_down_3(no, Config).
+
+one_master_2(suite) -> [];
+one_master_2(Config) when is_list(Config) -> mnesia_down_2(one, Config).
+
+one_master_3(suite) -> [];
+one_master_3(Config) when is_list(Config) -> mnesia_down_3(one, Config).
+
+two_master_2(suite) -> [];
+two_master_2(Config) when is_list(Config) -> mnesia_down_2(two, Config).
+
+two_master_3(suite) -> [];
+two_master_3(Config) when is_list(Config) -> mnesia_down_3(two, Config).
+
+all_master_2(suite) -> [];
+all_master_2(Config) when is_list(Config) -> mnesia_down_2(all, Config).
+
+all_master_3(suite) -> [];
+all_master_3(Config) when is_list(Config) -> mnesia_down_3(all, Config).
+
+mnesia_down_2(Masters, Config) ->
+ Nodes = [N1, N2] = ?acquire_nodes(2, Config),
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab2, [{disc_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab3, [{disc_only_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab4, [{ram_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab5, [{ram_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab6, [{disc_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab7, [{disc_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab8, [{disc_only_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab9, [{disc_only_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab10, [{ram_copies, [N1]}, {disc_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab11, [{ram_copies, [N2]}, {disc_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab12, [{ram_copies, [N1]}, {disc_only_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab13, [{ram_copies, [N2]}, {disc_only_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab14, [{disc_only_copies, [N1]}, {disc_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab15, [{disc_only_copies, [N2]}, {disc_copies, [N1]}])),
+
+ Tabs = [tab1, tab2, tab3, tab4, tab5, tab6, tab7, tab8,
+ tab9, tab10, tab11, tab12, tab13, tab14, tab15],
+ [?match(ok, rpc:call(Node, mnesia, wait_for_tables, [Tabs, 10000])) || Node <- Nodes],
+ [insert_data(Tab, 20) || Tab <- Tabs],
+
+ VTabs =
+ case Masters of
+ no ->
+ Tabs -- [tab4, tab5]; % ram copies
+ one ->
+ ?match(ok, rpc:call(N1, mnesia, set_master_nodes, [[N1]])),
+ Tabs -- [tab1, tab4, tab5, tab10, tab12]; % ram_copies
+ two ->
+ ?match(ok, rpc:call(N1, mnesia, set_master_nodes, [Nodes])),
+ Tabs -- [tab4, tab5];
+ all ->
+ [?match(ok, rpc:call(Node, mnesia, set_master_nodes, [[Node]])) || Node <- Nodes],
+ Tabs -- [tab1, tab4, tab5, tab10, tab11, tab12, tab13]
+ end,
+
+ mnesia_test_lib:kill_mnesia([N1]),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ [?match(ok, rpc:call(N1, ?MODULE, verify_data, [Tab, 20])) || Tab <- VTabs],
+ [?match(ok, rpc:call(N2, ?MODULE, verify_data, [Tab, 20])) || Tab <- VTabs],
+ ?verify_mnesia(Nodes, []).
+
+mnesia_down_3(Masters, Config) ->
+ Nodes = [N1, N2, N3] = ?acquire_nodes(3, Config),
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab2, [{disc_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab3, [{disc_only_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab4, [{ram_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab5, [{ram_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab16, [{ram_copies, [N3]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab6, [{disc_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab7, [{disc_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab17, [{disc_copies, [N3]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab8, [{disc_only_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab9, [{disc_only_copies, [N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab18, [{disc_only_copies, [N3]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab10, [{ram_copies, [N1]}, {disc_copies, [N2, N3]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab11, [{ram_copies, [N2]}, {disc_copies, [N3, N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab19, [{ram_copies, [N3]}, {disc_copies, [N1, N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab12, [{ram_copies, [N1]}, {disc_only_copies, [N2, N3]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab13, [{ram_copies, [N2]}, {disc_only_copies, [N3, N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab20, [{ram_copies, [N3]}, {disc_only_copies, [N1, N2]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab14, [{disc_only_copies, [N1]}, {disc_copies, [N2, N3]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab15, [{disc_only_copies, [N2]}, {disc_copies, [N3, N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab21, [{disc_only_copies, [N3]}, {disc_copies, [N1, N2]}])),
+
+ Tabs = [tab1, tab2, tab3, tab4, tab5, tab6, tab7, tab8,
+ tab9, tab10, tab11, tab12, tab13, tab14, tab15,
+ tab16, tab17, tab18, tab19, tab20, tab21],
+ [?match(ok, rpc:call(Node, mnesia, wait_for_tables, [Tabs, 10000])) || Node <- Nodes],
+ [insert_data(Tab, 20) || Tab <- Tabs],
+
+ VTabs =
+ case Masters of
+ no ->
+ Tabs -- [tab4, tab5, tab16]; % ram copies
+ one ->
+ ?match(ok, rpc:call(N1, mnesia, set_master_nodes, [[N1]])),
+ Tabs -- [tab1, tab4, tab5, tab16, tab10, tab12]; % ram copies
+ two ->
+ ?match(ok, rpc:call(N1, mnesia, set_master_nodes, [Nodes])),
+ Tabs -- [tab4, tab5, tab16]; % ram copies
+ all ->
+ [?match(ok, rpc:call(Node, mnesia, set_master_nodes, [[Node]])) || Node <- Nodes],
+ Tabs -- [tab1, tab4, tab5, tab16, tab10,
+ tab11, tab19, tab12, tab13, tab20] % ram copies
+ end,
+
+ mnesia_test_lib:kill_mnesia([N1]),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N3])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2, N1])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N2, N3])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ ?match([], mnesia_test_lib:kill_mnesia([N1, N3])),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, Tabs)),
+
+ [?match(ok, rpc:call(N1, ?MODULE, verify_data, [Tab, 20])) || Tab <- VTabs],
+ [?match(ok, rpc:call(N2, ?MODULE, verify_data, [Tab, 20])) || Tab <- VTabs],
+ [?match(ok, rpc:call(N3, ?MODULE, verify_data, [Tab, 20])) || Tab <- VTabs],
+
+ ?verify_mnesia(Nodes, []).
+
+
+read_during_down(doc) ->
+ ["Verify that read operation can continue to read when mnesia goes down"];
+read_during_down(suite) ->
+ [
+ dirty_read_during_down,
+ trans_read_during_down
+ ].
+
+dirty_read_during_down(suite) ->
+ [];
+dirty_read_during_down(Config) when is_list(Config) ->
+ read_during_down(dirty, Config).
+
+trans_read_during_down(suite) ->
+ [];
+trans_read_during_down(Config) when is_list(Config) ->
+ read_during_down(trans, Config).
+
+
+read_during_down(Op, Config) when is_list(Config) ->
+ Ns = [N1|TNs] = ?acquire_nodes(3, Config),
+ Tabs = [ram, disc, disco],
+
+ ?match({atomic, ok}, mnesia:create_table(ram, [{ram_copies, TNs}])),
+ ?match({atomic, ok}, mnesia:create_table(disc, [{disc_copies, TNs}])),
+ ?match({atomic, ok}, mnesia:create_table(disco, [{disc_only_copies, TNs}])),
+
+ %% Create some work for mnesia_controller when a node goes down
+ [{atomic, ok} = mnesia:create_table(list_to_atom("temp" ++ integer_to_list(N)),
+ [{ram_copies, Ns}]) || N <- lists:seq(1, 50)],
+
+ Write = fun(Tab) -> mnesia:write({Tab, key, val}) end,
+ ?match([ok,ok,ok],
+ [mnesia:sync_dirty(Write, [Tab]) || Tab <- Tabs]),
+
+ Readers = [spawn_link(N1, ?MODULE, reader, [Tab, Op]) || Tab <- Tabs],
+ [_|_] = W2R= [mnesia:table_info(Tab, where_to_read) || Tab <- Tabs],
+ ?log("W2R ~p~n", [W2R]),
+ loop_and_kill_mnesia(10, hd(W2R), Tabs),
+ [Pid ! self() || Pid <- Readers],
+ ?match([ok, ok, ok], [receive ok -> ok after 1000 -> {Pid, mnesia_lib:dist_coredump()} end || Pid <- Readers]),
+ ?verify_mnesia(Ns, []).
+
+reader(Tab, OP) ->
+ Res = case OP of
+ dirty ->
+ catch mnesia:dirty_read({Tab, key});
+ trans ->
+ Read = fun() -> mnesia:read({Tab, key}) end,
+ {_, Temp} = mnesia:transaction(Read),
+ Temp
+ end,
+ case Res of
+ [{Tab, key, val}] -> ok;
+ Else ->
+ ?error("Expected ~p Got ~p ~n", [[{Tab, key, val}], Else]),
+ erlang:error(test_failed)
+ end,
+ receive Pid ->
+ Pid ! ok
+ after 50 ->
+ reader(Tab, OP)
+ end.
+
+loop_and_kill_mnesia(0, _Node, _Tabs) -> ok;
+loop_and_kill_mnesia(N, Node, Tabs) ->
+ mnesia_test_lib:kill_mnesia([Node]),
+ timer:sleep(100),
+ ?match([], mnesia_test_lib:start_mnesia([Node], Tabs)),
+ [KN | _] = W2R= [mnesia:table_info(Tab, where_to_read) || Tab <- Tabs],
+ ?match([KN, KN,KN], W2R),
+ timer:sleep(100),
+ loop_and_kill_mnesia(N-1, KN, Tabs).
+
+mnesia_down_during_startup(doc) ->
+ ["Verify that mnesia can come back up again in a consistent state",
+ "after it has gone down during startup (with different store and",
+ "when it goes down in different situations"];
+mnesia_down_during_startup(suite) ->
+ [
+ mnesia_down_during_startup_disk_ram,
+ mnesia_down_during_startup_init_ram,
+ mnesia_down_during_startup_init_disc,
+ mnesia_down_during_startup_init_disc_only,
+ mnesia_down_during_startup_tm_ram,
+ mnesia_down_during_startup_tm_disc,
+ mnesia_down_during_startup_tm_disc_only
+ ].
+
+mnesia_down_during_startup_disk_ram(suite) -> [];
+mnesia_down_during_startup_disk_ram(Config) when is_list(Config)->
+ [Node1, Node2] = ?acquire_nodes(2, Config ++
+ [{tc_timeout, timer:minutes(2)}]),
+ Tab = down_during_startup,
+ Def = [{ram_copies, [Node2]}, {disc_copies, [Node1]}],
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match(ok, mnesia:dirty_write({Tab, 876234, test_ok})),
+ timer:sleep(500),
+ mnesia_test_lib:kill_mnesia([Node1, Node2]),
+ timer:sleep(500),
+ mnesia_test_lib:start_mnesia([Node1, Node2], [Tab]),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ timer:sleep(500),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [Tab])),
+ ?match([{Tab, 876234, test_ok}], mnesia:dirty_read({Tab,876234})),
+ ?verify_mnesia([Node1, Node2], []).
+
+mnesia_down_during_startup_init_ram(suite) -> [];
+mnesia_down_during_startup_init_ram(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ DP = {mnesia_loader, do_get_network_copy},
+ Type = ram_copies,
+ mnesia_down_during_startup2(Config, Type, DP, self()).
+
+mnesia_down_during_startup_init_disc(suite) -> [];
+mnesia_down_during_startup_init_disc(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ DP = {mnesia_loader, do_get_network_copy},
+ Type = disc_copies,
+ mnesia_down_during_startup2(Config, Type, DP, self()).
+
+mnesia_down_during_startup_init_disc_only(suite) -> [];
+mnesia_down_during_startup_init_disc_only(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ DP = {mnesia_loader, do_get_network_copy},
+ Type = disc_only_copies,
+ mnesia_down_during_startup2(Config, Type, DP, self()).
+
+mnesia_down_during_startup_tm_ram(suite) -> [];
+mnesia_down_during_startup_tm_ram(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ DP = {mnesia_tm, init},
+ Type = ram_copies,
+ mnesia_down_during_startup2(Config, Type, DP, self()).
+
+mnesia_down_during_startup_tm_disc(suite) -> [];
+mnesia_down_during_startup_tm_disc(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ DP = {mnesia_tm, init},
+ Type = disc_copies,
+ mnesia_down_during_startup2(Config, Type, DP, self()).
+
+mnesia_down_during_startup_tm_disc_only(suite) -> [];
+mnesia_down_during_startup_tm_disc_only(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ DP = {mnesia_tm, init},
+ Type = disc_only_copies,
+ mnesia_down_during_startup2(Config, Type, DP, self()).
+
+mnesia_down_during_startup2(Config, ReplicaType, Debug_Point, _Father) ->
+ ?log("TC~n mnesia_down_during_startup with type ~w and stops at ~w~n",
+ [ReplicaType, Debug_Point]),
+ Tpcb_tabs = [history,teller,account,branch],
+ Nodes = ?acquire_nodes(2, Config),
+ Node1 = hd(Nodes),
+ {success, [A]} = ?start_activities([Node1]),
+ TpcbConfig = tpcb_config(ReplicaType, 2, Nodes),
+ mnesia_tpcb:init(TpcbConfig),
+ A ! fun () -> mnesia_tpcb:run(TpcbConfig) end,
+ ?match_receive(timeout),
+ timer:sleep(timer:seconds(10)), % Let tpcb run for a while
+ mnesia_tpcb:stop(),
+ ?match(ok, mnesia_tpcb:verify_tabs()),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ timer:sleep(timer:seconds(2)),
+ Self = self(),
+ TestFun = fun(_MnesiaEnv, _EvalEnv) ->
+ ?deactivate_debug_fun(Debug_Point),
+ Self ! fun_done,
+ spawn(mnesia_test_lib, kill_mnesia, [[Node1]])
+ end,
+ ?activate_debug_fun(Debug_Point, TestFun, []), % Kill when debug has been reached
+ mnesia:start(),
+ Res = receive fun_done -> ok after timer:minutes(3) -> timeout end, % Wait till it's killed
+ ?match(ok, Res),
+ ?match(ok, timer:sleep(timer:seconds(2))), % Wait a while, at least till it dies;
+ ?match([], mnesia_test_lib:start_mnesia([Node1], Tpcb_tabs)),
+ ?match(ok, mnesia_tpcb:verify_tabs()), % Verify it
+ ?verify_mnesia(Nodes, []).
+
+
+with_checkpoint(doc) ->
+ ["Restart mnesia with checkpoint"];
+with_checkpoint(suite) ->
+ [with_checkpoint_same, with_checkpoint_other].
+
+with_checkpoint_same(suite) -> [];
+with_checkpoint_same(Config) when is_list(Config) ->
+ with_checkpoint(Config, same).
+
+with_checkpoint_other(suite) -> [];
+with_checkpoint_other(Config) when is_list(Config) ->
+ with_checkpoint(Config, other).
+
+with_checkpoint(Config, Type) when is_list(Config) ->
+ Nodes = [Node1, Node2] = ?acquire_nodes(2, Config),
+ Kill = case Type of
+ same -> %% Node1 is the one used for creating the checkpoint
+ Node1; %% and which we bring down
+ other ->
+ Node2 %% Here we bring node2 down..
+ end,
+
+ ?match({atomic, ok}, mnesia:create_table(ram, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(disc, [{disc_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(disco, [{disc_only_copies, Nodes}])),
+ Tabs = [ram, disc, disco],
+
+ ?match({ok, sune, _}, mnesia:activate_checkpoint([{name, sune},
+ {max, mnesia:system_info(tables)},
+ {ram_overrides_dump, true}])),
+
+ ?match([], check_retainers(sune, Nodes)),
+
+ ?match(ok, mnesia:deactivate_checkpoint(sune)),
+ ?match([], check_chkp(Nodes)),
+
+ timer:sleep(500), %% Just to help debugging the io:formats now comes in the
+ %% correct order... :-)
+
+ ?match({ok, sune, _}, mnesia:activate_checkpoint([{name, sune},
+ {max, mnesia:system_info(tables)},
+ {ram_overrides_dump, true}])),
+
+ [[mnesia:dirty_write({Tab,Key,Key}) || Key <- lists:seq(1,10)] || Tab <- Tabs],
+
+ mnesia_test_lib:kill_mnesia([Kill]),
+ timer:sleep(100),
+ mnesia_test_lib:start_mnesia([Kill], Tabs),
+ io:format("Mnesia on ~p started~n", [Kill]),
+ ?match([], check_retainers(sune, Nodes)),
+ ?match(ok, mnesia:deactivate_checkpoint(sune)),
+ ?match([], check_chkp(Nodes)),
+
+ case Kill of
+ Node1 ->
+ ignore;
+ Node2 ->
+ mnesia_test_lib:kill_mnesia([Kill]),
+ timer:sleep(500), %% Just to help debugging
+ ?match({ok, sune, _}, mnesia:activate_checkpoint([{name, sune},
+ {max, mnesia:system_info(tables)},
+ {ram_overrides_dump, true}])),
+
+ [[mnesia:dirty_write({Tab,Key,Key+2}) || Key <- lists:seq(1,10)] ||
+ Tab <- Tabs],
+
+ mnesia_test_lib:start_mnesia([Kill], Tabs),
+ io:format("Mnesia on ~p started ~n", [Kill]),
+ ?match([], check_retainers(sune, Nodes)),
+ ?match(ok, mnesia:deactivate_checkpoint(sune)),
+ ?match([], check_chkp(Nodes)),
+ ok
+ end,
+ ?verify_mnesia(Nodes, []).
+
+check_chkp(Nodes) ->
+ {Good, Bad} = rpc:multicall(Nodes, ?MODULE, check, []),
+ lists:flatten(Good ++ Bad).
+
+check() ->
+ [PCP] = ets:match_object(mnesia_gvar, {pending_checkpoint_pids, '_'}),
+ [PC] = ets:match_object(mnesia_gvar, {pending_checkpoints, '_'}),
+ [CPN] = ets:match_object(mnesia_gvar, {checkpoints, '_'}),
+ F = lists:filter(fun({_, []}) -> false; (_W) -> true end,
+ [PCP,PC,CPN]),
+ CPP = ets:match_object(mnesia_gvar, {{checkpoint, '_'}, '_'}),
+ Rt = ets:match_object(mnesia_gvar, {{'_', {retainer, '_'}}, '_'}),
+ F ++ CPP ++ Rt.
+
+
+check_retainers(CHP, Nodes) ->
+ {[R1,R2], []} = rpc:multicall(Nodes, ?MODULE, get_all_retainers, [CHP]),
+ (R1 -- R2) ++ (R2 -- R1).
+
+get_all_retainers(CHP) ->
+ Tabs = mnesia:system_info(local_tables),
+ Iter = fun(Tab) ->
+ {ok, Res} =
+ mnesia_checkpoint:iterate(CHP, Tab, fun(R, A) -> [R|A] end, [],
+ retainer, checkpoint),
+%% io:format("Retainer content ~w ~n", [Res]),
+ Res
+ end,
+ Elements = [Iter(Tab) || Tab <- Tabs],
+ lists:sort(lists:flatten(Elements)).
+
+delete_during_start(doc) ->
+ ["Test that tables can be delete during start, hopefully with tables"
+ " in the loader queue or soon to be"];
+delete_during_start(suite) -> [];
+delete_during_start(Config) when is_list(Config) ->
+ [N1, N2, N3] = Nodes = ?acquire_nodes(3, Config),
+ Tabs = [list_to_atom("tab" ++ integer_to_list(I)) || I <- lists:seq(1, 30)],
+ ?match({atomic, ok}, mnesia:change_table_copy_type(schema, N2, ram_copies)),
+ ?match({atomic, ok}, mnesia:change_table_copy_type(schema, N3, ram_copies)),
+
+ [?match({atomic, ok},mnesia:create_table(Tab, [{ram_copies,Nodes}])) || Tab <- Tabs],
+ lists:foldl(fun(Tab, I) ->
+ ?match({atomic, ok},
+ mnesia:change_table_load_order(Tab,I)),
+ I+1
+ end, 1, Tabs),
+ mnesia_test_lib:kill_mnesia([N2,N3]),
+%% timer:sleep(500),
+ ?match({[ok,ok],[]}, rpc:multicall([N2,N3], mnesia,start,
+ [[{extra_db_nodes,[N1]}]])),
+ [Tab1,Tab2,Tab3|_] = Tabs,
+ ?match({atomic, ok}, mnesia:delete_table(Tab1)),
+ ?match({atomic, ok}, mnesia:delete_table(Tab2)),
+
+ ?log("W4T ~p~n", [rpc:multicall([N2,N3], mnesia, wait_for_tables, [[Tab1,Tab2,Tab3],1])]),
+
+ Remain = Tabs--[Tab1,Tab2],
+ ?match(ok, rpc:call(N2, mnesia, wait_for_tables, [Remain,10000])),
+ ?match(ok, rpc:call(N3, mnesia, wait_for_tables, [Remain,10000])),
+
+ ?match(ok, rpc:call(N2, ?MODULE, verify_where2read, [Remain])),
+ ?match(ok, rpc:call(N3, ?MODULE, verify_where2read, [Remain])),
+
+ ?verify_mnesia(Nodes, []).
+
+verify_where2read([Tab|Tabs]) ->
+ true = (node() == mnesia:table_info(Tab,where_to_read)),
+ verify_where2read(Tabs);
+verify_where2read([]) -> ok.
+
+
+%%-------------------------------------------------------------------------------------------
+explicit_stop(doc) ->
+ ["Stop Mnesia in different situations"];
+explicit_stop(suite) ->
+ [explicit_stop_during_snmp].
+%% This is a bad implementation, but at least gives a indication if something is wrong
+explicit_stop_during_snmp(suite) -> [];
+explicit_stop_during_snmp(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(2, Config),
+ [Node1, Node2] = Nodes,
+ Tab = snmp_tab,
+ Def = [{attributes, [key, value]},
+ {snmp, [{key, integer}]},
+ {mnesia_test_lib:storage_type(disc_copies, Config),
+ [Node1, Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write({Tab, 1, 1}) end)),
+
+ Do_trans_Pid1 = spawn_link(Node2, ?MODULE, do_trans_loop, [Tab, self()]),
+ Do_trans_Pid2 = spawn_link(?MODULE, do_trans_loop, [Tab, self()]),
+ Start_stop_Pid = spawn_link(?MODULE, start_stop, [Node1, 10, self()]),
+ receive
+ test_done ->
+ ok
+ after timer:minutes(5) ->
+ ?error("test case time out~n", [])
+ end,
+ ?verify_mnesia(Nodes, []),
+ exit(Do_trans_Pid1, kill),
+ exit(Do_trans_Pid2, kill),
+ exit(Start_stop_Pid, kill),
+ ok.
+
+do_trans_loop(Tab, Father) ->
+ %% Do not trap exit
+ do_trans_loop2(Tab, Father).
+do_trans_loop2(Tab, Father) ->
+ Trans =
+ fun() ->
+ [{Tab, 1, Val}] = mnesia:read({Tab, 1}),
+ mnesia:write({Tab, 1, Val + 1})
+ end,
+ case mnesia:transaction(Trans) of
+ {atomic, ok} ->
+ timer:sleep(200),
+ do_trans_loop2(Tab, Father);
+ {aborted, {node_not_running, N}} when N == node() ->
+ timer:sleep(200),
+ do_trans_loop2(Tab, Father);
+ {aborted, {no_exists, Tab}} ->
+ timer:sleep(200),
+ do_trans_loop2(Tab, Father);
+ Else ->
+ ?error("Transaction failed: ~p ~n", [Else]),
+ Father ! test_done,
+ exit(shutdown)
+ end.
+
+start_stop(_Node1, 0, Father) ->
+ Father ! test_done,
+ exit(shutdown);
+start_stop(Node1, N, Father) when N > 0->
+ timer:sleep(timer:seconds(5)),
+ ?match(stopped, rpc:call(Node1, mnesia, stop, [])),
+ timer:sleep(timer:seconds(2)),
+ ?match([], mnesia_test_lib:start_mnesia([Node1])),
+ start_stop(Node1, N-1, Father).
+
+coord_dies(suite) -> [];
+coord_dies(doc) -> [""];
+coord_dies(Config) when is_list(Config) ->
+ Nodes = [N1, N2] = ?acquire_nodes(2, Config),
+ ?match({atomic, ok}, mnesia:create_table(tab1, [{ram_copies, Nodes}])),
+ ?match({atomic, ok}, mnesia:create_table(tab2, [{ram_copies, [N1]}])),
+ ?match({atomic, ok}, mnesia:create_table(tab3, [{ram_copies, [N2]}])),
+ Tester = self(),
+
+ U1 = fun(Tab) ->
+ [{Tab,key,Val}] = mnesia:read(Tab,key,write),
+ mnesia:write({Tab,key, Val+1}),
+ Tester ! {self(),continue},
+ receive
+ continue -> exit(crash)
+ end
+ end,
+ U2 = fun(Tab) ->
+ [{Tab,key,Val}] = mnesia:read(Tab,key,write),
+ mnesia:write({Tab,key, Val+1}),
+ mnesia:transaction(U1, [Tab])
+ end,
+ [mnesia:dirty_write(Tab,{Tab,key,0}) || Tab <- [tab1,tab2,tab3]],
+ Pid1 = spawn(fun() -> mnesia:transaction(U2, [tab1]) end),
+ Pid2 = spawn(fun() -> mnesia:transaction(U2, [tab2]) end),
+ Pid3 = spawn(fun() -> mnesia:transaction(U2, [tab3]) end),
+ [receive {Pid,continue} -> ok end || Pid <- [Pid1,Pid2,Pid3]],
+ Pid1 ! continue, Pid2 ! continue, Pid3 ! continue,
+ ?match({atomic,[{_,key,1}]}, mnesia:transaction(fun() -> mnesia:read({tab1,key}) end)),
+ ?match({atomic,[{_,key,1}]}, mnesia:transaction(fun() -> mnesia:read({tab2,key}) end)),
+ ?match({atomic,[{_,key,1}]}, mnesia:transaction(fun() -> mnesia:read({tab3,key}) end)),
+
+ Pid4 = spawn(fun() -> mnesia:transaction(U2, [tab1]) end),
+ Pid5 = spawn(fun() -> mnesia:transaction(U2, [tab2]) end),
+ Pid6 = spawn(fun() -> mnesia:transaction(U2, [tab3]) end),
+ erlang:monitor(process, Pid4),erlang:monitor(process, Pid5),erlang:monitor(process, Pid6),
+
+ [receive {Pid,continue} -> ok end || Pid <- [Pid4,Pid5,Pid6]],
+ exit(Pid4,crash),
+ ?match_receive({'DOWN',_,_,Pid4, _}),
+ ?match({atomic,[{_,key,1}]}, mnesia:transaction(fun() -> mnesia:read({tab1,key}) end)),
+ exit(Pid5,crash),
+ ?match_receive({'DOWN',_,_,Pid5, _}),
+ ?match({atomic,[{_,key,1}]}, mnesia:transaction(fun() -> mnesia:read({tab2,key}) end)),
+ exit(Pid6,crash),
+ ?match_receive({'DOWN',_,_,Pid6, _}),
+ ?match({atomic,[{_,key,1}]}, mnesia:transaction(fun() -> mnesia:read({tab3,key}) end)),
+
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+sym_trans(doc) ->
+ ["Recovery of symmetrical transactions in a couple of different",
+ "situations; when coordinator or participant or node dies"];
+
+sym_trans(suite) ->
+ [sym_trans_before_commit_kill_coord_node, %% coordinator node dies
+ sym_trans_before_commit_kill_coord_pid, %% coordinator process dies
+ sym_trans_before_commit_kill_part_after_ask, %% participating node dies
+ sym_trans_before_commit_kill_part_before_ask,
+ sym_trans_after_commit_kill_coord_node,
+ sym_trans_after_commit_kill_coord_pid,
+ sym_trans_after_commit_kill_part_after_ask,
+ sym_trans_after_commit_kill_part_do_commit_pre,
+ sym_trans_after_commit_kill_part_do_commit_post].
+
+%kill_after_debug_point(Config, TestCase, {Debug_node, Debug_Point}, TransFun, Tab)
+
+sym_trans_before_commit_kill_coord_node(suite) -> [];
+sym_trans_before_commit_kill_coord_node(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_before_commit_kill_coord,
+ Def = [{attributes, [key, value]}, {ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, multi_commit_sym}},
+ do_sym_trans, [{Tab, Def}], Nodes).
+
+sym_trans_before_commit_kill_coord_pid(suite) -> [];
+sym_trans_before_commit_kill_coord_pid(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_before_commit_kill_coord,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, multi_commit_sym}},
+ do_sym_trans, [{Tab, Def}], Nodes).
+
+sym_trans_before_commit_kill_part_after_ask(suite) -> [];
+sym_trans_before_commit_kill_part_after_ask(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_before_commit_kill_part_after_ask,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(Part1, {Coord, {mnesia_tm, multi_commit_sym}},
+ do_sym_trans, [{Tab, Def}], Nodes).
+
+sym_trans_before_commit_kill_part_before_ask(suite) -> [];
+sym_trans_before_commit_kill_part_before_ask(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_before_commit_kill_part_before_ask,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, doit_ask_commit}},
+ do_sym_trans, [{Tab, Def}], Nodes).
+
+sym_trans_after_commit_kill_coord_node(suite) -> [];
+sym_trans_after_commit_kill_coord_node(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_after_commit_kill_coord,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, multi_commit_sym, post}},
+ do_sym_trans, [{Tab, Def}], Nodes).
+
+sym_trans_after_commit_kill_coord_pid(suite) -> [];
+sym_trans_after_commit_kill_coord_pid(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_after_commit_kill_coord,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, multi_commit_sym, post}},
+ do_sym_trans, [{Tab,Def}], Nodes).
+
+sym_trans_after_commit_kill_part_after_ask(suite) -> [];
+sym_trans_after_commit_kill_part_after_ask(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_after_commit_kill_part_after_ask,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ kill_after_debug_point(Part1, {Coord, {mnesia_tm, multi_commit_sym, post}},
+ do_sym_trans, [{Tab, Def}], Nodes).
+
+sym_trans_after_commit_kill_part_do_commit_pre(suite) -> [];
+sym_trans_after_commit_kill_part_do_commit_pre(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_after_commit_kill_part_do_commit_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, do_commit, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+sym_trans_after_commit_kill_part_do_commit_post(suite) -> [];
+sym_trans_after_commit_kill_part_do_commit_post(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sym_trans_after_commit_kill_part_do_commit_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, do_commit, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+do_sym_trans([Tab], _Fahter) ->
+ ?dl("Starting SYM_TRANS with active debug fun ", []),
+ Trans = fun() ->
+ [{_,_,Val}] = mnesia:read({Tab, 1}),
+ mnesia:write({Tab, 1, Val+1})
+ end,
+ Res = mnesia:transaction(Trans),
+ case Res of
+ {atomic, ok} -> ok;
+ {aborted, _Reason} -> ok;
+ Else -> ?error("Wrong output from mensia:transaction(FUN):~n ~p~n",
+ [Else])
+ end,
+ ?dl("SYM_TRANSACTION done: ~p (deactiv dbgfun) ", [Res]),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+sync_dirty(doc) ->
+ ["Verify recovery of synchronously operations in a couple of different",
+ "situations"];
+sync_dirty(suite) ->
+ [sync_dirty_pre_kill_part,
+ sync_dirty_pre_kill_coord_node,
+ sync_dirty_pre_kill_coord_pid,
+ sync_dirty_post_kill_part,
+ sync_dirty_post_kill_coord_node,
+ sync_dirty_post_kill_coord_pid
+ ].
+
+sync_dirty_pre_kill_part(suite) -> [];
+sync_dirty_pre_kill_part(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sync_dirty_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sync_dirty,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, sync_dirty, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+sync_dirty_pre_kill_coord_node(suite) -> [];
+sync_dirty_pre_kill_coord_node(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sync_dirty_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sync_dirty,
+ kill_after_debug_point(Coord, {Part1, {mnesia_tm, sync_dirty, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+sync_dirty_pre_kill_coord_pid(suite) -> [];
+sync_dirty_pre_kill_coord_pid(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sync_dirty_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sync_dirty,
+ kill_after_debug_point(coord_pid, {Part1, {mnesia_tm, sync_dirty, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+sync_dirty_post_kill_part(suite) -> [];
+sync_dirty_post_kill_part(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sync_dirty_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sync_dirty,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, sync_dirty, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+sync_dirty_post_kill_coord_node(suite) -> [];
+sync_dirty_post_kill_coord_node(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sync_dirty_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sync_dirty,
+ kill_after_debug_point(Coord, {Part1, {mnesia_tm, sync_dirty, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+sync_dirty_post_kill_coord_pid(suite) -> [];
+sync_dirty_post_kill_coord_pid(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = sync_dirty_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_sync_dirty,
+ kill_after_debug_point(coord_pid, {Part1, {mnesia_tm, sync_dirty, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+do_sync_dirty([Tab], _Father) ->
+ ?dl("Starting SYNC_DIRTY", []),
+ SYNC = fun() ->
+ [{_,_,Val}] = mnesia:read({Tab, 1}),
+ mnesia:write({Tab, 1, Val+1})
+ end,
+ {_, Res} = ?match(ok, mnesia:sync_dirty(SYNC)),
+ ?dl("SYNC_DIRTY done: ~p ", [Res]),
+ ok.
+
+async_dirty(doc) ->
+ ["Verify recovery of asynchronously dirty operations in a couple of different",
+ "situations"];
+async_dirty(suite) ->
+ [async_dirty_pre_kill_part,
+ async_dirty_pre_kill_coord_node,
+ async_dirty_pre_kill_coord_pid,
+ async_dirty_post_kill_part,
+ async_dirty_post_kill_coord_node,
+ async_dirty_post_kill_coord_pid].
+
+async_dirty_pre_kill_part(suite) -> [];
+async_dirty_pre_kill_part(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = async_dirty_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_async_dirty,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, async_dirty, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+async_dirty_pre_kill_coord_node(suite) -> [];
+async_dirty_pre_kill_coord_node(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = async_dirty_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_async_dirty,
+ kill_after_debug_point(Coord, {Part1, {mnesia_tm, async_dirty, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+async_dirty_pre_kill_coord_pid(suite) -> [];
+async_dirty_pre_kill_coord_pid(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = async_dirty_pre,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_async_dirty,
+ kill_after_debug_point(coord_pid, {Part1, {mnesia_tm, async_dirty, pre}},
+ TransFun, [{Tab, Def}], Nodes).
+
+async_dirty_post_kill_part(suite) -> [];
+async_dirty_post_kill_part(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = async_dirty_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_async_dirty,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, async_dirty, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+async_dirty_post_kill_coord_node(suite) -> [];
+async_dirty_post_kill_coord_node(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = async_dirty_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_async_dirty,
+ kill_after_debug_point(Coord, {Part1, {mnesia_tm, async_dirty, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+async_dirty_post_kill_coord_pid(suite) -> [];
+async_dirty_post_kill_coord_pid(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab = async_dirty_post,
+ Def = [{attributes, [key, value]},{ram_copies, [Part2]},{disc_copies, [Coord, Part1]}],
+ TransFun = do_async_dirty,
+ kill_after_debug_point(coord_pid, {Part1, {mnesia_tm, async_dirty, post}},
+ TransFun, [{Tab, Def}], Nodes).
+
+do_async_dirty([Tab], _Fahter) ->
+ ?dl("Starting ASYNC", []),
+ ASYNC = fun() ->
+ [{_,_,Val}] = mnesia:read({Tab, 1}),
+ mnesia:write({Tab, 1, Val+1})
+ end,
+ {_, Res} = ?match(ok, mnesia:async_dirty(ASYNC)),
+ ?dl("ASYNC done: ~p ", [Res]),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+asym_trans(doc) ->
+ ["Recovery of asymmetrical transactions in a couple of different",
+ "situations, currently the error cases are not covered, i.e. ",
+ "not tested are the situations when we kill mnesia or a process",
+ "during a recovery"];
+asym_trans(suite) ->
+ [
+ asym_trans_kill_part_ask,
+ asym_trans_kill_part_commit_vote,
+ asym_trans_kill_part_pre_commit,
+ asym_trans_kill_part_log_commit,
+ asym_trans_kill_part_do_commit,
+ asym_trans_kill_coord_got_votes,
+ asym_trans_kill_coord_pid_got_votes,
+ asym_trans_kill_coord_log_commit_rec,
+ asym_trans_kill_coord_pid_log_commit_rec,
+ asym_trans_kill_coord_log_commit_dec,
+ asym_trans_kill_coord_pid_log_commit_dec,
+ asym_trans_kill_coord_rec_acc_pre_commit_log_commit,
+ asym_trans_kill_coord_pid_rec_acc_pre_commit_log_commit,
+ asym_trans_kill_coord_rec_acc_pre_commit_done_commit,
+ asym_trans_kill_coord_pid_rec_acc_pre_commit_done_commit
+ ].
+
+asym_trans_kill_part_ask(suite) -> [];
+asym_trans_kill_part_ask(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, doit_ask_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_part_commit_vote(suite) -> [];
+asym_trans_kill_part_commit_vote(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, commit_participant, vote_yes}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_part_pre_commit(suite) -> [];
+asym_trans_kill_part_pre_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, commit_participant, pre_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_part_log_commit(suite) -> [];
+asym_trans_kill_part_log_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, commit_participant, log_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_part_do_commit(suite) -> [];
+asym_trans_kill_part_do_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Part1, {Part1, {mnesia_tm, commit_participant, do_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_got_votes(suite) -> [];
+asym_trans_kill_coord_got_votes(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, multi_commit_asym_got_votes}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_pid_got_votes(suite) -> [];
+asym_trans_kill_coord_pid_got_votes(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, multi_commit_asym_got_votes}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_log_commit_rec(suite) -> [];
+asym_trans_kill_coord_log_commit_rec(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, multi_commit_asym_log_commit_rec}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_pid_log_commit_rec(suite) -> [];
+asym_trans_kill_coord_pid_log_commit_rec(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, multi_commit_asym_log_commit_rec}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_log_commit_dec(suite) -> [];
+asym_trans_kill_coord_log_commit_dec(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, multi_commit_asym_log_commit_dec}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_pid_log_commit_dec(suite) -> [];
+asym_trans_kill_coord_pid_log_commit_dec(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, multi_commit_asym_log_commit_dec}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_rec_acc_pre_commit_log_commit(suite) -> [];
+asym_trans_kill_coord_rec_acc_pre_commit_log_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, rec_acc_pre_commit_log_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_pid_rec_acc_pre_commit_log_commit(suite) -> [];
+asym_trans_kill_coord_pid_rec_acc_pre_commit_log_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, rec_acc_pre_commit_log_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_rec_acc_pre_commit_done_commit(suite) -> [];
+asym_trans_kill_coord_rec_acc_pre_commit_done_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(Coord, {Coord, {mnesia_tm, rec_acc_pre_commit_done_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+asym_trans_kill_coord_pid_rec_acc_pre_commit_done_commit(suite) -> [];
+asym_trans_kill_coord_pid_rec_acc_pre_commit_done_commit(Config) when is_list(Config) ->
+ ?is_debug_compiled,
+ Nodes = ?acquire_nodes(3, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ [Coord, Part1, Part2] = Nodes,
+ Tab1 = {asym1, [{ram_copies, [Part2]}, {disc_copies, [Coord]}]},
+ Tab2 = {asym2, [{ram_copies, [Coord]}, {disc_copies, [Part1]}]},
+ TransFun = do_asym_trans,
+ kill_after_debug_point(coord_pid, {Coord, {mnesia_tm, rec_acc_pre_commit_done_commit}},
+ TransFun, [Tab1, Tab2], Nodes).
+
+do_asym_trans([Tab1, Tab2 | _R], Garbhandler) ->
+ ?dl("Starting asym trans ", []),
+ ASym_Trans = fun() ->
+ TidTs = {_Mod, Tid, _Store} =
+ mnesia:get_activity_id(),
+ ?verbose("===> asym_trans: ~w~n", [TidTs]),
+ Garbhandler ! {trans_id, Tid},
+ [{_, _, Val1}] = mnesia:read({Tab1, 1}),
+ [{_, _, Val2}] = mnesia:read({Tab2, 1}),
+ mnesia:write({Tab1, 1, Val1+1}),
+ mnesia:write({Tab2, 1, Val2+1})
+ end,
+ Res = mnesia:transaction(ASym_Trans),
+ case Res of
+ {atomic, ok} -> ok;
+ {aborted, _Reason} -> ok;
+ _Else -> ?error("Wrong output from mensia:transaction(FUN):~n ~p~n", [Res])
+ end,
+ ?dl("Asym trans finished with: ~p ", [Res]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+kill_after_debug_point(Kill, {DebugNode, Debug_Point}, TransFun, TabsAndDefs, Nodes) ->
+ [Coord | _rest] = Nodes,
+
+ Create = fun({Tab, Def}) -> ?match({atomic, ok}, mnesia:create_table(Tab, Def)) end,
+ lists:foreach(Create, TabsAndDefs),
+ Tabs = [T || {T, _} <- TabsAndDefs],
+ Write = fun(Tab) -> ?match(ok, mnesia:dirty_write({Tab, 1, 100})) end,
+ lists:foreach(Write, Tabs),
+
+ Self = self(),
+ SyncFun = fun(_Env1, _Env2) -> % Just Sync with test prog
+ Self ! {self(), fun_in_position},
+ ?dl("SyncFun, sending fun_in_position ", []),
+ receive continue ->
+ ?dl("SyncFun received continue ",[]),
+ ok
+ after timer:seconds(60) ->
+ ?error("Timeout in sync_fun on ~p~n", [node()])
+ end
+ end,
+
+ Garb_handler = spawn_link(?MODULE, garb_handler, [[]]),
+
+ ?remote_activate_debug_fun(DebugNode, Debug_Point, SyncFun, []),
+ ?dl("fun_in_position activated at ~p with ~p", [DebugNode, Debug_Point]),
+ %% Spawn and do the transaction
+ Pid = spawn(Coord, ?MODULE, TransFun, [Tabs, Garb_handler]),
+ %% Wait till all the Nodes are in correct position
+ [{StoppedPid,_}] = ?receive_messages([fun_in_position]),
+ ?dl("Received fun_in_position; Removing the debug funs ~p", [DebugNode]),
+ ?remote_deactivate_debug_fun(DebugNode, Debug_Point),
+
+ case Kill of
+ coord_pid ->
+ ?dl("Intentionally killing pid ~p ", [Pid]),
+ exit(Pid, normal);
+ Node ->
+ mnesia_test_lib:kill_mnesia([Node])
+ end,
+
+ StoppedPid ! continue, %% Send continue, it may still be alive
+
+ %% Start and check that the databases are consistent
+ ?dl("Done, Restarting and verifying result ",[]),
+ case Kill of
+ coord_pid -> ok;
+ _ -> % Ok, mnesia on some node was killed restart it
+ timer:sleep(timer:seconds(3)), %% Just let it have the time to die
+ ?match(ok, rpc:call(Kill, mnesia, start, [[]])),
+ ?match(ok, rpc:call(Kill, mnesia, wait_for_tables, [Tabs, 60000]))
+ end,
+ Trans_res = verify_tabs(Tabs, Nodes),
+ case TransFun of
+ do_asym_trans ->
+ %% Verifies that decisions are garbed, only valid for asym_tran
+ Garb_handler ! {get_tids, self()},
+ Tid_list = receive
+ {tids, List} ->
+ ?dl("Fun rec ~w", [List]),
+ List
+ end,
+ garb_of_decisions(Kill, Nodes, Tid_list, Trans_res);
+ _ ->
+ ignore
+ end,
+ ?verify_mnesia(Nodes, []).
+
+garb_of_decisions(Kill, Nodes, Tid_list, Trans_res) ->
+ [Coord, Part1, Part2] = Nodes,
+ %% Check that decision log is empty on all nodes after the trans is finished
+ verify_garb_decision_log(Nodes, Tid_list),
+ case Trans_res of
+ aborted ->
+ %% Check that aborted trans have not been restarted!!
+ ?match(1, length(Tid_list)),
+ %% Check the transient decision logs
+ %% A transaction should only be aborted in an early stage of
+ %% the trans before the any Node have logged anything
+ verify_garb_transient_logs(Nodes, Tid_list, aborted),
+ %% And only when the coordinator are have died
+ %% Else he would have restarted the transaction
+ ?match(Kill, Coord);
+ updated ->
+ case length(Tid_list) of
+ 1 ->
+ %% If there was only one transaction, it should be logged as
+ %% comitted on every node!
+ [Tid1] = Tid_list,
+ verify_garb_transient_logs(Nodes, [Tid1], committed);
+ 2 ->
+ %% If there is two transaction id, then the first
+ %% TID should have been aborted and the transaction
+ %% restarted with a new TID
+ [Tid1, Tid2] = Tid_list,
+ verify_garb_transient_logs(Nodes, [Tid1], aborted),
+ %% If mnesia is killed on a node i.e Coord and Part1 than they
+ %% won't know about the restarted trans! The rest of the nodes
+ %% should know that the trans was committed
+ case Kill of
+ coord_pid ->
+ verify_garb_transient_logs(Nodes, [Tid2], committed);
+ Coord ->
+ verify_garb_transient_logs([Part1, Part2], [Tid2], committed),
+ verify_garb_transient_logs([Coord], [Tid2], not_found);
+ Part1 ->
+ verify_garb_transient_logs([Coord, Part2], [Tid2], committed),
+ verify_garb_transient_logs([Part1], [Tid2], not_found)
+ end
+ end
+ end.
+
+verify_garb_decision_log([], _Tids) -> ok;
+verify_garb_decision_log([Node|R], Tids) ->
+ Check = fun(Tid) -> %% Node, Tid used in debugging!
+ ?match({{not_found, _}, Node, Tid},
+ {outcome(Tid, [mnesia_decision]), Node, Tid})
+ end,
+ rpc:call(Node, lists, foreach, [Check, Tids]),
+ verify_garb_decision_log(R, Tids).
+
+verify_garb_transient_logs([], _Tids, _) -> ok;
+verify_garb_transient_logs([Node|R], Tids, Exp_Res) ->
+ Check = fun(Tid) ->
+ LatestTab = mnesia_lib:val(latest_transient_decision),
+ PrevTabs = mnesia_lib:val(previous_transient_decisions),
+ case outcome(Tid, [LatestTab | PrevTabs]) of
+ {found, {_, [{_,_Tid, Exp_Res}]}} -> ok;
+ {not_found, _} when Exp_Res == not_found -> ok;
+ {not_found, _} when Exp_Res == aborted -> ok;
+ Else -> ?error("Expected ~p in trans ~p on ~p got ~p~n",
+ [Exp_Res, Tid, Node, Else])
+ end
+ end,
+ rpc:call(Node, lists, foreach, [Check, Tids]),
+ verify_garb_transient_logs(R, Tids, Exp_Res).
+
+outcome(Tid, Tabs) ->
+ outcome(Tid, Tabs, Tabs).
+
+outcome(Tid, [Tab | Tabs], AllTabs) ->
+ case catch ets:lookup(Tab, Tid) of
+ {'EXIT', _} ->
+ outcome(Tid, Tabs, AllTabs);
+ [] ->
+ outcome(Tid, Tabs, AllTabs);
+ Val ->
+ {found, {Tab, Val}}
+ end;
+outcome(_Tid, [], AllTabs) ->
+ {not_found, AllTabs}.
+
+
+verify_tabs([Tab|R], Nodes) ->
+ [_Coord, Part1, Part2 | _rest] = Nodes,
+ Read = fun() -> mnesia:read({Tab, 1}) end,
+ {success, A} = ?match({atomic, _}, mnesia:transaction(Read)),
+ ?match(A, rpc:call(Part1, mnesia, transaction, [Read])),
+ ?match(A, rpc:call(Part2, mnesia, transaction, [Read])),
+ {atomic, [{Tab, 1, Res}]} = A,
+ verify_tabs(R, Nodes, Res).
+
+verify_tabs([], _Nodes, Res) ->
+ case Res of
+ 100 -> aborted;
+ 101 -> updated
+ end;
+
+verify_tabs([Tab | Rest], Nodes, Res) ->
+ [Coord, Part1, Part2 | _rest] = Nodes,
+ Read = fun() -> mnesia:read({Tab, 1}) end,
+ Exp = {atomic, [{Tab, 1, Res}]},
+ ?match(Exp, rpc:call(Coord, mnesia, transaction, [Read])),
+ ?match(Exp, rpc:call(Part1, mnesia, transaction, [Read])),
+ ?match(Exp, rpc:call(Part2, mnesia, transaction, [Read])),
+ verify_tabs(Rest, Nodes, Res).
+
+%% Gather TIDS and send them to requesting process and exit!
+garb_handler(List) ->
+ receive
+ {trans_id, ID} -> garb_handler([ID|List]);
+ {get_tids, Pid} -> Pid ! {tids, lists:reverse(List)}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%
+receive_messages([], _File, _Line) -> [];
+receive_messages(ListOfMsgs, File, Line) ->
+ receive
+ {Pid, Msg} ->
+ case lists:member(Msg, ListOfMsgs) of
+ false ->
+ mnesia_test_lib:log("<>WARNING<>~n"
+ "Received unexpected msg~n ~p ~n"
+ "While waiting for ~p~n",
+ [{Pid, Msg}, ListOfMsgs], File, Line),
+ receive_messages(ListOfMsgs, File, Line);
+ true ->
+ ?dl("Got msg ~p from ~p ", [Msg, node(Pid)]),
+ [{Pid, Msg} | receive_messages(ListOfMsgs -- [Msg], File, Line)]
+ end;
+ Else -> mnesia_test_lib:log("<>WARNING<>~n"
+ "Recevied unexpected or bad formatted msg~n ~p ~n"
+ "While waiting for ~p~n",
+ [Else, ListOfMsgs], File, Line),
+ receive_messages(ListOfMsgs, File, Line)
+ after timer:minutes(2) ->
+ ?error("Timeout in receive msgs while waiting for ~p~n",
+ [ListOfMsgs])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+after_full_disc_partition(doc) ->
+ ["Verify that the database does not get corrupt",
+ "when Mnesia encounters a full disc partition"].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% interrupted_fallback_start
+%% is implemented in consistency interupted_install_fallback!
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+after_corrupt_files(doc) ->
+ ["Verify that mnesia (and dets) can handle corrupt files"];
+after_corrupt_files(suite) -> % cope with unsynced disks
+ [after_corrupt_files_decision_log_head,
+ after_corrupt_files_decision_log_tail,
+ after_corrupt_files_latest_log_head,
+ after_corrupt_files_latest_log_tail,
+ after_corrupt_files_table_dat_head,
+ after_corrupt_files_table_dat_tail,
+ after_corrupt_files_schema_dat_head,
+ after_corrupt_files_schema_dat_tail
+ ].
+
+after_corrupt_files_decision_log_head(suite) -> [];
+after_corrupt_files_decision_log_head(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "DECISION.LOG", head, repair).
+
+after_corrupt_files_decision_log_tail(suite) -> [];
+after_corrupt_files_decision_log_tail(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "DECISION.LOG", tail, repair).
+
+after_corrupt_files_latest_log_head(suite) -> [];
+after_corrupt_files_latest_log_head(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "LATEST.LOG", head, repair).
+
+after_corrupt_files_latest_log_tail(suite) -> [];
+after_corrupt_files_latest_log_tail(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "LATEST.LOG", tail, repair).
+
+after_corrupt_files_table_dat_head(suite) -> [];
+after_corrupt_files_table_dat_head(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "rec_files.DAT", head, crash).
+
+after_corrupt_files_table_dat_tail(suite) -> [];
+after_corrupt_files_table_dat_tail(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "rec_files.DAT", tail, repair).
+
+after_corrupt_files_schema_dat_head(suite) -> [];
+after_corrupt_files_schema_dat_head(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "schema.DAT", head, crash).
+
+after_corrupt_files_schema_dat_tail(suite) -> [];
+after_corrupt_files_schema_dat_tail(Config) when is_list(Config) ->
+ after_corrupt_files(Config, "schema.DAT", tail, crash).
+
+
+
+%%% BUGBUG: We should also write testcase's for autorepair=false i.e.
+%%% not the standard case!
+after_corrupt_files(Config, File, Where, Behaviour) ->
+ [Node] = ?acquire_nodes(1, Config ++ [{tc_timeout, timer:minutes(2)}]),
+ Tab = rec_files,
+ Def = [{disc_only_copies, [Node]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab, Def)),
+ insert_data(Tab, 100),
+ Dir = mnesia:system_info(directory),
+ mnesia_test_lib:kill_mnesia([Node]),
+ timer:sleep(timer:seconds(10)), % Let dets finish whatever it does
+
+ DirFile = Dir ++ "/" ++ File,
+
+ {ok, Fd} = file:open(DirFile, read_write),
+ {ok, FileInfo} = file:read_file_info(DirFile),
+ case Where of
+ head ->
+ ?match({ok, _NewP}, file:position(Fd, {bof, 1})),
+ ?match(ok, file:write(Fd, [255, 255, 255, 255, 255, 255, 255, 255, 254])),
+ ok;
+ tail ->
+ Size = FileInfo#file_info.size,
+ Half = Size div 2,
+
+ ?dl(" Size = ~p Half = ~p ", [Size, Half]),
+ ?match({ok, _NewP}, file:position(Fd, {bof, Half})),
+ ?match(ok, file:truncate(Fd)),
+ ok
+ end,
+ ?match(ok, file:close(Fd)),
+
+ ?warning("++++++SOME OF THE after_corrupt* TEST CASES WILL INTENTIONALLY CRASH MNESIA+++++++~n", []),
+ Pid = spawn_link(?MODULE, mymnesia_start, [self()]),
+ receive
+ {Pid, ok} ->
+ ?match(ok, mnesia:wait_for_tables([schema, Tab], 10000)),
+ ?match(ok, verify_data(Tab, 100)),
+ case mnesia_monitor:get_env(auto_repair) of
+ false ->
+ ?error("Mnesia should have crashed in ~p ~p ~n",
+ [File, Where]);
+ true ->
+ ok
+ end,
+ ?verify_mnesia([Node], []);
+ {Pid, {error, ED}} ->
+ case {mnesia_monitor:get_env(auto_repair), Behaviour} of
+ {true, repair} ->
+ ?error("Mnesia crashed with ~p: in ~p ~p ~n",
+ [ED, File, Where]);
+ _ -> %% Every other can crash!
+ ok
+ end,
+ ?verify_mnesia([], [Node]);
+ Msg ->
+ ?error("~p ~p: Got ~p during start of Mnesia~n",
+ [File, Where, Msg])
+ end.
+
+mymnesia_start(Tester) ->
+ Res = mnesia:start(),
+ unlink(Tester),
+ Tester ! {self(), Res}.
+
+verify_data(_, 0) -> ok;
+verify_data(Tab, N) ->
+ Actual = mnesia:dirty_read({Tab, N}),
+ Expected = [{Tab, N, N}],
+ if
+ Expected == Actual ->
+ verify_data(Tab, N - 1);
+ true ->
+ mnesia:schema(Tab),
+ {not_equal, node(), Expected, Actual}
+ end.
+
+insert_data(_Tab, 0) -> ok;
+insert_data(Tab, N) ->
+ ok = mnesia:sync_dirty(fun() -> mnesia:write({Tab, N, N}) end),
+ insert_data(Tab, N-1).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+disc_less(doc) ->
+ ["Here is a simple test case of a simple recovery of a disc less node. "
+ "However a lot more test cases involving disc less nodes should "
+ "be written"];
+disc_less(suite) -> [];
+disc_less(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ case mnesia_test_lib:diskless(Config) of
+ true -> skip;
+ false ->
+ ?match({atomic, ok}, mnesia:change_table_copy_type(schema, Node3, ram_copies))
+ end,
+ Tab1 = disc_less1,
+ Tab2 = disc_less2,
+ Tab3 = disc_less3,
+ Def1 = [{ram_copies, [Node3]}, {disc_copies, [Node1, Node2]}],
+ Def2 = [{ram_copies, [Node3]}, {disc_copies, [Node1]}],
+ Def3 = [{ram_copies, [Node3]}, {disc_copies, [Node2]}],
+ ?match({atomic, ok}, mnesia:create_table(Tab1, Def1)),
+ ?match({atomic, ok}, mnesia:create_table(Tab2, Def2)),
+ ?match({atomic, ok}, mnesia:create_table(Tab3, Def3)),
+ insert_data(Tab1, 100),
+ insert_data(Tab2, 100),
+ insert_data(Tab3, 100),
+
+ mnesia_test_lib:kill_mnesia([Node1, Node2]),
+ timer:sleep(500),
+ mnesia_test_lib:kill_mnesia([Node3]),
+ ?match(ok, rpc:call(Node1, mnesia, start, [])),
+ ?match(ok, rpc:call(Node2, mnesia, start, [])),
+
+ timer:sleep(500),
+ ?match(ok, rpc:call(Node3, mnesia, start, [[{extra_db_nodes, [Node1, Node2]}]])),
+ ?match(ok, rpc:call(Node3, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])),
+
+ ?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab1, 100])),
+ ?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab2, 100])),
+ ?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab3, 100])),
+
+
+ ?match(ok, rpc:call(Node2, ?MODULE, verify_data, [Tab1, 100])),
+ ?match(ok, rpc:call(Node2, ?MODULE, verify_data, [Tab2, 100])),
+ ?match(ok, rpc:call(Node2, ?MODULE, verify_data, [Tab3, 100])),
+
+ ?match(ok, rpc:call(Node1, ?MODULE, verify_data, [Tab1, 100])),
+ ?match(ok, rpc:call(Node1, ?MODULE, verify_data, [Tab2, 100])),
+ ?match(ok, rpc:call(Node1, ?MODULE, verify_data, [Tab3, 100])),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+system_upgrade(doc) ->
+ ["Test on-line and off-line upgrade of the Mnesia application"].
+
+garb_decision(doc) ->
+ ["Test that decisions are garbed correctly."];
+garb_decision(suite) -> [];
+garb_decision(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ check_garb(Nodes),
+ ?match({atomic, ok},mnesia:create_table(a, [{disc_copies, Nodes}])),
+ check_garb(Nodes),
+ ?match({atomic, ok},mnesia:create_table(b, [{ram_copies, Nodes}])),
+ check_garb(Nodes),
+ ?match({atomic, ok},mnesia:create_table(c, [{ram_copies, [Node1, Node3]},
+ {disc_copies, [Node2]}])),
+ check_garb(Nodes),
+ ?match({atomic, ok},mnesia:create_table(d, [{disc_copies, [Node1, Node3]},
+ {ram_copies, [Node2]}])),
+ check_garb(Nodes),
+
+ W = fun(Tab) -> mnesia:write({Tab,1,1}) end,
+ A = fun(Tab) -> mnesia:write({Tab,1,1}), exit(1) end,
+
+ ?match({atomic, ok}, mnesia:transaction(W,[a])),
+ check_garb(Nodes),
+ ?match({atomic, ok}, mnesia:transaction(W,[b])),
+ check_garb(Nodes),
+ ?match({atomic, ok}, mnesia:transaction(W,[c])),
+ check_garb(Nodes),
+ ?match({atomic, ok}, mnesia:transaction(W,[d])),
+ check_garb(Nodes),
+ ?match({aborted,1}, mnesia:transaction(A,[a])),
+ check_garb(Nodes),
+ ?match({aborted,1}, mnesia:transaction(A,[b])),
+ check_garb(Nodes),
+ ?match({aborted,1}, mnesia:transaction(A,[c])),
+ check_garb(Nodes),
+ ?match({aborted,1}, mnesia:transaction(A,[d])),
+ check_garb(Nodes),
+
+ rpc:call(Node2, mnesia, lkill, []),
+ ?match({atomic, ok}, mnesia:transaction(W,[a])),
+ ?match({atomic, ok}, mnesia:transaction(W,[b])),
+ ?match({atomic, ok}, mnesia:transaction(W,[c])),
+ ?match({atomic, ok}, mnesia:transaction(W,[d])),
+ check_garb(Nodes),
+ ?match([], mnesia_test_lib:start_mnesia([Node2])),
+ check_garb(Nodes),
+ timer:sleep(2000),
+ check_garb(Nodes),
+ %%%%%% Check transient_decision logs %%%%%
+
+ ?match(dumped, mnesia:dump_log()), sys:get_status(mnesia_recover), % sync
+ [{atomic, ok} = mnesia:transaction(W,[a]) || _ <- lists:seq(1,30)],
+ ?match(dumped, mnesia:dump_log()), sys:get_status(mnesia_recover), % sync
+ TD0 = mnesia_lib:val(latest_transient_decision),
+ ?match(0, ets:info(TD0, size)),
+ {atomic, ok} = mnesia:transaction(W,[a]),
+ ?match(dumped, mnesia:dump_log()), sys:get_status(mnesia_recover), % sync
+ ?match(TD0, mnesia_lib:val(latest_transient_decision)),
+ [{atomic, ok} = mnesia:transaction(W,[a]) || _ <- lists:seq(1,30)],
+ ?match(dumped, mnesia:dump_log()), sys:get_status(mnesia_recover), % sync
+ ?match(false, TD0 =:= mnesia_lib:val(latest_transient_decision)),
+ ?match(true, lists:member(TD0, mnesia_lib:val(previous_transient_decisions))),
+ ?verify_mnesia(Nodes, []).
+
+check_garb(Nodes) ->
+ rpc:multicall(Nodes, sys, get_status, [mnesia_recover]),
+ ?match({_, []},rpc:multicall(Nodes, erlang, apply, [fun check_garb/0, []])).
+
+check_garb() ->
+ try
+ Ds = ets:tab2list(mnesia_decision),
+ Check = fun({trans_tid,serial, _}) -> false;
+ ({mnesia_down,_,_,_}) -> false;
+ (_Else) -> true
+ end,
+ Node = node(),
+ ?match({Node, []}, {node(), lists:filter(Check, Ds)})
+ catch _:_ -> ok
+ end,
+ ok.
diff --git a/lib/mnesia/test/mnesia_registry_test.erl b/lib/mnesia/test/mnesia_registry_test.erl
new file mode 100644
index 0000000000..2305ef93b7
--- /dev/null
+++ b/lib/mnesia/test/mnesia_registry_test.erl
@@ -0,0 +1,137 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_registry_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Test the mnesia_registry module"];
+all(suite) ->
+ [
+ good_dump,
+ bad_dump
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+good_dump(doc) ->
+ ["Dump a faked C-node registry"];
+good_dump(suite) -> [];
+good_dump(Config) when is_list(Config) ->
+ [Node] = Nodes = ?acquire_nodes(1, Config),
+ T1 = gordon,
+ ?match(ok, mnesia_registry:create_table(T1)),
+ One = {T1, 1, 0, integer, 0, 10},
+ Two = {T1, "two", 3, integer, 0, 20},
+ Three = {T1, 3, 0, string, 6, "thirty"},
+ ?match(ok, mnesia:dirty_write(One)),
+ ?match(ok, mnesia:dirty_write(Two)),
+ ?match(ok, mnesia:dirty_write(Three)),
+ ?match([One], mnesia:dirty_read({T1, 1})),
+ ?match([_ | _], dump_registry(Node, T1)),
+
+ NewOne = {T1, 1, 0, integer, 0, 1},
+ NewFour = {T1, "4", 1, string, 4, "four"},
+
+ ?match([NewOne], mnesia:dirty_read({T1, 1})),
+ ?match([Two], mnesia:dirty_read({T1, "two"})),
+ ?match([], mnesia:dirty_read({T1, 3})),
+ ?match([NewFour], mnesia:dirty_read({T1, "4"})),
+
+ T2 = blixt,
+ ?match({'EXIT', {aborted, {no_exists, _}}},
+ mnesia:dirty_read({T2, 1})),
+ ?match([_ |_], dump_registry(Node, T2)),
+
+ NewOne2 = setelement(1, NewOne, T2),
+ NewFour2 = setelement(1, NewFour, T2),
+
+ ?match([NewOne2], mnesia:dirty_read({T2, 1})),
+ ?match([], mnesia:dirty_read({T2, "two"})),
+ ?match([], mnesia:dirty_read({T2, 3})),
+ ?match([NewFour2], mnesia:dirty_read({T2, "4"})),
+ ?match([_One2, NewFour2], lists:sort(restore_registry(Node, T2))),
+
+ ?verify_mnesia(Nodes, []).
+
+dump_registry(Node, Tab) ->
+ case rpc:call(Node, mnesia_registry, start_dump, [Tab, self()]) of
+ Pid when is_pid(Pid) ->
+ Pid ! {write, 1, 0, integer, 0, 1},
+ Pid ! {delete, 3},
+ Pid ! {write, "4", 1, string, 4, "four"},
+ Pid ! {commit, self()},
+ receive
+ {ok, Pid} ->
+ [{Tab, "4", 1, string, 4, "four"},
+ {Tab, 1, 0, integer, 0, 1}];
+ {'EXIT', Pid, Reason} ->
+ exit(Reason)
+ end;
+ {badrpc, Reason} ->
+ exit(Reason)
+ end.
+
+restore_registry(Node, Tab) ->
+ case rpc:call(Node, mnesia_registry, start_restore, [Tab, self()]) of
+ {size, Pid, N, _LargestKeySize, _LargestValSize} ->
+ Pid ! {send_records, self()},
+ receive_records(Tab, N);
+ {badrpc, Reason} ->
+ exit(Reason)
+ end.
+
+receive_records(Tab, N) when N > 0 ->
+ receive
+ {restore, KeySize, ValSize, ValType, Key, Val} ->
+ [{Tab, Key, KeySize, ValType, ValSize, Val} | receive_records(Tab, N -1)];
+ {'EXIT', _Pid, Reason} ->
+ exit(Reason)
+ end;
+receive_records(_Tab, 0) ->
+ [].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+bad_dump(doc) ->
+ ["Intentionally fail with the dump of a faked C-node registry"];
+bad_dump(suite) -> [];
+bad_dump(Config) when is_list(Config) ->
+ [Node] = Nodes = ?acquire_nodes(1, Config),
+
+ OldTab = ming,
+ ?match({'EXIT', {aborted, _}}, mnesia_registry:start_restore(no_tab, self())),
+ ?match({atomic, ok}, mnesia:create_table(OldTab, [{attributes, [a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q]}])),
+ ?match({'EXIT',{aborted,{bad_type,_}}}, dump_registry(Node, OldTab)),
+ ?match(stopped, mnesia:stop()),
+
+ ?match({'EXIT', {aborted, _}}, mnesia_registry:create_table(down_table)),
+ ?match({'EXIT', {aborted, _}}, mnesia_registry:start_restore(no_tab, self())),
+ ?match({'EXIT', {aborted, _}}, dump_registry(Node, down_dump)),
+
+ ?verify_mnesia([], Nodes).
+
diff --git a/lib/mnesia/test/mnesia_schema_recovery_test.erl b/lib/mnesia/test/mnesia_schema_recovery_test.erl
new file mode 100644
index 0000000000..387238ae6b
--- /dev/null
+++ b/lib/mnesia/test/mnesia_schema_recovery_test.erl
@@ -0,0 +1,787 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_schema_recovery_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-define(receive_messages(Msgs), receive_messages(Msgs, ?FILE, ?LINE)).
+
+% First Some debug logging
+-define(dgb, true).
+-ifdef(dgb).
+-define(dl(X, Y), ?verbose("**TRACING: " ++ X ++ "**~n", Y)).
+-else.
+-define(dl(X, Y), ok).
+-endif.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+all(doc) ->
+ ["Verify recoverabiliy of schema transactions.",
+ " Verify that a schema transaction",
+ " can be completed when it has been logged correctly and Mnesia",
+ " crashed before the log has been dumped. Then the transaction ",
+ " should be handled during the log dump at startup"
+ ];
+all(suite) ->
+ [interrupted_before_log_dump,
+ interrupted_after_log_dump].
+
+interrupted_before_log_dump(suite) ->
+ [interrupted_before_create_ram,
+ interrupted_before_create_disc,
+ interrupted_before_create_disc_only,
+ interrupted_before_create_nostore,
+ interrupted_before_delete_ram,
+ interrupted_before_delete_disc,
+ interrupted_before_delete_disc_only,
+ interrupted_before_add_ram,
+ interrupted_before_add_disc,
+ interrupted_before_add_disc_only,
+ interrupted_before_add_kill_copier,
+ interrupted_before_move_ram,
+ interrupted_before_move_disc,
+ interrupted_before_move_disc_only,
+ interrupted_before_move_kill_copier,
+ interrupted_before_delcopy_ram,
+ interrupted_before_delcopy_disc,
+ interrupted_before_delcopy_disc_only,
+ interrupted_before_delcopy_kill_copier,
+ interrupted_before_addindex_ram,
+ interrupted_before_addindex_disc,
+ interrupted_before_addindex_disc_only,
+ interrupted_before_delindex_ram,
+ interrupted_before_delindex_disc,
+ interrupted_before_delindex_disc_only,
+ interrupted_before_change_type_ram2disc,
+ interrupted_before_change_type_ram2disc_only,
+ interrupted_before_change_type_disc2ram,
+ interrupted_before_change_type_disc2disc_only,
+ interrupted_before_change_type_disc_only2ram,
+ interrupted_before_change_type_disc_only2disc,
+ interrupted_before_change_type_other_node,
+ interrupted_before_change_schema_type %% Change schema table copy type!!
+ ].
+
+interrupted_after_log_dump(suite) ->
+ [interrupted_after_create_ram,
+ interrupted_after_create_disc,
+ interrupted_after_create_disc_only,
+ interrupted_after_create_nostore,
+ interrupted_after_delete_ram,
+ interrupted_after_delete_disc,
+ interrupted_after_delete_disc_only,
+ interrupted_after_add_ram,
+ interrupted_after_add_disc,
+ interrupted_after_add_disc_only,
+ interrupted_after_add_kill_copier,
+ interrupted_after_move_ram,
+ interrupted_after_move_disc,
+ interrupted_after_move_disc_only,
+ interrupted_after_move_kill_copier,
+ interrupted_after_delcopy_ram,
+ interrupted_after_delcopy_disc,
+ interrupted_after_delcopy_disc_only,
+ interrupted_after_delcopy_kill_copier,
+ interrupted_after_addindex_ram,
+ interrupted_after_addindex_disc,
+ interrupted_after_addindex_disc_only,
+ interrupted_after_delindex_ram,
+ interrupted_after_delindex_disc,
+ interrupted_after_delindex_disc_only,
+ interrupted_after_change_type_ram2disc,
+ interrupted_after_change_type_ram2disc_only,
+ interrupted_after_change_type_disc2ram,
+ interrupted_after_change_type_disc2disc_only,
+ interrupted_after_change_type_disc_only2ram,
+ interrupted_after_change_type_disc_only2disc,
+ interrupted_after_change_type_other_node,
+ interrupted_after_change_schema_type %% Change schema table copy type!!
+
+% interrupted_before_change_access_mode,
+% interrupted_before_transform,
+% interrupted_before_restore,
+ ].
+
+interrupted_before_create_ram(suite) -> [];
+interrupted_before_create_ram(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, dump_schema_op},
+ interrupted_create(Config, ram_copies, all, KillAt).
+
+interrupted_before_create_disc(suite) -> [];
+interrupted_before_create_disc(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, dump_schema_op},
+ interrupted_create(Config, disc_copies, all, KillAt).
+
+interrupted_before_create_disc_only(suite) -> [];
+interrupted_before_create_disc_only(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, dump_schema_op},
+ interrupted_create(Config, disc_only_copies, all, KillAt).
+
+interrupted_before_create_nostore(suite) -> [];
+interrupted_before_create_nostore(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, dump_schema_op},
+ interrupted_create(Config, ram_copies, one, KillAt).
+
+interrupted_after_create_ram(suite) -> [];
+interrupted_after_create_ram(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, post_dump},
+ interrupted_create(Config, ram_copies, all, KillAt).
+
+interrupted_after_create_disc(suite) -> [];
+interrupted_after_create_disc(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, post_dump},
+ interrupted_create(Config, disc_copies, all, KillAt).
+
+interrupted_after_create_disc_only(suite) -> [];
+interrupted_after_create_disc_only(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, post_dump},
+ interrupted_create(Config, disc_only_copies, all, KillAt).
+
+interrupted_after_create_nostore(suite) -> [];
+interrupted_after_create_nostore(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, post_dump},
+ interrupted_create(Config, ram_copies, one, KillAt).
+
+%%% After dump don't need debug point
+interrupted_create(Config, Type, _Where, {mnesia_dumper, post_dump}) ->
+ [Node1] = Nodes = ?acquire_nodes(1, [{tc_timeout, timer:seconds(30)} | Config]),
+ ?match({atomic, ok},mnesia:create_table(itrpt, [{Type, Nodes}])),
+ ?match({atomic, ok},mnesia:create_table(test, [{disc_copies,[Node1]}])),
+ ?match(ok, mnesia:dirty_write({itrpt, before, 1})),
+ ?match(ok, mnesia:dirty_write({test, found_in_log, 1})),
+ ?match(stopped, mnesia:stop()),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [itrpt,test])),
+ %% Verify
+ ?match([{test, found_in_log, 1}], mnesia:dirty_read({test, found_in_log})),
+ case Type of
+ ram_copies ->
+ ?match([], mnesia:dirty_read({itrpt, before}));
+ _ ->
+ ?match([{itrpt, before, 1}], mnesia:dirty_read({itrpt, before}))
+ end,
+ ?verify_mnesia(Nodes, []);
+interrupted_create(Config, Type, Where, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ {success, [A]} = ?start_activities([Node2]),
+ setup_dbgpoint(KillAt, Node2),
+
+ if %% CREATE TABLE
+ Where == all -> % tables on both nodes
+ A ! fun() -> mnesia:create_table(itrpt, [{Type, Nodes}]) end;
+ true -> % no table on the killed node
+ A ! fun() -> mnesia:create_table(itrpt, [{Type, [Node1]}]) end
+ end,
+
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Node2], [itrpt])),
+ %% Verify
+ ?match(ok, mnesia:dirty_write({itrpt, before, 1})),
+ verify_tab(Node1, Node2),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_delete_ram(suite) -> [];
+interrupted_before_delete_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delete(Config, ram_copies, Debug_Point).
+interrupted_before_delete_disc(suite) -> [];
+interrupted_before_delete_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delete(Config, disc_copies, Debug_Point).
+interrupted_before_delete_disc_only(suite) -> [];
+interrupted_before_delete_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delete(Config, disc_only_copies, Debug_Point).
+
+interrupted_after_delete_ram(suite) -> [];
+interrupted_after_delete_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delete(Config, ram_copies, Debug_Point).
+interrupted_after_delete_disc(suite) -> [];
+interrupted_after_delete_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delete(Config, disc_copies, Debug_Point).
+interrupted_after_delete_disc_only(suite) -> [];
+interrupted_after_delete_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delete(Config, disc_only_copies, Debug_Point).
+
+interrupted_delete(Config, Type, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node2]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+ {_Alive, Kill} = {Node1, Node2},
+ {success, [A]} = ?start_activities([Kill]),
+
+ setup_dbgpoint(KillAt, Kill),
+ A ! fun() -> mnesia:delete_table(Tab) end,
+
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Node2], [])),
+ Bad = {badrpc, {'EXIT', {aborted,{no_exists, Tab, all}}}},
+ ?match(Bad, rpc:call(Node1, mnesia, table_info, [Tab, all])),
+ ?match(Bad, rpc:call(Node2, mnesia, table_info, [Tab, all])),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_add_ram(suite) -> [];
+interrupted_before_add_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_add(Config, ram_copies, kill_reciever, Debug_Point).
+interrupted_before_add_disc(suite) -> [];
+interrupted_before_add_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_add(Config, disc_copies, kill_reciever, Debug_Point).
+interrupted_before_add_disc_only(suite) -> [];
+interrupted_before_add_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_add(Config, disc_only_copies, kill_reciever, Debug_Point).
+interrupted_before_add_kill_copier(suite) -> [];
+interrupted_before_add_kill_copier(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_add(Config, ram_copies, kill_copier, Debug_Point).
+
+interrupted_after_add_ram(suite) -> [];
+interrupted_after_add_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_add(Config, ram_copies, kill_reciever, Debug_Point).
+interrupted_after_add_disc(suite) -> [];
+interrupted_after_add_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_add(Config, disc_copies, kill_reciever, Debug_Point).
+interrupted_after_add_disc_only(suite) -> [];
+interrupted_after_add_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_add(Config, disc_only_copies, kill_reciever, Debug_Point).
+interrupted_after_add_kill_copier(suite) -> [];
+interrupted_after_add_kill_copier(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_add(Config, ram_copies, kill_copier, Debug_Point).
+
+%%% After dump don't need debug point
+interrupted_add(Config, Type, _Where, {mnesia_dumper, post_dump}) ->
+ [Node1, Node2] = Nodes =
+ ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node2]}, {local_content,true}])),
+ ?match({atomic, ok},mnesia:create_table(test, [{disc_copies,[Node1]}])),
+ ?match({atomic, ok}, mnesia:add_table_copy(Tab, Node1, Type)),
+ ?match(ok, mnesia:dirty_write({itrpt, before, 1})),
+ ?match(ok, mnesia:dirty_write({test, found_in_log, 1})),
+ ?match(stopped, mnesia:stop()),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [itrpt,test])),
+ %% Verify
+ ?match([{test, found_in_log, 1}], mnesia:dirty_read({test, found_in_log})),
+ case Type of
+ ram_copies ->
+ ?match([], mnesia:dirty_read({itrpt, before}));
+ _ ->
+ ?match([{itrpt, before, 1}], mnesia:dirty_read({itrpt, before}))
+ end,
+ ?verify_mnesia(Nodes, []);
+interrupted_add(Config, Type, Who, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes =
+ ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ {_Alive, Kill} =
+ if Who == kill_reciever ->
+ {Node1, Node2};
+ true ->
+ {Node2, Node1}
+ end,
+ {success, [A]} = ?start_activities([Kill]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node1]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+
+ setup_dbgpoint(KillAt, Kill),
+
+ A ! fun() -> mnesia:add_table_copy(Tab, Node2, Type) end,
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Kill], [itrpt])),
+ verify_tab(Node1, Node2),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_move_ram(suite) -> [];
+interrupted_before_move_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_move(Config, ram_copies, kill_reciever, Debug_Point).
+interrupted_before_move_disc(suite) -> [];
+interrupted_before_move_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_move(Config, disc_copies, kill_reciever, Debug_Point).
+interrupted_before_move_disc_only(suite) -> [];
+interrupted_before_move_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_move(Config, disc_only_copies, kill_reciever, Debug_Point).
+interrupted_before_move_kill_copier(suite) -> [];
+interrupted_before_move_kill_copier(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_move(Config, ram_copies, kill_copier, Debug_Point).
+
+interrupted_after_move_ram(suite) -> [];
+interrupted_after_move_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_move(Config, ram_copies, kill_reciever, Debug_Point).
+interrupted_after_move_disc(suite) -> [];
+interrupted_after_move_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_move(Config, disc_copies, kill_reciever, Debug_Point).
+interrupted_after_move_disc_only(suite) -> [];
+interrupted_after_move_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_move(Config, disc_only_copies, kill_reciever, Debug_Point).
+interrupted_after_move_kill_copier(suite) -> [];
+interrupted_after_move_kill_copier(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_move(Config, ram_copies, kill_copier, Debug_Point).
+
+%%% After dump don't need debug point
+interrupted_move(Config, Type, _Where, {mnesia_dumper, post_dump}) ->
+ [Node1, Node2] = Nodes =
+ ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok},mnesia:create_table(test, [{disc_copies,[Node1]}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node1]}])),
+ ?match(ok, mnesia:dirty_write({itrpt, before, 1})),
+ ?match({atomic, ok}, mnesia:move_table_copy(Tab, Node1, Node2)),
+ ?match(ok, mnesia:dirty_write({itrpt, aFter, 1})),
+ ?match(ok, mnesia:dirty_write({test, found_in_log, 1})),
+ ?match(stopped, mnesia:stop()),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [itrpt,test])),
+ %% Verify
+ ?match([{test, found_in_log, 1}], mnesia:dirty_read({test, found_in_log})),
+ ?match([{itrpt, before, 1}], mnesia:dirty_read({itrpt, before})),
+ ?match([{itrpt, aFter, 1}], mnesia:dirty_read({itrpt, aFter})),
+ ?verify_mnesia(Nodes, []);
+interrupted_move(Config, Type, Who, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes =
+ ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node1]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+
+ {_Alive, Kill} =
+ if Who == kill_reciever ->
+ if Type == ram_copies ->
+ {atomic, ok} = mnesia:dump_tables([Tab]);
+ true ->
+ ignore
+ end,
+ {Node1, Node2};
+ true ->
+ {Node2, Node1}
+ end,
+
+ {success, [A]} = ?start_activities([Kill]),
+
+ setup_dbgpoint(KillAt, Kill),
+ A ! fun() -> mnesia:move_table_copy(Tab, Node1, Node2) end,
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Kill], [itrpt])),
+ verify_tab(Node1, Node2),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_delcopy_ram(suite) -> [];
+interrupted_before_delcopy_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delcopy(Config, ram_copies, kill_reciever, Debug_Point).
+interrupted_before_delcopy_disc(suite) -> [];
+interrupted_before_delcopy_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delcopy(Config, disc_copies, kill_reciever, Debug_Point).
+interrupted_before_delcopy_disc_only(suite) -> [];
+interrupted_before_delcopy_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delcopy(Config, disc_only_copies, kill_reciever, Debug_Point).
+interrupted_before_delcopy_kill_copier(suite) -> [];
+interrupted_before_delcopy_kill_copier(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delcopy(Config, ram_copies, kill_copier, Debug_Point).
+
+interrupted_after_delcopy_ram(suite) -> [];
+interrupted_after_delcopy_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delcopy(Config, ram_copies, kill_reciever, Debug_Point).
+interrupted_after_delcopy_disc(suite) -> [];
+interrupted_after_delcopy_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delcopy(Config, disc_copies, kill_reciever, Debug_Point).
+interrupted_after_delcopy_disc_only(suite) -> [];
+interrupted_after_delcopy_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delcopy(Config, disc_only_copies, kill_reciever, Debug_Point).
+interrupted_after_delcopy_kill_copier(suite) -> [];
+interrupted_after_delcopy_kill_copier(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delcopy(Config, ram_copies, kill_copier, Debug_Point).
+
+
+%%% After dump don't need debug point
+interrupted_delcopy(Config, Type, _Where, {mnesia_dumper, post_dump}) ->
+ [Node1, Node2] = Nodes =
+ ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok},mnesia:create_table(test, [{disc_copies,[Node1]}])),
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node1,Node2]}])),
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, Node1)),
+ ?match(ok, mnesia:dirty_write({test, found_in_log, 1})),
+ ?match(stopped, mnesia:stop()),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [test])),
+ %% Verify
+ ?match([{test, found_in_log, 1}], mnesia:dirty_read({test, found_in_log})),
+ ?match([Node2], mnesia:table_info(itrpt,Type)),
+ ?verify_mnesia(Nodes, []);
+interrupted_delcopy(Config, Type, Who, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes =
+ ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node1, Node2]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+
+ {_Alive, Kill} =
+ if Who == kill_reciever ->
+ {Node1, Node2};
+ true ->
+ if
+ Type == ram_copies ->
+ {atomic, ok} = mnesia:dump_tables([Tab]);
+ true ->
+ ignore
+ end,
+ {Node2, Node1}
+ end,
+
+ {success, [A]} = ?start_activities([Kill]),
+ setup_dbgpoint(KillAt, Kill),
+ A ! fun() -> mnesia:del_table_copy(Tab, Node2) end,
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Kill], [itrpt])),
+ verify_tab(Node1, Node2),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_addindex_ram(suite) -> [];
+interrupted_before_addindex_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_addindex(Config, ram_copies, Debug_Point).
+interrupted_before_addindex_disc(suite) -> [];
+interrupted_before_addindex_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_addindex(Config, disc_copies, Debug_Point).
+interrupted_before_addindex_disc_only(suite) -> [];
+interrupted_before_addindex_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_addindex(Config, disc_only_copies, Debug_Point).
+
+interrupted_after_addindex_ram(suite) -> [];
+interrupted_after_addindex_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_addindex(Config, ram_copies, Debug_Point).
+interrupted_after_addindex_disc(suite) -> [];
+interrupted_after_addindex_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_addindex(Config, disc_copies, Debug_Point).
+interrupted_after_addindex_disc_only(suite) -> [];
+interrupted_after_addindex_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_addindex(Config, disc_only_copies, Debug_Point).
+
+
+%%% After dump don't need debug point
+interrupted_addindex(Config, Type, {mnesia_dumper, post_dump}) ->
+ [Node1] = Nodes = ?acquire_nodes(1, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic,ok},mnesia:create_table(Tab, [{Type, Nodes}])),
+ ?match({atomic,ok},mnesia:create_table(test, [{disc_copies,[Node1]}])),
+ ?match({atomic,ok}, mnesia:add_table_index(Tab, val)),
+ ?match(ok, mnesia:dirty_write({itrpt, before, 1})),
+ ?match(ok, mnesia:dirty_write({test, found_in_log, 1})),
+ ?match(stopped, mnesia:stop()),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [itrpt,test])),
+ %% Verify
+ ?match([{test, found_in_log, 1}], mnesia:dirty_read({test, found_in_log})),
+ case Type of
+ ram_copies ->
+ ?match([], mnesia:dirty_index_read(itrpt, 1, val));
+ _ ->
+ ?match([{itrpt, before, 1}], mnesia:dirty_index_read(itrpt, 1, val))
+ end,
+ ?verify_mnesia(Nodes, []);
+interrupted_addindex(Config, Type, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{Type, [Node1]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+ {_Alive, Kill} = {Node1, Node2},
+ {success, [A]} = ?start_activities([Kill]),
+
+ setup_dbgpoint(KillAt, Kill),
+ A ! fun() -> mnesia:add_table_index(Tab, val) end,
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Node2], [])),
+
+ verify_tab(Node1, Node2),
+ ?match([{Tab, b, a}, {Tab, a, a}],
+ rpc:call(Node1, mnesia, dirty_index_read, [itrpt, a, val])),
+ ?match([{Tab, b, a}, {Tab, a, a}],
+ rpc:call(Node2, mnesia, dirty_index_read, [itrpt, a, val])),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_delindex_ram(suite) -> [];
+interrupted_before_delindex_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delindex(Config, ram_copies, Debug_Point).
+interrupted_before_delindex_disc(suite) -> [];
+interrupted_before_delindex_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delindex(Config, disc_copies, Debug_Point).
+interrupted_before_delindex_disc_only(suite) -> [];
+interrupted_before_delindex_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_delindex(Config, disc_only_copies, Debug_Point).
+
+interrupted_after_delindex_ram(suite) -> [];
+interrupted_after_delindex_ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delindex(Config, ram_copies, Debug_Point).
+interrupted_after_delindex_disc(suite) -> [];
+interrupted_after_delindex_disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delindex(Config, disc_copies, Debug_Point).
+interrupted_after_delindex_disc_only(suite) -> [];
+interrupted_after_delindex_disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_delindex(Config, disc_only_copies, Debug_Point).
+
+%%% After dump don't need debug point
+interrupted_delindex(Config, Type, {mnesia_dumper, post_dump}) ->
+ [Node1] = Nodes = ?acquire_nodes(1, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic,ok},mnesia:create_table(Tab, [{Type, Nodes},{index,[val]}])),
+ ?match({atomic,ok},mnesia:create_table(test, [{disc_copies,[Node1]}])),
+ ?match({atomic,ok}, mnesia:del_table_index(Tab, val)),
+ ?match(ok, mnesia:dirty_write({itrpt, before, 1})),
+ ?match(ok, mnesia:dirty_write({test, found_in_log, 1})),
+ ?match(stopped, mnesia:stop()),
+ ?match([], mnesia_test_lib:start_mnesia([Node1], [itrpt,test])),
+ %% Verify
+ ?match([{test, found_in_log, 1}], mnesia:dirty_read({test, found_in_log})),
+ ?match({'EXIT',{aborted,{badarg,_}}}, mnesia:dirty_index_read(itrpt, 1, val)),
+ ?verify_mnesia(Nodes, []);
+
+interrupted_delindex(Config, Type, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{index, [val]},
+ {Type, [Node1]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+ {_Alive, Kill} = {Node1, Node2},
+ {success, [A]} = ?start_activities([Kill]),
+ setup_dbgpoint(KillAt, Kill),
+ A ! fun() -> mnesia:del_table_index(Tab, val) end,
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia([Node2], [])),
+ verify_tab(Node1, Node2),
+ ?match({badrpc, _}, rpc:call(Node1, mnesia, dirty_index_read, [itrpt, a, val])),
+ ?match({badrpc, _}, rpc:call(Node2, mnesia, dirty_index_read, [itrpt, a, val])),
+ ?match([], rpc:call(Node1, mnesia, table_info, [Tab, index])),
+ ?match([], rpc:call(Node2, mnesia, table_info, [Tab, index])),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_change_type_ram2disc(suite) -> [];
+interrupted_before_change_type_ram2disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, ram_copies, disc_copies, changer, Debug_Point).
+interrupted_before_change_type_ram2disc_only(suite) -> [];
+interrupted_before_change_type_ram2disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, ram_copies, disc_only_copies, changer, Debug_Point).
+interrupted_before_change_type_disc2ram(suite) -> [];
+interrupted_before_change_type_disc2ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, disc_copies, ram_copies, changer, Debug_Point).
+interrupted_before_change_type_disc2disc_only(suite) -> [];
+interrupted_before_change_type_disc2disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, disc_copies, disc_only_copies, changer, Debug_Point).
+interrupted_before_change_type_disc_only2ram(suite) -> [];
+interrupted_before_change_type_disc_only2ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, disc_only_copies, ram_copies, changer, Debug_Point).
+interrupted_before_change_type_disc_only2disc(suite) -> [];
+interrupted_before_change_type_disc_only2disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, disc_only_copies, disc_copies, changer, Debug_Point).
+interrupted_before_change_type_other_node(suite) -> [];
+interrupted_before_change_type_other_node(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, dump_schema_op},
+ interrupted_change_type(Config, ram_copies, disc_copies, the_other_one, Debug_Point).
+
+interrupted_after_change_type_ram2disc(suite) -> [];
+interrupted_after_change_type_ram2disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, ram_copies, disc_copies, changer, Debug_Point).
+interrupted_after_change_type_ram2disc_only(suite) -> [];
+interrupted_after_change_type_ram2disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, ram_copies, disc_only_copies, changer, Debug_Point).
+interrupted_after_change_type_disc2ram(suite) -> [];
+interrupted_after_change_type_disc2ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, disc_copies, ram_copies, changer, Debug_Point).
+interrupted_after_change_type_disc2disc_only(suite) -> [];
+interrupted_after_change_type_disc2disc_only(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, disc_copies, disc_only_copies, changer, Debug_Point).
+interrupted_after_change_type_disc_only2ram(suite) -> [];
+interrupted_after_change_type_disc_only2ram(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, disc_only_copies, ram_copies, changer, Debug_Point).
+interrupted_after_change_type_disc_only2disc(suite) -> [];
+interrupted_after_change_type_disc_only2disc(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, disc_only_copies, disc_copies, changer, Debug_Point).
+interrupted_after_change_type_other_node(suite) -> [];
+interrupted_after_change_type_other_node(Config) when is_list(Config) ->
+ Debug_Point = {mnesia_dumper, post_dump},
+ interrupted_change_type(Config, ram_copies, disc_copies, the_other_one, Debug_Point).
+
+interrupted_change_type(Config, FromType, ToType, Who, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{FromType, [Node2, Node1]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+
+ {_Alive, Kill} =
+ if Who == changer -> {Node1, Node2};
+ true -> {Node2, Node1}
+ end,
+
+ {success, [A]} = ?start_activities([Kill]),
+ setup_dbgpoint(KillAt, Kill),
+ A ! fun() -> mnesia:change_table_copy_type(Tab, Node2, ToType) end,
+ kill_at_debug(),
+ ?match([], mnesia_test_lib:start_mnesia(Nodes, [itrpt])),
+ verify_tab(Node1, Node2),
+ ?match(FromType, rpc:call(Node1, mnesia, table_info, [Tab, storage_type])),
+ ?match(ToType, rpc:call(Node2, mnesia, table_info, [Tab, storage_type])),
+ ?verify_mnesia(Nodes, []).
+
+interrupted_before_change_schema_type(suite) -> [];
+interrupted_before_change_schema_type(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, dump_schema_op},
+ interrupted_change_schema_type(Config, KillAt).
+
+interrupted_after_change_schema_type(suite) -> [];
+interrupted_after_change_schema_type(Config) when is_list(Config) ->
+ KillAt = {mnesia_dumper, post_dump},
+ interrupted_change_schema_type(Config, KillAt).
+
+-define(cleanup(N, Config),
+ mnesia_test_lib:prepare_test_case([{reload_appls, [mnesia]}],
+ N, Config, ?FILE, ?LINE)).
+
+interrupted_change_schema_type(Config, KillAt) ->
+ ?is_debug_compiled,
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, [{tc_timeout, timer:seconds(30)} | Config]),
+
+ Tab = itrpt,
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{ram_copies, [Node2, Node1]}])),
+ ?match(ok, mnesia:dirty_write({Tab, before, 1})),
+
+ {success, [A]} = ?start_activities([Node2]),
+ setup_dbgpoint(KillAt, Node2),
+
+ A ! fun() -> mnesia:change_table_copy_type(schema, Node2, ram_copies) end,
+ kill_at_debug(),
+ ?match(ok, rpc:call(Node2, mnesia, start, [[{extra_db_nodes, [Node1, Node2]}]])),
+ ?match(ok, rpc:call(Node2, mnesia, wait_for_tables, [[itrpt, schema], 2000])),
+ ?match(disc_copies, rpc:call(Node1, mnesia, table_info, [schema, storage_type])),
+ ?match(ram_copies, rpc:call(Node2, mnesia, table_info, [schema, storage_type])),
+
+ %% Go back to disc_copies !!
+ {success, [B]} = ?start_activities([Node2]),
+ setup_dbgpoint(KillAt, Node2),
+ B ! fun() -> mnesia:change_table_copy_type(schema, Node2, disc_copies) end,
+ kill_at_debug(),
+
+ ?match(ok, rpc:call(Node2, mnesia, start, [[{extra_db_nodes, [Node1, Node2]}]])),
+ ?match(ok, rpc:call(Node2, mnesia, wait_for_tables, [[itrpt, schema], 2000])),
+ ?match(disc_copies, rpc:call(Node1, mnesia, table_info, [schema, storage_type])),
+ ?match(disc_copies, rpc:call(Node2, mnesia, table_info, [schema, storage_type])),
+
+ ?verify_mnesia(Nodes, []),
+ ?cleanup(2, Config).
+
+%%% Helpers
+verify_tab(Node1, Node2) ->
+ ?match({atomic, ok},
+ rpc:call(Node1, mnesia, transaction, [fun() -> mnesia:dirty_write({itrpt, a, a}) end])),
+ ?match({atomic, ok},
+ rpc:call(Node2, mnesia, transaction, [fun() -> mnesia:dirty_write({itrpt, b, a}) end])),
+ ?match([{itrpt,a,a}], rpc:call(Node1, mnesia, dirty_read, [{itrpt, a}])),
+ ?match([{itrpt,a,a}], rpc:call(Node2, mnesia, dirty_read, [{itrpt, a}])),
+ ?match([{itrpt,b,a}], rpc:call(Node1, mnesia, dirty_read, [{itrpt, b}])),
+ ?match([{itrpt,b,a}], rpc:call(Node2, mnesia, dirty_read, [{itrpt, b}])),
+ ?match([{itrpt,before,1}], rpc:call(Node1, mnesia, dirty_read, [{itrpt, before}])),
+ ?match([{itrpt,before,1}], rpc:call(Node2, mnesia, dirty_read, [{itrpt, before}])).
+
+setup_dbgpoint(DbgPoint, Where) ->
+ Self = self(),
+ TestFun = fun(_, [InitBy]) ->
+ case InitBy of
+ schema_prepare ->
+ ignore;
+ schema_begin ->
+ ignore;
+ _Other ->
+ ?deactivate_debug_fun(DbgPoint),
+ unlink(Self),
+ Self ! {fun_done, node()},
+ timer:sleep(infinity)
+ end
+ end,
+ %% Kill when debug has been reached
+ ?remote_activate_debug_fun(Where, DbgPoint, TestFun, []).
+
+kill_at_debug() ->
+ %% Wait till it's killed
+ receive
+ {fun_done, Node} ->
+ ?match([], mnesia_test_lib:kill_mnesia([Node]))
+ after
+ timer:minutes(1) -> ?error("Timeout in kill_at_debug", [])
+ end.
+
diff --git a/lib/mnesia/test/mnesia_test_lib.erl b/lib/mnesia/test/mnesia_test_lib.erl
new file mode 100644
index 0000000000..1e98f017f7
--- /dev/null
+++ b/lib/mnesia/test/mnesia_test_lib.erl
@@ -0,0 +1,1058 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%%% Author: Hakan Mattsson [email protected]
+%%% Purpose: Test case support library
+%%%
+%%% This test suite may be run as a part of the Grand Test Suite
+%%% of Erlang. The Mnesia test suite is structured in a hierarchy.
+%%% Each test case is implemented as an exported function with arity 1.
+%%% Test case identifiers must have the following syntax: {Module, Function}.
+%%%
+%%% The driver of the test suite runs in two passes as follows:
+%%% first the test case function is invoked with the atom 'suite' as
+%%% single argument. The returned value is treated as a list of sub
+%%% test cases. If the list of sub test cases is [] the test case
+%%% function is invoked again, this time with a list of nodes as
+%%% argument. If the list of sub test cases is not empty, the test
+%%% case driver applies the algorithm recursively on each element
+%%% in the list.
+%%%
+%%% All test cases are written in such a manner
+%%% that they start to invoke ?acquire_nodes(X, Config)
+%%% in order to prepare the test case execution. When that is
+%%% done, the test machinery ensures that at least X number
+%%% of nodes are connected to each other. If too few nodes was
+%%% specified in the Config, the test case is skipped. If there
+%%% was enough node names in the Config, X of them are selected
+%%% and if some of them happens to be down they are restarted
+%%% via the slave module. When all nodes are up and running a
+%%% disk resident schema is created on all nodes and Mnesia is
+%%% started a on all nodes. This means that all test cases may
+%%% assume that Mnesia is up and running on all acquired nodes.
+%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%
+%%% doc(TestCases)
+%%%
+%%% Generates a test spec from parts of the test case structure
+%%%
+%%% struct(TestCases)
+%%%
+%%% Prints out the test case structure
+%%%
+%%% test(TestCases)
+%%%
+%%% Run parts of the test suite. Uses test/2.
+%%% Reads Config from mnesia_test.config and starts them if neccessary.
+%%% Kills Mnesia and wipes out the Mnesia directories as a starter.
+%%%
+%%% test(TestCases, Config)
+%%%
+%%% Run parts of the test suite on the given Nodes,
+%%% assuming that the nodes are up and running.
+%%% Kills Mnesia and wipes out the Mnesia directories as a starter.
+%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mnesia_test_lib).
+-author('[email protected]').
+-export([
+ log/2,
+ log/4,
+ verbose/4,
+ default_config/0,
+ diskless/1,
+ eval_test_case/3,
+ test_driver/2,
+ test_case_evaluator/3,
+ activity_evaluator/1,
+ flush/0,
+ pick_msg/0,
+ start_activities/1,
+ start_transactions/1,
+ start_transactions/2,
+ start_sync_transactions/1,
+ start_sync_transactions/2,
+ sync_trans_tid_serial/1,
+ prepare_test_case/5,
+ select_nodes/4,
+ init_nodes/3,
+ error/4,
+ slave_start_link/0,
+ slave_start_link/1,
+ slave_sup/0,
+
+ start_mnesia/1,
+ start_mnesia/2,
+ start_appls/2,
+ start_appls/3,
+ start_wait/2,
+ storage_type/2,
+ stop_mnesia/1,
+ stop_appls/2,
+ sort/1,
+ kill_mnesia/1,
+ kill_appls/2,
+ verify_mnesia/4,
+ shutdown/0,
+ verify_replica_location/5,
+ lookup_config/2,
+ sync_tables/2,
+ remote_start/3,
+ remote_stop/1,
+ remote_kill/1,
+
+ reload_appls/2,
+
+ remote_activate_debug_fun/6,
+ do_remote_activate_debug_fun/6,
+
+ test/1,
+ test/2,
+ doc/1,
+ struct/1,
+ init_per_testcase/2,
+ fin_per_testcase/2,
+ kill_tc/2
+ ]).
+
+-include("mnesia_test_lib.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% included for test server compatibility
+%% assume that all test cases only takes Config as sole argument
+init_per_testcase(_Func, Config) ->
+ global:register_name(mnesia_global_logger, group_leader()),
+ Config.
+
+fin_per_testcase(_Func, Config) ->
+ global:unregister_name(mnesia_global_logger),
+ %% Nodes = select_nodes(all, Config, ?FILE, ?LINE),
+ %% rpc:multicall(Nodes, mnesia, lkill, []),
+ Config.
+
+%% Use ?log(Format, Args) as wrapper
+log(Format, Args, LongFile, Line) ->
+ File = filename:basename(LongFile),
+ Format2 = lists:concat([File, "(", Line, ")", ": ", Format]),
+ log(Format2, Args).
+
+log(Format, Args) ->
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ io:format(user, Format, Args);
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+verbose(Format, Args, File, Line) ->
+ Arg = mnesia_test_verbose,
+ case get(Arg) of
+ false ->
+ ok;
+ true ->
+ log(Format, Args, File, Line);
+ undefined ->
+ case init:get_argument(Arg) of
+ {ok, List} when is_list(List) ->
+ case lists:last(List) of
+ ["true"] ->
+ put(Arg, true),
+ log(Format, Args, File, Line);
+ _ ->
+ put(Arg, false),
+ ok
+ end;
+ _ ->
+ put(Arg, false),
+ ok
+ end
+ end.
+
+-record('REASON', {file, line, desc}).
+
+error(Format, Args, File, Line) ->
+ global:send(mnesia_global_logger, {failed, File, Line}),
+ Fail = #'REASON'{file = filename:basename(File),
+ line = Line,
+ desc = Args},
+ case global:whereis_name(mnesia_test_case_sup) of
+ undefined ->
+ ignore;
+ Pid ->
+ Pid ! Fail
+%% global:send(mnesia_test_case_sup, Fail),
+ end,
+ log("<>ERROR<>~n" ++ Format, Args, File, Line).
+
+storage_type(Default, Config) ->
+ case diskless(Config) of
+ true ->
+ ram_copies;
+ false ->
+ Default
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+default_config() ->
+ [{nodes, default_nodes()}].
+
+default_nodes() ->
+ mk_nodes(3, []).
+
+mk_nodes(0, Nodes) ->
+ Nodes;
+mk_nodes(N, []) ->
+ mk_nodes(N - 1, [node()]);
+mk_nodes(N, Nodes) when N > 0 ->
+ Head = hd(Nodes),
+ [Name, Host] = node_to_name_and_host(Head),
+ Nodes ++ [mk_node(I, Name, Host) || I <- lists:seq(1, N)].
+
+mk_node(N, Name, Host) ->
+ list_to_atom(lists:concat([Name ++ integer_to_list(N) ++ "@" ++ Host])).
+
+slave_start_link() ->
+ slave_start_link(node()).
+
+slave_start_link(Node) ->
+ [Local, Host] = node_to_name_and_host(Node),
+ {Mega, Sec, Micro} = erlang:now(),
+ List = [Local, "_", Mega, "_", Sec, "_", Micro],
+ Name = list_to_atom(lists:concat(List)),
+ slave_start_link(list_to_atom(Host), Name).
+
+slave_start_link(Host, Name) ->
+ slave_start_link(Host, Name, 10).
+
+slave_start_link(Host, Name, Retries) ->
+ Debug = atom_to_list(mnesia:system_info(debug)),
+ Args = "-mnesia debug " ++ Debug ++
+ " -pa " ++
+ filename:dirname(code:which(?MODULE)) ++
+ " -pa " ++
+ filename:dirname(code:which(mnesia)),
+ case starter(Host, Name, Args) of
+ {ok, NewNode} ->
+ ?match(pong, net_adm:ping(NewNode)),
+ {ok, Cwd} = file:get_cwd(),
+ Path = code:get_path(),
+ ok = rpc:call(NewNode, file, set_cwd, [Cwd]),
+ true = rpc:call(NewNode, code, set_path, [Path]),
+ spawn_link(NewNode, ?MODULE, slave_sup, []),
+ rpc:multicall([node() | nodes()], global, sync, []),
+ {ok, NewNode};
+ {error, Reason} when Retries == 0->
+ {error, Reason};
+ {error, Reason} ->
+ io:format("Could not start slavenode ~p ~p retrying~n",
+ [{Host, Name, Args}, Reason]),
+ timer:sleep(500),
+ slave_start_link(Host, Name, Retries - 1)
+ end.
+
+starter(Host, Name, Args) ->
+ case os:type() of
+ vxworks ->
+ X = test_server:start_node(Name, slave, [{args,Args}]),
+ timer:sleep(5000),
+ X;
+ _ ->
+ slave:start(Host, Name, Args)
+ end.
+
+slave_sup() ->
+ process_flag(trap_exit, true),
+ receive
+ {'EXIT', _, _} ->
+ case os:type() of
+ vxworks ->
+ erlang:halt();
+ _ ->
+ ignore
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Index the test case structure
+
+doc(TestCases) when is_list(TestCases) ->
+ test(TestCases, suite),
+ SuiteFname = "index.html",
+ io:format("Generating HTML test specification to file: ~s~n",
+ [SuiteFname]),
+ {ok, Fd} = file:open(SuiteFname, [write]),
+ io:format(Fd, "<TITLE>Test specification for ~p</TITLE>.~n", [TestCases]),
+ io:format(Fd, "<H1>Test specification for ~p</H1>~n", [TestCases]),
+ io:format(Fd, "Test cases which not are implemented yet are written in <B>bold face</B>.~n~n", []),
+
+ io:format(Fd, "<BR><BR>~n", []),
+ io:format(Fd, "~n<DL>~n", []),
+ do_doc(Fd, TestCases, []),
+ io:format(Fd, "</DL>~n", []),
+ file:close(Fd);
+doc(TestCases) ->
+ doc([TestCases]).
+
+do_doc(Fd, [H | T], List) ->
+ case H of
+ {Module, TestCase} when is_atom(Module), is_atom(TestCase) ->
+ do_doc(Fd, Module, TestCase, List);
+ TestCase when is_atom(TestCase), List == [] ->
+ do_doc(Fd, mnesia_SUITE, TestCase, List);
+ TestCase when is_atom(TestCase) ->
+ do_doc(Fd, hd(List), TestCase, List)
+ end,
+ do_doc(Fd, T, List);
+do_doc(_, [], _) ->
+ ok.
+
+do_doc(Fd, Module, TestCase, List) ->
+ case get_suite(Module, TestCase) of
+ [] ->
+ %% Implemented leaf test case
+ Head = ?flat_format("<A HREF=~p.html#~p_1>{~p, ~p}</A>}",
+ [Module, TestCase, Module, TestCase]),
+ print_doc(Fd, Module, TestCase, Head);
+ Suite when is_list(Suite) ->
+ %% Test suite
+ Head = ?flat_format("{~p, ~p}", [Module, TestCase]),
+ print_doc(Fd, Module, TestCase, Head),
+ io:format(Fd, "~n<DL>~n", []),
+ do_doc(Fd, Suite, [Module | List]),
+ io:format(Fd, "</DL>~n", []);
+ 'NYI' ->
+ %% Not yet implemented
+ Head = ?flat_format("<B>{~p, ~p}</B>", [Module, TestCase]),
+ print_doc(Fd, Module, TestCase, Head)
+ end.
+
+print_doc(Fd, Mod, Fun, Head) ->
+ case catch (apply(Mod, Fun, [doc])) of
+ {'EXIT', _} ->
+ io:format(Fd, "<DT>~s</DT>~n", [Head]);
+ Doc when is_list(Doc) ->
+ io:format(Fd, "<DT><U>~s</U><BR><DD>~n", [Head]),
+ print_rows(Fd, Doc),
+ io:format(Fd, "</DD><BR><BR>~n", [])
+ end.
+
+print_rows(_Fd, []) ->
+ ok;
+print_rows(Fd, [H | T]) when is_list(H) ->
+ io:format(Fd, "~s~n", [H]),
+ print_rows(Fd, T);
+print_rows(Fd, [H | T]) when is_integer(H) ->
+ io:format(Fd, "~s~n", [[H | T]]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Show the test case structure
+
+struct(TestCases) ->
+ T = test(TestCases, suite),
+ struct(T, "").
+
+struct({Module, TestCase}, Indentation)
+ when is_atom(Module), is_atom(TestCase) ->
+ log("~s{~p, ~p} ...~n", [Indentation, Module, TestCase]);
+struct({Module, TestCase, Other}, Indentation)
+ when is_atom(Module), is_atom(TestCase) ->
+ log("~s{~p, ~p} ~p~n", [Indentation, Module, TestCase, Other]);
+struct([], _) ->
+ ok;
+struct([TestCase | TestCases], Indentation) ->
+ struct(TestCase, Indentation),
+ struct(TestCases, Indentation);
+struct({TestCase, []}, Indentation) ->
+ struct(TestCase, Indentation);
+struct({TestCase, SubTestCases}, Indentation) when is_list(SubTestCases) ->
+ struct(TestCase, Indentation),
+ struct(SubTestCases, Indentation ++ " ").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Execute the test cases
+
+test(TestCases) ->
+ test(TestCases, []).
+
+test(TestCases, suite) when is_list(TestCases) ->
+ test_driver(TestCases, suite);
+test(TestCases, Config) when is_list(TestCases) ->
+ D1 = lists:duplicate(10, $=),
+ D2 = lists:duplicate(10, $ ),
+ log("~n~s TEST CASES: ~p~n ~sCONFIG: ~p~n~n", [D1, TestCases, D2, Config]),
+ test_driver(TestCases, Config);
+test(TestCase, Config) ->
+ test([TestCase], Config).
+
+test_driver([], _Config) ->
+ [];
+test_driver([T|TestCases], Config) ->
+ L1 = test_driver(T, Config),
+ L2 = test_driver(TestCases, Config),
+ [L1|L2];
+test_driver({Module, TestCases}, Config) when is_list(TestCases)->
+ test_driver(default_module(Module, TestCases), Config);
+test_driver({_, {Module, TestCase}}, Config) ->
+ test_driver({Module, TestCase}, Config);
+test_driver({Module, TestCase}, Config) ->
+ Sec = timer:seconds(1) * 1000,
+ case get_suite(Module, TestCase) of
+ [] when Config == suite ->
+ {Module, TestCase, 'IMPL'};
+ [] ->
+ log("Eval test case: ~w~n", [{Module, TestCase}]),
+ {T, Res} =
+ timer:tc(?MODULE, eval_test_case, [Module, TestCase, Config]),
+ log("Tested ~w in ~w sec~n", [TestCase, T div Sec]),
+ {T div Sec, Res};
+ Suite when is_list(Suite), Config == suite ->
+ Res = test_driver(default_module(Module, Suite), Config),
+ {{Module, TestCase}, Res};
+ Suite when is_list(Suite) ->
+ log("Expand test case ~w~n", [{Module, TestCase}]),
+ Def = default_module(Module, Suite),
+ {T, Res} = timer:tc(?MODULE, test_driver, [Def, Config]),
+ {T div Sec, {{Module, TestCase}, Res}};
+ 'NYI' when Config == suite ->
+ {Module, TestCase, 'NYI'};
+ 'NYI' ->
+ log("<WARNING> Test case ~w NYI~n", [{Module, TestCase}]),
+ {0, {skip, {Module, TestCase}, "NYI"}}
+ end;
+test_driver(TestCase, Config) ->
+ DefaultModule = mnesia_SUITE,
+ log("<>WARNING<> Missing module in test case identifier. "
+ "{~w, ~w} assumed~n", [DefaultModule, TestCase]),
+ test_driver({DefaultModule, TestCase}, Config).
+
+default_module(DefaultModule, TestCases) when is_list(TestCases) ->
+ Fun = fun(T) ->
+ case T of
+ {_, _} -> true;
+ T -> {true, {DefaultModule, T}}
+ end
+ end,
+ lists:zf(Fun, TestCases).
+
+%% Returns a list (possibly empty) or the atom 'NYI'
+get_suite(Mod, Fun) ->
+ case catch (apply(Mod, Fun, [suite])) of
+ {'EXIT', _} -> 'NYI';
+ List when is_list(List) -> List
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+eval_test_case(Mod, Fun, Config) ->
+ flush(),
+ global:register_name(mnesia_test_case_sup, self()),
+ Flag = process_flag(trap_exit, true),
+ Pid = spawn_link(?MODULE, test_case_evaluator, [Mod, Fun, [Config]]),
+ R = wait_for_evaluator(Pid, Mod, Fun, Config),
+ global:unregister_name(mnesia_test_case_sup),
+ process_flag(trap_exit, Flag),
+ R.
+
+flush() ->
+ receive Msg -> [Msg | flush()]
+ after 0 -> []
+ end.
+
+wait_for_evaluator(Pid, Mod, Fun, Config) ->
+ receive
+ {'EXIT', Pid, {test_case_ok, _PidRes}} ->
+ Errors = flush(),
+ Res =
+ case Errors of
+ [] -> ok;
+ Errors -> failed
+ end,
+ {Res, {Mod, Fun}, Errors};
+ {'EXIT', Pid, {skipped, Reason}} ->
+ log("<WARNING> Test case ~w skipped, because ~p~n",
+ [{Mod, Fun}, Reason]),
+ Mod:fin_per_testcase(Fun, Config),
+ {skip, {Mod, Fun}, Reason};
+ {'EXIT', Pid, Reason} ->
+ log("<>ERROR<> Eval process ~w exited, because ~p~n",
+ [{Mod, Fun}, Reason]),
+ Mod:fin_per_testcase(Fun, Config),
+ {crash, {Mod, Fun}, Reason}
+ end.
+
+test_case_evaluator(Mod, Fun, [Config]) ->
+ NewConfig = Mod:init_per_testcase(Fun, Config),
+ R = apply(Mod, Fun, [NewConfig]),
+ Mod:fin_per_testcase(Fun, NewConfig),
+ exit({test_case_ok, R}).
+
+activity_evaluator(Coordinator) ->
+ activity_evaluator_loop(Coordinator),
+ exit(normal).
+
+activity_evaluator_loop(Coordinator) ->
+ receive
+ begin_trans ->
+ transaction(Coordinator, 0);
+ {begin_trans, MaxRetries} ->
+ transaction(Coordinator, MaxRetries);
+ end_trans ->
+ end_trans;
+ Fun when is_function(Fun) ->
+ Coordinator ! {self(), Fun()},
+ activity_evaluator_loop(Coordinator);
+% {'EXIT', Coordinator, Reason} ->
+% Reason;
+ ExitExpr ->
+% ?error("activity_evaluator_loop ~p ~p: exit(~p)~n}", [Coordinator, self(), ExitExpr]),
+ exit(ExitExpr)
+ end.
+
+transaction(Coordinator, MaxRetries) ->
+ Fun = fun() ->
+ Coordinator ! {self(), begin_trans},
+ activity_evaluator_loop(Coordinator)
+ end,
+ Coordinator ! {self(), mnesia:transaction(Fun, MaxRetries)},
+ activity_evaluator_loop(Coordinator).
+
+pick_msg() ->
+ receive
+ Message -> Message
+ after 4000 -> timeout
+ end.
+
+start_activities(Nodes) ->
+ Fun = fun(N) -> spawn_link(N, ?MODULE, activity_evaluator, [self()]) end,
+ Pids = mapl(Fun, Nodes),
+ {success, Pids}.
+
+mapl(Fun, [H|T]) ->
+ Res = Fun(H),
+ [Res|mapl(Fun, T)];
+mapl(_Fun, []) ->
+ [].
+
+diskless(Config) ->
+ case lists:keysearch(diskless, 1, Config) of
+ {value, {diskless, true}} ->
+ true;
+ _Else ->
+ false
+ end.
+
+
+start_transactions(Pids) ->
+ Fun = fun(Pid) ->
+ Pid ! begin_trans,
+ ?match_receive({Pid, begin_trans})
+ end,
+ mapl(Fun, Pids).
+
+start_sync_transactions(Pids) ->
+ Nodes = [node(Pid) || Pid <- Pids],
+ Fun = fun(Pid) ->
+ sync_trans_tid_serial(Nodes),
+ Pid ! begin_trans,
+ ?match_receive({Pid, begin_trans})
+ end,
+ mapl(Fun, Pids).
+
+
+start_transactions(Pids, MaxRetries) ->
+ Fun = fun(Pid) ->
+ Pid ! {begin_trans, MaxRetries},
+ ?match_receive({Pid, begin_trans})
+ end,
+ mapl(Fun, Pids).
+
+start_sync_transactions(Pids, MaxRetries) ->
+ Nodes = [node(Pid) || Pid <- Pids],
+ Fun = fun(Pid) ->
+ sync_trans_tid_serial(Nodes),
+ Pid ! {begin_trans, MaxRetries},
+ ?match_receive({Pid, begin_trans})
+ end,
+ mapl(Fun, Pids).
+
+sync_trans_tid_serial(Nodes) ->
+ Fun = fun() -> mnesia:write_lock_table(schema) end,
+ rpc:multicall(Nodes, mnesia, transaction, [Fun]).
+
+select_nodes(N, Config, File, Line) ->
+ prepare_test_case([], N, Config, File, Line).
+
+prepare_test_case(Actions, N, Config, File, Line) ->
+ NodeList1 = lookup_config(nodes, Config),
+ NodeList2 = lookup_config(nodenames, Config), %% For testserver
+ NodeList3 = append_unique(NodeList1, NodeList2),
+ This = node(),
+ All = [This | lists:delete(This, NodeList3)],
+ Selected = pick_nodes(N, All, File, Line),
+ case diskless(Config) of
+ true ->
+ ok;
+ false ->
+ rpc:multicall(Selected, application, set_env,[mnesia, schema_location, opt_disc])
+ end,
+ do_prepare(Actions, Selected, All, Config, File, Line).
+
+do_prepare([], Selected, _All, _Config, _File, _Line) ->
+ Selected;
+do_prepare([{init_test_case, Appls} | Actions], Selected, All, Config, File, Line) ->
+ set_kill_timer(Config),
+ Started = init_nodes(Selected, File, Line),
+ All2 = append_unique(Started, All),
+ Alive = mnesia_lib:intersect(nodes() ++ [node()], All2),
+ kill_appls(Appls, Alive),
+ process_flag(trap_exit, true),
+ do_prepare(Actions, Started, All2, Config, File, Line);
+do_prepare([delete_schema | Actions], Selected, All, Config, File, Line) ->
+ Alive = mnesia_lib:intersect(nodes() ++ [node()], All),
+ case diskless(Config) of
+ true ->
+ skip;
+ false ->
+ Del = fun(Node) ->
+ case mnesia:delete_schema([Node]) of
+ ok -> ok;
+ {error, {"All nodes not running",_}} ->
+ ok;
+ Else ->
+ ?log("Delete schema error ~p ~n", [Else])
+ end
+ end,
+ lists:foreach(Del, Alive)
+ end,
+ do_prepare(Actions, Selected, All, Config, File, Line);
+do_prepare([create_schema | Actions], Selected, All, Config, File, Line) ->
+ case diskless(Config) of
+ true ->
+ skip;
+ _Else ->
+ case mnesia:create_schema(Selected) of
+ ok ->
+ ignore;
+ BadNodes ->
+ ?fatal("Cannot create Mnesia schema on ~p~n", [BadNodes])
+ end
+ end,
+ do_prepare(Actions, Selected, All, Config, File, Line);
+do_prepare([{start_appls, Appls} | Actions], Selected, All, Config, File, Line) ->
+ case start_appls(Appls, Selected, Config) of
+ [] -> ok;
+ Bad -> ?fatal("Cannot start appls ~p: ~p~n", [Appls, Bad])
+ end,
+ do_prepare(Actions, Selected, All, Config, File, Line);
+do_prepare([{reload_appls, Appls} | Actions], Selected, All, Config, File, Line) ->
+ reload_appls(Appls, Selected),
+ do_prepare(Actions, Selected, All, Config, File, Line).
+
+set_kill_timer(Config) ->
+ case init:get_argument(mnesia_test_timeout) of
+ {ok, _ } -> ok;
+ _ ->
+ Time0 =
+ case lookup_config(tc_timeout, Config) of
+ [] -> timer:minutes(5);
+ ConfigTime when is_integer(ConfigTime) -> ConfigTime
+ end,
+ Mul = try
+ test_server:timetrap_scale_factor()
+ catch _:_ -> 1 end,
+ (catch test_server:timetrap(Mul*Time0 + 1000)),
+ spawn_link(?MODULE, kill_tc, [self(),Time0*Mul])
+ end.
+
+kill_tc(Pid, Time) ->
+ receive
+ after Time ->
+ case process_info(Pid) of
+ undefined -> ok;
+ _ ->
+ ?error("Watchdog in test case timed out "
+ "in ~p min~n", [Time div (1000*60)]),
+ Files = mnesia_lib:dist_coredump(),
+ ?log("Cores dumped to:~n ~p~n", [Files]),
+ %% Genarate erlang crashdumps.
+ %% GenDump = fun(Node) ->
+ %% File = "CRASH_" ++ atom_to_list(Node) ++ ".dump",
+ %% rpc:call(Node, os, putenv, ["ERL_CRASH_DUMP", File]),
+ %% rpc:cast(Node, erlang, halt, ["RemoteTimeTrap"])
+ %% end,
+ %% [GenDump(Node) || Node <- nodes()],
+
+ %% erlang:halt("DebugTimeTrap"),
+ exit(Pid, kill)
+ end
+ end.
+
+
+append_unique([], List) -> List;
+append_unique([H|R], List) ->
+ case lists:member(H, List) of
+ true -> append_unique(R, List);
+ false -> [H | append_unique(R, List)]
+ end.
+
+pick_nodes(all, Nodes, File, Line) ->
+ pick_nodes(length(Nodes), Nodes, File, Line);
+pick_nodes(N, [H | T], File, Line) when N > 0 ->
+ [H | pick_nodes(N - 1, T, File, Line)];
+pick_nodes(0, _Nodes, _File, _Line) ->
+ [];
+pick_nodes(N, [], File, Line) ->
+ ?skip("Test case (~p(~p)) ignored: ~p nodes missing~n",
+ [File, Line, N]).
+
+init_nodes([Node | Nodes], File, Line) ->
+ case net_adm:ping(Node) of
+ pong ->
+ [Node | init_nodes(Nodes, File, Line)];
+ pang ->
+ [Name, Host] = node_to_name_and_host(Node),
+ case slave_start_link(Host, Name) of
+ {ok, Node1} ->
+ Path = code:get_path(),
+ true = rpc:call(Node1, code, set_path, [Path]),
+ [Node1 | init_nodes(Nodes, File, Line)];
+ Other ->
+ ?skip("Test case (~p(~p)) ignored: cannot start node ~p: ~p~n",
+ [File, Line, Node, Other])
+ end
+ end;
+init_nodes([], _File, _Line) ->
+ [].
+
+%% Returns [Name, Host]
+node_to_name_and_host(Node) ->
+ string:tokens(atom_to_list(Node), [$@]).
+
+lookup_config(Key,Config) ->
+ case lists:keysearch(Key,1,Config) of
+ {value,{Key,Val}} ->
+ Val;
+ _ ->
+ []
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+start_appls(Appls, Nodes) ->
+ start_appls(Appls, Nodes, [], [schema]).
+
+start_appls(Appls, Nodes, Config) ->
+ start_appls(Appls, Nodes, Config, [schema]).
+
+start_appls([Appl | Appls], Nodes, Config, Tabs) ->
+ {Started, BadStarters} =
+ rpc:multicall(Nodes, ?MODULE, remote_start, [Appl, Config, Nodes]),
+ BadS = [{Node, Appl, Res} || {Node, Res} <- Started, Res /= ok],
+ BadN = [{BadNode, Appl, bad_start} || BadNode <- BadStarters],
+ Bad = BadS ++ BadN,
+ case Appl of
+ mnesia when Bad == [] ->
+ sync_tables(Nodes, Tabs);
+ _ ->
+ ignore
+ end,
+ Bad ++ start_appls(Appls, Nodes, Config, Tabs);
+start_appls([], _Nodes, _Config, _Tabs) ->
+ [].
+
+remote_start(mnesia, Config, Nodes) ->
+ case diskless(Config) of
+ true ->
+ application_controller:set_env(mnesia,
+ extra_db_nodes,
+ Nodes -- [node()]),
+ application_controller:set_env(mnesia,
+ schema_location,
+ ram);
+ false ->
+ application_controller:set_env(mnesia,
+ schema_location,
+ opt_disc),
+ ignore
+ end,
+ {node(), mnesia:start()};
+remote_start(Appl, _Config, _Nodes) ->
+ Res =
+ case application:start(Appl) of
+ {error, {already_started, Appl}} ->
+ ok;
+ Other ->
+ Other
+ end,
+ {node(), Res}.
+
+%% Start Mnesia on all given nodes and wait for specified
+%% tables to be accessible on each node. The atom all means
+%% that we should wait for all tables to be loaded
+%%
+%% Returns a list of error tuples {BadNode, mnesia, Reason}
+start_mnesia(Nodes) ->
+ start_appls([mnesia], Nodes).
+start_mnesia(Nodes, Tabs) when is_list(Nodes) ->
+ start_appls([mnesia], Nodes, [], Tabs).
+
+%% Wait for the tables to be accessible from all nodes in the list
+%% and that all nodes are aware of that the other nodes also ...
+sync_tables(Nodes, Tabs) ->
+ Res = send_wait(Nodes, Tabs, []),
+ if
+ Res == [] ->
+ mnesia:transaction(fun() -> mnesia:write_lock_table(schema) end),
+ Res;
+ true ->
+ Res
+ end.
+
+send_wait([Node | Nodes], Tabs, Pids) ->
+ Pid = spawn_link(Node, ?MODULE, start_wait, [self(), Tabs]),
+ send_wait(Nodes, Tabs, [Pid | Pids]);
+send_wait([], _Tabs, Pids) ->
+ rec_wait(Pids, []).
+
+rec_wait([Pid | Pids], BadRes) ->
+ receive
+ {'EXIT', Pid, R} ->
+ rec_wait(Pids, [{node(Pid), bad_wait, R} | BadRes]);
+ {Pid, ok} ->
+ rec_wait(Pids, BadRes);
+ {Pid, {error, R}} ->
+ rec_wait(Pids, [{node(Pid), bad_wait, R} | BadRes])
+ end;
+rec_wait([], BadRes) ->
+ BadRes.
+
+start_wait(Coord, Tabs) ->
+ process_flag(trap_exit, true),
+ Mon = whereis(mnesia_monitor),
+ case catch link(Mon) of
+ {'EXIT', _} ->
+ unlink(Coord),
+ Coord ! {self(), {error, {node_not_running, node()}}};
+ _ ->
+ Res = start_wait_loop(Tabs),
+ unlink(Mon),
+ unlink(Coord),
+ Coord ! {self(), Res}
+ end.
+
+start_wait_loop(Tabs) ->
+ receive
+ {'EXIT', Pid, Reason} ->
+ {error, {start_wait, Pid, Reason}}
+ after 0 ->
+ case mnesia:wait_for_tables(Tabs, timer:seconds(30)) of
+ ok ->
+ verify_nodes(Tabs);
+ {timeout, BadTabs} ->
+ log("<>WARNING<> Wait for tables ~p: ~p~n", [node(), Tabs]),
+ start_wait_loop(BadTabs);
+ {error, Reason} ->
+ {error, {start_wait, Reason}}
+ end
+ end.
+
+verify_nodes(Tabs) ->
+ verify_nodes(Tabs, 0).
+
+verify_nodes([], _) ->
+ ok;
+
+verify_nodes([Tab| Tabs], N) ->
+ ?match(X when is_atom(X), mnesia_lib:val({Tab, where_to_read})),
+ Nodes = mnesia:table_info(Tab, where_to_write),
+ Copies =
+ mnesia:table_info(Tab, disc_copies) ++
+ mnesia:table_info(Tab, disc_only_copies) ++
+ mnesia:table_info(Tab, ram_copies),
+ Local = mnesia:table_info(Tab, local_content),
+ case Copies -- Nodes of
+ [] ->
+ verify_nodes(Tabs, 0);
+ _Else when Local == true, Nodes /= [] ->
+ verify_nodes(Tabs, 0);
+ Else ->
+ N2 =
+ if
+ N > 20 ->
+ log("<>WARNING<> ~w Waiting for table: ~p on ~p ~n",
+ [node(), Tab, Else]),
+ 0;
+ true -> N+1
+ end,
+ timer:sleep(500),
+ verify_nodes([Tab| Tabs], N2)
+ end.
+
+
+%% Nicely stop Mnesia on all given nodes
+%%
+%% Returns a list of error tuples {BadNode, Reason}
+stop_mnesia(Nodes) when is_list(Nodes) ->
+ stop_appls([mnesia], Nodes).
+
+stop_appls([Appl | Appls], Nodes) when is_list(Nodes) ->
+ {Stopped, BadNodes} = rpc:multicall(Nodes, ?MODULE, remote_stop, [Appl]),
+ BadS =[{Node, Appl, Res} || {Node, Res} <- Stopped, Res /= stopped],
+ BadN =[{BadNode, Appl, bad_node} || BadNode <- BadNodes],
+ BadS ++ BadN ++ stop_appls(Appls, Nodes);
+stop_appls([], _Nodes) ->
+ [].
+
+remote_stop(mnesia) ->
+ {node(), mnesia:stop()};
+remote_stop(Appl) ->
+ {node(), application:stop(Appl)}.
+
+remote_kill([Appl | Appls]) ->
+ catch Appl:lkill(),
+ application:stop(Appl),
+ remote_kill(Appls);
+remote_kill([]) ->
+ ok.
+
+%% Abruptly kill Mnesia on all given nodes
+%% Returns []
+kill_appls(Appls, Nodes) when is_list(Nodes) ->
+ verbose("<>WARNING<> Intentionally killing ~p: ~w...~n",
+ [Appls, Nodes], ?FILE, ?LINE),
+ rpc:multicall(Nodes, ?MODULE, remote_kill, [Appls]),
+ [].
+
+kill_mnesia(Nodes) when is_list(Nodes) ->
+ kill_appls([mnesia], Nodes).
+
+reload_appls([Appl | Appls], Selected) ->
+ kill_appls([Appl], Selected),
+ timer:sleep(1000),
+ Ok = {[ok || _N <- Selected], []},
+ {Ok2temp, Empty} = rpc:multicall(Selected, application, unload, [Appl]),
+ Conv = fun({error,{not_loaded,mnesia}}) -> ok; (Else) -> Else end,
+ Ok2 = {lists:map(Conv, Ok2temp), Empty},
+
+ Ok3 = rpc:multicall(Selected, application, load, [Appl]),
+ if
+ Ok /= Ok2 ->
+ ?fatal("Cannot unload appl ~p: ~p~n", [Appl, Ok2]);
+ Ok /= Ok3 ->
+ ?fatal("Cannot load appl ~p: ~p~n", [Appl, Ok3]);
+ true ->
+ ok
+ end,
+ reload_appls(Appls, Selected);
+reload_appls([], _Selected) ->
+ ok.
+
+shutdown() ->
+ log("<>WARNING<> Intentionally shutting down all nodes... ~p~n",
+ [nodes() ++ [node()]]),
+ rpc:multicall(nodes(), erlang, halt, []),
+ erlang:halt().
+
+verify_mnesia(Ups, Downs, File, Line) when is_list(Ups), is_list(Downs) ->
+ BadUps =
+ [N || N <- Ups, rpc:call(N, mnesia, system_info, [is_running]) /= yes],
+ BadDowns =
+ [N || N <- Downs, rpc:call(N, mnesia, system_info, [is_running]) == yes],
+ if
+ BadUps == [] ->
+ ignore;
+ true ->
+ error("Mnesia is not running as expected: ~p~n",
+ [BadUps], File, Line)
+ end,
+ if
+ BadDowns == [] ->
+ ignore;
+ true ->
+ error("Mnesia is not stopped as expected: ~p~n",
+ [BadDowns], File, Line)
+ end,
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+verify_replica_location(Tab, [], [], [], _) ->
+ ?match({'EXIT', _}, mnesia:table_info(Tab, ram_copies)),
+ ?match({'EXIT', _}, mnesia:table_info(Tab, disc_copies)),
+ ?match({'EXIT', _}, mnesia:table_info(Tab, disc_only_copies)),
+ ?match({'EXIT', _}, mnesia:table_info(Tab, where_to_write)),
+ ?match({'EXIT', _}, mnesia:table_info(Tab, where_to_read)),
+ [];
+
+verify_replica_location(Tab, DiscOnly0, Ram0, Disc0, AliveNodes0) ->
+%% sync_tables(AliveNodes0, [Tab]),
+ AliveNodes = lists:sort(AliveNodes0),
+ DiscOnly = lists:sort(DiscOnly0),
+ Ram = lists:sort(Ram0),
+ Disc = lists:sort(Disc0),
+ Write = ignore_dead(DiscOnly ++ Ram ++ Disc, AliveNodes),
+ Read = ignore_dead(DiscOnly ++ Ram ++ Disc, AliveNodes),
+ This = node(),
+
+ timer:sleep(100),
+
+ S1 = ?match(AliveNodes, lists:sort(mnesia:system_info(running_db_nodes))),
+ S2 = ?match(DiscOnly, lists:sort(mnesia:table_info(Tab, disc_only_copies))),
+ S3 = ?match(Ram, lists:sort(mnesia:table_info(Tab, ram_copies))),
+ S4 = ?match(Disc, lists:sort(mnesia:table_info(Tab, disc_copies))),
+ S5 = ?match(Write, lists:sort(mnesia:table_info(Tab, where_to_write))),
+ S6 = case lists:member(This, Read) of
+ true ->
+ ?match(This, mnesia:table_info(Tab, where_to_read));
+ false ->
+ ?match(true, lists:member(mnesia:table_info(Tab, where_to_read), Read))
+ end,
+ lists:filter(fun({success,_}) -> false; (_) -> true end, [S1,S2,S3,S4,S5,S6]).
+
+ignore_dead(Nodes, AliveNodes) ->
+ Filter = fun(Node) -> lists:member(Node, AliveNodes) end,
+ lists:sort(lists:zf(Filter, Nodes)).
+
+
+remote_activate_debug_fun(N, I, F, C, File, Line) ->
+ Pid = spawn_link(N, ?MODULE, do_remote_activate_debug_fun, [self(), I, F, C, File, Line]),
+ receive
+ {activated, Pid} -> ok;
+ {'EXIT', Pid, Reason} -> {error, Reason}
+ end.
+
+do_remote_activate_debug_fun(From, I, F, C, File, Line) ->
+ mnesia_lib:activate_debug_fun(I, F, C, File, Line),
+ From ! {activated, self()},
+ timer:sleep(infinity). % Dies whenever the test process dies !!
+
+
+sort(L) when is_list(L) ->
+ lists:sort(L);
+sort({atomic, L}) when is_list(L) ->
+ {atomic, lists:sort(L)};
+sort({ok, L}) when is_list(L) ->
+ {ok, lists:sort(L)};
+sort(W) ->
+ W.
diff --git a/lib/mnesia/test/mnesia_test_lib.hrl b/lib/mnesia/test/mnesia_test_lib.hrl
new file mode 100644
index 0000000000..85f12200d4
--- /dev/null
+++ b/lib/mnesia/test/mnesia_test_lib.hrl
@@ -0,0 +1,132 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-define(log(Format,Args),mnesia_test_lib:log(Format,Args,?FILE,?LINE)).
+-define(warning(Format,Args),?log("<>WARNING<>~n " ++ Format,Args)).
+-define(error(Format,Args),
+ mnesia_test_lib:error(Format,Args,?FILE,?LINE)).
+-define(verbose(Format,Args),mnesia_test_lib:verbose(Format,Args,?FILE,?LINE)).
+
+-define(fatal(Format,Args),
+ ?error(Format, Args),
+ exit({test_case_fatal, Format, Args, ?FILE, ?LINE})).
+
+-define(skip(Format,Args),
+ ?warning(Format, Args),
+ exit({skipped, ?flat_format(Format, Args)})).
+
+-define(flat_format(Format,Args),
+ lists:flatten(io_lib:format(Format, Args))).
+
+-define(sort(What), mnesia_test_lib:sort(What)).
+
+-define(ignore(Expr),
+ fun() ->
+ AcTuAlReS = (catch (Expr)),
+ ?verbose("ok, ~n Result as expected:~p~n",[AcTuAlReS]),
+ AcTuAlReS
+ end()).
+
+-define(match(ExpectedRes,Expr),
+ fun() ->
+ AcTuAlReS = (catch (Expr)),
+ case AcTuAlReS of
+ ExpectedRes ->
+ ?verbose("ok, ~n Result as expected:~p~n",[AcTuAlReS]),
+ {success,AcTuAlReS};
+ _ ->
+ ?error("Not Matching Actual result was:~n ~p~n",
+ [AcTuAlReS]),
+ {fail,AcTuAlReS}
+ end
+ end()).
+
+-define(match_inverse(NotExpectedRes,Expr),
+ fun() ->
+ AcTuAlReS = (catch (Expr)),
+ case AcTuAlReS of
+ NotExpectedRes ->
+ ?error("Not matching Actual result was:~n ~p~n",
+ [AcTuAlReS]),
+ {fail,AcTuAlReS};
+ _ ->
+ ?verbose("ok, ~n Result as expected: ~p~n",[AcTuAlReS]),
+ {success,AcTuAlReS}
+ end
+ end()).
+
+-define(match_receive(ExpectedMsg),
+ ?match(ExpectedMsg,mnesia_test_lib:pick_msg())).
+
+%% ExpectedMsgs must be completely bound
+-define(match_multi_receive(ExpectedMsgs),
+ fun() ->
+ TmPeXpCtEdMsGs = lists:sort(ExpectedMsgs),
+ ?match(TmPeXpCtEdMsGs,
+ lists:sort(lists:map(fun(_) ->
+ mnesia_test_lib:pick_msg()
+ end,
+ TmPeXpCtEdMsGs)))
+ end()).
+
+-define(start_activities(Nodes),
+ mnesia_test_lib:start_activities(Nodes)).
+
+-define(start_transactions(Pids),
+ mnesia_test_lib:start_transactions(Pids)).
+
+-define(acquire_nodes(N, Config),
+ mnesia_test_lib:prepare_test_case([{init_test_case, [mnesia]},
+ delete_schema,
+ create_schema,
+ {start_appls, [mnesia]}],
+ N, Config, ?FILE, ?LINE)).
+
+-define(activate_debug_fun(I, F, C),
+ mnesia_lib:activate_debug_fun(I, F, C, ?FILE, ?LINE)).
+
+-define(remote_activate_debug_fun(N, I, F, C),
+ ?match(ok, mnesia_test_lib:remote_activate_debug_fun(N, I, F, C,
+ ?FILE, ?LINE))).
+
+-define(deactivate_debug_fun(I),
+ mnesia_lib:deactivate_debug_fun(I, ?FILE, ?LINE)).
+
+-define(remote_deactivate_debug_fun(N, I),
+ rpc:call(N, mnesia_lib, deactivate_debug_fun, [I, ?FILE, ?LINE])).
+
+-define(is_debug_compiled,
+ case mnesia_lib:is_debug_compiled() of
+ false ->
+ ?skip("Mnesia is not debug compiled, test case ignored.~n", []);
+ _OhTeR ->
+ ok
+ end).
+
+-define(needs_disc(Config),
+ case mnesia_test_lib:diskless(Config) of
+ false ->
+ ok;
+ true ->
+ ?skip("Must have disc, test case ignored.~n", [])
+ end).
+
+-define(verify_mnesia(Ups, Downs),
+ mnesia_test_lib:verify_mnesia(Ups, Downs, ?FILE, ?LINE)).
diff --git a/lib/mnesia/test/mnesia_tpcb.erl b/lib/mnesia/test/mnesia_tpcb.erl
new file mode 100644
index 0000000000..903c53a21c
--- /dev/null
+++ b/lib/mnesia/test/mnesia_tpcb.erl
@@ -0,0 +1,1268 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% MODULE
+%%
+%% mnesia_tpcb - TPC-B benchmarking of Mnesia
+%%
+%% DESCRIPTION
+%%
+%% The metrics used in the TPC-B benchmark are throughput as measured
+%% in transactions per second (TPS). The benchmark uses a single,
+%% simple update-intensive transaction to load the database system.
+%% The single transaction type provides a simple, repeatable
+%% unit of work, and is designed to exercise the basic components of
+%% a database system.
+%%
+%% The definition of the TPC-B states lots of detailed rules and
+%% conditions that must be fullfilled, e.g. how the ACID (atomicity,
+%% consistency, isolation and durability) properties are verified,
+%% how the random numbers must be distributed, minimum sizes of
+%% the different types of records, minimum duration of the benchmark,
+%% formulas to calculate prices (dollars per tps), disclosure issues
+%% etc. Please, see http://www.tpc.org/ about the nitty gritty details.
+%%
+%% The TPC-B benchmark is stated in terms of a hypothetical bank. The
+%% bank has one or more branches. Each branch has multiple tellers. The
+%% bank has many customers, each with an account. The database represents
+%% the cash position of each entity (branch, teller and account) and a
+%% history of recent transactions run by the bank. The transaction
+%% represents the work done when a customer makes a deposit or a
+%% withdrawal against his account. The transaction is performed by a
+%% teller at some branch.
+%%
+%% Each process that performs TPC-B transactions is called a driver.
+%% Drivers generates teller_id, account_id and delta amount of
+%% money randomly. An account, a teller and a branch are read, their
+%% balances are adjusted and a history record is created. The driver
+%% measures the time for 3 reads, 3 writes and 1 create.
+%%
+%% GETTING STARTED
+%%
+%% Generate tables and run with default configuration:
+%%
+%% mnesia_tpcb:start().
+%%
+%% A little bit more advanced;
+%%
+%% spawn(mnesia_tpcb, start, [[[{n_drivers_per_node, 8}, {stop_after, infinity}]]),
+%% mnesia_tpcb:stop().
+%%
+%% Really advanced;
+%%
+%% mnesia_tpcb:init(([{n_branches, 8}, {replica_type, disc_only_copies}]),
+%% mnesia_tpcb:run(([{n_drivers_per_node, 8}]),
+%% mnesia_tpcb:run(([{n_drivers_per_node, 64}]).
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mnesia_tpcb).
+-author('[email protected]').
+
+-export([
+ config/2,
+ count_balance/0,
+ driver_init/2,
+ init/1,
+ reporter_init/2,
+ run/1,
+ start/0,
+ start/1,
+ start/2,
+ stop/0,
+ real_trans/5,
+ verify_tabs/0,
+ reply_gen_branch/3,
+ frag_add_delta/7,
+
+ conflict_test/1,
+ dist_test/1,
+ replica_test/1,
+ sticky_replica_test/1,
+ remote_test/1,
+ remote_frag2_test/1
+ ]).
+
+-define(SECOND, 1000000).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Account record, total size must be at least 100 bytes
+
+-define(ACCOUNT_FILLER,
+ {123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234}).
+
+-record(account,
+ {
+ id = 0, % Unique account id
+ branch_id = 0, % Branch where the account is held
+ balance = 0, % Account balance
+ filler = ?ACCOUNT_FILLER % Gap filler to ensure size >= 100 bytes
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Branch record, total size must be at least 100 bytes
+
+-define(BRANCH_FILLER,
+ {123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890}).
+
+-record(branch,
+ {
+ id = 0, % Unique branch id
+ balance = 0, % Total balance of whole branch
+ filler = ?BRANCH_FILLER % Gap filler to ensure size >= 100 bytes
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Teller record, total size must be at least 100 bytes
+
+-define(TELLER_FILLER,
+ {123456789012345678901234567890123456789012345678901234567890,
+ 123456789012345678901234567890123456789012345678901234567890,
+ 1234567890123456789012345678901234567890123456789012345678}).
+
+-record(teller,
+ {
+ id = 0, % Unique teller id
+ branch_id = 0, % Branch where the teller is located
+ balance = 0, % Teller balance
+ filler = ?TELLER_FILLER % Gap filler to ensure size >= 100 bytes
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% History record, total size must be at least 50 bytes
+
+-define(HISTORY_FILLER, 1234567890).
+
+-record(history,
+ {
+ history_id = {0, 0}, % {DriverId, DriverLocalHistoryid}
+ time_stamp = now(), % Time point during active transaction
+ branch_id = 0, % Branch associated with teller
+ teller_id = 0, % Teller invlolved in transaction
+ account_id = 0, % Account updated by transaction
+ amount = 0, % Amount (delta) specified by transaction
+ filler = ?HISTORY_FILLER % Gap filler to ensure size >= 50 bytes
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-record(tab_config,
+ {
+ db_nodes = [node()],
+ n_replicas = 1, % Ignored for non-fragmented tables
+ replica_nodes = [node()],
+ replica_type = ram_copies,
+ use_running_mnesia = false,
+ n_fragments = 0,
+ n_branches = 1,
+ n_tellers_per_branch = 10, % Must be 10
+ n_accounts_per_branch = 100000, % Must be 100000
+ branch_filler = ?BRANCH_FILLER,
+ account_filler = ?ACCOUNT_FILLER,
+ teller_filler = ?TELLER_FILLER
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-record(run_config,
+ {
+ driver_nodes = [node()],
+ n_drivers_per_node = 1,
+ use_running_mnesia = false,
+ stop_after = timer:minutes(15), % Minimum 15 min
+ report_interval = timer:minutes(1),
+ use_sticky_locks = false,
+ spawn_near_branch = false,
+ activity_type = transaction,
+ reuse_history_id = false
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-record(time,
+ {
+ n_trans = 0,
+ min_n = 0,
+ max_n = 0,
+ acc_time = 0,
+ max_time = 0
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-record(driver_state,
+ {
+ driver_id,
+ driver_node,
+ seed,
+ n_local_branches,
+ local_branches,
+ tab_config,
+ run_config,
+ history_id,
+ time = #time{},
+ acc_time = #time{},
+ reuse_history_id
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-record(reporter_state,
+ {
+ driver_pids,
+ starter_pid,
+ n_iters = 0,
+ prev_tps = 0,
+ curr = #time{},
+ acc = #time{},
+ init_micros,
+ prev_micros,
+ run_config
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% One driver on each node, table not replicated
+
+config(frag_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {n_branches, length(Nodes)},
+ {n_fragments, length(Nodes)},
+ {replica_nodes, Nodes},
+ {db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% One driver on each node, table replicated to two nodes.
+
+config(frag2_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {n_branches, length(Nodes)},
+ {n_fragments, length(Nodes)},
+ {n_replicas, 2},
+ {replica_nodes, Nodes},
+ {db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% One driver on this node, table replicated to all nodes.
+
+config(replica_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {db_nodes, Nodes},
+ {driver_nodes, [Local]},
+ {replica_nodes, Nodes},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% One driver on this node, table replicated to all nodes.
+
+config(sticky_replica_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {db_nodes, Nodes},
+ {driver_nodes, [node()]},
+ {replica_nodes, Nodes},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {use_sticky_locks, true},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Ten drivers per node, tables replicated to all nodes, lots of branches
+
+config(dist_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {replica_nodes, Nodes},
+ {n_drivers_per_node, 10},
+ {n_branches, 10 * length(Nodes) * 100},
+ {n_accounts_per_branch, 10},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Ten drivers per node, tables replicated to all nodes, single branch
+
+config(conflict_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {db_nodes, Nodes},
+ {driver_nodes, Nodes},
+ {replica_nodes, Nodes},
+ {n_drivers_per_node, 10},
+ {n_branches, 1},
+ {n_accounts_per_branch, 10},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% One driver on this node, table replicated to all other nodes.
+
+config(remote_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {db_nodes, Nodes},
+ {driver_nodes, [Local]},
+ {replica_nodes, Remote},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ];
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% One driver on this node, table replicated to two other nodes.
+
+config(remote_frag2_test, ReplicaType) ->
+ Remote = nodes(),
+ Local = node(),
+ Nodes = [Local | Remote],
+ [
+ {n_branches, length(Remote)},
+ {n_fragments, length(Remote)},
+ {n_replicas, 2},
+ {replica_nodes, Remote},
+ {db_nodes, Nodes},
+ {driver_nodes, [Local]},
+ {n_accounts_per_branch, 100},
+ {replica_type, ReplicaType},
+ {stop_after, timer:minutes(1)},
+ {report_interval, timer:seconds(10)},
+ {reuse_history_id, true}
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+start(What, ReplicaType) ->
+ spawn_link(?MODULE, start, [config(What, ReplicaType)]).
+
+replica_test(ReplicaType) ->
+ start(replica_test, ReplicaType).
+
+sticky_replica_test(ReplicaType) ->
+ start(sticky_replica_test, ReplicaType).
+
+dist_test(ReplicaType) ->
+ start(dist_test, ReplicaType).
+
+conflict_test(ReplicaType) ->
+ start(conflict_test, ReplicaType).
+
+remote_test(ReplicaType) ->
+ start(remote_test, ReplicaType).
+
+remote_frag2_test(ReplicaType) ->
+ start(remote_frag2_test, ReplicaType).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Args is a list of {Key, Val} tuples where Key is a field name
+%% in either the record tab_config or run_config. Unknown keys are ignored.
+
+start() ->
+ start([]).
+start(Args) ->
+ init(Args),
+ run(Args).
+
+list2rec(List, Fields, DefaultTuple) ->
+ [Name|Defaults] = tuple_to_list(DefaultTuple),
+ List2 = list2rec(List, Fields, Defaults, []),
+ list_to_tuple([Name] ++ List2).
+
+list2rec(_List, [], [], Acc) ->
+ Acc;
+list2rec(List, [F|Fields], [D|Defaults], Acc) ->
+ {Val, List2} =
+ case lists:keysearch(F, 1, List) of
+ false ->
+ {D, List};
+ {value, {F, NewVal}} ->
+ {NewVal, lists:keydelete(F, 1, List)}
+ end,
+ list2rec(List2, Fields, Defaults, Acc ++ [Val]).
+
+stop() ->
+ case whereis(mnesia_tpcb) of
+ undefined ->
+ {error, not_running};
+ Pid ->
+ sync_stop(Pid)
+ end.
+
+sync_stop(Pid) ->
+ Pid ! {self(), stop},
+ receive
+ {Pid, {stopped, Res}} -> Res
+ after timer:minutes(1) ->
+ exit(Pid, kill),
+ {error, brutal_kill}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Initialization
+
+%% Args is a list of {Key, Val} tuples where Key is a field name
+%% in the record tab_config, unknown keys are ignored.
+
+init(Args) ->
+ TabConfig0 = list2rec(Args, record_info(fields, tab_config), #tab_config{}),
+ TabConfig =
+ if
+ TabConfig0#tab_config.n_fragments =:= 0 ->
+ TabConfig0#tab_config{n_replicas = length(TabConfig0#tab_config.replica_nodes)};
+ true ->
+ TabConfig0
+ end,
+ Tags = record_info(fields, tab_config),
+ Fun = fun(F, Pos) -> {{F, element(Pos, TabConfig)}, Pos + 1} end,
+ {List, _} = lists:mapfoldl(Fun, 2, Tags),
+ io:format("TPC-B: Table config: ~p ~n", [List]),
+
+ DbNodes = TabConfig#tab_config.db_nodes,
+ stop(),
+ if
+ TabConfig#tab_config.use_running_mnesia =:= true ->
+ ignore;
+ true ->
+ rpc:multicall(DbNodes, mnesia, lkill, []),
+ case mnesia:delete_schema(DbNodes) of
+ ok ->
+ case mnesia:create_schema(DbNodes) of
+ ok ->
+ {Replies, BadNodes} =
+ rpc:multicall(DbNodes, mnesia, start, []),
+ case [Res || Res <- Replies, Res =/= ok] of
+ [] when BadNodes =:= [] ->
+ ok;
+ BadRes ->
+ io:format("TPC-B: <ERROR> "
+ "Failed to start ~p: ~p~n",
+ [BadNodes, BadRes]),
+ exit({start_failed, BadRes, BadNodes})
+ end;
+ {error, Reason} ->
+ io:format("TPC-B: <ERROR> "
+ "Failed to create schema on disc: ~p~n",
+ [Reason]),
+ exit({create_schema_failed, Reason})
+ end;
+ {error, Reason} ->
+ io:format("TPC-B: <ERROR> "
+ "Failed to delete schema on disc: ~p~n",
+ [Reason]),
+ exit({delete_schema_failed, Reason})
+ end
+ end,
+ gen_tabs(TabConfig).
+
+gen_tabs(TC) ->
+ create_tab(TC, branch, record_info(fields, branch),
+ undefined),
+ create_tab(TC, account, record_info(fields, account),
+ {branch, #account.branch_id}),
+ create_tab(TC, teller, record_info(fields, teller),
+ {branch, #teller.branch_id}),
+ create_tab(TC, history, record_info(fields, history),
+ {branch, #history.branch_id}),
+
+ NB = TC#tab_config.n_branches,
+ NT = TC#tab_config.n_tellers_per_branch,
+ NA = TC#tab_config.n_accounts_per_branch,
+ io:format("TPC-B: Generating ~p branches a ~p bytes~n",
+ [NB, size(term_to_binary(default_branch(TC)))]),
+ io:format("TPC-B: Generating ~p * ~p tellers a ~p bytes~n",
+ [NB, NT, size(term_to_binary(default_teller(TC)))]),
+ io:format("TPC-B: Generating ~p * ~p accounts a ~p bytes~n",
+ [NB, NA, size(term_to_binary(default_account(TC)))]),
+ io:format("TPC-B: Generating 0 history records a ~p bytes~n",
+ [size(term_to_binary(default_history(TC)))]),
+ gen_branches(TC),
+
+ case verify_tabs() of
+ ok ->
+ ignore;
+ {error, Reason} ->
+ io:format("TPC-B: <ERROR> Inconsistent tables: ~w~n",
+ [Reason]),
+ exit({inconsistent_tables, Reason})
+ end.
+
+create_tab(TC, Name, Attrs, _ForeignKey) when TC#tab_config.n_fragments =:= 0 ->
+ Nodes = TC#tab_config.replica_nodes,
+ Type = TC#tab_config.replica_type,
+ Def = [{Type, Nodes}, {attributes, Attrs}],
+ create_tab(Name, Def);
+create_tab(TC, Name, Attrs, ForeignKey) ->
+ NReplicas = TC#tab_config.n_replicas,
+ NodePool = TC#tab_config.replica_nodes,
+ Type = TC#tab_config.replica_type,
+ NF = TC#tab_config.n_fragments,
+ Props = [{n_fragments, NF},
+ {node_pool, NodePool},
+ {n_copies(Type), NReplicas},
+ {foreign_key, ForeignKey}],
+ Def = [{frag_properties, Props},
+ {attributes, Attrs}],
+ create_tab(Name, Def).
+
+create_tab(Name, Def) ->
+ mnesia:delete_table(Name),
+ case mnesia:create_table(Name, Def) of
+ {atomic, ok} ->
+ ok;
+ {aborted, Reason} ->
+ io:format("TPC-B: <ERROR> failed to create table ~w ~w: ~p~n",
+ [Name, Def, Reason]),
+ exit({create_table_failed, Reason})
+ end.
+
+n_copies(Type) ->
+ case Type of
+ ram_copies -> n_ram_copies;
+ disc_copies -> n_disc_copies;
+ disc_only_copies -> n_disc_only_copies
+ end.
+
+gen_branches(TC) ->
+ First = 0,
+ Last = First + TC#tab_config.n_branches - 1,
+ GenPids = gen_branches(TC, First, Last, []),
+ wait_for_gen(GenPids).
+
+wait_for_gen([]) ->
+ ok;
+wait_for_gen(Pids) ->
+ receive
+ {branch_generated, Pid} -> wait_for_gen(lists:delete(Pid, Pids));
+ Exit ->
+ exit({tpcb_failed, Exit})
+ end.
+
+gen_branches(TC, BranchId, Last, UsedNs) when BranchId =< Last ->
+ UsedNs2 = get_branch_nodes(BranchId, UsedNs),
+ Node = hd(UsedNs2),
+ Pid = spawn_link(Node, ?MODULE, reply_gen_branch,
+ [self(), TC, BranchId]),
+ [Pid | gen_branches(TC, BranchId + 1, Last, UsedNs2)];
+gen_branches(_, _, _, _) ->
+ [].
+
+reply_gen_branch(ReplyTo, TC, BranchId) ->
+ gen_branch(TC, BranchId),
+ ReplyTo ! {branch_generated, self()},
+ unlink(ReplyTo).
+
+%% Returns a new list of nodes with the best node as head
+get_branch_nodes(BranchId, UsedNs) ->
+ WriteNs = table_info({branch, BranchId}, where_to_write),
+ WeightedNs = [{n_duplicates(N, UsedNs, 0), N} || N <- WriteNs],
+ [{_, LeastUsed} | _ ] = lists:sort(WeightedNs),
+ [LeastUsed | UsedNs].
+
+n_duplicates(_N, [], Count) ->
+ Count;
+n_duplicates(N, [N | Tail], Count) ->
+ n_duplicates(N, Tail, Count + 1);
+n_duplicates(N, [_ | Tail], Count) ->
+ n_duplicates(N, Tail, Count).
+
+gen_branch(TC, BranchId) ->
+ A = default_account(TC),
+ NA = TC#tab_config.n_accounts_per_branch,
+ FirstA = BranchId * NA,
+ ArgsA = [FirstA, FirstA + NA - 1, BranchId, A],
+ ok = mnesia:activity(async_dirty, fun gen_accounts/4, ArgsA, mnesia_frag),
+
+ T = default_teller(TC),
+ NT = TC#tab_config.n_tellers_per_branch,
+ FirstT = BranchId * NT,
+ ArgsT = [FirstT, FirstT + NT - 1, BranchId, T],
+ ok = mnesia:activity(async_dirty, fun gen_tellers/4, ArgsT, mnesia_frag),
+
+ B = default_branch(TC),
+ FunB = fun() -> mnesia:write(branch, B#branch{id = BranchId}, write) end,
+ ok = mnesia:activity(sync_dirty, FunB, [], mnesia_frag).
+
+gen_tellers(Id, Last, BranchId, T) when Id =< Last ->
+ mnesia:write(teller, T#teller{id = Id, branch_id=BranchId}, write),
+ gen_tellers(Id + 1, Last, BranchId, T);
+gen_tellers(_, _, _, _) ->
+ ok.
+
+gen_accounts(Id, Last, BranchId, A) when Id =< Last ->
+ mnesia:write(account, A#account{id = Id, branch_id=BranchId}, write),
+ gen_accounts(Id + 1, Last, BranchId, A);
+gen_accounts(_, _, _, _) ->
+ ok.
+
+default_branch(TC) -> #branch{filler = TC#tab_config.branch_filler}.
+default_teller(TC) -> #teller{filler = TC#tab_config.teller_filler}.
+default_account(TC) -> #account{filler = TC#tab_config.account_filler}.
+default_history(_TC) -> #history{}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Run the benchmark
+
+%% Args is a list of {Key, Val} tuples where Key is a field name
+%% in the record run_config, unknown keys are ignored.
+run(Args) ->
+ RunConfig = list2rec(Args, record_info(fields, run_config), #run_config{}),
+ Tags = record_info(fields, run_config),
+ Fun = fun(F, Pos) -> {{F, element(Pos, RunConfig)}, Pos + 1} end,
+ {List, _} = lists:mapfoldl(Fun, 2, Tags),
+ io:format("TPC-B: Run config: ~p ~n", [List]),
+
+ Pid = spawn_link(?MODULE, reporter_init, [self(), RunConfig]),
+ receive
+ {Pid, {stopped, Res}} ->
+ Res; % Stopped by other process
+ Else ->
+ {tpcb_got, Else}
+ after RunConfig#run_config.stop_after ->
+ sync_stop(Pid)
+ end.
+
+reporter_init(Starter, RC) ->
+ register(mnesia_tpcb, self()),
+ process_flag(trap_exit, true),
+ DbNodes = mnesia:system_info(db_nodes),
+ if
+ RC#run_config.use_running_mnesia =:= true ->
+ ignore;
+ true ->
+ {Replies, BadNodes} =
+ rpc:multicall(DbNodes, mnesia, start, []),
+ case [Res || Res <- Replies, Res =/= ok] of
+ [] when BadNodes =:= [] ->
+ ok;
+ BadRes ->
+ io:format("TPC-B: <ERROR> "
+ "Failed to start ~w: ~p~n",
+ [BadNodes, BadRes]),
+ exit({start_failed, BadRes, BadNodes})
+ end,
+ verify_tabs()
+ end,
+
+ N = table_info(branch, size),
+ NT = table_info(teller, size) div N,
+ NA = table_info(account, size) div N,
+
+ {Type, NF, RepNodes} = table_storage(branch),
+ TC = #tab_config{n_fragments = NF,
+ n_branches = N,
+ n_tellers_per_branch = NT,
+ n_accounts_per_branch = NA,
+ db_nodes = DbNodes,
+ replica_nodes = RepNodes,
+ replica_type = Type
+ },
+ Drivers = start_drivers(RC, TC),
+ Now = now_to_micros(erlang:now()),
+ State = #reporter_state{driver_pids = Drivers,
+ run_config = RC,
+ starter_pid = Starter,
+ init_micros = Now,
+ prev_micros = Now
+ },
+ case catch reporter_loop(State) of
+ {'EXIT', Reason} ->
+ io:format("TPC-B: Abnormal termination: ~p~n", [Reason]),
+ if
+ RC#run_config.use_running_mnesia =:= true ->
+ ignore;
+ true ->
+ rpc:multicall(DbNodes, mnesia, lkill, [])
+ end,
+ unlink(Starter),
+ Starter ! {self(), {stopped, {error, Reason}}}, % To be sure
+ exit(shutdown);
+ {ok, Stopper, State2} ->
+ Time = State2#reporter_state.acc,
+ Res =
+ case verify_tabs() of
+ ok ->
+ {ok, Time};
+ {error, Reason} ->
+ io:format("TPC-B: <ERROR> Inconsistent tables, ~p~n",
+ [{error, Reason}]),
+ {error, Reason}
+ end,
+ if
+ RC#run_config.use_running_mnesia =:= true ->
+ ignore;
+ true ->
+ rpc:multicall(DbNodes, mnesia, stop, [])
+ end,
+ unlink(Starter),
+ Starter ! {self(), {stopped, Res}},
+ if
+ Stopper =/= Starter ->
+ Stopper ! {self(), {stopped, Res}};
+ true ->
+ ignore
+ end,
+ exit(shutdown)
+ end.
+
+table_info(Tab, Item) ->
+ Fun = fun() -> mnesia:table_info(Tab, Item) end,
+ mnesia:activity(sync_dirty, Fun, mnesia_frag).
+
+%% Returns {Storage, NFragments, ReplicaNodes}
+table_storage(Tab) ->
+ case mnesia:table_info(branch, frag_properties) of
+ [] ->
+ NFO = 0,
+ NR = length(mnesia:table_info(Tab, ram_copies)),
+ ND = length(mnesia:table_info(Tab, disc_copies)),
+ NDO = length(mnesia:table_info(Tab, disc_only_copies)),
+ if
+ NR =/= 0 -> {ram_copies, NFO, NR};
+ ND =/= 0 -> {disc_copies, NFO, ND};
+ NDO =/= 0 -> {disc_copies, NFO, NDO}
+ end;
+ Props ->
+ {value, NFO} = lists:keysearch(n_fragments, 1, Props),
+ NR = table_info(Tab, n_ram_copies),
+ ND = table_info(Tab, n_disc_copies),
+ NDO = table_info(Tab, n_disc_only_copies),
+ if
+ NR =/= 0 -> {ram_copies, NFO, NR};
+ ND =/= 0 -> {disc_copies, NFO, ND};
+ NDO =/= 0 -> {disc_copies, NFO, NDO}
+ end
+ end.
+
+reporter_loop(State) ->
+ RC = State#reporter_state.run_config,
+ receive
+ {From, stop} ->
+ {ok, From, call_drivers(State, stop)};
+ {'EXIT', Pid, Reason} when Pid =:= State#reporter_state.starter_pid ->
+ %% call_drivers(State, stop),
+ exit({starter_died, Pid, Reason})
+ after RC#run_config.report_interval ->
+ Iters = State#reporter_state.n_iters,
+ State2 = State#reporter_state{n_iters = Iters + 1},
+ case call_drivers(State2, report) of
+ State3 when State3#reporter_state.driver_pids =/= [] ->
+ State4 = State3#reporter_state{curr = #time{}},
+ reporter_loop(State4);
+ _ ->
+ exit(drivers_died)
+ end
+ end.
+
+call_drivers(State, Msg) ->
+ Drivers = State#reporter_state.driver_pids,
+ lists:foreach(fun(Pid) -> Pid ! {self(), Msg} end, Drivers),
+ State2 = show_report(calc_reports(Drivers, State)),
+ case Msg =:= stop of
+ true ->
+ Acc = State2#reporter_state.acc,
+ Init = State2#reporter_state.init_micros,
+ show_report(State2#reporter_state{n_iters = 0,
+ curr = Acc,
+ prev_micros = Init});
+ false ->
+ ignore
+ end,
+ State2.
+
+calc_reports([], State) ->
+ State;
+calc_reports([Pid|Drivers], State) ->
+ receive
+ {'EXIT', P, Reason} when P =:= State#reporter_state.starter_pid ->
+ exit({starter_died, P, Reason});
+ {'EXIT', Pid, Reason} ->
+ exit({driver_died, Pid, Reason});
+ {Pid, Time} when is_record(Time, time) ->
+ %% io:format("~w: ~w~n", [Pid, Time]),
+ A = add_time(State#reporter_state.acc, Time),
+ C = add_time(State#reporter_state.curr, Time),
+ State2 = State#reporter_state{acc = A, curr = C},
+ calc_reports(Drivers, State2)
+ end.
+
+add_time(Acc, New) ->
+ Acc#time{n_trans = New#time.n_trans + Acc#time.n_trans,
+ min_n = lists:min([New#time.n_trans, Acc#time.min_n] -- [0]),
+ max_n = lists:max([New#time.n_trans, Acc#time.max_n]),
+ acc_time = New#time.acc_time + Acc#time.acc_time,
+ max_time = lists:max([New#time.max_time, Acc#time.max_time])}.
+
+-define(AVOID_DIV_ZERO(_What_), try (_What_) catch _:_ -> 0 end).
+
+show_report(State) ->
+ Now = now_to_micros(erlang:now()),
+ Iters = State#reporter_state.n_iters,
+ Time = State#reporter_state.curr,
+ Max = Time#time.max_time,
+ N = Time#time.n_trans,
+ Avg = ?AVOID_DIV_ZERO(Time#time.acc_time div N),
+ AliveN = length(State#reporter_state.driver_pids),
+ Tps = ?AVOID_DIV_ZERO((?SECOND * AliveN) div Avg),
+ PrevTps= State#reporter_state.prev_tps,
+ {DiffSign, DiffTps} = signed_diff(Iters, Tps, PrevTps),
+ Unfairness = ?AVOID_DIV_ZERO(Time#time.max_n / Time#time.min_n),
+ BruttoAvg = ?AVOID_DIV_ZERO((Now - State#reporter_state.prev_micros) div N),
+%% io:format("n_iters=~p, n_trans=~p, n_drivers=~p, avg=~p, now=~p, prev=~p~n",
+%% [Iters, N, AliveN, BruttoAvg, Now, State#reporter_state.prev_micros]),
+ BruttoTps = ?AVOID_DIV_ZERO(?SECOND div BruttoAvg),
+ case Iters > 0 of
+ true ->
+ io:format("TPC-B: ~p iter ~s~p diff ~p (~p) tps ~p avg micros ~p max micros ~p unfairness~n",
+ [Iters, DiffSign, DiffTps, Tps, BruttoTps, Avg, Max, Unfairness]);
+ false ->
+ io:format("TPC-B: ~p (~p) transactions per second, "
+ "duration of longest transaction was ~p milliseconds~n",
+ [Tps, BruttoTps, Max div 1000])
+ end,
+ State#reporter_state{prev_tps = Tps, prev_micros = Now}.
+
+signed_diff(Iters, Curr, Prev) ->
+ case Iters > 1 of
+ true -> sign(Curr - Prev);
+ false -> sign(0)
+ end.
+
+sign(N) when N > 0 -> {"+", N};
+sign(N) -> {"", N}.
+
+now_to_micros({Mega, Secs, Micros}) ->
+ DT = calendar:now_to_datetime({Mega, Secs, 0}),
+ S = calendar:datetime_to_gregorian_seconds(DT),
+ (S * ?SECOND) + Micros.
+
+start_drivers(RC, TC) ->
+ LastHistoryId = table_info(history, size),
+ Reuse = RC#run_config.reuse_history_id,
+ DS = #driver_state{tab_config = TC,
+ run_config = RC,
+ n_local_branches = 0,
+ local_branches = [],
+ history_id = LastHistoryId,
+ reuse_history_id = Reuse},
+ Nodes = RC#run_config.driver_nodes,
+ NB = TC#tab_config.n_branches,
+ First = 0,
+ AllBranches = lists:seq(First, First + NB - 1),
+ ND = RC#run_config.n_drivers_per_node,
+ Spawn = fun(Spec) ->
+ Node = Spec#driver_state.driver_node,
+ spawn_link(Node, ?MODULE, driver_init, [Spec, AllBranches])
+ end,
+ Specs = [DS#driver_state{driver_id = Id, driver_node = N}
+ || N <- Nodes,
+ Id <- lists:seq(1, ND)],
+ Specs2 = lists:sort(lists:flatten(Specs)),
+ {Specs3, OrphanBranches} = alloc_local_branches(AllBranches, Specs2, []),
+ case length(OrphanBranches) of
+ N when N =< 10 ->
+ io:format("TPC-B: Orphan branches: ~p~n", [OrphanBranches]);
+ N ->
+ io:format("TPC-B: Orphan branches: ~p~n", [N])
+ end,
+ [Spawn(Spec) || Spec <- Specs3].
+
+alloc_local_branches([BranchId | Tail], Specs, OrphanBranches) ->
+ Nodes = table_info({branch, BranchId}, where_to_write),
+ LocalSpecs = [DS || DS <- Specs,
+ lists:member(DS#driver_state.driver_node, Nodes)],
+ case lists:keysort(#driver_state.n_local_branches, LocalSpecs) of
+ [] ->
+ alloc_local_branches(Tail, Specs, [BranchId | OrphanBranches]);
+ [DS | _] ->
+ LocalNB = DS#driver_state.n_local_branches + 1,
+ LocalBranches = [BranchId | DS#driver_state.local_branches],
+ DS2 = DS#driver_state{n_local_branches = LocalNB,
+ local_branches = LocalBranches},
+ Specs2 = Specs -- [DS],
+ Specs3 = [DS2 | Specs2],
+ alloc_local_branches(Tail, Specs3, OrphanBranches)
+ end;
+alloc_local_branches([], Specs, OrphanBranches) ->
+ {Specs, OrphanBranches}.
+
+driver_init(DS, AllBranches) ->
+ Seed = erlang:now(),
+ DS2 =
+ if
+ DS#driver_state.n_local_branches =:= 0 ->
+ DS#driver_state{seed = Seed,
+ n_local_branches = length(AllBranches),
+ local_branches = AllBranches};
+ true ->
+ DS#driver_state{seed = Seed}
+ end,
+ io:format("TPC-B: Driver ~p started as ~p on node ~p with ~p local branches~n",
+ [DS2#driver_state.driver_id, self(), node(), DS2#driver_state.n_local_branches]),
+ driver_loop(DS2).
+
+driver_loop(DS) ->
+ receive
+ {From, report} ->
+ From ! {self(), DS#driver_state.time},
+ Acc = add_time(DS#driver_state.time, DS#driver_state.acc_time),
+ DS2 = DS#driver_state{time=#time{}, acc_time = Acc}, % Reset timer
+ DS3 = calc_trans(DS2),
+ driver_loop(DS3);
+ {From, stop} ->
+ Acc = add_time(DS#driver_state.time, DS#driver_state.acc_time),
+ io:format("TPC-B: Driver ~p (~p) on node ~p stopped: ~w~n",
+ [DS#driver_state.driver_id, self(), node(self()), Acc]),
+ From ! {self(), DS#driver_state.time},
+ unlink(From),
+ exit(stopped)
+ after 0 ->
+ DS2 = calc_trans(DS),
+ driver_loop(DS2)
+ end.
+
+calc_trans(DS) ->
+ {Micros, DS2} = time_trans(DS),
+ Time = DS2#driver_state.time,
+ Time2 = Time#time{n_trans = Time#time.n_trans + 1,
+ acc_time = Time#time.acc_time + Micros,
+ max_time = lists:max([Micros, Time#time.max_time])
+ },
+ case DS#driver_state.reuse_history_id of
+ false ->
+ HistoryId = DS#driver_state.history_id + 1,
+ DS2#driver_state{time=Time2, history_id = HistoryId};
+ true ->
+ DS2#driver_state{time=Time2}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Generate teller_id, account_id and delta
+%% Time the TPC-B transaction
+time_trans(DS) ->
+ OldSeed = get(random_seed), % Avoid interference with Mnesia
+ put(random_seed, DS#driver_state.seed),
+ Random = random:uniform(),
+ NewSeed = get(random_seed),
+ case OldSeed of
+ undefined -> erase(random_seed);
+ _ -> put(random_seed, OldSeed)
+ end,
+
+ TC = DS#driver_state.tab_config,
+ RC = DS#driver_state.run_config,
+ {Branchid, Args} = random_to_args(Random, DS),
+ {Fun, Mod} = trans_type(TC, RC),
+ {Time, Res} = timer:tc(?MODULE, real_trans, [RC, Branchid, Fun, Args, Mod]),
+
+ case Res of
+ AccountBal when is_integer(AccountBal) ->
+ {Time, DS#driver_state{seed = NewSeed}};
+ Other ->
+ exit({crash, Other, Args, Random, DS})
+ end.
+
+random_to_args(Random, DS) ->
+ DriverId = DS#driver_state.driver_id,
+ TC = DS#driver_state.tab_config,
+ HistoryId = DS#driver_state.history_id,
+ Delta = trunc(Random * 1999998) - 999999, % -999999 <= Delta <= +999999
+
+ Branches = DS#driver_state.local_branches,
+ NB = DS#driver_state.n_local_branches,
+ NT = TC#tab_config.n_tellers_per_branch,
+ NA = TC#tab_config.n_accounts_per_branch,
+ Tmp = trunc(Random * NB * NT),
+ BranchPos = (Tmp div NT) + 1,
+ BranchId =
+ case TC#tab_config.n_fragments of
+ 0 -> BranchPos - 1;
+ _ -> lists:nth(BranchPos, Branches)
+ end,
+ RelativeTellerId = Tmp div NT,
+ TellerId = (BranchId * NT) + RelativeTellerId,
+ {AccountBranchId, AccountId} =
+ if
+ Random >= 0.85, NB > 1 ->
+ %% Pick from a remote account
+ TmpAccountId= trunc(Random * (NB - 1) * NA),
+ TmpAccountBranchId = TmpAccountId div NA,
+ if
+ TmpAccountBranchId =:= BranchId ->
+ {TmpAccountBranchId + 1, TmpAccountId + NA};
+ true ->
+ {TmpAccountBranchId, TmpAccountId}
+ end;
+ true ->
+ %% Pick from a local account
+ RelativeAccountId = trunc(Random * NA),
+ TmpAccountId = (BranchId * NA) + RelativeAccountId,
+ {BranchId, TmpAccountId}
+ end,
+
+ {BranchId, [DriverId, BranchId, TellerId, AccountBranchId, AccountId, HistoryId, Delta]}.
+
+real_trans(RC, BranchId, Fun, Args, Mod) ->
+ Type = RC#run_config.activity_type,
+ case RC#run_config.spawn_near_branch of
+ false ->
+ mnesia:activity(Type, Fun, Args, Mod);
+ true ->
+ Node = table_info({branch, BranchId}, where_to_read),
+ case rpc:call(Node, mnesia, activity, [Type, Fun, Args, Mod]) of
+ {badrpc, Reason} -> exit(Reason);
+ Other -> Other
+ end
+ end.
+
+trans_type(TC, RC) ->
+ if
+ TC#tab_config.n_fragments =:= 0,
+ RC#run_config.use_sticky_locks =:= false ->
+ {fun add_delta/7, mnesia};
+ TC#tab_config.n_fragments =:= 0,
+ RC#run_config.use_sticky_locks =:= true ->
+ {fun sticky_add_delta/7, mnesia};
+ TC#tab_config.n_fragments > 0,
+ RC#run_config.use_sticky_locks =:= false ->
+ {fun frag_add_delta/7, mnesia_frag}
+ end.
+
+%%
+%% Runs the TPC-B defined transaction and returns NewAccountBalance
+%%
+
+add_delta(DriverId, BranchId, TellerId, _AccountBranchId, AccountId, HistoryId, Delta) ->
+ %% Grab write lock already when the record is read
+
+ %% Add delta to branch balance
+ [B] = mnesia:read(branch, BranchId, write),
+ NewB = B#branch{balance = B#branch.balance + Delta},
+ ok = mnesia:write(branch, NewB, write),
+
+ %% Add delta to teller balance
+ [T] = mnesia:read(teller, TellerId, write),
+ NewT = T#teller{balance = T#teller.balance + Delta},
+ ok = mnesia:write(teller, NewT, write),
+
+ %% Add delta to account balance
+ [A] = mnesia:read(account, AccountId, write),
+ NewA = A#account{balance = A#account.balance + Delta},
+ ok = mnesia:write(account, NewA, write),
+
+ %% Append to history log
+ History = #history{history_id = {DriverId, HistoryId},
+ account_id = AccountId,
+ teller_id = TellerId,
+ branch_id = BranchId,
+ amount = Delta
+ },
+ ok = mnesia:write(history, History, write),
+
+ %% Return account balance
+ NewA#account.balance.
+
+sticky_add_delta(DriverId, BranchId, TellerId, _AccountBranchId, AccountId, HistoryId, Delta) ->
+ %% Grab orinary read lock when the record is read
+ %% Grab sticky write lock when the record is written
+ %% This transaction would benefit of an early stick_write lock at read
+
+ %% Add delta to branch balance
+ [B] = mnesia:read(branch, BranchId, read),
+ NewB = B#branch{balance = B#branch.balance + Delta},
+ ok = mnesia:write(branch, NewB, sticky_write),
+
+ %% Add delta to teller balance
+ [T] = mnesia:read(teller, TellerId, read),
+ NewT = T#teller{balance = T#teller.balance + Delta},
+ ok = mnesia:write(teller, NewT, sticky_write),
+
+ %% Add delta to account balance
+ [A] = mnesia:read(account, AccountId, read),
+ NewA = A#account{balance = A#account.balance + Delta},
+ ok = mnesia:write(account, NewA, sticky_write),
+
+ %% Append to history log
+ History = #history{history_id = {DriverId, HistoryId},
+ account_id = AccountId,
+ teller_id = TellerId,
+ branch_id = BranchId,
+ amount = Delta
+ },
+ ok = mnesia:write(history, History, sticky_write),
+
+ %% Return account balance
+ NewA#account.balance.
+
+frag_add_delta(DriverId, BranchId, TellerId, AccountBranchId, AccountId, HistoryId, Delta) ->
+ %% Access fragmented table
+ %% Grab write lock already when the record is read
+
+ %% Add delta to branch balance
+ [B] = mnesia:read(branch, BranchId, write),
+ NewB = B#branch{balance = B#branch.balance + Delta},
+ ok = mnesia:write(NewB),
+
+ %% Add delta to teller balance
+ [T] = mnesia:read({teller, BranchId}, TellerId, write),
+ NewT = T#teller{balance = T#teller.balance + Delta},
+ ok = mnesia:write(NewT),
+
+ %% Add delta to account balance
+ %%io:format("frag_add_delta(~p): ~p\n", [node(), {account, BranchId, AccountId}]),
+ [A] = mnesia:read({account, AccountBranchId}, AccountId, write),
+ NewA = A#account{balance = A#account.balance + Delta},
+ ok = mnesia:write(NewA),
+
+ %% Append to history log
+ History = #history{history_id = {DriverId, HistoryId},
+ account_id = AccountId,
+ teller_id = TellerId,
+ branch_id = BranchId,
+ amount = Delta
+ },
+ ok = mnesia:write(History),
+
+ %% Return account balance
+ NewA#account.balance.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Verify table consistency
+
+verify_tabs() ->
+ Nodes = mnesia:system_info(running_db_nodes),
+ case lists:member(node(), Nodes) of
+ true ->
+ Tabs = [branch, teller, account, history],
+ io:format("TPC-B: Verifying tables: ~w~n", [Tabs]),
+ rpc:multicall(Nodes, mnesia, wait_for_tables, [Tabs, infinity]),
+
+ Fun = fun() ->
+ mnesia:write_lock_table(branch),
+ mnesia:write_lock_table(teller),
+ mnesia:write_lock_table(account),
+ mnesia:write_lock_table(history),
+ {Res, BadNodes} =
+ rpc:multicall(Nodes, ?MODULE, count_balance, []),
+ check_balance(Res, BadNodes)
+ end,
+ case mnesia:transaction(Fun) of
+ {atomic, Res} -> Res;
+ {aborted, Reason} -> {error, Reason}
+ end;
+ false ->
+ {error, "Must be initiated from a running db_node"}
+ end.
+
+%% Returns a list of {Table, Node, Balance} tuples
+%% Assumes that no updates are performed
+
+-record(summary, {table, node, balance, size}).
+
+count_balance() ->
+ [count_balance(branch, #branch.balance),
+ count_balance(teller, #teller.balance),
+ count_balance(account, #account.balance)].
+
+count_balance(Tab, BalPos) ->
+ Frags = table_info(Tab, frag_names),
+ count_balance(Tab, Frags, 0, 0, BalPos).
+
+count_balance(Tab, [Frag | Frags], Bal, Size, BalPos) ->
+ First = mnesia:dirty_first(Frag),
+ {Bal2, Size2} = count_frag_balance(Frag, First, Bal, Size, BalPos),
+ count_balance(Tab, Frags, Bal2, Size2, BalPos);
+count_balance(Tab, [], Bal, Size, _BalPos) ->
+ #summary{table = Tab, node = node(), balance = Bal, size = Size}.
+
+count_frag_balance(_Frag, '$end_of_table', Bal, Size, _BalPos) ->
+ {Bal, Size};
+count_frag_balance(Frag, Key, Bal, Size, BalPos) ->
+ [Record] = mnesia:dirty_read({Frag, Key}),
+ Bal2 = Bal + element(BalPos, Record),
+ Next = mnesia:dirty_next(Frag, Key),
+ count_frag_balance(Frag, Next, Bal2, Size + 1, BalPos).
+
+check_balance([], []) ->
+ mnesia:abort({"No balance"});
+check_balance(Summaries, []) ->
+ [One | Rest] = lists:flatten(Summaries),
+ Balance = One#summary.balance,
+ %% Size = One#summary.size,
+ case [S || S <- Rest, S#summary.balance =/= Balance] of
+ [] ->
+ ok;
+ BadSummaries ->
+ mnesia:abort({"Bad balance", One, BadSummaries})
+ end;
+check_balance(_, BadNodes) ->
+ mnesia:abort({"Bad nodes", BadNodes}).
diff --git a/lib/mnesia/test/mnesia_trans_access_test.erl b/lib/mnesia/test/mnesia_trans_access_test.erl
new file mode 100644
index 0000000000..c67382e694
--- /dev/null
+++ b/lib/mnesia/test/mnesia_trans_access_test.erl
@@ -0,0 +1,1254 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(mnesia_trans_access_test).
+-author('[email protected]').
+-compile([export_all]).
+-include("mnesia_test_lib.hrl").
+
+init_per_testcase(Func, Conf) ->
+ mnesia_test_lib:init_per_testcase(Func, Conf).
+
+fin_per_testcase(Func, Conf) ->
+ mnesia_test_lib:fin_per_testcase(Func, Conf).
+
+-define(receive_messages(Msgs), mnesia_recovery_test:receive_messages(Msgs, ?FILE, ?LINE)).
+
+% First Some debug logging
+-define(dgb, true).
+-ifdef(dgb).
+-define(dl(X, Y), ?verbose("**TRACING: " ++ X ++ "**~n", Y)).
+-else.
+-define(dl(X, Y), ok).
+-endif.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+all(doc) ->
+ ["Evil access of records in the scope of transactions",
+ "Invoke all functions in the API and try to cover all legal uses",
+ "cases as well the illegal dito. This is a complement to the",
+ "other more explicit test cases."];
+all(suite) ->
+ [
+ write, read, wread, delete, delete_object,
+ match_object, select, select14, all_keys,
+ transaction, nested_activities,
+ index_tabs, index_lifecycle
+ ].
+
+%% Write records
+
+write(suite) -> [];
+write(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = write,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:write([]) end)),
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 2}) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:write({foo, 2}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 1, 2}) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:write({Tab, 1, 2})),
+ ?verify_mnesia(Nodes, []).
+
+%% Read records
+
+read(suite) -> [];
+read(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = read,
+ Schema = [{name, Tab}, {type, bag}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ OneRec = {Tab, 1, 2},
+ TwoRec = {Tab, 1, 3},
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:read([]) end)),
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:read({Tab}) end)),
+ ?match({aborted, {bad_type, _}}
+ , mnesia:transaction(fun() -> mnesia:read(OneRec) end)),
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, [OneRec]},
+ mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(TwoRec) end)),
+ ?match({atomic, [OneRec, TwoRec]},
+ mnesia:transaction(fun() -> mnesia:read({Tab, 1}) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:read({Tab, 1})),
+ ?verify_mnesia(Nodes, []).
+
+%% Read records and set write lock
+
+wread(suite) -> [];
+wread(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = wread,
+ Schema = [{name, Tab}, {type, set}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ OneRec = {Tab, 1, 2},
+ TwoRec = {Tab, 1, 3},
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:wread([]) end)),
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:wread({Tab}) end)),
+ ?match({aborted, {bad_type, _}}
+ , mnesia:transaction(fun() -> mnesia:wread(OneRec) end)),
+
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:wread({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+
+ ?match({atomic, [OneRec]},
+ mnesia:transaction(fun() -> mnesia:wread({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(TwoRec) end)),
+ ?match({atomic, [TwoRec]},
+ mnesia:transaction(fun() -> mnesia:wread({Tab, 1}) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:wread({Tab, 1})),
+ ?verify_mnesia(Nodes, []).
+
+%% Delete record
+
+delete(suite) -> [];
+delete(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = delete,
+ Schema = [{name, Tab}, {type, bag}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:delete([]) end)),
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:delete({Tab}) end)),
+ ?match({aborted, {bad_type, _}}
+ , mnesia:transaction(fun() -> mnesia:delete({Tab, 1, 2}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 1, 2}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 1, 2}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 1, 2}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete({Tab, 1}) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:delete({Tab, 1})),
+ ?verify_mnesia(Nodes, []).
+
+%% Delete matching record
+
+delete_object(suite) -> [];
+delete_object(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = delete_object,
+ Schema = [{name, Tab}, {type, bag}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ OneRec = {Tab, 1, 2},
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:delete_object([]) end)),
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:delete_object({Tab}) end)),
+ ?match({aborted, {bad_type, _}},
+ mnesia:transaction(fun() -> mnesia:delete_object({Tab, 1}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete_object(OneRec) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete_object(OneRec) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete_object(OneRec) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:delete_object(OneRec)),
+
+ ?match({aborted, {bad_type, Tab, _}},
+ mnesia:transaction(fun() -> mnesia:delete_object({Tab, {['_']}, 21}) end)),
+ ?match({aborted, {bad_type, Tab, _}},
+ mnesia:transaction(fun() -> mnesia:delete_object({Tab, {['$5']}, 21}) end)),
+
+ ?verify_mnesia(Nodes, []).
+
+%% Read matching records
+
+match_object(suite) -> [];
+match_object(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = match,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ OneRec = {Tab, 1, 2},
+ OnePat = {Tab, '$1', 2},
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:match_object(OnePat) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, [OneRec]},
+ mnesia:transaction(fun() -> mnesia:match_object(OnePat) end)),
+
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:match_object({foo, '$1', 2}) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:match_object({[], '$1', 2}) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:match_object(OnePat)),
+ ?verify_mnesia(Nodes, []).
+
+%% select
+select(suite) -> [];
+select(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = match,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ OneRec = {Tab, 1, 2},
+ TwoRec = {Tab, 2, 3},
+ OnePat = [{{Tab, '$1', 2}, [], ['$_']}],
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:select(Tab, OnePat) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(TwoRec) end)),
+ ?match({atomic, [OneRec]},
+ mnesia:transaction(fun() -> mnesia:select(Tab, OnePat) end)),
+
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:select(Tab, {match, '$1', 2}) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:select(Tab, [{'_', [], '$1'}]) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:select(Tab, OnePat)),
+ ?verify_mnesia(Nodes, []).
+
+
+%% more select
+select14(suite) -> [];
+select14(Config) when is_list(Config) ->
+ [Node1,Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab1 = select14_ets,
+ Tab2 = select14_dets,
+ Tab3 = select14_remote,
+ Tab4 = select14_remote_dets,
+ Schemas = [[{name, Tab1}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ [{name, Tab2}, {attributes, [k, v]}, {disc_only_copies, [Node1]}],
+ [{name, Tab3}, {attributes, [k, v]}, {ram_copies, [Node2]}],
+ [{name, Tab4}, {attributes, [k, v]}, {disc_only_copies, [Node2]}]],
+ [?match({atomic, ok}, mnesia:create_table(Schema)) || Schema <- Schemas],
+
+ %% Some Helpers
+ Trans = fun(Fun) -> mnesia:transaction(Fun) end,
+ LoopHelp = fun('$end_of_table',_) -> [];
+ ({Recs,Cont},Fun) ->
+ Sel = mnesia:select(Cont),
+ Recs ++ Fun(Sel, Fun)
+ end,
+ Loop = fun(Table,Pattern) ->
+ Sel = mnesia:select(Table, Pattern, 1, read),
+ Res = LoopHelp(Sel,LoopHelp),
+ case mnesia:table_info(Table, type) of
+ ordered_set -> Res;
+ _ -> lists:sort(Res)
+ end
+ end,
+ Test =
+ fun(Tab) ->
+ OneRec = {Tab, 1, 2},
+ TwoRec = {Tab, 2, 3},
+ OnePat = [{{Tab, '$1', 2}, [], ['$_']}],
+ All = [OneRec,TwoRec],
+ AllPat = [{'_', [], ['$_']}],
+
+ ?match({atomic, []}, Trans(fun() -> Loop(Tab, OnePat) end)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(TwoRec) end)),
+ ?match({atomic, [OneRec]}, Trans(fun() -> Loop(Tab, OnePat) end)),
+ ?match({atomic, All}, Trans(fun() -> Loop(Tab, AllPat) end)),
+
+ {atomic,{_, Cont}} = Trans(fun() -> mnesia:select(Tab, OnePat, 1, read) end),
+ ?match({aborted, wrong_transaction}, Trans(fun() -> mnesia:select(Cont) end)),
+
+ ?match({aborted, _}, Trans(fun() -> mnesia:select(Tab, {match, '$1', 2},1,read) end)),
+ ?match({aborted, _}, Trans(fun() -> mnesia:select(Tab, [{'_', [], '$1'}],1,read) end)),
+ ?match({aborted, _}, Trans(fun() -> mnesia:select(sune) end)),
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:select(Tab, OnePat,1,read)),
+ ?match({aborted, {badarg,sune}},
+ Trans(fun() -> mnesia:select(sune) end))
+ end,
+ Test(Tab1),
+ Test(Tab2),
+ Test(Tab3),
+ Test(Tab4),
+ ?verify_mnesia(Nodes, []).
+
+
+%% Pick all keys from table
+
+all_keys(suite) ->[];
+all_keys(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = all_keys,
+ Schema = [{name, Tab}, {type, bag}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ Write = fun() -> mnesia:write({Tab, 14, 4}) end,
+ AllKeys = fun() -> mnesia:all_keys(Tab) end,
+
+ ?match({atomic, []}, mnesia:transaction(AllKeys)),
+
+ ?match({atomic, ok}, mnesia:transaction(Write)),
+ ?match({atomic, [14]}, mnesia:transaction(AllKeys)),
+
+ ?match({atomic, ok}, mnesia:transaction(Write)),
+ ?match({atomic, [14]}, mnesia:transaction(AllKeys)),
+
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:all_keys(foo) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:all_keys([]) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:all_keys(Tab)),
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Use and misuse transactions
+
+transaction(suite) -> [];
+transaction(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ ?match({atomic, ali_baba}, mnesia:transaction(fun() -> ali_baba end)),
+ ?match({aborted, _}, mnesia:transaction(no_fun)),
+ ?match({aborted, _}, mnesia:transaction(?MODULE, no_fun, [foo])),
+
+ {success, [A, B, C, D, E, F, G, H]} =
+ ?start_activities(lists:duplicate(8, Node1)),
+ ?start_transactions([A, B, C, D, E, F, G, H]),
+
+ A ! fun() -> mnesia:abort(abort_bad_trans) end,
+ ?match_receive({A, {aborted, abort_bad_trans}}),
+
+ B ! fun() -> erlang:error(exit_here) end,
+ ?match_receive({B, {aborted, _}}),
+
+ C ! fun() -> throw(throw_bad_trans) end,
+ ?match_receive({C, {aborted, {throw, throw_bad_trans}}}),
+
+ D ! fun() -> exit(exit_bad_trans) end,
+ ?match_receive({D, {aborted, exit_bad_trans}}),
+
+ E ! fun() -> exit(normal) end,
+ ?match_receive({E, {aborted, normal}}),
+
+ F ! fun() -> exit(abnormal) end,
+ ?match_receive({F, {aborted, abnormal}}),
+
+ G ! fun() -> exit(G, abnormal) end,
+ ?match_receive({'EXIT', G, abnormal}),
+
+ H ! fun() -> exit(H, kill) end,
+ ?match_receive({'EXIT', H, killed}),
+
+ ?match({atomic, ali_baba},
+ mnesia:transaction(fun() -> ali_baba end, infinity)),
+ ?match({atomic, ali_baba}, mnesia:transaction(fun() -> ali_baba end, 1)),
+ ?match({atomic, ali_baba}, mnesia:transaction(fun() -> ali_baba end, 0)),
+ ?match({aborted, Reason8} when element(1, Reason8) == badarg, mnesia:transaction(fun() -> ali_baba end, -1)),
+ ?match({aborted, Reason1} when element(1, Reason1) == badarg, mnesia:transaction(fun() -> ali_baba end, foo)),
+ Fun = fun() ->
+ ?match(true, mnesia:is_transaction()),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> ?match(true, mnesia:is_transaction()),ok end)), ok end,
+ ?match({atomic, ok}, mnesia:transaction(Fun)),
+ ?verify_mnesia(Nodes, []).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+nested_activities(suite) ->
+ [
+ basic_nested,
+ nested_transactions,
+ mix_of_nested_activities
+ ].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% ensure that nested transactions behave correctly
+%% We create a particular table that is used by this test only
+-record(ntab, {a, b}).
+basic_nested(doc) -> ["Test the basic functionality of nested transactions"];
+basic_nested(suite) -> [];
+basic_nested(Config) when is_list(Config) ->
+ Nodes = ?acquire_nodes(3, Config),
+ Args = [{ram_copies, Nodes},
+ {attributes, record_info(fields, ntab)}],
+ ?match({atomic, ok}, mnesia:create_table(ntab, Args)),
+ do_nested(top),
+ case mnesia_test_lib:diskless(Config) of
+ false ->
+ lists:foreach(fun(N) ->
+ ?match({atomic, ok},
+ mnesia:change_table_copy_type(ntab, N, disc_only_copies))
+ end, Nodes),
+ do_nested(top);
+ true ->
+ skip
+ end,
+ ?verify_mnesia(Nodes, []).
+
+do_nested(How) ->
+ F1 = fun() ->
+ mnesia:write(#ntab{a= 1}),
+ mnesia:write(#ntab{a= 2})
+ end,
+ F2 = fun() ->
+ mnesia:read({ntab, 1})
+ end,
+ ?match({atomic, ok}, mnesia:transaction(F1)),
+ ?match({atomic, _}, mnesia:transaction(F2)),
+
+ ?match({atomic, {aborted, _}},
+ mnesia:transaction(fun() -> n_f1(),
+ mnesia:transaction(fun() -> n_f2() end)
+ end)),
+
+ ?match({atomic, {aborted, _}},
+ mnesia:transaction(fun() -> n_f1(),
+ mnesia:transaction(fun() -> n_f3() end)
+ end)),
+ ?match({atomic, {atomic, [#ntab{a = 5}]}},
+ mnesia:transaction(fun() -> mnesia:write(#ntab{a = 5}),
+ mnesia:transaction(fun() -> n_f4() end)
+ end)),
+ Cyclic = fun() -> mnesia:abort({cyclic,a,a,a,a,a}) end, %% Ugly
+ NodeNotR = fun() -> mnesia:abort({node_not_running, testNode}) end,
+
+ TestAbort = fun(Fun) ->
+ case get(restart_counter) of
+ undefined ->
+ put(restart_counter, 1),
+ Fun();
+ _ ->
+ erase(restart_counter),
+ ok
+ end
+ end,
+
+ ?match({atomic,{atomic,ok}},
+ mnesia:transaction(fun()->mnesia:transaction(TestAbort,
+ [Cyclic])end)),
+
+ ?match({atomic,{atomic,ok}},
+ mnesia:transaction(fun()->mnesia:transaction(TestAbort,
+ [NodeNotR])end)),
+
+ %% Now try the restart thingie
+ case How of
+ top ->
+ Pids = [spawn(?MODULE, do_nested, [{spawned, self()}]),
+ spawn(?MODULE, do_nested, [{spawned, self()}]),
+ spawn(?MODULE, do_nested, [{spawned, self()}]),
+ spawn(?MODULE, do_nested, [{spawned, self()}])],
+ ?match({info, _, _}, mnesia_tm:get_info(2000)),
+ lists:foreach(fun(P) -> receive
+ {P, ok} -> ok
+ end
+ end, Pids),
+ ?match([], [Tab || Tab <- ets:all(), mnesia_trans_store == ets:info(Tab, name)]);
+
+ {spawned, Pid} ->
+ ?match({info, _, _}, mnesia_tm:get_info(2000)),
+ Pid ! {self(), ok},
+ exit(normal)
+ end.
+
+
+n_f1() ->
+ mnesia:read({ntab, 1}),
+ mnesia:write(#ntab{a = 3}).
+
+n_f2() ->
+ mnesia:write(#ntab{a = 4}),
+ erlang:error(exit_here).
+
+n_f3() ->
+ mnesia:write(#ntab{a = 4}),
+ throw(funky).
+
+n_f4() ->
+ mnesia:read({ntab, 5}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+nested_transactions(doc) ->
+ ["Verify that nested_transactions are handled as expected"];
+nested_transactions(suite) ->
+ [nested_trans_both_ok,
+ nested_trans_child_dies,
+ nested_trans_parent_dies,
+ nested_trans_both_dies].
+
+nested_trans_both_ok(suite) -> [];
+nested_trans_both_ok(Config) when is_list(Config) ->
+ nested_transactions(Config, ok, ok).
+
+nested_trans_child_dies(suite) -> [];
+nested_trans_child_dies(Config) when is_list(Config) ->
+ nested_transactions(Config, abort, ok).
+
+nested_trans_parent_dies(suite) -> [];
+nested_trans_parent_dies(Config) when is_list(Config) ->
+ nested_transactions(Config, ok, abort).
+
+nested_trans_both_dies(suite) -> [];
+nested_trans_both_dies(Config) when is_list(Config) ->
+ nested_transactions(Config, abort, abort).
+
+nested_transactions(Config, Child, Father) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab = nested_trans,
+
+ Def =
+ case mnesia_test_lib:diskless(Config) of
+ true ->
+ [{name, Tab}, {ram_copies, Nodes}];
+ false ->
+ [{name, Tab}, {ram_copies, [Node1]},
+ {disc_copies, [Node2]}, {disc_only_copies, [Node3]}]
+ end,
+
+ ?match({atomic, ok}, mnesia:create_table(Def)),
+ ?match(ok, mnesia:dirty_write({Tab, father, not_updated})),
+ ?match(ok, mnesia:dirty_write({Tab, child, not_updated})),
+
+ ChildOk = fun() -> mnesia:write({Tab, child, updated}) end,
+ ChildAbort = fun() ->
+ mnesia:write({Tab, child, updated}),
+ erlang:error(exit_here)
+ end,
+
+ Child_Fun = % Depending of test case
+ case Child of
+ ok -> ChildOk;
+ abort -> ChildAbort
+ end,
+
+ FatherOk = fun() -> mnesia:transaction(Child_Fun),
+ mnesia:write({Tab, father, updated})
+ end,
+
+ FatherAbort = fun() -> mnesia:transaction(Child_Fun),
+ mnesia:write({Tab, father, updated}),
+ erlang:error(exit_here)
+ end,
+
+ {FatherRes, ChildRes} = % Depending of test case
+ case Father of
+ ok -> ?match({atomic, ok}, mnesia:transaction(FatherOk)),
+ case Child of
+ ok -> {[{Tab, father, updated}], [{Tab, child, updated}]};
+ _ -> {[{Tab, father, updated}], [{Tab, child, not_updated}]}
+ end;
+ abort -> ?match({aborted, _}, mnesia:transaction(FatherAbort)),
+ {[{Tab, father, not_updated}], [{Tab, child, not_updated}]}
+ end,
+
+ %% Syncronize things!!
+ ?match({atomic, ok}, mnesia:sync_transaction(fun() -> mnesia:write({Tab, sync, sync}) end)),
+
+ ?match(ChildRes, rpc:call(Node1, mnesia, dirty_read, [{Tab, child}])),
+ ?match(ChildRes, rpc:call(Node2, mnesia, dirty_read, [{Tab, child}])),
+ ?match(ChildRes, rpc:call(Node3, mnesia, dirty_read, [{Tab, child}])),
+
+ ?match(FatherRes, rpc:call(Node1, mnesia, dirty_read, [{Tab, father}])),
+ ?match(FatherRes, rpc:call(Node2, mnesia, dirty_read, [{Tab, father}])),
+ ?match(FatherRes, rpc:call(Node3, mnesia, dirty_read, [{Tab, father}])),
+ ?verify_mnesia(Nodes, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+mix_of_nested_activities(doc) ->
+ ["Verify that dirty operations in a transaction are handled like ",
+ "normal transactions"];
+mix_of_nested_activities(suite) -> [];
+mix_of_nested_activities(Config) when is_list(Config) ->
+ [Node1, Node2, Node3] = Nodes = ?acquire_nodes(3, Config),
+ Tab = tab,
+
+ Def =
+ case mnesia_test_lib:diskless(Config) of
+ true -> [{ram_copies, Nodes}];
+ false ->
+ [{ram_copies, [Node1]},
+ {disc_copies, [Node2]},
+ {disc_only_copies, [Node3]}]
+ end,
+
+ ?match({atomic, ok}, mnesia:create_table(Tab, [{type,bag}|Def])),
+ Activities = [transaction, sync_transaction,
+ ets, async_dirty, sync_dirty],
+ %% Make a test for all 3000 combinations
+ Tests = [[A,B,C,D,E] ||
+ A <- Activities,
+ B <- Activities,
+ C <- Activities,
+ D <- Activities,
+ E <- Activities],
+ Foreach =
+ fun(Test,No) ->
+ Result = lists:reverse(Test),
+ ?match({No,Result},{No,catch apply_op({Tab,No},Test)}),
+ No+1
+ end,
+ lists:foldl(Foreach, 0, Tests),
+ ?verify_mnesia(Nodes, []).
+
+apply_op(Oid,[Type]) ->
+ check_res(Type,mnesia:Type(fun() -> [Type|read_op(Oid)] end));
+apply_op(Oid = {Tab,Key},[Type|Next]) ->
+ check_res(Type,mnesia:Type(fun() ->
+ Prev = read_op(Oid),
+ mnesia:write({Tab,Key,[Type|Prev]}),
+ apply_op(Oid,Next)
+ end)).
+
+check_res(transaction, {atomic,Res}) ->
+ Res;
+check_res(sync_transaction, {atomic,Res}) ->
+ Res;
+check_res(async_dirty, Res) when is_list(Res) ->
+ Res;
+check_res(sync_dirty, Res) when is_list(Res) ->
+ Res;
+check_res(ets, Res) when is_list(Res) ->
+ Res;
+check_res(Type,Res) ->
+ ?match(bug,{Type,Res}).
+
+read_op(Oid) ->
+ case lists:reverse(mnesia:read(Oid)) of
+ [] -> [];
+ [{_,_,Ops}|_] ->
+ Ops
+ end.
+
+index_tabs(suite) ->
+ [
+ index_match_object,
+ index_read,
+ index_update,
+ index_write
+ ].
+
+%% Read matching records by using an index
+
+index_match_object(suite) -> [];
+index_match_object(Config) when is_list(Config) ->
+ [Node1, Node2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = index_match_object,
+ Schema = [{name, Tab}, {attributes, [k, v, e]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = 3,
+ BadValPos = ValPos + 2,
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:index_match_object({Tab, '$1', 2}, ValPos) end)),
+ OneRec = {Tab, {1, 1}, 2, {1, 1}},
+ OnePat = {Tab, '$1', 2, '_'},
+ BadPat = {Tab, '$1', '$2', '_'}, %% See ref guide
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+
+ Imatch = fun(Patt, Pos) ->
+ mnesia:transaction(fun() -> lists:sort(mnesia:index_match_object(Patt, Pos)) end)
+ end,
+ ?match({atomic, [OneRec]}, Imatch(OnePat, ValPos)),
+ ?match({aborted, _}, Imatch(OnePat, BadValPos)),
+ ?match({aborted, _}, Imatch({foo, '$1', 2, '_'}, ValPos)),
+ ?match({aborted, _}, Imatch({[], '$1', 2, '_'}, ValPos)),
+ ?match({aborted, _}, Imatch(BadPat, ValPos)),
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:index_match_object(OnePat, ValPos)),
+
+ Another = {Tab, {3,1}, 2, {4,4}},
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Another) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, {4, 4}, 3, {4, 4}}) end)),
+
+ ?match({atomic, [OneRec]}, Imatch({Tab, {1,1}, 2, {1,1}}, ValPos)),
+ ?match({atomic, [OneRec]}, Imatch({Tab, {1,1}, 2, '$1'}, ValPos)),
+ ?match({atomic, [OneRec]}, Imatch({Tab, '$1', 2, {1,1}}, ValPos)),
+ ?match({atomic, [OneRec]}, Imatch({Tab, '$1', 2, '$1'}, ValPos)),
+ ?match({atomic, [OneRec]}, Imatch({Tab, {1, '$1'}, 2, '_'}, ValPos)),
+ ?match({atomic, [OneRec]}, Imatch({Tab, {'$2', '$1'}, 2, {'_', '$1'}}, ValPos)),
+ ?match({atomic, [OneRec, Another]}, Imatch({Tab, '_', 2, '_'}, ValPos)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 4, 5, {7, 4}}) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write({Tab, 7, 5, {7, 5}}) end)),
+
+ ?match({atomic, [{Tab, 4, 5, {7, 4}}]}, Imatch({Tab, '$1', 5, {'_', '$1'}}, ValPos)),
+
+ ?match({atomic, [OneRec]}, rpc:call(Node2, mnesia, transaction,
+ [fun() ->
+ lists:sort(mnesia:index_match_object({Tab, {1,1}, 2,
+ {1,1}}, ValPos))
+ end])),
+ ?verify_mnesia(Nodes, []).
+
+%% Read records by using an index
+
+index_read(suite) -> [];
+index_read(Config) when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = index_read,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = 3,
+ BadValPos = ValPos + 1,
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ OneRec = {Tab, 1, 2},
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(OneRec) end)),
+ ?match({atomic, [OneRec]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, BadValPos) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:index_read(foo, 2, ValPos) end)),
+ ?match({aborted, _},
+ mnesia:transaction(fun() -> mnesia:index_read([], 2, ValPos) end)),
+
+ ?match({'EXIT', {aborted, no_transaction}}, mnesia:index_read(Tab, 2, ValPos)),
+ ?verify_mnesia(Nodes, []).
+
+index_update(suite) -> [index_update_set, index_update_bag];
+index_update(doc) -> ["See Ticket OTP-2083, verifies that a table with a index is "
+ "update in the correct way i.e. the index finds the correct "
+ "records after a update"].
+index_update_set(suite) -> [];
+index_update_set(Config)when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = index_test,
+ Schema = [{name, Tab}, {attributes, [k, v1, v2, v3]}, {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = v1,
+ ValPos2 = v3,
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Pat1 = {Tab, '$1', 2, '$2', '$3'},
+ Pat2 = {Tab, '$1', '$2', '$3', '$4'},
+
+ Rec1 = {Tab, 1, 2, 3, 4},
+ Rec2 = {Tab, 2, 2, 13, 14},
+ Rec3 = {Tab, 1, 12, 13, 14},
+ Rec4 = {Tab, 4, 2, 13, 14},
+
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ ?match({atomic, [Rec1]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec2) end)),
+ {atomic, R1} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2], lists:sort(R1)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec3) end)),
+ {atomic, R2} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec2], lists:sort(R2)),
+ ?match({atomic, [Rec2]},
+ mnesia:transaction(fun() -> mnesia:index_match_object(Pat1, ValPos) end)),
+
+ {atomic, R3} = mnesia:transaction(fun() -> mnesia:match_object(Pat2) end),
+ ?match([Rec3, Rec2], lists:sort(R3)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ {atomic, R4} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec2, Rec4], lists:sort(R4)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:delete({Tab, 4}) end)),
+ ?match({atomic, [Rec2]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+
+ ?match({atomic, ok}, mnesia:del_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos2)),
+
+ {atomic, R5} = mnesia:transaction(fun() -> mnesia:match_object(Pat2) end),
+ ?match([Rec3, Rec2, Rec4], lists:sort(R5)),
+
+ {atomic, R6} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec2, Rec4], lists:sort(R6)),
+
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end)),
+ {atomic, R7} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec3, Rec2, Rec4], lists:sort(R7)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ {atomic, R8} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2, Rec4], lists:sort(R8)),
+ ?match({atomic, [Rec1]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end)),
+ {atomic, R9} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec2, Rec4], lists:sort(R9)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec2) end)),
+ {atomic, R10} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec4], lists:sort(R10)),
+ ?match({atomic, [Rec1]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end)),
+ ?match({atomic, [Rec4]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete({Tab, 4}) end)),
+ {atomic, R11} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1], lists:sort(R11)),
+ ?match({atomic, [Rec1]},mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end)),
+ ?match({atomic, []},mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end)),
+
+ ?verify_mnesia(Nodes, []).
+
+index_update_bag(suite) -> [];
+index_update_bag(Config)when is_list(Config) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = index_test,
+ Schema = [{name, Tab},
+ {type, bag},
+ {attributes, [k, v1, v2, v3]},
+ {ram_copies, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = v1,
+ ValPos2 = v3,
+
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+
+ Pat1 = {Tab, '$1', 2, '$2', '$3'},
+ Pat2 = {Tab, '$1', '$2', '$3', '$4'},
+
+ Rec1 = {Tab, 1, 2, 3, 4},
+ Rec2 = {Tab, 2, 2, 13, 14},
+ Rec3 = {Tab, 1, 12, 13, 14},
+ Rec4 = {Tab, 4, 2, 13, 4},
+ Rec5 = {Tab, 1, 2, 234, 14},
+
+ %% Simple Index
+ ?match({atomic, []},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ ?match({atomic, [Rec1]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec2) end)),
+ {atomic, R1} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2], lists:sort(R1)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec3) end)),
+ {atomic, R2} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2], lists:sort(R2)),
+
+ {atomic, R3} = mnesia:transaction(fun() -> mnesia:index_match_object(Pat1, ValPos) end),
+ ?match([Rec1, Rec2], lists:sort(R3)),
+
+ {atomic, R4} = mnesia:transaction(fun() -> mnesia:match_object(Pat2) end),
+ ?match([Rec1, Rec3, Rec2], lists:sort(R4)),
+
+ ?match({atomic, ok},
+ mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ {atomic, R5} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2, Rec4], lists:sort(R5)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete({Tab, 4}) end)),
+ {atomic, R6} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2], lists:sort(R6)),
+
+ %% OTP-6587 Needs some whitebox testing to see that the index table is cleaned correctly
+
+ [IPos] = mnesia_lib:val({Tab,index}),
+ ITab = mnesia_lib:val({index_test,{index, IPos}}),
+ io:format("~n Index ~p @ ~p => ~p ~n~n",[IPos,ITab, ets:tab2list(ITab)]),
+ ?match([{2,1},{2,2},{12,1}], ets:tab2list(ITab)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec5) end)),
+ {atomic, R60} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1,Rec5,Rec2], lists:sort(R60)),
+
+ ?match([{2,1},{2,2},{12,1}], ets:tab2list(ITab)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec3) end)),
+ {atomic, R61} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1,Rec5,Rec2], lists:sort(R61)),
+ {atomic, R62} = mnesia:transaction(fun() -> mnesia:index_read(Tab,12, ValPos) end),
+ ?match([], lists:sort(R62)),
+ ?match([{2,1},{2,2}], ets:tab2list(ITab)),
+
+ %% reset for rest of testcase
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec3) end)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec5) end)),
+ {atomic, R6} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2], lists:sort(R6)),
+ %% OTP-6587
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec1) end)),
+ ?match({atomic, [Rec2]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end)),
+ {atomic, R7} = mnesia:transaction(fun() -> mnesia:match_object(Pat2) end),
+ ?match([Rec3, Rec2], lists:sort(R7)),
+
+ %% Two indexies
+ ?match({atomic, ok}, mnesia:del_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec4) end)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos2)),
+
+ {atomic, R8} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec2, Rec4], lists:sort(R8)),
+
+ {atomic, R9} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end),
+ ?match([Rec1, Rec4], lists:sort(R9)),
+ {atomic, R10} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec3, Rec2], lists:sort(R10)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec5) end)),
+ {atomic, R11} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec1, Rec5, Rec2, Rec4], lists:sort(R11)),
+ {atomic, R12} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end),
+ ?match([Rec1, Rec4], lists:sort(R12)),
+ {atomic, R13} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec5, Rec3, Rec2], lists:sort(R13)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec1) end)),
+ {atomic, R14} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec5, Rec2, Rec4], lists:sort(R14)),
+ ?match({atomic, [Rec4]},
+ mnesia:transaction(fun() -> mnesia:index_read(Tab, 4, ValPos2) end)),
+ {atomic, R15} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec5, Rec3, Rec2], lists:sort(R15)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec5) end)),
+ {atomic, R16} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec2, Rec4], lists:sort(R16)),
+ ?match({atomic, [Rec4]}, mnesia:transaction(fun()->mnesia:index_read(Tab, 4, ValPos2) end)),
+ {atomic, R17} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec3, Rec2], lists:sort(R17)),
+
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec1) end)),
+ ?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete({Tab, 1}) end)),
+ {atomic, R18} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
+ ?match([Rec2, Rec4], lists:sort(R18)),
+ ?match({atomic, [Rec4]}, mnesia:transaction(fun()->mnesia:index_read(Tab, 4, ValPos2) end)),
+ {atomic, R19} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 14, ValPos2) end),
+ ?match([Rec2], lists:sort(R19)),
+
+ ?verify_mnesia(Nodes, []).
+
+
+index_write(suite) -> [];
+index_write(doc) -> ["See ticket OTP-8072"];
+index_write(Config)when is_list(Config) ->
+ Nodes = ?acquire_nodes(1, Config),
+ mnesia:create_table(a, [{index, [val]}]),
+ mnesia:create_table(counter, []),
+
+ CreateIfNonExist =
+ fun(Index) ->
+ case mnesia:index_read(a, Index, 3) of
+ [] ->
+ Id = mnesia:dirty_update_counter(counter, id, 1),
+ New = {a, Id, Index},
+ mnesia:write(New),
+ New;
+ [Found] ->
+ Found
+ end
+ end,
+
+ Trans = fun(A) ->
+ mnesia:transaction(CreateIfNonExist, [A])
+ %% This works better most of the time
+ %% And it is allowed to fail since it's dirty
+ %% mnesia:async_dirty(CreateIfNonExist, [A])
+ end,
+
+ Self = self(),
+ Update = fun() ->
+ Res = lists:map(Trans, lists:seq(1,10)),
+ Self ! {self(), Res}
+ end,
+
+ Pids = [spawn(Update) || _ <- lists:seq(1,5)],
+
+ Gather = fun(Pid, Acc) -> receive {Pid, Res} -> [Res|Acc] end end,
+ Results = lists:foldl(Gather, [], Pids),
+ Expected = hd(Results),
+ Check = fun(Res) -> ?match(Expected, Res) end,
+ lists:foreach(Check, Results),
+ ?verify_mnesia(Nodes, []).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add and drop indecies
+
+index_lifecycle(suite) ->
+ [
+ add_table_index_ram,
+ add_table_index_disc,
+ add_table_index_disc_only,
+ create_live_table_index_ram,
+ create_live_table_index_disc,
+ create_live_table_index_disc_only,
+ del_table_index_ram,
+ del_table_index_disc,
+ del_table_index_disc_only,
+ idx_schema_changes
+ ].
+
+add_table_index_ram(suite) -> [];
+add_table_index_ram(Config) when is_list(Config) ->
+ add_table_index(Config, ram_copies).
+
+add_table_index_disc(suite) -> [];
+add_table_index_disc(Config) when is_list(Config) ->
+ add_table_index(Config, disc_copies).
+
+add_table_index_disc_only(suite) -> [];
+add_table_index_disc_only(Config) when is_list(Config) ->
+ add_table_index(Config, disc_only_copies).
+
+%% Add table index
+
+add_table_index(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = add_table_index,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = 3,
+ BadValPos = ValPos + 1,
+ ?match({aborted, Reason41 } when element(1, Reason41) == bad_type,
+ mnesia:add_table_index(Tab, BadValPos)),
+ ?match({aborted,Reason42 } when element(1, Reason42) == bad_type,
+ mnesia:add_table_index(Tab, 2)),
+ ?match({aborted, Reason43 } when element(1, Reason43) == bad_type,
+ mnesia:add_table_index(Tab, 1)),
+ ?match({aborted, Reason44 } when element(1, Reason44) == bad_type,
+ mnesia:add_table_index(Tab, 0)),
+ ?match({aborted, Reason45 } when element(1, Reason45) == bad_type,
+ mnesia:add_table_index(Tab, -1)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({aborted, Reason46 } when element(1, Reason46) == already_exists,
+ mnesia:add_table_index(Tab, ValPos)),
+
+ NestedFun = fun() ->
+ ?match({aborted, nested_transaction},
+ mnesia:add_table_index(Tab, ValPos)),
+ ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(NestedFun)),
+ ?verify_mnesia(Nodes, []).
+
+create_live_table_index_ram(suite) -> [];
+create_live_table_index_ram(Config) when is_list(Config) ->
+ create_live_table_index(Config, ram_copies).
+
+create_live_table_index_disc(suite) -> [];
+create_live_table_index_disc(Config) when is_list(Config) ->
+ create_live_table_index(Config, disc_copies).
+
+create_live_table_index_disc_only(suite) -> [];
+create_live_table_index_disc_only(Config) when is_list(Config) ->
+ create_live_table_index(Config, disc_only_copies).
+
+create_live_table_index(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = create_live_table_index,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = 3,
+ mnesia:dirty_write({Tab, 1, 2}),
+
+ Fun = fun() ->
+ ?match(ok, mnesia:write({Tab, 2, 2})),
+ ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(Fun)),
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({atomic, [{Tab, 1, 2},{Tab, 2, 2}]},
+ mnesia:transaction(fun() -> lists:sort(mnesia:index_read(Tab, 2, ValPos))
+ end)),
+ ?verify_mnesia(Nodes, []).
+
+%% Drop table index
+
+del_table_index_ram(suite) ->[];
+del_table_index_ram(Config) when is_list(Config) ->
+ del_table_index(Config, ram_copies).
+
+del_table_index_disc(suite) ->[];
+del_table_index_disc(Config) when is_list(Config) ->
+ del_table_index(Config, disc_copies).
+
+del_table_index_disc_only(suite) ->[];
+del_table_index_disc_only(Config) when is_list(Config) ->
+ del_table_index(Config, disc_only_copies).
+
+del_table_index(Config, Storage) ->
+ [Node1] = Nodes = ?acquire_nodes(1, Config),
+ Tab = del_table_index,
+ Schema = [{name, Tab}, {attributes, [k, v]}, {Storage, [Node1]}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+ ValPos = 3,
+ BadValPos = ValPos + 1,
+ ?match({atomic, ok}, mnesia:add_table_index(Tab, ValPos)),
+ ?match({aborted,Reason} when element(1, Reason) == no_exists,
+ mnesia:del_table_index(Tab, BadValPos)),
+ ?match({atomic, ok}, mnesia:del_table_index(Tab, ValPos)),
+
+ ?match({aborted,Reason1} when element(1, Reason1) == no_exists,
+ mnesia:del_table_index(Tab, ValPos)),
+ NestedFun =
+ fun() ->
+ ?match({aborted, nested_transaction},
+ mnesia:del_table_index(Tab, ValPos)),
+ ok
+ end,
+ ?match({atomic, ok}, mnesia:transaction(NestedFun)),
+ ?verify_mnesia(Nodes, []).
+
+idx_schema_changes(suite) -> [idx_schema_changes_ram,
+ idx_schema_changes_disc,
+ idx_schema_changes_disc_only];
+idx_schema_changes(doc) ->
+ ["Tests that index tables are handled correctly when schema changes.",
+ "For example when a replica is deleted or inserted",
+ "TICKET OTP-2XXX (ELVIRA)"].
+
+idx_schema_changes_ram(suite) -> [];
+idx_schema_changes_ram(Config) when is_list(Config) ->
+ idx_schema_changes(Config, ram_copies).
+idx_schema_changes_disc(suite) -> [];
+idx_schema_changes_disc(Config) when is_list(Config) ->
+ idx_schema_changes(Config, disc_copies).
+idx_schema_changes_disc_only(suite) -> [];
+idx_schema_changes_disc_only(Config) when is_list(Config) ->
+ idx_schema_changes(Config, disc_only_copies).
+
+idx_schema_changes(Config, Storage) ->
+ [N1, N2] = Nodes = ?acquire_nodes(2, Config),
+ Tab = index_schema_changes,
+ Idx = 3,
+ Schema = [{name, Tab}, {index, [Idx]}, {attributes, [k, v]}, {Storage, Nodes}],
+ ?match({atomic, ok}, mnesia:create_table(Schema)),
+
+ {Storage1, Storage2} =
+ case Storage of
+ disc_only_copies ->
+ {ram_copies, disc_copies};
+ disc_copies ->
+ {disc_only_copies, ram_copies};
+ ram_copies ->
+ {disc_copies, disc_only_copies}
+ end,
+
+ Write = fun(N) ->
+ mnesia:write({Tab, N, N+50})
+ end,
+
+ [mnesia:sync_transaction(Write, [N]) || N <- lists:seq(1, 10)],
+ ?match([{Tab, 1, 51}], rpc:call(N1, mnesia, dirty_index_read, [Tab, 51, Idx])),
+ ?match([{Tab, 1, 51}], rpc:call(N2, mnesia, dirty_index_read, [Tab, 51, Idx])),
+
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, N1, Storage1)),
+
+ ?match({atomic, ok}, rpc:call(N1, mnesia, sync_transaction, [Write, [17]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, sync_transaction, [Write, [18]])),
+
+ ?match([{Tab, 17, 67}], rpc:call(N2, mnesia, dirty_index_read, [Tab, 67, Idx])),
+ ?match([{Tab, 18, 68}], rpc:call(N1, mnesia, dirty_index_read, [Tab, 68, Idx])),
+
+ ?match({atomic, ok}, mnesia:del_table_copy(Tab, N1)),
+ ?match({atomic, ok}, rpc:call(N1, mnesia, sync_transaction, [Write, [11]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, sync_transaction, [Write, [12]])),
+
+ ?match([{Tab, 11, 61}], rpc:call(N2, mnesia, dirty_index_read, [Tab, 61, Idx])),
+ ?match([{Tab, 12, 62}], rpc:call(N1, mnesia, dirty_index_read, [Tab, 62, Idx])),
+
+ ?match({atomic, ok}, mnesia:move_table_copy(Tab, N2, N1)),
+ ?match({atomic, ok}, rpc:call(N1, mnesia, sync_transaction, [Write, [19]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, sync_transaction, [Write, [20]])),
+
+ ?match([{Tab, 19, 69}], rpc:call(N2, mnesia, dirty_index_read, [Tab, 69, Idx])),
+ ?match([{Tab, 20, 70}], rpc:call(N1, mnesia, dirty_index_read, [Tab, 70, Idx])),
+
+ ?match({atomic, ok}, mnesia:add_table_copy(Tab, N2, Storage)),
+ ?match({atomic, ok}, rpc:call(N1, mnesia, sync_transaction, [Write, [13]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, sync_transaction, [Write, [14]])),
+
+ ?match([{Tab, 13, 63}], rpc:call(N2, mnesia, dirty_index_read, [Tab, 63, Idx])),
+ ?match([{Tab, 14, 64}], rpc:call(N1, mnesia, dirty_index_read, [Tab, 64, Idx])),
+
+ ?match({atomic, ok}, mnesia:change_table_copy_type(Tab, N2, Storage2)),
+
+ ?match({atomic, ok}, rpc:call(N1, mnesia, sync_transaction, [Write, [15]])),
+ ?match({atomic, ok}, rpc:call(N2, mnesia, sync_transaction, [Write, [16]])),
+
+ ?match([{Tab, 15, 65}], rpc:call(N2, mnesia, dirty_index_read, [Tab, 65, Idx])),
+ ?match([{Tab, 16, 66}], rpc:call(N1, mnesia, dirty_index_read, [Tab, 66, Idx])),
+
+ ?verify_mnesia(Nodes, []).
diff --git a/lib/mnesia/test/mt b/lib/mnesia/test/mt
new file mode 100755
index 0000000000..25243f1149
--- /dev/null
+++ b/lib/mnesia/test/mt
@@ -0,0 +1,60 @@
+#! /bin/sh -f
+# ``The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved via the world wide web at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+# Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+# AB. All Rights Reserved.''
+#
+# $Id$
+#
+#
+# Author: Hakan Mattsson <[email protected]>
+# Purpose: Simplified execution of the test suite
+#
+# Usage: mt <args to erlang startup script>
+
+#top=".."
+top="$ERL_TOP/lib/mnesia"
+h=`hostname`
+p="-pa $top/examples -pa $top/ebin -pa $top/test -mnesia_test_verbose true"
+log=test_log$$
+latest=test_log_latest
+args=${1+"$@"}
+erlcmd="erl -sname a $p $args -mnesia_test_timeout"
+erlcmd1="erl -sname a1 $p $args"
+erlcmd2="erl -sname a2 $p $args"
+
+xterm -geometry 70x20+0+550 -T a1 -e $erlcmd1 &
+xterm -geometry 70x20+450+550 -T a2 -e $erlcmd2 &
+
+rm "$latest" 2>/dev/null
+ln -s "$log" "$latest"
+touch "$log"
+
+echo "$erlcmd1"
+echo ""
+echo "$erlcmd2"
+echo ""
+echo "$erlcmd"
+echo ""
+echo "Give the following command in order to see the outcome from node a@$h"":"
+echo ""
+echo " less test_log$$"
+
+ostype=`uname -s`
+if [ "$ostype" = "SunOS" ] ; then
+ /usr/openwin/bin/xterm -geometry 145x40+0+0 -T a -l -lf "$log" -e $erlcmd &
+else
+ xterm -geometry 145x40+0+0 -T a -e script -f -c "$erlcmd" "$log" &
+fi
+tail -f "$log" | egrep 'Eval|<>ERROR|NYI'
+
diff --git a/lib/mnesia/test/mt.erl b/lib/mnesia/test/mt.erl
new file mode 100644
index 0000000000..f69c4a11fd
--- /dev/null
+++ b/lib/mnesia/test/mt.erl
@@ -0,0 +1,262 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%%% Author: Hakan Mattsson [email protected]
+%%% Purpose: Nice shortcuts intended for testing of Mnesia
+%%%
+%%% See the mnesia_SUITE module about the structure of
+%%% the test suite.
+%%%
+%%% See the mnesia_test_lib module about the test case execution.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mt).
+-author('[email protected]').
+-export([
+ t/0, t/1, t/2, t/3, % Run test cases
+ loop/1, loop/2, loop/3, % loop test cases
+ doc/0, doc/1, % Generate test case doc
+ struct/0, struct/1, % View test suite struct
+ shutdown/0, ping/0, start_nodes/0, % Node admin
+ read_config/0, write_config/1 % Config admin
+ ]).
+
+-include("mnesia_test_lib.hrl").
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Aliases for the (sub) test suites
+alias(all) -> mnesia_SUITE;
+alias(atomicity) -> mnesia_atomicity_test;
+alias(backup) -> mnesia_evil_backup;
+alias(config) -> mnesia_config_test;
+alias(consistency) -> mnesia_consistency_test;
+alias(dirty) -> mnesia_dirty_access_test;
+alias(durability) -> mnesia_durability_test;
+alias(evil) -> mnesia_evil_coverage_test;
+alias(qlc) -> mnesia_qlc_test;
+alias(examples) -> mnesia_examples_test;
+alias(frag) -> mnesia_frag_test;
+alias(heavy) -> {mnesia_SUITE, heavy};
+alias(install) -> mnesia_install_test;
+alias(isolation) -> mnesia_isolation_test;
+alias(light) -> {mnesia_SUITE, light};
+alias(measure) -> mnesia_measure_test;
+alias(medium) -> {mnesia_SUITE, medium};
+alias(nice) -> mnesia_nice_coverage_test;
+alias(recover) -> mnesia_recover_test;
+alias(recovery) -> mnesia_recovery_test;
+alias(registry) -> mnesia_registry_test;
+alias(suite) -> mnesia_SUITE;
+alias(trans) -> mnesia_trans_access_test;
+alias(Other) -> Other.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Resolves the name of test suites and test cases
+%% according to the alias definitions. Single atoms
+%% are assumed to be the name of a test suite.
+resolve(Suite0) when is_atom(Suite0) ->
+ case alias(Suite0) of
+ Suite when is_atom(Suite) ->
+ {Suite, all};
+ {Suite, Case} ->
+ {Suite, Case}
+ end;
+resolve({Suite0, Case}) when is_atom(Suite0), is_atom(Case) ->
+ case alias(Suite0) of
+ Suite when is_atom(Suite) ->
+ {Suite, Case};
+ {Suite, Case2} ->
+ {Suite, Case2}
+ end;
+resolve(List) when is_list(List) ->
+ [resolve(Case) || Case <- List].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Run one or more test cases
+
+%% Run the default test case with default config
+t() ->
+ t(read_test_case()).
+
+%% Resolve the test case name and run the test case
+%% The test case is noted as default test case
+%% and the outcome of the tests are written to
+%% to a file.
+t(silly) ->
+ mnesia_install_test:silly();
+t(diskless) ->
+ %% Run the default test case with default config,
+ %% but diskless
+ t(read_test_case(), diskless);
+t(Case) ->
+ %% Use the default config
+ t(Case, read_config()).
+
+t(Case, Config) when Config == diskless ->
+ %% Run the test case with default config, but diskless
+ Config2 = [{diskless, true} | read_config()],
+ t(Case, Config2);
+t(Mod, Fun) when is_atom(Mod), is_atom(Fun) ->
+ %% Run the test case with default config
+ t({Mod, Fun}, read_config());
+t(RawCase, Config) when is_list(Config) ->
+ %% Resolve the test case name and run the test case
+ Case = resolve(RawCase),
+ write_test_case(Case),
+ Res = mnesia_test_lib:test(Case, Config),
+ append_test_case_info(Case, Res).
+
+t(Mod, Fun, Config) when Config == diskless ->
+ t({Mod, Fun}, diskless).
+
+config_fname() ->
+ "mnesia_test_case_config".
+
+%% Read default config file
+read_config() ->
+ Fname = config_fname(),
+ mnesia_test_lib:log("Consulting file ~s...~n", [Fname]),
+ case file:consult(Fname) of
+ {ok, Config} ->
+ mnesia_test_lib:log("Read config ~w~n", [Config]),
+ Config;
+ _Error ->
+ Config = mnesia_test_lib:default_config(),
+ mnesia_test_lib:log("<>WARNING<> Using default config: ~w~n", [Config]),
+ Config
+ end.
+
+%% Write new default config file
+write_config(Config) when is_list(Config) ->
+ Fname = config_fname(),
+ {ok, Fd} = file:open(Fname, write),
+ write_list(Fd, Config),
+ file:close(Fd).
+
+write_list(Fd, [H | T]) ->
+ ok = io:format(Fd, "~p.~n",[H]),
+ write_list(Fd, T);
+write_list(_, []) ->
+ ok.
+
+test_case_fname() ->
+ "mnesia_test_case_info".
+
+%% Read name of test case
+read_test_case() ->
+ Fname = test_case_fname(),
+ case file:open(Fname, [read]) of
+ {ok, Fd} ->
+ Res = io:read(Fd, []),
+ file:close(Fd),
+ case Res of
+ {ok, TestCase} ->
+ mnesia_test_lib:log("Using test case ~w from file ~s~n",
+ [TestCase, Fname]),
+ TestCase;
+ {error, _} ->
+ default_test_case(Fname)
+ end;
+ {error, _} ->
+ default_test_case(Fname)
+ end.
+
+default_test_case(Fname) ->
+ TestCase = all,
+ mnesia_test_lib:log("<>WARNING<> Cannot read file ~s, "
+ "using default test case: ~w~n",
+ [Fname, TestCase]),
+ TestCase.
+
+write_test_case(TestCase) ->
+ Fname = test_case_fname(),
+ {ok, Fd} = file:open(Fname, write),
+ ok = io:format(Fd, "~p.~n",[TestCase]),
+ file:close(Fd).
+
+append_test_case_info(TestCase, TestCaseInfo) ->
+ Fname = test_case_fname(),
+ {ok, Fd} = file:open(Fname, [read, write]),
+ ok = io:format(Fd, "~p.~n",[TestCase]),
+ ok = io:format(Fd, "~p.~n",[TestCaseInfo]),
+ file:close(Fd),
+ TestCaseInfo.
+
+%% Generate HTML pages from the test case structure
+doc() ->
+ doc(suite).
+
+doc(Case) ->
+ mnesia_test_lib:doc(resolve(Case)).
+
+%% Display out the test case structure
+struct() ->
+ struct(suite).
+
+struct(Case) ->
+ mnesia_test_lib:struct([resolve(Case)]).
+
+%% Shutdown all nodes with erlang:halt/0
+shutdown() ->
+ mnesia_test_lib:shutdown().
+
+%% Ping all nodes in config spec
+ping() ->
+ Config = read_config(),
+ Nodes = mnesia_test_lib:select_nodes(all, Config, ?FILE, ?LINE),
+ [{N, net_adm:ping(N)} || N <- Nodes].
+
+%% Slave start all nodes in config spec
+start_nodes() ->
+ Config = read_config(),
+ Nodes = mnesia_test_lib:select_nodes(all, Config, ?FILE, ?LINE),
+ mnesia_test_lib:init_nodes(Nodes, ?FILE, ?LINE),
+ ping().
+
+%% loop one testcase /suite until it fails
+
+loop(Case) ->
+ loop_1(Case,-1,read_config()).
+
+loop(M,F) when is_atom(F) ->
+ loop_1({M,F},-1,read_config());
+loop(Case,N) when is_integer(N) ->
+ loop_1(Case, N,read_config()).
+
+loop(M,F,N) when is_integer(N) ->
+ loop_1({M,F},N,read_config()).
+
+loop_1(Case,N,Config) when N /= 0 ->
+ io:format("Loop test ~p ~n", [abs(N)]),
+ case ok_result(Res = t(Case,Config)) of
+ true ->
+ loop_1(Case,N-1,Config);
+ error ->
+ Res
+ end;
+loop_1(_,_,_) ->
+ ok.
+
+ok_result([{_T,{ok,_,_}}|R]) ->
+ ok_result(R);
+ok_result([{_T,{TC,List}}|R]) when is_tuple(TC), is_list(List) ->
+ ok_result(List) andalso ok_result(R);
+ok_result([]) -> true;
+ok_result(_) -> error.
diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk
index 31cc8f8513..2780b737b6 100644
--- a/lib/mnesia/vsn.mk
+++ b/lib/mnesia/vsn.mk
@@ -1,7 +1,8 @@
-MNESIA_VSN = 4.4.13
+MNESIA_VSN = 4.4.14
-TICKETS = OTP-8402 OTP-8406
+TICKETS = OTP-8519
+#TICKETS_4.4.13 = OTP-8402 OTP-8406
#TICKETS_4.4.12 = OTP-8250
#TICKETS_4.4.11 = OTP-8074
#TICKETS_4.4.10 = OTP-7928 OTP-7968 OTP-8002
diff --git a/lib/observer/vsn.mk b/lib/observer/vsn.mk
index ff06fb992d..499cce6b97 100644
--- a/lib/observer/vsn.mk
+++ b/lib/observer/vsn.mk
@@ -1 +1 @@
-OBSERVER_VSN = 0.9.8.2
+OBSERVER_VSN = 0.9.8.3
diff --git a/lib/odbc/test/Makefile b/lib/odbc/test/Makefile
new file mode 100644
index 0000000000..935ecbf5a7
--- /dev/null
+++ b/lib/odbc/test/Makefile
@@ -0,0 +1,114 @@
+#
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1999-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+
+INCLUDES= -I. -I$(ERL_TOP)/lib/test_server/include/ -I$(ERL_TOP)/lib/odbc/src
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+
+MODULES= \
+ odbc_start_SUITE \
+ odbc_connect_SUITE \
+ odbc_query_SUITE \
+ odbc_data_type_SUITE \
+ odbc_test_lib \
+ oracle \
+ sqlserver \
+ postgres
+
+EBIN = .
+
+ERL_FILES= $(MODULES:%=%.erl)
+
+HRL_FILES= odbc_test.hrl\
+
+TARGET_FILES= \
+ $(MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+SPEC_FILES = odbc.spec odbc.dynspec \
+ odbc.spec.win
+
+EMAKEFILE = Emakefile
+MAKE_EMAKE = $(wildcard $(ERL_TOP)/make/make_emakefile)
+
+ifeq ($(MAKE_EMAKE),)
+BUILDTARGET = $(TARGET_FILES)
+RELTEST_FILES = $(SPEC_FILES) $(SOURCE)
+else
+BUILDTARGET = emakebuild
+RELTEST_FILES = $(EMAKEFILE) $(SPEC_FILES) $(SOURCE)
+endif
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/odbc_test
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+
+ERL_COMPILE_FLAGS += $(INCLUDES) \
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+tests debug opt: $(BUILDTARGET)
+
+targets: $(TARGET_FILES)
+
+.PHONY: emakebuild
+
+emakebuild: $(EMAKEFILE)
+
+$(EMAKEFILE):
+ $(MAKE_EMAKE) $(ERL_COMPILE_FLAGS) -o$(EBIN) '*_SUITE_make' | grep -v Warning > $(EMAKEFILE)
+ $(MAKE_EMAKE) $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES) | grep -v Warning >> $(EMAKEFILE)
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+
+release_tests_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)
+ $(INSTALL_DATA) $(SPEC_FILES) $(ERL_FILES) $(HRL_FILES) $(RELSYSDIR)
+
+
+release_docs_spec:
+
+
+
+
+
+
+
diff --git a/lib/odbc/test/README b/lib/odbc/test/README
new file mode 100644
index 0000000000..1f3c659e28
--- /dev/null
+++ b/lib/odbc/test/README
@@ -0,0 +1,86 @@
+-------------------------------------------------------------------------
+ TEST SUITE REQUIREMENTS
+-------------------------------------------------------------------------
+As third party products are involved when using ODBC you will have to
+setup your own test environment to be able to run the ODBC test
+suites.
+
+You need to install a database such as postgres, sql-server, oracle
+etc, and ODBC-drivers for that database.
+
+Then you need to setup a test database, however you do not
+need to create any tables that will be done by the test suites.
+The test suites will also remove all tables that it creates when
+the test is complete.
+
+-------------------------------------------------------------------------
+ERLANG FILES YOU MAY NEED TO CHANGE
+-------------------------------------------------------------------------
+
+A remote database management system has a callback module to handle
+possible differences in data type handling etc, the callback module
+also defines the ODBC connection string. Currently available callback
+modules are postgres.erl, sqlserver.erl and oracle.erl. Depending on
+how you set things up you might want to edit the connection string in
+the callback module or even add your own callback module.
+
+The callback module used in each test case is defined by the ?RDBMS
+macro defined in odbc_test.hrl so you might need to change this to
+suite your purposes.
+
+-------------------------------------------------------------------------
+EXAMPLE
+-------------------------------------------------------------------------
+
+As an example say we have the database odbctestdb, with
+the user odbctest that has the password Sesame. The database
+runs on the host myhost.
+
+UINX/LINUX
+-----------
+
+Set up a database and install the unixODBC drivers.
+Then the unix/linux user that should run the test suits needs an .odbc.ini
+file to map connection data. For example ODBC connection string:
+"DSN=Postgres;UID=odbctest" will need an .odbc.ini entry that looks
+something like this:
+
+--- Start example of .odbc.ini ----
+
+[Postgres]
+Driver=/usr/lib/psqlodbc.so
+Description=Postgres driver
+ServerName=myhost
+Database=odbctestdb
+Port=5432
+LogonID=odbctest
+Password=Sesame
+
+---End example of .odbc.ini ------------
+
+
+WINDOWS MOST FLAVORS
+--------------------
+
+There will be a "ODBC data source administrator" tool under
+Control Panel -> Administrative Tools, use this to set up
+your database. Choose to connect with SQL Server authentication.
+As odbc connection string use: "DSN=odbctestdb;UID=odbctest;PWD=Sesame"
+
+
+> %CopyrightBegin%
+>
+> Copyright Ericsson AB 2010. All Rights Reserved.
+>
+> The contents of this file are subject to the Erlang Public License,
+> Version 1.1, (the "License"); you may not use this file except in
+> compliance with the License. You should have received a copy of the
+> Erlang Public License along with this software. If not, it can be
+> retrieved online at http://www.erlang.org/.
+>
+> Software distributed under the License is distributed on an "AS IS"
+> basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+> the License for the specific language governing rights and limitations
+> under the License.
+>
+> %CopyrightEnd%
diff --git a/lib/odbc/test/odbc.dynspec b/lib/odbc/test/odbc.dynspec
new file mode 100644
index 0000000000..bb15edceed
--- /dev/null
+++ b/lib/odbc/test/odbc.dynspec
@@ -0,0 +1,31 @@
+%% -*- erlang -*-
+%% You can test this file using this command.
+%% file:script("odbc.dynspec", [{'Os',"Unix"}]).
+
+Exists =
+fun() ->
+ case code:lib_dir(odbc) of
+ {error,bad_name} ->
+ false;
+ P ->
+ %% Make sure that the odbc directory really
+ %% contains the application (and not only documentation).
+ case filelib:is_file(filename:join(P, "ebin/odbc.beam")) of
+ false -> false;
+ true ->
+ %% We know that we don't have any odbc libraries
+ %% installed on this computer.
+ {ok,Host} = inet:gethostname(),
+ Host =/= "netsim200"
+ end
+ end
+end,
+case Exists() of
+ false ->
+ NoOdbc = "No odbc application",
+ [{skip, {odbc_connect_SUITE, NoOdbc}},
+ {skip, {odbc_data_type_SUITE, NoOdbc}},
+ {skip, {odbc_query_SUITE, NoOdbc}}];
+ true ->
+ []
+end.
diff --git a/lib/odbc/test/odbc.spec b/lib/odbc/test/odbc.spec
new file mode 100644
index 0000000000..acba9f8d98
--- /dev/null
+++ b/lib/odbc/test/odbc.spec
@@ -0,0 +1,9 @@
+{topcase, {dir, "../odbc_test"}}.
+{skip, {odbc_data_type_SUITE, varchar_upper_limit, "Known bug in database"}}.
+{skip, {odbc_data_type_SUITE, text_upper_limit, "Consumes too much resources"}}.
+{skip, {odbc_data_type_SUITE, bit_true , "Not supported by driver"}}.
+{skip, {odbc_data_type_SUITE, bit_false, "Not supported by driver"}}.
+{skip, {odbc_query_SUITE, multiple_select_result_sets,"Not supported by driver"}}.
+{skip, {odbc_query_SUITE, multiple_mix_result_sets, "Not supported by driver"}}.
+{skip, {odbc_query_SUITE, multiple_result_sets_error, "Not supported by driver"}}.
+{skip, {odbc_query_SUITE, param_insert_tiny_int, "Not supported by driver"}}. \ No newline at end of file
diff --git a/lib/odbc/test/odbc.spec.win b/lib/odbc/test/odbc.spec.win
new file mode 100644
index 0000000000..1fd349d2c3
--- /dev/null
+++ b/lib/odbc/test/odbc.spec.win
@@ -0,0 +1,5 @@
+{topcase, {dir, "../odbc_test"}}.
+{skip, {odbc_data_type_SUITE, big_int_lower_limit, "Not supported by sqlserver 7.0"}}.
+{skip, {odbc_data_type_SUITE, big_int_upper_limit, "Not supported by sqlserver7.0"}}.
+{skip, {odbc_data_type_SUITE, text_upper_limit, "Consumes too much resources"}}.
+
diff --git a/lib/odbc/test/odbc_connect_SUITE.erl b/lib/odbc/test/odbc_connect_SUITE.erl
new file mode 100644
index 0000000000..4d37a8f543
--- /dev/null
+++ b/lib/odbc/test/odbc_connect_SUITE.erl
@@ -0,0 +1,816 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(odbc_connect_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include("test_server.hrl").
+-include("test_server_line.hrl").
+-include("odbc_test.hrl").
+
+-define(MAX_SEQ_TIMEOUTS, 10).
+
+%%--------------------------------------------------------------------
+%% all(Arg) -> [Doc] | [Case] | {skip, Comment}
+%% Arg - doc | suite
+%% Doc - string()
+%% Case - atom()
+%% Name of a test case function.
+%% Comment - string()
+%% Description: Returns documentation/test cases in this test suite
+%% or a skip tuple if the platform is not supported.
+%%--------------------------------------------------------------------
+all(doc) ->
+ ["Tests the ability to connect and disconnet to/from the database"];
+all(suite) ->
+ case odbc_test_lib:odbc_check() of
+ ok -> all();
+ Other -> {skip, Other}
+ end.
+
+all() ->
+ [not_exist_db, commit, rollback, not_explicit_commit,
+ no_c_node, port_dies, control_process_dies, client_dies,
+ connect_timeout, timeout, many_timeouts, timeout_reset,
+ disconnect_on_timeout, connection_closed,
+ disable_scrollable_cursors, return_rows_as_lists, api_missuse].
+
+%%--------------------------------------------------------------------
+%% Function: init_per_suite(Config) -> Config
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Initiation before the whole suite
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ application:start(odbc),
+ case odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]) of
+ {ok, Ref} ->
+ odbc:disconnect(Ref),
+ [{tableName, odbc_test_lib:unique_table_name()} | Config];
+ _ ->
+ {skip, "ODBC is not properly setup"}
+ end.
+%%--------------------------------------------------------------------
+%% Function: end_per_suite(Config) -> _
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after the whole suite
+%%--------------------------------------------------------------------
+end_per_suite(_Config) ->
+ application:stop(odbc),
+ ok.
+
+%%--------------------------------------------------------------------
+%% Function: init_per_testcase(Case, Config) -> Config
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Initiation before each test case
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_testcase(_TestCase, Config) ->
+ test_server:format("ODBCINI = ~p~n", [os:getenv("ODBCINI")]),
+ Dog = test_server:timetrap(?default_timeout),
+ Temp = lists:keydelete(connection_ref, 1, Config),
+ NewConfig = lists:keydelete(watchdog, 1, Temp),
+ [{watchdog, Dog} | NewConfig].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_testcase(Case, Config) -> _
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after each test case
+%%--------------------------------------------------------------------
+end_per_testcase(_TestCase, Config) ->
+ %% Clean up if needed
+ Table = ?config(tableName, Config),
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ Result = odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+ io:format("Drop table: ~p ~p~n", [Table, Result]),
+ odbc:disconnect(Ref),
+ Dog = ?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+%%-------------------------------------------------------------------------
+%% Test cases starts here.
+%%-------------------------------------------------------------------------
+commit(doc)->
+ ["Test the use of explicit commit"];
+commit(suite) -> [];
+commit(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+
+ Table = ?config(tableName, Config),
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10))"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1,'bar')"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+
+ ok = odbc:commit(Ref, commit),
+ UpdateResult = ?RDBMS:update_result(),
+ UpdateResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'bar' WHERE ID = 1"),
+ ok = odbc:commit(Ref, commit, ?TIMEOUT),
+ InsertResult = ?RDBMS:insert_result(),
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT', {function_clause, _}} =
+ (catch odbc:commit(Ref, commit, -1)),
+
+ ok = odbc:disconnect(Ref),
+
+ ok.
+%%-------------------------------------------------------------------------
+
+rollback(doc)->
+ ["Test the use of explicit rollback"];
+rollback(suite) -> [];
+rollback(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10))"),
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+ ok = odbc:commit(Ref, commit),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+ ok = odbc:commit(Ref, rollback),
+ InsertResult = ?RDBMS:insert_result(),
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+ ok = odbc:commit(Ref, rollback, ?TIMEOUT),
+ InsertResult = ?RDBMS:insert_result(),
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+
+ {'EXIT', {function_clause, _}} =
+ (catch odbc:commit(Ref, rollback, -1)),
+
+ ok = odbc:disconnect(Ref),
+ ok.
+
+%%-------------------------------------------------------------------------
+not_explicit_commit(doc) ->
+ ["Test what happens if you try using commit on a auto_commit connection."];
+not_explicit_commit(suite) -> [];
+not_explicit_commit(_Config) ->
+ {ok, Ref} =
+ odbc:connect(?RDBMS:connection_string(), [{auto_commit, on}]),
+ {error, _} = odbc:commit(Ref, commit),
+ ok = odbc:disconnect(Ref),
+ ok.
+
+%%-------------------------------------------------------------------------
+not_exist_db(doc) ->
+ ["Tests valid data format but invalid data in the connection parameters."];
+not_exist_db(suite) -> [];
+not_exist_db(_Config) ->
+ {error, _} = odbc:connect("DSN=foo;UID=bar;PWD=foobar", []),
+ %% So that the odbc control server can be stoped "in the correct way"
+ test_server:sleep(100),
+ ok.
+
+%%-------------------------------------------------------------------------
+no_c_node(doc) ->
+ "Test what happens if the port-program can not be found";
+no_c_node(suite) -> [];
+no_c_node(_Config) ->
+ process_flag(trap_exit, true),
+ Dir = filename:nativename(filename:join(code:priv_dir(odbc),
+ "bin")),
+ FileName1 = filename:nativename(os:find_executable("odbcserver",
+ Dir)),
+ FileName2 = filename:nativename(filename:join(Dir, "odbcsrv")),
+ ok = file:rename(FileName1, FileName2),
+ Result =
+ case catch odbc:connect(?RDBMS:connection_string(), []) of
+ {error, port_program_executable_not_found} ->
+ ok;
+ Else ->
+ Else
+ end,
+
+ ok = file:rename(FileName2, FileName1),
+ ok = Result.
+%%------------------------------------------------------------------------
+
+port_dies(doc) ->
+ "Tests what happens if the port program dies";
+port_dies(suite) -> [];
+port_dies(_Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ {status, _} = process_info(Ref, status),
+ process_flag(trap_exit, true),
+ Port = lists:last(erlang:ports()),
+ exit(Port, kill),
+ %% Wait for exit_status from port 5000 ms (will not get a exit
+ %% status in this case), then wait a little longer to make sure
+ %% the port and the controlprocess has had time to terminate.
+ test_server:sleep(7000),
+ undefined = process_info(Ref, status),
+ ok.
+
+%%-------------------------------------------------------------------------
+control_process_dies(doc) ->
+ "Tests what happens if the Erlang control process dies";
+control_process_dies(suite) -> [];
+control_process_dies(_Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ process_flag(trap_exit, true),
+ Port = lists:last(erlang:ports()),
+ {connected, Ref} = erlang:port_info(Port, connected),
+ exit(Ref, kill),
+ test_server:sleep(100),
+ undefined = erlang:port_info(Port, connected),
+ %% Check for c-program still running, how?
+ ok.
+
+%%-------------------------------------------------------------------------
+client_dies(doc) ->
+ ["Test that the odbc process is terminated when the client process "
+ "dies"];
+client_dies(suite) ->
+ [client_dies_normal, client_dies_timeout, client_dies_error].
+
+%%-------------------------------------------------------------------------
+client_dies_normal(doc) ->
+ ["Client dies with reason normal."];
+client_dies_normal(suite) -> [];
+client_dies_normal(Config) when is_list(Config) ->
+ Pid = spawn(?MODULE, client_normal, [self()]),
+
+ MonitorReference =
+ receive
+ {dbRef, Ref} ->
+ MRef = erlang:monitor(process, Ref),
+ Pid ! continue,
+ MRef
+ end,
+
+ receive
+ {'DOWN', MonitorReference, _Type, _Object, _Info} ->
+ ok
+ after 5000 ->
+ test_server:fail(control_process_not_stopped)
+ end.
+
+client_normal(Pid) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ Pid ! {dbRef, Ref},
+ receive
+ continue ->
+ ok
+ end,
+ exit(self(), normal).
+
+
+%%-------------------------------------------------------------------------
+client_dies_timeout(doc) ->
+ ["Client dies with reason timeout."];
+client_dies_timeout(suite) -> [];
+client_dies_timeout(Config) when is_list(Config) ->
+ Pid = spawn(?MODULE, client_timeout, [self()]),
+
+ MonitorReference =
+ receive
+ {dbRef, Ref} ->
+ MRef = erlang:monitor(process, Ref),
+ Pid ! continue,
+ MRef
+ end,
+
+ receive
+ {'DOWN', MonitorReference, _Type, _Object, _Info} ->
+ ok
+ after 5000 ->
+ test_server:fail(control_process_not_stopped)
+ end.
+
+client_timeout(Pid) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ Pid ! {dbRef, Ref},
+ receive
+ continue ->
+ ok
+ end,
+ exit(self(), timeout).
+
+
+%%-------------------------------------------------------------------------
+client_dies_error(doc) ->
+ ["Client dies with reason error."];
+client_dies_error(suite) -> [];
+client_dies_error(Config) when is_list(Config) ->
+ Pid = spawn(?MODULE, client_error, [self()]),
+
+ MonitorReference =
+ receive
+ {dbRef, Ref} ->
+ MRef = erlang:monitor(process, Ref),
+ Pid ! continue,
+ MRef
+ end,
+
+ receive
+ {'DOWN', MonitorReference, _Type, _Object, _Info} ->
+ ok
+ after 5000 ->
+ test_server:fail(control_process_not_stopped)
+ end.
+
+client_error(Pid) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ Pid ! {dbRef, Ref},
+ receive
+ continue ->
+ ok
+ end,
+ exit(self(), error).
+
+
+%%-------------------------------------------------------------------------
+connect_timeout(doc) ->
+ ["Test the timeout for the connect function."];
+connect_timeout(suite) -> [];
+connect_timeout(Config) when is_list(Config) ->
+ {'EXIT',timeout} = (catch odbc:connect(?RDBMS:connection_string(),
+ [{timeout, 0}])),
+ ok.
+%%-------------------------------------------------------------------------
+timeout(doc) ->
+ ["Test that timeouts don't cause unwanted behavior sush as receiving"
+ " an anwser to a previously tiemed out query."];
+timeout(suite) -> [];
+timeout(Config) when is_list(Config) ->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), PRIMARY KEY(ID))"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ ok = odbc:commit(Ref, commit),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(2,'baz')"),
+
+ Pid = spawn_link(?MODULE, update_table_timeout, [Table, 5000, self()]),
+
+ receive
+ timout_occurred ->
+ ok = odbc:commit(Ref, commit),
+ Pid ! continue
+ end,
+
+ receive
+ altered ->
+ ok
+ end,
+
+ {selected, Fields, [{"foobar"}]} =
+ odbc:sql_query(Ref, "SELECT DATA FROM " ++ Table ++ " WHERE ID = 1"),
+ ["DATA"] = odbc_test_lib:to_upper(Fields),
+
+ ok = odbc:commit(Ref, commit),
+ ok = odbc:disconnect(Ref),
+ ok.
+
+
+update_table_timeout(Table, TimeOut, Pid) ->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ UpdateQuery = "UPDATE " ++ Table ++ " SET DATA = 'foobar' WHERE ID = 1",
+
+ case catch odbc:sql_query(Ref, UpdateQuery, TimeOut) of
+ {'EXIT', timeout} ->
+ Pid ! timout_occurred;
+ {updated, 1} ->
+ test_server:fail(database_locker_failed)
+ end,
+
+ receive
+ continue ->
+ ok
+ end,
+
+ %% Make sure we receive the correct result and not the answer
+ %% to the previous query.
+ {selected, Fields, [{"baz"}]} =
+ odbc:sql_query(Ref, "SELECT DATA FROM " ++ Table ++ " WHERE ID = 2"),
+ ["DATA"] = odbc_test_lib:to_upper(Fields),
+
+ {updated, 1} = odbc:sql_query(Ref, UpdateQuery, TimeOut),
+
+ ok = odbc:commit(Ref, commit),
+
+ Pid ! altered,
+
+ ok = odbc:disconnect(Ref),
+
+ ok.
+%%-------------------------------------------------------------------------
+many_timeouts(doc) ->
+ ["Tests that many consecutive timeouts lead to that the connection "
+ "is shutdown."];
+many_timeouts(suite) -> [];
+many_timeouts(Config) when is_list(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), PRIMARY KEY(ID))"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ ok = odbc:commit(Ref, commit),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+
+ _Pid = spawn_link(?MODULE, update_table_many_timeouts,
+ [Table, 5000, self()]),
+
+ receive
+ many_timeouts_occurred ->
+ ok
+ end,
+
+ ok = odbc:commit(Ref, commit),
+ ok = odbc:disconnect(Ref),
+ ok.
+
+
+update_table_many_timeouts(Table, TimeOut, Pid) ->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ UpdateQuery = "UPDATE " ++ Table ++ " SET DATA = 'foobar' WHERE ID = 1",
+
+ ok = loop_many_timouts(Ref, UpdateQuery, TimeOut),
+
+ Pid ! many_timeouts_occurred,
+
+ ok = odbc:disconnect(Ref),
+ ok.
+
+
+loop_many_timouts(Ref, UpdateQuery, TimeOut) ->
+ case catch odbc:sql_query(Ref, UpdateQuery, TimeOut) of
+ {'EXIT',timeout} ->
+ loop_many_timouts(Ref, UpdateQuery, TimeOut);
+ {updated, 1} ->
+ test_server:fail(database_locker_failed);
+ {error, connection_closed} ->
+ ok
+ end.
+%%-------------------------------------------------------------------------
+timeout_reset(doc) ->
+ ["Check that the number of consecutive timouts is reset to 0 when "
+ "a successful call to the database is made."];
+timeout_reset(suite) -> [];
+timeout_reset(Config) when is_list(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), PRIMARY KEY(ID))"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ ok = odbc:commit(Ref, commit),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(2,'baz')"),
+
+
+ Pid = spawn_link(?MODULE, update_table_timeout_reset,
+ [Table, 5000, self()]),
+
+ receive
+ many_timeouts_occurred ->
+ ok
+ end,
+
+ ok = odbc:commit(Ref, commit),
+ Pid ! continue,
+
+ receive
+ altered ->
+ ok
+ end,
+
+ {selected, Fields, [{"foobar"}]} =
+ odbc:sql_query(Ref, "SELECT DATA FROM " ++ Table ++ " WHERE ID = 1"),
+ ["DATA"] = odbc_test_lib:to_upper(Fields),
+
+ ok = odbc:commit(Ref, commit),
+ ok = odbc:disconnect(Ref),
+ ok.
+
+update_table_timeout_reset(Table, TimeOut, Pid) ->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ UpdateQuery = "UPDATE " ++ Table ++ " SET DATA = 'foobar' WHERE ID = 1",
+
+ ok = loop_timout_reset(Ref, UpdateQuery, TimeOut,
+ ?MAX_SEQ_TIMEOUTS-1),
+
+ Pid ! many_timeouts_occurred,
+
+ receive
+ continue ->
+ ok
+ end,
+
+ {selected, Fields, [{"baz"}]} =
+ odbc:sql_query(Ref, "SELECT DATA FROM " ++ Table ++ " WHERE ID = 2"),
+ ["DATA"] = odbc_test_lib:to_upper(Fields),
+
+ {updated,1} = odbc:sql_query(Ref, UpdateQuery, TimeOut),
+
+ ok = odbc:commit(Ref, commit),
+
+ Pid ! altered,
+
+ ok = odbc:disconnect(Ref),
+
+ ok.
+
+loop_timout_reset(_, _, _, 0) ->
+ ok;
+
+loop_timout_reset(Ref, UpdateQuery, TimeOut, NumTimeouts) ->
+ case catch odbc:sql_query(Ref, UpdateQuery, TimeOut) of
+ {'EXIT',timeout} ->
+ loop_timout_reset(Ref, UpdateQuery,
+ TimeOut, NumTimeouts - 1);
+ {updated, 1} ->
+ test_server:fail(database_locker_failed);
+ {error, connection_closed} ->
+ test_server:fail(connection_closed_premature)
+ end.
+
+%%-------------------------------------------------------------------------
+
+disconnect_on_timeout(doc) ->
+ ["Check that disconnect after a time out works properly"];
+disconnect_on_timeout(suite) -> [];
+disconnect_on_timeout(Config) when is_list(Config) ->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), PRIMARY KEY(ID))"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ ok = odbc:commit(Ref, commit),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+
+
+ _Pid = spawn_link(?MODULE, update_table_disconnect_on_timeout,
+ [Table, 5000, self()]),
+ receive
+ ok ->
+ ok = odbc:commit(Ref, commit);
+ nok ->
+ test_server:fail(database_locker_failed)
+ end.
+
+update_table_disconnect_on_timeout(Table, TimeOut, Pid) ->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{auto_commit, off}]),
+ UpdateQuery = "UPDATE " ++ Table ++ " SET DATA = 'foobar' WHERE ID = 1",
+
+ case catch odbc:sql_query(Ref, UpdateQuery, TimeOut) of
+ {'EXIT', timeout} ->
+ ok = odbc:disconnect(Ref),
+ Pid ! ok;
+ {updated, 1} ->
+ Pid ! nok
+ end.
+
+%%-------------------------------------------------------------------------
+connection_closed(doc) ->
+ ["Checks that you get an appropriate error message if you try to"
+ " use a connection that has been closed"];
+connection_closed(suite) -> [];
+connection_closed(Config) when is_list(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+
+ Table = ?config(tableName, Config),
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA char(10), PRIMARY KEY(ID))"),
+
+ ok = odbc:disconnect(Ref),
+
+ {error, connection_closed} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+ {error, connection_closed} =
+ odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+ {error, connection_closed} = odbc:first(Ref),
+ {error, connection_closed} = odbc:last(Ref),
+ {error, connection_closed} = odbc:next(Ref),
+ {error, connection_closed} = odbc:prev(Ref),
+ {error, connection_closed} = odbc:select(Ref, next, 3),
+ {error, connection_closed} = odbc:commit(Ref, commit),
+ ok.
+
+%%-------------------------------------------------------------------------
+disable_scrollable_cursors(doc) ->
+ ["Test disabling of scrollable cursors."];
+disable_scrollable_cursors(suite) -> [];
+disable_scrollable_cursors(Config) when is_list(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{scrollable_cursors, off}]),
+
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), PRIMARY KEY(ID))"),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ {ok, _} = odbc:select_count(Ref, "SELECT ID FROM " ++ Table),
+
+ NextResult = ?RDBMS:selected_ID(1, next),
+
+ test_server:format("Expected: ~p~n", [NextResult]),
+
+ Result = odbc:next(Ref),
+ test_server:format("Got: ~p~n", [Result]),
+ NextResult = Result,
+
+ {error, scrollable_cursors_disabled} = odbc:first(Ref),
+ {error, scrollable_cursors_disabled} = odbc:last(Ref),
+ {error, scrollable_cursors_disabled} = odbc:prev(Ref),
+ {error, scrollable_cursors_disabled} =
+ odbc:select(Ref, {relative, 2}, 5),
+ {error, scrollable_cursors_disabled} =
+ odbc:select(Ref, {absolute, 2}, 5),
+
+ {selected, _ColNames,[]} = odbc:select(Ref, next, 1),
+ ok.
+
+%%-------------------------------------------------------------------------
+return_rows_as_lists(doc)->
+ ["Test the option that a row may be returned as a list instead "
+ "of a tuple. Too be somewhat backward compatible."];
+return_rows_as_lists(suite) -> [];
+return_rows_as_lists(Config) when is_list(Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{tuple_row, off}]),
+
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), PRIMARY KEY(ID))"),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(2,'foo')"),
+
+ ListRows = ?RDBMS:selected_list_rows(),
+ ListRows =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ First = ?RDBMS:first_list_rows(),
+ Last = ?RDBMS:last_list_rows(),
+ Prev = ?RDBMS:prev_list_rows(),
+ Next = ?RDBMS:next_list_rows(),
+
+ Last = odbc:last(Ref),
+ Prev = odbc:prev(Ref),
+ First = odbc:first(Ref),
+ Next = odbc:next(Ref),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+api_missuse(doc)->
+ ["Test that behaviour of the control process if the api is abused"];
+api_missuse(suite) -> [];
+api_missuse(Config) when is_list(Config)->
+
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ %% Serious programming fault, connetion will be shut down
+ gen_server:call(Ref, {self(), foobar, 10}, infinity),
+ test_server:sleep(10),
+ undefined = process_info(Ref, status),
+
+ {ok, Ref2} = odbc:connect(?RDBMS:connection_string(), []),
+ %% Serious programming fault, connetion will be shut down
+ gen_server:cast(Ref2, {self(), foobar, 10}),
+ test_server:sleep(10),
+ undefined = process_info(Ref2, status),
+
+ {ok, Ref3} = odbc:connect(?RDBMS:connection_string(), []),
+ %% Could be an innocent misstake the connection lives.
+ Ref3 ! foobar,
+ test_server:sleep(10),
+ {status, _} = process_info(Ref3, status),
+ ok.
+
diff --git a/lib/odbc/test/odbc_data_type_SUITE.erl b/lib/odbc/test/odbc_data_type_SUITE.erl
new file mode 100644
index 0000000000..7d4a0ca15f
--- /dev/null
+++ b/lib/odbc/test/odbc_data_type_SUITE.erl
@@ -0,0 +1,1498 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(odbc_data_type_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include("test_server.hrl").
+-include_lib("stdlib/include/ms_transform.hrl").
+-include("test_server_line.hrl").
+-include("odbc_test.hrl").
+
+%%--------------------------------------------------------------------
+%% all(Arg) -> [Doc] | [Case] | {skip, Comment}
+%% Arg - doc | suite
+%% Doc - string()
+%% Case - atom()
+%% Name of a test case function.
+%% Comment - string()
+%% Description: Returns documentation/test cases in this test suite
+%% or a skip tuple if the platform is not supported.
+%%--------------------------------------------------------------------
+all(doc) ->
+ ["Tests data types"];
+all(suite) ->
+ case odbc_test_lib:odbc_check() of
+ ok -> all();
+ Other -> {skip,Other}
+ end.
+
+all() ->
+ [char, int, floats, dec_and_num, timestamp].
+
+%%--------------------------------------------------------------------
+%% Function: init_per_suite(Config) -> Config
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Initiation before the whole suite
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ application:start(odbc),
+ [{tableName, odbc_test_lib:unique_table_name()} | Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_suite(Config) -> _
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after the whole suite
+%%--------------------------------------------------------------------
+end_per_suite(_Config) ->
+ application:stop(odbc),
+ ok.
+
+%%--------------------------------------------------------------------
+%% Function: init_per_testcase(Case, Config) -> Config
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Initiation before each test case
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_testcase(Case, Config) ->
+ case atom_to_list(Case) of
+ "binary" ++ _ ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{binary_strings, on}]);
+ "unicode" ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(),
+ [{binary_strings, on}]);
+ _ ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), [])
+ end,
+ Dog = test_server:timetrap(?default_timeout),
+ Temp = lists:keydelete(connection_ref, 1, Config),
+ NewConfig = lists:keydelete(watchdog, 1, Temp),
+ [{watchdog, Dog}, {connection_ref, Ref} | NewConfig].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_testcase(Case, Config) -> _
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after each test case
+%%--------------------------------------------------------------------
+end_per_testcase(_TestCase, Config) ->
+ Ref = ?config(connection_ref, Config),
+ ok = odbc:disconnect(Ref),
+ %% Clean up if needed
+ Table = ?config(tableName, Config),
+ {ok, NewRef} = odbc:connect(?RDBMS:connection_string(), []),
+ odbc:sql_query(NewRef, "DROP TABLE " ++ Table),
+ odbc:disconnect(NewRef),
+ Dog = ?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+%%-------------------------------------------------------------------------
+%% Test cases starts here.
+%%-------------------------------------------------------------------------
+char(doc) ->
+ ["Tests char data types"];
+
+char(suite) ->
+ [char_fixed_lower_limit, char_fixed_upper_limit,
+ char_fixed_padding, varchar_lower_limit, varchar_upper_limit,
+ varchar_no_padding, text_lower_limit, text_upper_limit, unicode
+ ].
+
+char_fixed_lower_limit(doc) ->
+ ["Tests fixed length char data type lower boundaries."];
+char_fixed_lower_limit(suite) ->
+ [];
+char_fixed_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Below limit
+ {error, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ (?RDBMS:fixed_char_min() - 1))),
+ %% Lower limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ ?RDBMS:fixed_char_min())),
+
+ %% Right length data
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:fixed_char_min())
+ ++ "')"),
+ %% Select data
+ {selected, Fields,[{"a"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref,"INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:fixed_char_min()
+ + 1))
+ ++ "')"),
+ ok.
+%%-------------------------------------------------------------------------
+
+char_fixed_upper_limit(doc) ->
+ ["Tests fixed length char data type upper boundaries."];
+char_fixed_upper_limit(suite) ->
+ [];
+char_fixed_upper_limit(Config) when is_list(Config) ->
+
+ case ?RDBMS of
+ postgres ->
+ {skip, "Limit unknown"};
+ _ ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Upper limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ ?RDBMS:fixed_char_max())),
+ {updated, _} =
+ odbc:sql_query(Ref,"INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ ?RDBMS:fixed_char_max())
+ ++ "')"),
+ %% Select data
+ {selected, Fields, [{CharStr}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = length(CharStr) == ?RDBMS:fixed_char_max(),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:fixed_char_max()
+ + 1))
+ ++ "')"),
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ %% Above limit
+ {error, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ (?RDBMS:fixed_char_max() + 1))),
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
+
+char_fixed_padding(doc) ->
+ ["Tests that data that is shorter than the given size is padded "
+ "with blanks."];
+char_fixed_padding(suite) ->
+ [];
+char_fixed_padding(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Data should be padded with blanks
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ ?RDBMS:fixed_char_max())),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ ?RDBMS:fixed_char_min())
+ ++ "')"),
+
+ {selected, Fields, [{CharStr}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = length(CharStr) == ?RDBMS:fixed_char_max(),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+%%-------------------------------------------------------------------------
+
+varchar_lower_limit(doc) ->
+ ["Tests variable length char data type lower boundaries."];
+varchar_lower_limit(suite) ->
+ [];
+varchar_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Below limit
+ {error, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_min() - 1)),
+ %% Lower limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_min())),
+
+ %% Right length data
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:var_char_min())
+ ++ "')"),
+ %% Select data
+ {selected, Fields, [{"a"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:var_char_min()+1))
+ ++ "')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+varchar_upper_limit(doc) ->
+ ["Tests variable length char data type upper boundaries."];
+varchar_upper_limit(suite) ->
+ [];
+varchar_upper_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ case ?RDBMS of
+ oracle ->
+ {skip, "Known bug in database"};
+ postgres ->
+ {skip, "Limit unknown"};
+ _ ->
+ %% Upper limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_max())),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ ?RDBMS:var_char_max())
+ ++ "')"),
+
+ {selected, Fields, [{CharStr}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = length(CharStr) == ?RDBMS:var_char_max(),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:var_char_max()+1))
+ ++ "')"),
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ %% Above limit
+ {error, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ (?RDBMS:var_char_max() + 1))),
+ ok
+ end.
+%%-------------------------------------------------------------------------
+
+varchar_no_padding(doc) ->
+ ["Tests that data that is shorter than the given max size is not padded "
+ "with blanks."];
+varchar_no_padding(suite) ->
+ [];
+varchar_no_padding(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Data should NOT be padded with blanks
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_max())),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:var_char_min())
+ ++ "')"),
+
+ {selected, Fields, [{CharStr}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = length(CharStr) /= ?RDBMS:var_char_max(),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+text_lower_limit(doc) ->
+ ["Tests 'long' char data type lower boundaries."];
+text_lower_limit(suite) ->
+ [];
+text_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_text_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:text_min())
+ ++ "')"),
+
+ {selected, Fields, [{"a"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+text_upper_limit(doc) ->
+ [];
+text_upper_limit(suite) ->
+ [];
+text_upper_limit(Config) when is_list(Config) ->
+
+ {skip,"Consumes too much resources" }.
+%% Ref = ?config(connection_ref, Config),
+%% Table = ?config(tableName, Config),
+
+%% {updated, _} = % Value == 0 || -1 driver dependent!
+%% odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+%% ?RDBMS:create_text_table()),
+%% {updated, _} =
+%% odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+%% "'" ++ string:chars($a, ?RDBMS:text_max())
+%% ++ "')"),
+
+%% {selected, Fields, [{CharStr}]} =
+%% odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+%% length(CharStr) == ?RDBMS:text_max(),
+%% ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+%% {error, _} =
+%% odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+%% "'" ++ string:chars($a, (?RDBMS:text_max()+1))
+%% ++ "')"),
+%% ok.
+
+%%-------------------------------------------------------------------------
+binary_char(doc) ->
+ ["Tests char data types returned as erlang binaries"];
+
+binary_char(suite) ->
+ [binary_char_fixed_lower_limit, binary_char_fixed_upper_limit,
+ binary_char_fixed_padding, binary_varchar_lower_limit, binary_varchar_upper_limit,
+ binary_varchar_no_padding, binary_text_lower_limit, binary_text_upper_limit, unicode
+ ].
+
+binary_char_fixed_lower_limit(doc) ->
+ ["Tests fixed length char data type lower boundaries."];
+binary_char_fixed_lower_limit(suite) ->
+ [];
+binary_char_fixed_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Below limit
+ {error, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ (?RDBMS:fixed_char_min() - 1))),
+ %% Lower limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ ?RDBMS:fixed_char_min())),
+
+ %% Right length data
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:fixed_char_min())
+ ++ "')"),
+ %% Select data
+ {selected, Fields,[{<<"a">>}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref,"INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:fixed_char_min()
+ + 1))
+ ++ "')"),
+ ok.
+%%-------------------------------------------------------------------------
+
+binary_char_fixed_upper_limit(doc) ->
+ ["Tests fixed length char data type upper boundaries."];
+binary_char_fixed_upper_limit(suite) ->
+ [];
+binary_char_fixed_upper_limit(Config) when is_list(Config) ->
+
+ case ?RDBMS of
+ postgres ->
+ {skip, "Limit unknown"};
+ _ ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Upper limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ ?RDBMS:fixed_char_max())),
+ {updated, _} =
+ odbc:sql_query(Ref,"INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ ?RDBMS:fixed_char_max())
+ ++ "')"),
+ %% Select data
+ {selected, Fields, [{CharBin}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = size(CharBin) == ?RDBMS:fixed_char_max(),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:fixed_char_max()
+ + 1))
+ ++ "')"),
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ %% Above limit
+ {error, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ (?RDBMS:fixed_char_max() + 1))),
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
+
+binary_char_fixed_padding(doc) ->
+ ["Tests that data that is shorter than the given size is padded "
+ "with blanks."];
+binary_char_fixed_padding(suite) ->
+ [];
+binary_char_fixed_padding(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Data should be padded with blanks
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_fixed_char_table(
+ ?RDBMS:fixed_char_max())),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ ?RDBMS:fixed_char_min())
+ ++ "')"),
+
+ {selected, Fields, [{CharBin}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = size(CharBin) == ?RDBMS:fixed_char_max(),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+%%-------------------------------------------------------------------------
+
+binary_varchar_lower_limit(doc) ->
+ ["Tests variable length char data type lower boundaries."];
+binary_varchar_lower_limit(suite) ->
+ [];
+binary_varchar_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Below limit
+ {error, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_min() - 1)),
+ %% Lower limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_min())),
+
+ %% Right length data
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:var_char_min())
+ ++ "')"),
+ %% Select data
+ {selected, Fields, [{<<"a">>}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:var_char_min()+1))
+ ++ "')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+binary_varchar_upper_limit(doc) ->
+ ["Tests variable length char data type upper boundaries."];
+binary_varchar_upper_limit(suite) ->
+ [];
+binary_varchar_upper_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ case ?RDBMS of
+ oracle ->
+ {skip, "Known bug in database"};
+ postgres ->
+ {skip, "Limit unknown"};
+ _ ->
+ %% Upper limit
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_max())),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ ?RDBMS:var_char_max())
+ ++ "')"),
+
+ {selected, Fields, [{CharBin}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = size(CharBin) == ?RDBMS:var_char_max(),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Too long data
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a,
+ (?RDBMS:var_char_max()+1))
+ ++ "')"),
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ %% Above limit
+ {error, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ (?RDBMS:var_char_max() + 1))),
+ ok
+ end.
+%%-------------------------------------------------------------------------
+
+binary_varchar_no_padding(doc) ->
+ ["Tests that data that is shorter than the given max size is not padded "
+ "with blanks."];
+binary_varchar_no_padding(suite) ->
+ [];
+binary_varchar_no_padding(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ %% Data should NOT be padded with blanks
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_var_char_table(
+ ?RDBMS:var_char_max())),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:var_char_min())
+ ++ "')"),
+
+ {selected, Fields, [{CharBin}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ true = size(CharBin) /= ?RDBMS:var_char_max(),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+binary_text_lower_limit(doc) ->
+ ["Tests 'long' char data type lower boundaries."];
+binary_text_lower_limit(suite) ->
+ [];
+binary_text_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_text_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ string:chars($a, ?RDBMS:text_min())
+ ++ "')"),
+
+ {selected, Fields, [{<<"a">>}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+binary_text_upper_limit(doc) ->
+ [];
+binary_text_upper_limit(suite) ->
+ [];
+binary_text_upper_limit(Config) when is_list(Config) ->
+
+ {skip,"Consumes too much resources" }.
+%% Ref = ?config(connection_ref, Config),
+%% Table = ?config(tableName, Config),
+
+%% {updated, _} = % Value == 0 || -1 driver dependent!
+%% odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+%% ?RDBMS:create_text_table()),
+%% {updated, _} =
+%% odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+%% "'" ++ string:chars($a, ?RDBMS:text_max())
+%% ++ "')"),
+
+%% {selected, Fields, [{CharBin}]} =
+%% odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+%% size(CharBin) == ?RDBMS:text_max(),
+%% ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+%% {error, _} =
+%% odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+%% "'" ++ string:chars($a, (?RDBMS:text_max()+1))
+%% ++ "')"),
+%% ok.
+
+
+%%-------------------------------------------------------------------------
+
+int(doc) ->
+ ["Tests integer data types"];
+
+int(suite) ->
+ [tiny_int_lower_limit, tiny_int_upper_limit, small_int_lower_limit,
+ small_int_upper_limit, int_lower_limit, int_upper_limit,
+ big_int_lower_limit, big_int_upper_limit, bit_false, bit_true].
+
+%%-------------------------------------------------------------------------
+
+tiny_int_lower_limit(doc) ->
+ ["Tests integer of type tinyint."];
+tiny_int_lower_limit(suite) ->
+ [];
+tiny_int_lower_limit(Config) when is_list(Config) ->
+ case ?RDBMS of
+ postgres ->
+ {skip, "Type tiniyint not supported"};
+ _ ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_tiny_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:tiny_int_min())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:tiny_int_min_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:tiny_int_min()
+ - 1)
+ ++ "')"),
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
+
+tiny_int_upper_limit(doc) ->
+ ["Tests integer of type tinyint."];
+tiny_int_upper_limit(suite) ->
+ [];
+tiny_int_upper_limit(Config) when is_list(Config) ->
+ case ?RDBMS of
+ postgres ->
+ {skip, "Type tiniyint not supported"};
+ _ ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_tiny_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:tiny_int_max())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:tiny_int_max_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:tiny_int_max()
+ + 1)
+ ++ "')"),
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
+
+small_int_lower_limit(doc) ->
+ ["Tests integer of type smallint."];
+small_int_lower_limit(suite) ->
+ [];
+small_int_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_small_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:small_int_min())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:small_int_min_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:small_int_min()
+ - 1)
+ ++ "')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+small_int_upper_limit(doc) ->
+ ["Tests integer of type smallint."];
+small_int_upper_limit(suite) ->
+ [];
+small_int_upper_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_small_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:small_int_max())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:small_int_max_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref,"INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:small_int_max()
+ + 1)
+ ++ "')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+int_lower_limit(doc) ->
+ ["Tests integer of type int."];
+int_lower_limit(suite) ->
+ [];
+int_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:int_min())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:int_min_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:int_min() - 1)
+ ++ "')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+int_upper_limit(doc) ->
+ ["Tests integer of type int."];
+int_upper_limit(suite) ->
+ [];
+int_upper_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:int_max())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:int_max_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:int_max() + 1)
+ ++ "')"),
+ ok.
+
+
+%%-------------------------------------------------------------------------
+big_int_lower_limit(doc) ->
+ ["Tests integer of type bigint"];
+big_int_lower_limit(suite) ->
+ [];
+big_int_lower_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_big_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:big_int_min())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:big_int_min_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:big_int_min()
+ - 1)
+ ++ "')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+
+big_int_upper_limit(doc) ->
+ ["Tests integer of type bigint."];
+big_int_upper_limit(suite) ->
+ [];
+big_int_upper_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_big_int_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:big_int_max())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:big_int_max_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:big_int_max()
+ + 1)
+ ++ "')"),
+ ok.
+%%-------------------------------------------------------------------------
+
+bit_false(doc) ->
+ [""];
+bit_false(suite) ->
+ [];
+bit_false(Config) when is_list(Config) ->
+ case ?RDBMS of
+ oracle ->
+ {skip, "Not supported by driver"};
+ _ ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_bit_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:bit_false())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:bit_false_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(-1)
+ ++ "')"),
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
+
+bit_true(doc) ->
+ [""];
+bit_true(suite) ->
+ [];
+bit_true(Config) when is_list(Config) ->
+ case ?RDBMS of
+ oracle ->
+ {skip, "Not supported by driver"};
+ _ ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_bit_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(?RDBMS:bit_true())
+ ++ "')"),
+
+ SelectResult = ?RDBMS:bit_true_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ integer_to_list(-1)
+ ++ "')"),
+ ok
+ end.
+
+%%-------------------------------------------------------------------------
+
+floats(doc) ->
+ ["Test the datatype float."];
+floats(suite) ->
+ [float_lower_limit, float_upper_limit, float_zero, real_zero].
+
+%%-------------------------------------------------------------------------
+float_lower_limit(doc) ->
+ [""];
+float_lower_limit(suite) ->
+ [];
+float_lower_limit(Config) when is_list(Config) ->
+
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_float_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ float_to_list(
+ ?RDBMS:float_min())
+ ++ "')"),
+ {selected,[_ColName],[{MinFloat}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ true = ?RDBMS:float_min() == MinFloat,
+
+ case ?RDBMS of
+ oracle ->
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_float_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "INSERT INTO " ++ Table ++" VALUES(" ++
+ ?RDBMS:float_underflow() ++ ")"),
+
+ SelectResult = ?RDBMS:float_zero_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table);
+ _ ->
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ ?RDBMS:float_underflow() ++ ")")
+ end,
+ ok.
+
+
+%%-------------------------------------------------------------------------
+float_upper_limit(doc) ->
+ [""];
+float_upper_limit(suite) ->
+ [];
+float_upper_limit(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_float_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ "'" ++ float_to_list(
+ ?RDBMS:float_max())
+ ++ "')"),
+
+
+ {selected,[_ColName],[{MaxFloat}]}
+ = odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+
+ true = ?RDBMS:float_max() == MaxFloat,
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(" ++
+ ?RDBMS:float_overflow() ++ ")"),
+ ok.
+
+%%-------------------------------------------------------------------------
+float_zero(doc) ->
+ ["Test the float value zero."];
+float_zero(suite) ->
+ [];
+float_zero(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_float_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES('0')"),
+
+ SelectResult = ?RDBMS:float_zero_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ok.
+%%-------------------------------------------------------------------------
+real_zero(doc) ->
+ ["Test the real value zero."];
+real_zero(suite) ->
+ [];
+real_zero(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ case ?RDBMS of
+ oracle ->
+ {skip, "Not supported in Oracle"};
+ _ ->
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_real_table()),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES('0')"),
+
+ SelectResult = ?RDBMS:real_zero_selected(),
+ SelectResult =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ok
+ end.
+%%-------------------------------------------------------------------------
+dec_and_num(doc) ->
+ ["Tests decimal and numeric datatypes."];
+dec_and_num(suite) ->
+ [dec_long, dec_double, dec_bignum, num_long, num_double, num_bignum].
+%%------------------------------------------------------------------------
+dec_long(doc) ->
+ [""];
+dec_long(suit) ->
+ [];
+dec_long(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (9,0))"),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields, [{2}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+%%------------------------------------------------------------------------
+dec_double(doc) ->
+ [""];
+dec_double(suit) ->
+ [];
+dec_double(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (10,0))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields, [{2.00000}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (15,0))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields1, [{2.00000}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields1),
+
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (15, 1))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields2, [{1.60000}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields2),
+ ok.
+
+%%------------------------------------------------------------------------
+dec_bignum(doc) ->
+ [""];
+dec_bignum(suit) ->
+ [];
+dec_bignum(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (16,0))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields, [{"2"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (16,1))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields1, [{"1.6"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields1),
+ ok.
+%%------------------------------------------------------------------------
+num_long(doc) ->
+ [""];
+num_long(suit) ->
+ [];
+num_long(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (9,0))"),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.5)"),
+
+ {selected, Fields, [{2}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+ ok.
+%%------------------------------------------------------------------------
+num_double(doc) ->
+ [""];
+num_double(suit) ->
+ [];
+num_double(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (10,0))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields, [{2.0000}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (15,0))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields1, [{2.0000}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields1),
+
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (15,1))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields2, [{1.6000}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields2),
+ ok.
+%%------------------------------------------------------------------------
+num_bignum(doc) ->
+ [""];
+num_bignum(suit) ->
+ [];
+num_bignum(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (16,0))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields, [{"2"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ %% Clean up
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ "(FIELD DECIMAL (16,1))"),
+ {updated, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++" VALUES(1.6)"),
+
+ {selected, Fields1, [{"1.6"}]} =
+ odbc:sql_query(Ref,"SELECT FIELD FROM " ++ Table),
+ ["FIELD"] = odbc_test_lib:to_upper(Fields1),
+ ok.
+
+%%------------------------------------------------------------------------
+unicode(doc) ->
+ ["Test unicode support"];
+unicode(suit) ->
+ [];
+unicode(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_unicode_table()),
+
+ Latin1Data = ["���������",
+ "testasdf",
+ "Row 3",
+ "Row 4",
+ "Row 5",
+ "Row 6",
+ "Row 7",
+ "Row 8",
+ "Row 9",
+ "Row 10",
+ "Row 11",
+ "Row 12"],
+
+ case ?RDBMS of
+ sqlserver ->
+ w_char_support_win(Ref, Table, Latin1Data);
+ postgres ->
+ direct_utf8(Ref, Table, Latin1Data);
+ oracle ->
+ {skip, "not currently supported"}
+ end.
+
+w_char_support_win(Ref, Table, Latin1Data) ->
+ UnicodeIn = lists:map(fun(S) ->
+ unicode:characters_to_binary(S,latin1,{utf16,little})
+ end,
+ Latin1Data),
+
+ test_server:format("UnicodeIn (utf 16): ~p ~n",[UnicodeIn]),
+
+ {updated, _} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++ "(FIELD) values(?)",
+ [{{sql_wvarchar,50},UnicodeIn}]),
+
+ {selected,_,UnicodeOut} = odbc:sql_query(Ref,"SELECT * FROM " ++ Table),
+
+ test_server:format("UnicodeOut: ~p~n", [UnicodeOut]),
+
+ Result = lists:map(fun({Unicode}) ->
+ unicode:characters_to_list(Unicode,{utf16,little})
+ end,
+ UnicodeOut),
+ Latin1Data = Result.
+
+
+direct_utf8(Ref, Table, Latin1Data) ->
+ UnicodeIn = lists:map(fun(String) ->
+ unicode:characters_to_binary(String,latin1,utf8)
+ end,
+ Latin1Data),
+
+ test_server:format("UnicodeIn: ~p ~n",[UnicodeIn]),
+ {updated, _} = odbc:param_query(Ref,"INSERT INTO " ++ Table ++ "(FIELD) values(?)",
+ [{{sql_varchar,50}, UnicodeIn}]),
+
+ {selected,_,UnicodeOut} = odbc:sql_query(Ref,"SELECT * FROM " ++ Table),
+
+ test_server:format("UnicodeOut: ~p~n", [UnicodeOut]),
+
+ Result = lists:map(fun({Char}) ->
+ unicode:characters_to_list(Char,utf8)
+ end, UnicodeOut),
+
+ test_server:format("Result: ~p ~n", [Result]),
+
+ Latin1Data = Result.
+
+%%------------------------------------------------------------------------
+timestamp(doc) ->
+ [""];
+timestamp(suit) ->
+ [];
+timestamp(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_timestamp_table()),
+
+ Data = [calendar:local_time(),
+ {{2009,6,17},{20,54,59}},
+ {{2009,6,18},{20,54,59}},
+ {{2009,6,19},{20,54,59}},
+ {{2009,6,20},{20,54,59}},
+ {{2009,6,21},{20,54,59}}],
+
+ {updated, _} = odbc:param_query(Ref,"INSERT INTO " ++ Table ++ "(FIELD) values(?)",
+ [{sql_timestamp,Data}]),
+
+ %%% Crate list or database table rows
+ TimeStamps = lists:map(fun(Value) -> {Value} end, Data),
+
+ {selected,_, TimeStamps} = odbc:sql_query(Ref, "SELECT * FROM " ++ Table).
diff --git a/lib/odbc/test/odbc_query_SUITE.erl b/lib/odbc/test/odbc_query_SUITE.erl
new file mode 100644
index 0000000000..12b39be3b7
--- /dev/null
+++ b/lib/odbc/test/odbc_query_SUITE.erl
@@ -0,0 +1,1453 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(odbc_query_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include("test_server.hrl").
+-include("test_server_line.hrl").
+-include("odbc_test.hrl").
+
+%%--------------------------------------------------------------------
+%% all(Arg) -> [Doc] | [Case] | {skip, Comment}
+%% Arg - doc | suite
+%% Doc - string()
+%% Case - atom()
+%% Name of a test case function.
+%% Comment - string()
+%% Description: Returns documentation/test cases in this test suite
+%% or a skip tuple if the platform is not supported.
+%%--------------------------------------------------------------------
+all(doc) ->
+ ["Tests SQL queries"];
+all(suite) ->
+ case odbc_test_lib:odbc_check() of
+ ok -> all();
+ Other -> {skip, Other}
+ end.
+
+all() ->
+ [sql_query, first, last, next, prev, select_count,select_next,
+ select_relative, select_absolute, create_table_twice,
+ delete_table_twice, duplicate_key, not_connection_owner,
+ no_result_set, query_error, multiple_select_result_sets,
+ multiple_mix_result_sets, multiple_result_sets_error,
+ parameterized_queries, describe_table,
+ delete_nonexisting_row].
+
+
+%%--------------------------------------------------------------------
+%% Function: init_per_suite(Config) -> Config
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Initiation before the whole suite
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_suite(Config) when is_list(Config) ->
+ application:start(odbc),
+ [{tableName, odbc_test_lib:unique_table_name()}| Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_suite(Config) -> _
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after the whole suite
+%%--------------------------------------------------------------------
+end_per_suite(_Config) ->
+ application:stop(odbc),
+ ok.
+
+%%--------------------------------------------------------------------
+%% Function: init_per_testcase(Case, Config) -> Config
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Initiation before each test case
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_testcase(_Case, Config) ->
+ {ok, Ref} = odbc:connect(?RDBMS:connection_string(), []),
+ Dog = test_server:timetrap(?default_timeout),
+ Temp = lists:keydelete(connection_ref, 1, Config),
+ NewConfig = lists:keydelete(watchdog, 1, Temp),
+ [{watchdog, Dog}, {connection_ref, Ref} | NewConfig].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_testcase(Case, Config) -> _
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after each test case
+%%--------------------------------------------------------------------
+end_per_testcase(_Case, Config) ->
+ Ref = ?config(connection_ref, Config),
+ ok = odbc:disconnect(Ref),
+ %% Clean up if needed
+ Table = ?config(tableName, Config),
+ {ok, NewRef} = odbc:connect(?RDBMS:connection_string(), []),
+ odbc:sql_query(NewRef, "DROP TABLE " ++ Table),
+ odbc:disconnect(NewRef),
+ Dog = ?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+%%-------------------------------------------------------------------------
+%% Test cases starts here.
+%%-------------------------------------------------------------------------
+sql_query(doc)->
+ ["Test the common cases"];
+sql_query(suite) -> [];
+sql_query(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10))"),
+
+ {updated, Count} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ true = odbc_test_lib:check_row_count(1, Count),
+
+ InsertResult = ?RDBMS:insert_result(),
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {updated, NewCount} =
+ odbc:sql_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foo' WHERE ID = 1"),
+
+ true = odbc_test_lib:check_row_count(1, NewCount),
+
+ UpdateResult = ?RDBMS:update_result(),
+ UpdateResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {updated, NewCount1} = odbc:sql_query(Ref, "DELETE FROM " ++ Table ++
+ " WHERE ID = 1"),
+
+ true = odbc_test_lib:check_row_count(1, NewCount1),
+
+ {selected, Fields, []} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["ID","DATA"] = odbc_test_lib:to_upper(Fields),
+ ok.
+
+%%-------------------------------------------------------------------------
+select_count(doc) ->
+ ["Tests select_count/[2,3]'s timeout, "
+ " select_count's functionality will be better tested by other tests "
+ " such as first."];
+select_count(sute) -> [];
+select_count(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, Count} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ true = odbc_test_lib:check_row_count(1, Count),
+ {ok, _} =
+ odbc:select_count(Ref, "SELECT * FROM " ++ Table, ?TIMEOUT),
+ {'EXIT', {function_clause, _}} =
+ (catch odbc:select_count(Ref, "SELECT * FROM ", -1)),
+ ok.
+%%-------------------------------------------------------------------------
+first(doc) ->
+ ["Tests first/[1,2]"];
+first(suite) -> [];
+first(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, Count} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ true = odbc_test_lib:check_row_count(1, Count),
+ {updated, NewCount} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ true = odbc_test_lib:check_row_count(1, NewCount),
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+
+ FirstResult = ?RDBMS:selected_ID(1, first),
+ FirstResult = odbc:first(Ref),
+ FirstResult = odbc:first(Ref, ?TIMEOUT),
+ {'EXIT', {function_clause, _}} = (catch odbc:first(Ref, -1)),
+ ok.
+
+%%-------------------------------------------------------------------------
+last(doc) ->
+ ["Tests last/[1,2]"];
+last(suite) -> [];
+last(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, Count} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ true = odbc_test_lib:check_row_count(1, Count),
+ {updated, NewCount} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ true = odbc_test_lib:check_row_count(1, NewCount),
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ LastResult = ?RDBMS:selected_ID(2, last),
+ LastResult = odbc:last(Ref),
+
+ LastResult = odbc:last(Ref, ?TIMEOUT),
+ {'EXIT', {function_clause, _}} = (catch odbc:last(Ref, -1)),
+ ok.
+
+%%-------------------------------------------------------------------------
+next(doc) ->
+ ["Tests next/[1,2]"];
+next(suite) -> [];
+next(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, Count} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ true = odbc_test_lib:check_row_count(1, Count),
+ {updated, NewCount} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ true = odbc_test_lib:check_row_count(1, NewCount),
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ NextResult = ?RDBMS:selected_ID(1, next),
+ NextResult = odbc:next(Ref),
+ NextResult2 = ?RDBMS:selected_ID(2, next),
+ NextResult2 = odbc:next(Ref, ?TIMEOUT),
+ {'EXIT', {function_clause, _}} = (catch odbc:next(Ref, -1)),
+ ok.
+%%-------------------------------------------------------------------------
+prev(doc) ->
+ ["Tests prev/[1,2]"];
+prev(suite) -> [];
+prev(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, Count} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ true = odbc_test_lib:check_row_count(1, Count),
+ {updated, NewCount} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ true = odbc_test_lib:check_row_count(1, NewCount),
+
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ odbc:last(Ref), % Position cursor last so there will be a prev
+ PrevResult = ?RDBMS:selected_ID(1, prev),
+ PrevResult = odbc:prev(Ref),
+
+ odbc:last(Ref), % Position cursor last so there will be a prev
+ PrevResult = odbc:prev(Ref, ?TIMEOUT),
+ {'EXIT', {function_clause, _}} = (catch odbc:prev(Ref, -1)),
+ ok.
+%%-------------------------------------------------------------------------
+select_next(doc) ->
+ ["Tests select/[4,5] with CursorRelation = next "];
+select_next(suit) -> [];
+select_next(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(3)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(4)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(5)"),
+
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ SelectResult1 = ?RDBMS:selected_next_N(1),
+ SelectResult1 = odbc:select(Ref, next, 3),
+
+ %% Test that selecting stops at the end of the result set
+ SelectResult2 = ?RDBMS:selected_next_N(2),
+ SelectResult2 = odbc:select(Ref, next, 3, ?TIMEOUT),
+ {'EXIT',{function_clause, _}} =
+ (catch odbc:select(Ref, next, 2, -1)),
+
+ %% If you try fetching data beyond the the end of result set,
+ %% you get an empty list.
+ {selected, Fields, []} = odbc:select(Ref, next, 1),
+
+ ["ID"] = odbc_test_lib:to_upper(Fields),
+ ok.
+
+%%-------------------------------------------------------------------------
+select_relative(doc) ->
+ ["Tests select/[4,5] with CursorRelation = relative "];
+select_relative(suit) -> [];
+select_relative(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(3)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(4)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(5)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(6)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(7)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(8)"),
+
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ SelectResult1 = ?RDBMS:selected_relative_N(1),
+ SelectResult1 = odbc:select(Ref, {relative, 2}, 3),
+
+ %% Test that selecting stops at the end of the result set
+ SelectResult2 = ?RDBMS:selected_relative_N(2),
+ SelectResult2 = odbc:select(Ref, {relative, 3}, 3, ?TIMEOUT),
+ {'EXIT',{function_clause, _}} =
+ (catch odbc:select(Ref, {relative, 3} , 2, -1)),
+ ok.
+
+%%-------------------------------------------------------------------------
+select_absolute(doc) ->
+ ["Tests select/[4,5] with CursorRelation = absolute "];
+select_absolute(suit) -> [];
+select_absolute(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer)"),
+
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(3)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(4)"),
+ {updated, 1} = odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(5)"),
+ {ok, _} = odbc:select_count(Ref, "SELECT * FROM " ++ Table),
+
+ SelectResult1 = ?RDBMS:selected_absolute_N(1),
+ SelectResult1 = odbc:select(Ref, {absolute, 1}, 3),
+
+ %% Test that selecting stops at the end of the result set
+ SelectResult2 = ?RDBMS:selected_absolute_N(2),
+ SelectResult2 = odbc:select(Ref, {absolute, 1}, 6, ?TIMEOUT),
+ {'EXIT',{function_clause, _}} =
+ (catch odbc:select(Ref, {absolute, 1}, 2, -1)),
+ ok.
+
+%%-------------------------------------------------------------------------
+create_table_twice(doc) ->
+ ["Test what happens if you try to create the same table twice."];
+create_table_twice(suite) -> [];
+create_table_twice(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10))"),
+ {error, Error} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10))"),
+ is_driver_error(Error),
+ ok.
+
+%%-------------------------------------------------------------------------
+delete_table_twice(doc) ->
+ ["Test what happens if you try to delete the same table twice."];
+delete_table_twice(suite) -> [];
+delete_table_twice(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10))"),
+ {updated, _} = odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+ {error, Error} = odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+ is_driver_error(Error),
+ ok.
+
+%-------------------------------------------------------------------------
+duplicate_key(doc) ->
+ ["Test what happens if you try to use the same key twice"];
+duplicate_key(suit) -> [];
+duplicate_key(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA char(10), PRIMARY KEY(ID))"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ {error, Error} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'foo')"),
+ is_driver_error(Error),
+ ok.
+
+%%-------------------------------------------------------------------------
+not_connection_owner(doc) ->
+ ["Test what happens if a process that did not start the connection"
+ " tries to acess it."];
+not_connection_owner(suite) -> [];
+not_connection_owner(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ spawn_link(?MODULE, not_owner, [self(), Ref, Table]),
+
+ receive
+ continue ->
+ ok
+ end.
+
+not_owner(Pid, Ref, Table) ->
+ {error, process_not_owner_of_odbc_connection} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++ " (ID integer)"),
+
+ {error, process_not_owner_of_odbc_connection} =
+ odbc:disconnect(Ref),
+
+ Pid ! continue.
+
+%%-------------------------------------------------------------------------
+no_result_set(doc) ->
+ ["Tests what happens if you try to use a function that needs an "
+ "associated result set when there is none."];
+no_result_set(suite) -> [];
+no_result_set(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+
+ {error, result_set_does_not_exist} = odbc:first(Ref),
+ {error, result_set_does_not_exist} = odbc:last(Ref),
+ {error, result_set_does_not_exist} = odbc:next(Ref),
+ {error, result_set_does_not_exist} = odbc:prev(Ref),
+ {error, result_set_does_not_exist} = odbc:select(Ref, next, 1),
+ {error, result_set_does_not_exist} =
+ odbc:select(Ref, {absolute, 2}, 1),
+ {error, result_set_does_not_exist} =
+ odbc:select(Ref, {relative, 2}, 1),
+ ok.
+%%-------------------------------------------------------------------------
+query_error(doc) ->
+ ["Test what happens if there is an error in the query."];
+query_error(suite) ->
+ [];
+query_error(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA char(10), PRIMARY KEY(ID))"),
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++ " VALUES(1,'bar')"),
+
+ {error, _} =
+ odbc:sql_query(Ref, "INSERT ONTO " ++ Table ++ " VALUES(1,'bar')"),
+ ok.
+
+%%-------------------------------------------------------------------------
+multiple_select_result_sets(doc) ->
+ ["Test what happens if you have a batch of select queries."];
+multiple_select_result_sets(suite) ->
+ [];
+multiple_select_result_sets(Config) when is_list(Config) ->
+ case ?RDBMS of
+ sqlserver ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), "
+ "PRIMARY KEY(ID))"),
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1,'bar')"),
+
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2, 'foo')"),
+
+ MultipleResult = ?RDBMS:multiple_select(),
+
+ MultipleResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table ++
+ "; SELECT DATA FROM "++ Table ++
+ " WHERE ID=2"),
+ ok;
+ _ ->
+ {skip, "multiple result_set not supported"}
+ end.
+
+%%-------------------------------------------------------------------------
+multiple_mix_result_sets(doc) ->
+ ["Test what happens if you have a batch of select and other type of"
+ " queries."];
+multiple_mix_result_sets(suite) ->
+ [];
+multiple_mix_result_sets(Config) when is_list(Config) ->
+ case ?RDBMS of
+ sqlserver ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), "
+ "PRIMARY KEY(ID))"),
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1,'bar')"),
+
+ MultipleResult = ?RDBMS:multiple_mix(),
+
+ MultipleResult =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(2,'foo'); UPDATE " ++ Table ++
+ " SET DATA = 'foobar' WHERE ID =1;SELECT "
+ "* FROM "
+ ++ Table ++ ";DELETE FROM " ++ Table ++
+ " WHERE ID =1; SELECT DATA FROM " ++ Table),
+ ok;
+ _ ->
+ {skip, "multiple result_set not supported"}
+ end.
+%%-------------------------------------------------------------------------
+multiple_result_sets_error(doc) ->
+ ["Test what happens if one of the batched queries fails."];
+multiple_result_sets_error(suite) ->
+ [];
+multiple_result_sets_error(Config) when is_list(Config) ->
+ case ?RDBMS of
+ sqlserver ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID integer, DATA varchar(10), "
+ "PRIMARY KEY(ID))"),
+ {updated, 1} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1,'bar')"),
+
+ {error, Error} =
+ odbc:sql_query(Ref, "INSERT INTO " ++ Table ++
+ " VALUES(1,'foo'); SELECT * FROM " ++ Table),
+ is_driver_error(Error),
+
+ {error, NewError} =
+ odbc:sql_query(Ref, "SELECT * FROM "
+ ++ Table ++ ";INSERT INTO " ++ Table ++
+ " VALUES(1,'foo')"),
+ is_driver_error(NewError),
+ ok;
+ _ ->
+ {skip, "multiple result_set not supported"}
+ end.
+
+%%-------------------------------------------------------------------------
+parameterized_queries(doc)->
+ ["Tests diffrent variants of parameterized queries."];
+parameterized_queries(suite) ->
+ %% Note timestamps are inserted with param_query in odbc_data_type_SUITE
+ %% so no need to test this again.
+ [param_integers,
+ param_insert_decimal, param_insert_numeric,
+ param_insert_string,
+ param_insert_float, param_insert_real, param_insert_double,
+ param_insert_mix, param_update, param_delete, param_select].
+
+%%-------------------------------------------------------------------------
+param_integers(doc)->
+ ["Test insertion of integers by parameterized queries."];
+param_integers(suite) ->
+ [param_insert_tiny_int,
+ param_insert_small_int, param_insert_int, param_insert_integer].
+%%-------------------------------------------------------------------------
+param_insert_tiny_int(doc)->
+ ["Test insertion of tiny ints by parameterized queries."];
+param_insert_tiny_int(suite) ->
+ [];
+param_insert_tiny_int(Config) when is_list(Config) ->
+ case ?RDBMS of
+ sqlserver ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD TINYINT)"),
+
+ {updated, Count} =
+ odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_tinyint, [1, 2]}],
+ ?TIMEOUT),%Make sure to test timeout clause
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_tiny_int(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_tinyint, [1, "2"]}])),
+ ok;
+ _ ->
+ {skip, "Type tiniyint not supported"}
+ end.
+%%-------------------------------------------------------------------------
+param_insert_small_int(doc)->
+ ["Test insertion of small ints by parameterized queries."];
+param_insert_small_int(suite) ->
+ [];
+param_insert_small_int(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD SMALLINT)"),
+
+ {updated, Count} =
+ odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)", [{sql_smallint, [1, 2]}],
+ ?TIMEOUT), %% Make sure to test timeout clause
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_small_int(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_smallint, [1, "2"]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_int(doc)->
+ ["Test insertion of ints by parameterized queries."];
+param_insert_int(suite) ->
+ [];
+param_insert_int(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD INT)"),
+
+ Int = ?RDBMS:small_int_max() + 1,
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_integer, [1, Int]}]),
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_int(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_integer, [1, "2"]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_integer(doc)->
+ ["Test insertion of integers by parameterized queries."];
+param_insert_integer(suite) ->
+ [];
+param_insert_integer(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD INTEGER)"),
+
+ Int = ?RDBMS:small_int_max() + 1,
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_integer, [1, Int]}]),
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_int(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_integer, [1, 2.3]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_decimal(doc)->
+ ["Test insertion of decimal numbers by parameterized queries."];
+param_insert_decimal(suite) ->
+ [];
+param_insert_decimal(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD DECIMAL (3,0))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_decimal, 3, 0}, [1, 2]}]),
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_decimal(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_decimal, 3, 0}, [1, "2"]}])),
+
+
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD DECIMAL (3,1))"),
+
+ {updated, NewCount} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_decimal, 3, 1}, [0.25]}]),
+ true = odbc_test_lib:check_row_count(1, NewCount),
+
+ {selected, Fields, [{Value}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fields),
+
+ odbc_test_lib:match_float(Value, 0.3, 0.01),
+
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_numeric(doc)->
+ ["Test insertion of numeric numbers by parameterized queries."];
+param_insert_numeric(suite) ->
+ [];
+param_insert_numeric(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD NUMERIC (3,0))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_numeric,3,0}, [1, 2]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_numeric(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_decimal, 3, 0}, [1, "2"]}])),
+
+ odbc:sql_query(Ref, "DROP TABLE " ++ Table),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD NUMERIC (3,1))"),
+
+ {updated, NewCount} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_numeric, 3, 1}, [0.25]}]),
+
+ true = odbc_test_lib:check_row_count(1, NewCount),
+
+ {selected, Fileds, [{Value}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ odbc_test_lib:match_float(Value, 0.3, 0.01),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_string(doc) ->
+ ["Test insertion of strings by parameterized queries."];
+param_insert_string(suite) ->
+ [param_insert_char, param_insert_character, param_insert_char_varying,
+ param_insert_character_varying].
+
+%%-------------------------------------------------------------------------
+param_insert_char(doc)->
+ ["Test insertion of fixed length string by parameterized queries."];
+param_insert_char(suite) ->
+ [];
+param_insert_char(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD CHAR (10))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_char, 10},
+ ["foofoofoof", "0123456789"]}]),
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ {selected,Fileds,[{"foofoofoof"}, {"0123456789"}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ {error, _} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_char, 10},
+ ["foo", "01234567890"]}]),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_char, 10}, ["1", 2.3]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_character(doc)->
+ ["Test insertion of fixed length string by parameterized queries."];
+param_insert_character(suite) ->
+ [];
+param_insert_character(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD CHARACTER (10))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_char, 10},
+ ["foofoofoof", "0123456789"]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ {selected, Fileds, [{"foofoofoof"}, {"0123456789"}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ {error, _} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_char, 10},
+ ["foo", "01234567890"]}]),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_char, 10}, ["1", 2]}])),
+ ok.
+
+%%------------------------------------------------------------------------
+param_insert_char_varying(doc)->
+ ["Test insertion of variable length strings by parameterized queries."];
+param_insert_char_varying(suite) ->
+ [];
+param_insert_char_varying(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD CHAR VARYING(10))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_varchar, 10},
+ ["foo", "0123456789"]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ {selected, Fileds, [{"foo"}, {"0123456789"}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ {error, _} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_varchar, 10},
+ ["foo", "01234567890"]}]),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_varchar, 10}, ["1", 2.3]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_character_varying(doc)->
+ ["Test insertion of variable length strings by parameterized queries."];
+param_insert_character_varying(suite) ->
+ [];
+param_insert_character_varying(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD CHARACTER VARYING(10))"),
+
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_varchar, 10},
+ ["foo", "0123456789"]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ {selected, Fileds, [{"foo"}, {"0123456789"}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ {error, _} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_varchar, 10},
+ ["foo", "01234567890"]}]),
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_varchar, 10}, ["1", 2]}])),
+ ok.
+%%-------------------------------------------------------------------------
+param_insert_float(doc)->
+ ["Test insertion of floats by parameterized queries."];
+param_insert_float(suite) ->
+ [];
+param_insert_float(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD FLOAT(5))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_float,5}, [1.3, 1.2]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ {selected, Fileds, [{Float1},{Float2}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ case (odbc_test_lib:match_float(Float1, 1.3, 0.000001) and
+ odbc_test_lib:match_float(Float2, 1.2, 0.000001)) of
+ true ->
+ ok;
+ false ->
+ test_server:fail(float_numbers_do_not_match)
+ end,
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{{sql_float, 5}, [1.0, "2"]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_real(doc)->
+ ["Test insertion of real numbers by parameterized queries."];
+param_insert_real(suite) ->
+ [];
+param_insert_real(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD REAL)"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_real, [1.3, 1.2]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ %_InsertResult = ?RDBMS:param_select_real(),
+
+ {selected, Fileds, [{Real1},{Real2}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ case (odbc_test_lib:match_float(Real1, 1.3, 0.000001) and
+ odbc_test_lib:match_float(Real2, 1.2, 0.000001)) of
+ true ->
+ ok;
+ false ->
+ test_server:fail(real_numbers_do_not_match)
+ end,
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_real,[1.0, "2"]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_double(doc)->
+ ["Test insertion of doubles by parameterized queries."];
+param_insert_double(suite) ->
+ [];
+param_insert_double(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (FIELD DOUBLE PRECISION)"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_double, [1.3, 1.2]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ {selected, Fileds, [{Double1},{Double2}]} =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+
+ ["FIELD"] = odbc_test_lib:to_upper(Fileds),
+
+ case (odbc_test_lib:match_float(Double1, 1.3, 0.000001) and
+ odbc_test_lib:match_float(Double2, 1.2, 0.000001)) of
+ true ->
+ ok;
+ false ->
+ test_server:fail(double_numbers_do_not_match)
+ end,
+
+ {'EXIT',{badarg,odbc,param_query,'Params'}} =
+ (catch odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(FIELD) VALUES(?)",
+ [{sql_double, [1.0, "2"]}])),
+ ok.
+
+%%-------------------------------------------------------------------------
+param_insert_mix(doc)->
+ ["Test insertion of a mixture of datatypes by parameterized queries."];
+param_insert_mix(suite) ->
+ [];
+param_insert_mix(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID INTEGER, DATA CHARACTER VARYING(10),"
+ " PRIMARY KEY(ID))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(ID, DATA) VALUES(?, ?)",
+ [{sql_integer, [1, 2]},
+ {{sql_varchar, 10}, ["foo", "bar"]}]),
+
+ true = odbc_test_lib:check_row_count(2, Count),
+
+ InsertResult = ?RDBMS:param_select_mix(),
+
+ InsertResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+ ok.
+%%-------------------------------------------------------------------------
+param_update(doc)->
+ ["Test parameterized update query."];
+param_update(suite) ->
+ [];
+param_update(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID INTEGER, DATA CHARACTER VARYING(10),"
+ " PRIMARY KEY(ID))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(ID, DATA) VALUES(?, ?)",
+ [{sql_integer, [1, 2, 3]},
+ {{sql_varchar, 10},
+ ["foo", "bar", "baz"]}]),
+
+ true = odbc_test_lib:check_row_count(3, Count),
+
+ {updated, NewCount} = odbc:param_query(Ref, "UPDATE " ++ Table ++
+ " SET DATA = 'foobar' WHERE ID = ?",
+ [{sql_integer, [1, 2]}]),
+
+ true = odbc_test_lib:check_row_count(2, NewCount),
+
+ UpdateResult = ?RDBMS:param_update(),
+
+ UpdateResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+ ok.
+
+%%-------------------------------------------------------------------------
+delete_nonexisting_row(doc) -> % OTP-5759
+ ["Make a delete...where with false conditions (0 rows deleted). ",
+ "This used to give an error message (see ticket OTP-5759)."];
+delete_nonexisting_row(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table
+ ++ " (ID INTEGER, DATA CHARACTER VARYING(10))"),
+ {updated, Count} =
+ odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(ID, DATA) VALUES(?, ?)",
+ [{sql_integer, [1, 2, 3]},
+ {{sql_varchar, 10}, ["foo", "bar", "baz"]}]),
+
+ true = odbc_test_lib:check_row_count(3, Count),
+
+ {updated, NewCount} =
+ odbc:sql_query(Ref, "DELETE FROM " ++ Table ++ " WHERE ID = 8"),
+
+ true = odbc_test_lib:check_row_count(0, NewCount),
+
+ {updated, _} =
+ odbc:sql_query(Ref, "DROP TABLE "++ Table),
+
+ ok.
+
+%%-------------------------------------------------------------------------
+param_delete(doc) ->
+ ["Test parameterized delete query."];
+param_delete(suite) ->
+ [];
+param_delete(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID INTEGER, DATA CHARACTER VARYING(10),"
+ " PRIMARY KEY(ID))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(ID, DATA) VALUES(?, ?)",
+ [{sql_integer, [1, 2, 3]},
+ {{sql_varchar, 10},
+ ["foo", "bar", "baz"]}]),
+ true = odbc_test_lib:check_row_count(3, Count),
+
+ {updated, NewCount} = odbc:param_query(Ref, "DELETE FROM " ++ Table ++
+ " WHERE ID = ?",
+ [{sql_integer, [1, 2]}]),
+
+ true = odbc_test_lib:check_row_count(2, NewCount),
+
+ UpdateResult = ?RDBMS:param_delete(),
+
+ UpdateResult =
+ odbc:sql_query(Ref, "SELECT * FROM " ++ Table),
+ ok.
+
+
+%%-------------------------------------------------------------------------
+param_select(doc) ->
+ ["Test parameterized select query."];
+param_select(suite) ->
+ [];
+param_select(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (ID INTEGER, DATA CHARACTER VARYING(10),"
+ " PRIMARY KEY(ID))"),
+
+ {updated, Count} = odbc:param_query(Ref, "INSERT INTO " ++ Table ++
+ "(ID, DATA) VALUES(?, ?)",
+ [{sql_integer, [1, 2, 3]},
+ {{sql_varchar, 10},
+ ["foo", "bar", "foo"]}]),
+
+ true = odbc_test_lib:check_row_count(3, Count),
+
+ SelectResult = ?RDBMS:param_select(),
+
+ SelectResult = odbc:param_query(Ref, "SELECT * FROM " ++ Table ++
+ " WHERE DATA = ?",
+ [{{sql_varchar, 10}, ["foo"]}]),
+ ok.
+
+%%-------------------------------------------------------------------------
+describe_table(doc) ->
+ ["Test describe_table/[2,3]"];
+describe_table(suite) ->
+ [describe_integer, describe_string, describe_floating, describe_dec_num,
+ describe_no_such_table].
+
+%%-------------------------------------------------------------------------
+describe_integer(doc) ->
+ ["Test describe_table/[2,3] for integer columns."];
+describe_integer(suite) ->
+ [];
+describe_integer(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (int1 SMALLINT, int2 INT, int3 INTEGER)"),
+
+ Decs = ?RDBMS:describe_integer(),
+ %% Make sure to test timeout clause
+ Decs = odbc:describe_table(Ref, Table, ?TIMEOUT),
+ ok.
+
+%%-------------------------------------------------------------------------
+describe_string(doc) ->
+ ["Test describe_table/[2,3] for string columns."];
+describe_string(suite) ->
+ [];
+describe_string(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (str1 char(10), str2 character(10), "
+ "str3 CHAR VARYING(10), str4 "
+ "CHARACTER VARYING(10))"),
+
+ Decs = ?RDBMS:describe_string(),
+
+ Decs = odbc:describe_table(Ref, Table),
+ ok.
+
+%%-------------------------------------------------------------------------
+describe_floating(doc) ->
+ ["Test describe_table/[2,3] for floting columns."];
+describe_floating(suite) ->
+ [];
+describe_floating(Config) when is_list(Config) ->
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (f FLOAT(5), r REAL, "
+ "d DOUBLE PRECISION)"),
+
+ Decs = ?RDBMS:describe_floating(),
+
+ Decs = odbc:describe_table(Ref, Table),
+ ok.
+
+%%-------------------------------------------------------------------------
+describe_dec_num(doc) ->
+ ["Test describe_table/[2,3] for decimal and numerical columns"];
+describe_dec_num(suite) ->
+ [];
+describe_dec_num(Config) when is_list(Config) ->
+
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} =
+ odbc:sql_query(Ref,
+ "CREATE TABLE " ++ Table ++
+ " (dec DECIMAL(9,3), num NUMERIC(9,2))"),
+
+ Decs = ?RDBMS:describe_dec_num(),
+
+ Decs = odbc:describe_table(Ref, Table),
+ ok.
+
+
+%%-------------------------------------------------------------------------
+describe_timestamp(doc) ->
+ ["Test describe_table/[2,3] for tinmestap columns"];
+describe_timestamp(suite) ->
+ [];
+describe_timestamp(Config) when is_list(Config) ->
+
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {updated, _} = % Value == 0 || -1 driver dependent!
+ odbc:sql_query(Ref, "CREATE TABLE " ++ Table ++
+ ?RDBMS:create_timestamp_table()),
+
+ Decs = ?RDBMS:describe_timestamp(),
+
+ Decs = odbc:describe_table(Ref, Table),
+ ok.
+
+%%-------------------------------------------------------------------------
+describe_no_such_table(doc) ->
+ ["Test what happens if you try to describe a table that does not exist."];
+describe_no_such_table(suite) ->
+ [];
+describe_no_such_table(Config) when is_list(Config) ->
+
+ Ref = ?config(connection_ref, Config),
+ Table = ?config(tableName, Config),
+
+ {error, _ } = odbc:describe_table(Ref, Table),
+ ok.
+
+%%-------------------------------------------------------------------------
+%% Internal functions
+%%-------------------------------------------------------------------------
+
+is_driver_error(Error) ->
+ case is_list(Error) of
+ true ->
+ test_server:format("Driver error ~p~n", [Error]),
+ ok;
+ false ->
+ test_server:fail(Error)
+ end.
diff --git a/lib/odbc/test/odbc_start_SUITE.erl b/lib/odbc/test/odbc_start_SUITE.erl
new file mode 100644
index 0000000000..2cca8e4546
--- /dev/null
+++ b/lib/odbc/test/odbc_start_SUITE.erl
@@ -0,0 +1,147 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(odbc_start_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include("test_server.hrl").
+-include("test_server_line.hrl").
+-include("odbc_test.hrl").
+
+%% Test server callback functions
+%%--------------------------------------------------------------------
+%% Function: init_per_suite(Config) -> Config
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Initialization before the whole suite
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ case code:which(odbc) of
+ non_existing ->
+ {skip, "No ODBC built"};
+ _ ->
+ [{tableName, odbc_test_lib:unique_table_name()} | Config]
+ end.
+
+%%--------------------------------------------------------------------
+%% Function: end_per_suite(Config) -> _
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after the whole suite
+%%--------------------------------------------------------------------
+end_per_suite(_Config) ->
+ ok.
+%%--------------------------------------------------------------------
+%% Function: init_per_testcase(TestCase, Config) -> Config
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%%
+%% Description: Initialization before each test case
+%%
+%% Note: This function is free to add any key/value pairs to the Config
+%% variable, but should NOT alter/remove any existing entries.
+%% Description: Initialization before each test case
+%%--------------------------------------------------------------------
+init_per_testcase(_TestCase, Config0) ->
+ test_server:format("ODBCINI = ~p~n", [os:getenv("ODBCINI")]),
+ Config = lists:keydelete(watchdog, 1, Config0),
+ Dog = test_server:timetrap(?TIMEOUT),
+ [{watchdog, Dog} | Config].
+
+%%--------------------------------------------------------------------
+%% Function: end_per_testcase(TestCase, Config) -> _
+%% Case - atom()
+%% Name of the test case that is about to be run.
+%% Config - [tuple()]
+%% A list of key/value pairs, holding the test case configuration.
+%% Description: Cleanup after each test case
+%%--------------------------------------------------------------------
+end_per_testcase(_TestCase, Config) ->
+ Dog = ?config(watchdog, Config),
+ case Dog of
+ undefined ->
+ ok;
+ _ ->
+ test_server:timetrap_cancel(Dog)
+ end.
+
+%%--------------------------------------------------------------------
+%% Function: all(Clause) -> TestCases
+%% Clause - atom() - suite | doc
+%% TestCases - [Case]
+%% Case - atom()
+%% Name of a test case.
+%% Description: Returns a list of all test cases in this test suite
+%%--------------------------------------------------------------------
+all(doc) ->
+ ["Test start/stop of odbc"];
+
+all(suite) ->
+ case odbc_test_lib:odbc_check() of
+ ok -> all();
+ Other -> {skip, Other}
+ end.
+
+all() ->
+ [start].
+
+
+%% Test cases starts here.
+%%--------------------------------------------------------------------
+
+start(doc) ->
+ ["Test start/stop of odbc"];
+start(suite) ->
+ [];
+start(Config) when is_list(Config) ->
+ {error,odbc_not_started} = odbc:connect(?RDBMS:connection_string(), []),
+ odbc:start(),
+ case odbc:connect(?RDBMS:connection_string(), []) of
+ {ok, Ref0} ->
+ ok = odbc:disconnect(Ref0),
+ odbc:stop(),
+ {error,odbc_not_started} =
+ odbc:connect(?RDBMS:connection_string(), []),
+ start_odbc(transient),
+ start_odbc(permanent);
+ {error, odbc_not_started} ->
+ test_server:fail(start_failed);
+ Error ->
+ test_server:format("Connection failed: ~p~n", [Error]),
+ {skip, "ODBC is not properly setup"}
+ end.
+
+start_odbc(Type) ->
+ ok = odbc:start(Type),
+ case odbc:connect(?RDBMS:connection_string(), []) of
+ {ok, Ref} ->
+ ok = odbc:disconnect(Ref),
+ odbc:stop();
+ {error, odbc_not_started} ->
+ test_server:fail(start_failed)
+ end.
diff --git a/lib/odbc/test/odbc_test.hrl b/lib/odbc/test/odbc_test.hrl
new file mode 100644
index 0000000000..87f50043db
--- /dev/null
+++ b/lib/odbc/test/odbc_test.hrl
@@ -0,0 +1,37 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+
+% Default timetrap timeout (set in init_per_testcase).
+% This should be set relatively high (10-15 times the expected
+% max testcasetime).
+-define(default_timeout, ?t:minutes(10)).
+
+-define(RDBMS, case os:type() of
+ {unix, sunos} ->
+ postgres;
+ {unix,linux} ->
+ postgres;
+ {win32, _} ->
+ sqlserver
+ end).
+
+-define(TIMEOUT, 100000).
+
+
diff --git a/lib/odbc/test/odbc_test_lib.erl b/lib/odbc/test/odbc_test_lib.erl
new file mode 100644
index 0000000000..92e895eb87
--- /dev/null
+++ b/lib/odbc/test/odbc_test_lib.erl
@@ -0,0 +1,77 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(odbc_test_lib).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include("odbc_test.hrl").
+-include("test_server.hrl").
+
+unique_table_name() ->
+ lists:reverse(lists:foldl(fun($@, Acc) -> [$t, $A |Acc] ;
+ (X, Acc) -> [X |Acc] end,
+ [], atom_to_list(node()))).
+
+match_float(Float, Match, Delta) ->
+ (Float < Match + Delta) and (Float > Match - Delta).
+
+odbc_check() ->
+ case erlang:system_info(wordsize) of
+ 4 ->
+ case test_server:os_type() of
+ {unix, sunos} ->
+ ok;
+ {unix, linux} ->
+ ok;
+ {win32, _} ->
+ ok;
+ Other ->
+ lists:flatten(
+ io_lib:format("Platform not supported: ~w",
+ [Other]))
+ end;
+ Other ->
+ case test_server:os_type() of
+ {unix, linux} ->
+ ok;
+ Platform ->
+ lists:flatten(
+ io_lib:format("Word on platform ~w size"
+ " ~w not supported", [Other,
+ Platform]))
+ end
+ end.
+
+check_row_count(Count, Count) ->
+ test_server:format("Correct row count Count: ~p~n", [Count]),
+ true;
+check_row_count(_, undefined) ->
+ test_server:format("Undefined row count ~n", []),
+ true;
+check_row_count(Expected, Count) ->
+ test_server:format("Incorrect row count Expected ~p Got ~p~n",
+ [Expected, Count]),
+ false.
+
+to_upper(List) ->
+ lists:map(fun(Str) -> string:to_upper(Str) end, List).
diff --git a/lib/odbc/test/oracle.erl b/lib/odbc/test/oracle.erl
new file mode 100644
index 0000000000..ebf6dbb6bf
--- /dev/null
+++ b/lib/odbc/test/oracle.erl
@@ -0,0 +1,246 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(oracle).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+%-------------------------------------------------------------------------
+connection_string() ->
+ "DSN=Oracle8;UID=odbctest".
+
+%-------------------------------------------------------------------------
+insert_result() ->
+ {selected,["ID","DATA"],[{"1","bar"}]}.
+
+update_result() ->
+ {selected,["ID","DATA"],[{"1","foo"}]}.
+
+selected_ID(N, next) ->
+ {selected,["ID"],[{integer_to_list(N)}]};
+
+selected_ID(_, _) ->
+ {error, driver_does_not_support_function}.
+
+selected_next_N(1)->
+ {selected,["ID"],
+ [{"1"},
+ {"2"},
+ {"3"}]};
+
+selected_next_N(2)->
+ {selected,["ID"],
+ [{"4"},
+ {"5"}]}.
+
+selected_relative_N(_)->
+ {error, driver_does_not_support_function}.
+
+selected_absolute_N(_)->
+ {error, driver_does_not_support_function}.
+
+selected_list_rows() ->
+ {selected,["ID", "DATA"],[["1", "bar"],["2","foo"]]}.
+
+first_list_rows() ->
+ {error, driver_does_not_support_function}.
+last_list_rows() ->
+ {error, driver_does_not_support_function}.
+prev_list_rows() ->
+ {error, driver_does_not_support_function}.
+next_list_rows() ->
+ {selected,["ID","DATA"],[["1","bar"]]}.
+
+%% In case we get a better oracle driver that support this some day .....
+multiple_select()->
+ [{selected,["ID", "DATA"],[{"1", "bar"},{"2", "foo"}]},
+ {selected,["ID"],[{"foo"}]}].
+
+multiple_mix()->
+ [{updated, 1},{updated, 1},
+ {selected,["ID", "DATA"],[{"1", "foobar"},{"2", "foo"}]},
+ {updated, 1}, {selected,["DATA"],[{"foo"}]}].
+
+%-------------------------------------------------------------------------
+fixed_char_min() ->
+ 1.
+fixed_char_max() ->
+ 2000. %% Should be 255 acording to manual but empirical tests say 2000
+
+create_fixed_char_table(Size) ->
+ " (FIELD char(" ++ integer_to_list(Size) ++ "))".
+
+%-------------------------------------------------------------------------
+var_char_min() ->
+ 1.
+var_char_max() ->
+ 2000.
+
+create_var_char_table(Size) ->
+ " (FIELD varchar2(" ++ integer_to_list(Size) ++ "))".
+
+%-------------------------------------------------------------------------
+text_min() ->
+ 1.
+text_max() ->
+ 2147483646. % 2147483647. %% 2^31 - 1
+
+create_text_table() ->
+ " (FIELD long)". %Oracle long is variable length char data
+
+%-------------------------------------------------------------------------
+create_unicode_table() ->
+ " (FIELD nvarchar(50))".
+
+%-------------------------------------------------------------------------
+create_timestamp_table() ->
+ " (FIELD DATETIME)".
+
+%-------------------------------------------------------------------------
+tiny_int_min() ->
+ -999.
+tiny_int_max() ->
+ 999.
+
+create_tiny_int_table() ->
+ " (FIELD number(3, 0))".
+
+tiny_int_min_selected() ->
+ {selected,["FIELD"],[{-999}]}.
+
+tiny_int_max_selected() ->
+ {selected,["FIELD"], [{999}]}.
+
+%-------------------------------------------------------------------------
+small_int_min() ->
+ -99999.
+small_int_max() ->
+ 99999.
+
+create_small_int_table() ->
+ " (FIELD number(5, 0))".
+
+small_int_min_selected() ->
+ {selected,["FIELD"],[{-99999}]}.
+
+small_int_max_selected() ->
+ {selected,["FIELD"], [{99999}]}.
+
+%-------------------------------------------------------------------------
+int_min() ->
+ -999999999.
+int_max() ->
+ 999999999.
+
+create_int_table() ->
+ " (FIELD number(9, 0))".
+
+int_min_selected() ->
+ {selected,["FIELD"],[{-999999999}]}.
+
+int_max_selected() ->
+ {selected,["FIELD"], [{999999999}]}.
+
+%-------------------------------------------------------------------------
+big_int_min() ->
+ -99999999999999999999999999999999999999.
+
+big_int_max() ->
+ 99999999999999999999999999999999999999.
+
+create_big_int_table() ->
+ " (FIELD number(38,0))".
+
+big_int_min_selected() ->
+ {selected,["FIELD"], [{"-99999999999999999999999999999999999999"}]}.
+
+big_int_max_selected() ->
+ {selected,["FIELD"], [{"99999999999999999999999999999999999999"}]}.
+
+%-------------------------------------------------------------------------
+float_min() ->
+ 1.40129846432481707e-45.
+
+float_max() ->
+ 3.40282346638528860e+38.
+
+create_float_table() ->
+ " (FIELD float(32))".
+
+float_underflow() ->
+ "'4.94065645841246544e-324'".
+float_overflow() ->
+ "'1.79769313486231570e+308'".
+
+float_zero_selected() ->
+ {selected,["FIELD"],[{0.00000e+0}]}.
+
+%-------------------------------------------------------------------------
+param_select_small_int() ->
+ {selected,["FIELD"],[{"1"}, {"2"}]}.
+
+param_select_int() ->
+ Int = small_int_max() + 1,
+ {selected,["FIELD"],[{"1"}, {integer_to_list(Int)}]}.
+
+param_select_decimal() ->
+ {selected,["FIELD"],[{1},{2}]}.
+
+param_select_numeric() ->
+ {selected,["FIELD"],[{1},{2}]}.
+
+param_select_float() ->
+ {selected,["FIELD"],[{1.30000},{1.20000}]}.
+
+param_select_real() ->
+ {selected,["FIELD"],[{1.30000},{1.20000}]}.
+
+param_select_double() ->
+ {selected,["FIELD"],[{1.30000},{1.20000}]}.
+
+param_select_mix() ->
+ {selected,["ID","DATA"],[{"1", "foo"}, {"2", "bar"}]}.
+
+param_update() ->
+ {selected,["ID","DATA"],[{"1", "foobar"}, {"2", "foobar"}, {"3", "baz"}]}.
+
+param_delete() ->
+ {selected,["ID","DATA"],[{"3", "baz"}]}.
+
+param_select() ->
+ {selected,["ID","DATA"],[{"1", "foo"},{"3", "foo"}]}.
+
+%-------------------------------------------------------------------------
+describe_integer() ->
+ {ok,[{"INT1",{sql_decimal,38,0}},{"INT2",{sql_decimal,38,0}},
+ {"INT3",{sql_decimal,38,0}}]}.
+
+describe_string() ->
+ {ok,[{"STR1",{sql_char,10}},
+ {"STR2",{sql_char,10}},
+ {"STR3",{sql_varchar,10}},
+ {"STR4",{sql_varchar,10}}]}.
+
+describe_floating() ->
+ {ok,[{"F",sql_double},{"R",sql_double},{"D",sql_double}]}.
+describe_dec_num() ->
+ {ok,[{"DEC",{sql_decimal,9,3}},{"NUM",{sql_decimal,9,2}}]}.
diff --git a/lib/odbc/test/postgres.erl b/lib/odbc/test/postgres.erl
new file mode 100644
index 0000000000..169ca26e43
--- /dev/null
+++ b/lib/odbc/test/postgres.erl
@@ -0,0 +1,294 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(postgres).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+%-------------------------------------------------------------------------
+connection_string() ->
+ case test_server:os_type() of
+ {unix, sunos} ->
+ "DSN=Postgres;UID=odbctest";
+ {unix, linux} ->
+ Size = erlang:system_info(wordsize),
+ linux_dist_connection_string(Size)
+ end.
+
+linux_dist_connection_string(4) ->
+ case linux_dist() of
+ "ubuntu" ->
+ "DSN=PostgresLinuxUbuntu;UID=odbctest";
+ _ ->
+ "DSN=PostgresLinux;UID=odbctest"
+ end;
+
+linux_dist_connection_string(_) ->
+ "DSN=PostgresLinux64;UID=odbctest".
+
+linux_dist() ->
+ case file:read_file("/etc/issue") of
+ {ok, Binary} ->
+ [Dist | _ ] = string:tokens(binary_to_list(Binary), " "),
+ string:to_lower(Dist);
+ {error, _} ->
+ other
+ end.
+
+
+%-------------------------------------------------------------------------
+insert_result() ->
+ {selected,["id","data"],[{1,"bar"}]}.
+
+update_result() ->
+ {selected,["id","data"],[{1,"foo"}]}.
+
+selected_ID(N, next) ->
+ {selected,["id"],[{N}]};
+
+selected_ID(_, _) ->
+ {error, driver_does_not_support_function}.
+
+selected_next_N(1)->
+ {selected,["id"],
+ [{1},
+ {2},
+ {3}]};
+
+selected_next_N(2)->
+ {selected,["id"],
+ [{4},
+ {5}]}.
+
+selected_relative_N(_)->
+ {error, driver_does_not_support_function}.
+
+selected_absolute_N(_)->
+ {error, driver_does_not_support_function}.
+
+selected_list_rows() ->
+ {selected,["id", "data"],[[1, "bar"],[2,"foo"]]}.
+
+first_list_rows() ->
+ {error, driver_does_not_support_function}.
+last_list_rows() ->
+ {error, driver_does_not_support_function}.
+prev_list_rows() ->
+ {error, driver_does_not_support_function}.
+next_list_rows() ->
+ {selected,["id","data"],[[1,"bar"]]}.
+
+%% In case we get a better postgres driver that support this some day .....
+multiple_select()->
+ [{selected,["id", "data"],[{1, "bar"},{2, "foo"}]},
+ {selected,["id"],[{"foo"}]}].
+
+multiple_mix()->
+ [{updated, 1},{updated, 1},
+ {selected,["id", "data"],[{1, "foobar"},{2, "foo"}]},
+ {updated, 1}, {selected,["data"],[{"foo"}]}].
+
+%-------------------------------------------------------------------------
+fixed_char_min() ->
+ 1.
+fixed_char_max() ->
+ 2000.
+
+create_fixed_char_table(Size) ->
+ " (FIELD char(" ++ integer_to_list(Size) ++ "))".
+
+%-------------------------------------------------------------------------
+var_char_min() ->
+ 1.
+var_char_max() ->
+ 2000.
+
+create_var_char_table(Size) ->
+ " (FIELD varchar(" ++ integer_to_list(Size) ++ "))".
+
+%-------------------------------------------------------------------------
+text_min() ->
+ 1.
+text_max() ->
+ 2147483646. % 2147483647. %% 2^31 - 1
+
+create_text_table() ->
+ " (FIELD text)".
+
+%-------------------------------------------------------------------------
+create_unicode_table() ->
+ " (FIELD text)".
+
+%-------------------------------------------------------------------------
+create_timestamp_table() ->
+ " (FIELD TIMESTAMP)".
+
+%-------------------------------------------------------------------------
+small_int_min() ->
+ -32768.
+small_int_max() ->
+ 32767.
+
+create_small_int_table() ->
+ " (FIELD smallint)".
+
+small_int_min_selected() ->
+ {selected,["field"],[{-32768}]}.
+
+small_int_max_selected() ->
+ {selected,["field"], [{32767}]}.
+
+%-------------------------------------------------------------------------
+int_min() ->
+ -2147483648.
+int_max() ->
+ 2147483647.
+
+create_int_table() ->
+ " (FIELD int)".
+
+int_min_selected() ->
+ {selected,["field"],[{-2147483648}]}.
+
+int_max_selected() ->
+ {selected,["field"], [{2147483647}]}.
+
+%-------------------------------------------------------------------------
+big_int_min() ->
+ -9223372036854775808.
+
+big_int_max() ->
+ 9223372036854775807.
+
+create_big_int_table() ->
+ " (FIELD bigint )".
+
+big_int_min_selected() ->
+ {selected,["field"], [{"-9223372036854775808"}]}.
+
+big_int_max_selected() ->
+ {selected,["field"], [{"9223372036854775807"}]}.
+
+%-------------------------------------------------------------------------
+bit_false() ->
+ 0.
+bit_true() ->
+ 1.
+
+create_bit_table() ->
+ " (FIELD bit)".
+
+bit_false_selected() ->
+ {selected,["field"],[{"0"}]}.
+
+bit_true_selected() ->
+ {selected,["field"], [{"1"}]}.
+
+%-------------------------------------------------------------------------
+float_min() ->
+ 1.79e-307.
+float_max() ->
+ 1.79e+308.
+
+create_float_table() ->
+ " (FIELD float)".
+
+float_underflow() ->
+ "1.80e-308".
+float_overflow() ->
+ "1.80e+308".
+
+float_zero_selected() ->
+ {selected,["field"],[{0.00000e+0}]}.
+
+%-------------------------------------------------------------------------
+real_min() ->
+ -3.40e+38.
+real_max() ->
+ 3.40e+38.
+
+real_underflow() ->
+ "-3.41e+38".
+
+real_overflow() ->
+ "3.41e+38".
+
+create_real_table() ->
+ " (FIELD real)".
+
+real_zero_selected() ->
+ {selected,["field"],[{0.00000e+0}]}.
+
+%-------------------------------------------------------------------------
+param_select_small_int() ->
+ {selected,["field"],[{1}, {2}]}.
+
+param_select_int() ->
+ Int = small_int_max() + 1,
+ {selected,["field"],[{1}, {Int}]}.
+
+param_select_decimal() ->
+ {selected,["field"],[{1},{2}]}.
+
+param_select_numeric() ->
+ {selected,["field"],[{1},{2}]}.
+
+param_select_float() ->
+ {selected,["field"],[{1.30000},{1.20000}]}.
+
+param_select_real() ->
+ {selected,["field"],[{1.30000},{1.20000}]}.
+
+param_select_double() ->
+ {selected,["field"],[{1.30000},{1.20000}]}.
+
+param_select_mix() ->
+ {selected,["id","data"],[{1, "foo"}, {2, "bar"}]}.
+
+param_update() ->
+ {selected,["id","data"],[{3, "baz"},{1, "foobar"}, {2, "foobar"}]}.
+
+param_delete() ->
+ {selected,["id","data"],[{3, "baz"}]}.
+
+param_select() ->
+ {selected,["id","data"],[{1, "foo"},{3, "foo"}]}.
+
+%-------------------------------------------------------------------------
+describe_integer() ->
+ {ok,[{"int1",sql_smallint},
+ {"int2",sql_integer},
+ {"int3",sql_integer}]}.
+
+describe_string() ->
+ {ok,[{"str1",{sql_char,10}},
+ {"str2",{sql_char,10}},
+ {"str3",{sql_varchar,10}},
+ {"str4",{sql_varchar,10}}]}.
+
+describe_floating() ->
+ {ok,[{"f",sql_real},{"r",sql_real},{"d",{sql_float,15}}]}.
+describe_dec_num() ->
+ {ok,[{"dec",{sql_numeric,9,3}},{"num",{sql_numeric,9,2}}]}.
+
+describe_timestamp() ->
+ {ok, [{"field", sql_timestamp}]}.
diff --git a/lib/odbc/test/sqlserver.erl b/lib/odbc/test/sqlserver.erl
new file mode 100644
index 0000000000..e3fe30e0bc
--- /dev/null
+++ b/lib/odbc/test/sqlserver.erl
@@ -0,0 +1,298 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(sqlserver).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+%-------------------------------------------------------------------------
+connection_string() ->
+ "DSN=sql-server;UID=odbctest;PWD=gurka".
+
+%-------------------------------------------------------------------------
+insert_result() ->
+ {selected,["ID","DATA"],[{1,"bar"}]}.
+
+update_result() ->
+ {selected,["ID","DATA"],[{1,"foo"}]}.
+
+selected_ID(N, _) ->
+ {selected,["ID"],[{N}]}.
+
+selected_next_N(1)->
+ {selected,["ID"],
+ [{1},
+ {2},
+ {3}]};
+
+selected_next_N(2)->
+ {selected,["ID"],
+ [{4},
+ {5}]}.
+
+selected_relative_N(1)->
+ {selected,["ID"],
+ [{2},
+ {3},
+ {4}]};
+
+selected_relative_N(2)->
+ {selected,["ID"],
+ [{7},
+ {8}]}.
+
+selected_absolute_N(1)->
+ {selected,["ID"],
+ [{1},
+ {2},
+ {3}]};
+
+selected_absolute_N(2)->
+ {selected,["ID"],
+ [{1},
+ {2},
+ {3},
+ {4},
+ {5}]}.
+
+selected_list_rows() ->
+ {selected,["ID", "DATA"],[[1, "bar"],[2,"foo"]]}.
+
+first_list_rows() ->
+ {selected,["ID", "DATA"],[[1, "bar"]]}.
+last_list_rows() ->
+ {selected,["ID", "DATA"],[[2, "foo"]]}.
+prev_list_rows() ->
+ {selected,["ID", "DATA"],[[1, "bar"]]}.
+next_list_rows() ->
+ {selected,["ID", "DATA"],[[2, "foo"]]}.
+
+multiple_select()->
+ [{selected,["ID", "DATA"],[{1, "bar"},{2, "foo"}]},
+ {selected,["DATA"],[{"foo"}]}].
+
+multiple_mix()->
+ [{updated, 1},{updated, 1},
+ {selected,["ID", "DATA"],[{1, "foobar"},{2, "foo"}]},
+ {updated, 1}, {selected,["DATA"],[{"foo"}]}].
+
+%-------------------------------------------------------------------------
+fixed_char_min() ->
+ 1.
+
+fixed_char_max() ->
+ 8000.
+
+create_fixed_char_table(Size) ->
+ " (FIELD char(" ++ integer_to_list(Size) ++ "))".
+
+%-------------------------------------------------------------------------
+var_char_min() ->
+ 1.
+var_char_max() ->
+ 8000.
+
+create_var_char_table(Size) ->
+ " (FIELD varchar(" ++ integer_to_list(Size) ++ "))".
+%-------------------------------------------------------------------------
+text_min() ->
+ 1.
+text_max() ->
+ 2147483647. %% 2^31 - 1
+
+create_text_table() ->
+ " (FIELD text)".
+
+%-------------------------------------------------------------------------
+create_unicode_table() ->
+ " (FIELD nvarchar(50))".
+
+%-------------------------------------------------------------------------
+create_timestamp_table() ->
+ " (FIELD DATETIME)".
+
+%-------------------------------------------------------------------------
+tiny_int_min() ->
+ 0.
+tiny_int_max() ->
+ 255.
+
+create_tiny_int_table() ->
+ " (FIELD tinyint)".
+
+tiny_int_min_selected() ->
+ {selected,["FIELD"],[{tiny_int_min()}]}.
+
+tiny_int_max_selected() ->
+ {selected,["FIELD"], [{tiny_int_max()}]}.
+
+%-------------------------------------------------------------------------
+small_int_min() ->
+ -32768. % -2^15
+small_int_max() ->
+ 32767. % 2^15-1
+
+create_small_int_table() ->
+ " (FIELD smallint)".
+
+small_int_min_selected() ->
+ {selected,["FIELD"],[{small_int_min()}]}.
+
+small_int_max_selected() ->
+ {selected,["FIELD"], [{small_int_max()}]}.
+
+%-------------------------------------------------------------------------
+int_min() ->
+ -2147483648. % -2^31
+int_max() ->
+ 2147483647. % 2^31-1
+
+create_int_table() ->
+ " (FIELD int)".
+
+int_min_selected() ->
+ {selected,["FIELD"],[{int_min()}]}.
+
+int_max_selected() ->
+ {selected,["FIELD"], [{int_max()}]}.
+
+%-------------------------------------------------------------------------
+big_int_min() ->
+ -9223372036854775808. % -2^63
+big_int_max() ->
+ 9223372036854775807. % 2^63-1
+
+create_big_int_table() ->
+ " (FIELD bigint)".
+
+big_int_min_selected() ->
+ {selected,["FIELD"],[{integer_to_list(big_int_min())}]}.
+
+big_int_max_selected() ->
+ {selected,["FIELD"], [{integer_to_list(big_int_max())}]}.
+
+%-------------------------------------------------------------------------
+bit_false() ->
+ 0.
+bit_true() ->
+ 1.
+
+create_bit_table() ->
+ " (FIELD bit)".
+
+bit_false_selected() ->
+ {selected,["FIELD"],[{false}]}.
+
+bit_true_selected() ->
+ {selected,["FIELD"], [{true}]}.
+%-------------------------------------------------------------------------
+float_min() ->
+ -1.79e+308.
+float_max() ->
+ 1.79e+308.
+
+float_underflow() ->
+ "'-1.80e+308'".
+
+float_overflow() ->
+ "'-1.80e+308'".
+
+create_float_table() ->
+ " (FIELD float)".
+
+float_zero_selected() ->
+ {selected,["FIELD"],[{0.00000e+0}]}.
+%-------------------------------------------------------------------------
+real_min() ->
+ -3.40e+38.
+real_max() ->
+ 3.40e+38.
+
+real_underflow() ->
+ -3.41e+38.
+
+real_overflow() ->
+ 3.41e+38.
+
+create_real_table() ->
+ " (FIELD real)".
+
+real_zero_selected() ->
+ {selected,["FIELD"],[{0.00000e+0}]}.
+%-------------------------------------------------------------------------
+param_select_tiny_int() ->
+ {selected,["FIELD"],[{1}, {2}]}.
+
+param_select_small_int() ->
+ {selected,["FIELD"],[{1}, {2}]}.
+
+param_select_int() ->
+ Int = small_int_max() + 1,
+ {selected,["FIELD"],[{1}, {Int}]}.
+
+param_select_decimal() ->
+ {selected,["FIELD"],[{1},{2}]}.
+
+param_select_numeric() ->
+ {selected,["FIELD"],[{1},{2}]}.
+
+param_select_float() ->
+ {selected,["FIELD"],[{1.30000},{1.20000}]}.
+
+param_select_real() ->
+ {selected,["FIELD"],[{1.30000},{1.20000}]}.
+
+param_select_double() ->
+ {selected,["FIELD"],[{1.30000},{1.20000}]}.
+
+param_select_mix() ->
+ {selected,["ID","DATA"],[{1, "foo"}, {2, "bar"}]}.
+
+param_update() ->
+ {selected,["ID","DATA"],[{1, "foobar"}, {2, "foobar"}, {3, "baz"}]}.
+
+param_delete() ->
+ {selected,["ID","DATA"],[{3, "baz"}]}.
+
+param_select() ->
+ {selected,["ID","DATA"],[{1, "foo"},{3, "foo"}]}.
+
+%-------------------------------------------------------------------------
+
+describe_integer() ->
+ {ok,[{"int1", sql_smallint},{"int2", sql_integer},
+ {"int3", sql_integer}]}.
+
+describe_string() ->
+ {ok,[{"str1",{sql_char,10}},
+ {"str2",{sql_char,10}},
+ {"str3",{sql_varchar,10}},
+ {"str4",{sql_varchar,10}}]}.
+
+describe_floating() ->
+ {ok,[{"f", sql_real},{"r", sql_real}, {"d", {sql_float, 53}}]}.
+
+describe_dec_num() ->
+ {ok,[{"dec",{sql_decimal,9,3}},{"num",{sql_numeric,9,2}}]}.
+
+describe_timestamp() ->
+ {ok, [{"field", sql_timestamp}]}.
diff --git a/lib/parsetools/src/leex.erl b/lib/parsetools/src/leex.erl
index fd494eaf06..d0b4b9efe7 100644
--- a/lib/parsetools/src/leex.erl
+++ b/lib/parsetools/src/leex.erl
@@ -35,7 +35,7 @@
-export([compile/3,file/1,file/2,format_error/1]).
-import(lists, [member/2,reverse/1,sort/1,delete/2,
- keysearch/3,keysort/2,keydelete/3,keyfind/3,
+ keysort/2,keydelete/3,keyfind/3,
map/2,foldl/3,foreach/2,flatmap/2]).
-import(string, [substr/2,substr/3,span/2]).
-import(ordsets, [is_element/2,add_element/2,union/2]).
@@ -182,18 +182,18 @@ options(Options0, [Key|Keys], L) when is_list(Options0) ->
false ->
Options0
end,
- V = case keysearch(Key, 1, Options) of
- {value, {Key, Filename0}} when Key =:= includefile;
- Key =:= scannerfile ->
+ V = case lists:keyfind(Key, 1, Options) of
+ {Key, Filename0} when Key =:= includefile;
+ Key =:= scannerfile ->
case is_filename(Filename0) of
no ->
badarg;
Filename ->
{ok,[{Key,Filename}]}
end;
- {value,{Key,Bool}} when Bool; not Bool ->
- {ok,[{Key, Bool}]};
- {value,{Key, _}} ->
+ {Key, Bool} = KB when is_boolean(Bool) ->
+ {ok, [KB]};
+ {Key, _} ->
badarg;
false ->
{ok,[{Key,default_option(Key)}]}
@@ -231,8 +231,7 @@ atom_option(verbose) -> {verbose,true};
atom_option(Key) -> Key.
is_filename(T) ->
- try filename:flatten(T) of
- Filename -> Filename
+ try filename:flatten(T)
catch error: _ -> no
end.
@@ -320,10 +319,10 @@ filenames(File, Opts, St0) ->
St1 = St0#leex{xfile=Xfile,
opts=Opts,
module=Module},
- {value,{includefile,Ifile0}} = keysearch(includefile, 1, Opts),
+ {includefile,Ifile0} = lists:keyfind(includefile, 1, Opts),
Ifile = inc_file_name(Ifile0),
%% Test for explicit scanner file.
- {value,{scannerfile,Ofile}} = keysearch(scannerfile, 1, Opts),
+ {scannerfile,Ofile} = lists:keyfind(scannerfile, 1, Opts),
if
Ofile =:= [] ->
St1#leex{efile=filename:join(Dir, Efile),
@@ -495,7 +494,7 @@ parse_rule(S, Line, Atoks, Ms, N, St) ->
end.
var_used(Name, Toks) ->
- case keyfind(Name, 3, Toks) of
+ case lists:keyfind(Name, 3, Toks) of
{var,_,Name} -> true; %It's the var we want
_ -> false
end.
@@ -629,7 +628,7 @@ re_seq(Cs0, Sn0, St) ->
{Rs,Sn1,Cs1} -> {{seq,Rs},Sn1,Cs1}
end.
-re_seq1([C|_]=Cs0, Sn0, St) when C /= $|, C /= $) ->
+re_seq1([C|_]=Cs0, Sn0, St) when C =/= $|, C =/= $) ->
{L,Sn1,Cs1} = re_repeat(Cs0, Sn0, St),
{Rs,Sn2,Cs2} = re_seq1(Cs1, Sn1, St),
{[L|Rs],Sn2,Cs2};
@@ -751,9 +750,9 @@ re_char_class("[:" ++ Cs0, Cc, #leex{posix=true}=St) ->
{Pcl,":]" ++ Cs1} -> re_char_class(Cs1, [{posix,Pcl}|Cc], St);
{_,Cs1} -> parse_error({posix_cc,string_between(Cs0, Cs1)})
end;
-re_char_class([C1|Cs0], Cc, St) when C1 /= $] ->
+re_char_class([C1|Cs0], Cc, St) when C1 =/= $] ->
case re_char(C1, Cs0) of
- {Cf,[$-,C2|Cs1]} when C2 /= $] ->
+ {Cf,[$-,C2|Cs1]} when C2 =/= $] ->
case re_char(C2, Cs1) of
{Cl,Cs2} when Cf < Cl ->
re_char_class(Cs2, [{range,Cf,Cl}|Cc], St);
@@ -998,7 +997,7 @@ pack_crs([{C1,C2},{C3,C4}|Crs]) when C2 >= C3, C2 < C4 ->
%% C1 C2
%% C3 C4
pack_crs([{C1,C4}|Crs]);
-pack_crs([{C1,C2},{C3,C4}|Crs]) when C2 + 1 == C3 ->
+pack_crs([{C1,C2},{C3,C4}|Crs]) when C2 + 1 =:= C3 ->
%% C1 C2
%% C3 C4
pack_crs([{C1,C4}|Crs]);
@@ -1055,7 +1054,7 @@ build_dfa(Set, Us, N, Ts, Ms, NFA) ->
%% List of all transition sets.
Crs0 = [Cr || S <- Set,
{Crs,_St} <- (element(S, NFA))#nfa_state.edges,
- Crs /= epsilon, % Not an epsilon transition
+ Crs =/= epsilon, % Not an epsilon transition
Cr <- Crs ],
Crs1 = lists:usort(Crs0), % Must remove duplicates!
%% Build list of disjoint test ranges.
@@ -1072,7 +1071,7 @@ disjoint_crs([{_C1,C2}=Cr1,{C3,_C4}=Cr2|Crs]) when C2 < C3 ->
%% C1 C2
%% C3 C4
[Cr1|disjoint_crs([Cr2|Crs])];
-disjoint_crs([{C1,C2},{C3,C4}|Crs]) when C1 == C3 ->
+disjoint_crs([{C1,C2},{C3,C4}|Crs]) when C1 =:= C3 ->
%% C1 C2
%% C3 C4
[{C1,C2}|disjoint_crs(add_element({C2+1,C4}, Crs))];
@@ -1080,7 +1079,7 @@ disjoint_crs([{C1,C2},{C3,C4}|Crs]) when C1 < C3, C2 >= C3, C2 < C4 ->
%% C1 C2
%% C3 C4
[{C1,C3-1}|disjoint_crs(union([{C3,C2},{C2+1,C4}], Crs))];
-disjoint_crs([{C1,C2},{C3,C4}|Crs]) when C1 < C3, C2 == C4 ->
+disjoint_crs([{C1,C2},{C3,C4}|Crs]) when C1 < C3, C2 =:= C4 ->
%% C1 C2
%% C3 C4
[{C1,C3-1}|disjoint_crs(add_element({C3,C4}, Crs))];
@@ -1093,7 +1092,7 @@ disjoint_crs([]) -> [].
build_dfa([Cr|Crs], Set, Us, N, Ts, Ms, NFA) ->
case eclosure(move(Set, Cr, NFA), NFA) of
- S when S /= [] ->
+ S when S =/= [] ->
case dfa_state_exist(S, Us, Ms) of
{yes,T} ->
build_dfa(Crs, Set, Us, N, store(Cr, T, Ts), Ms, NFA);
@@ -1110,11 +1109,11 @@ build_dfa([], _, Us, N, Ts, _, _) ->
%% dfa_state_exist(Set, Unmarked, Marked) -> {yes,State} | no.
dfa_state_exist(S, Us, Ms) ->
- case keysearch(S, #dfa_state.nfa, Us) of
- {value,#dfa_state{no=T}} -> {yes,T};
+ case lists:keyfind(S, #dfa_state.nfa, Us) of
+ #dfa_state{no=T} -> {yes,T};
false ->
- case keysearch(S, #dfa_state.nfa, Ms) of
- {value,#dfa_state{no=T}} -> {yes,T};
+ case lists:keyfind(S, #dfa_state.nfa, Ms) of
+ #dfa_state{no=T} -> {yes,T};
false -> no
end
end.
@@ -1129,7 +1128,7 @@ eclosure(Sts, NFA) -> eclosure(Sts, NFA, []).
eclosure([St|Sts], NFA, Ec) ->
#nfa_state{edges=Es} = element(St, NFA),
eclosure([ N || {epsilon,N} <- Es,
- not is_element(N, Ec) ] ++ Sts,
+ not is_element(N, Ec) ] ++ Sts,
NFA, add_element(St, Ec));
eclosure([], _, Ec) -> Ec.
@@ -1137,7 +1136,7 @@ move(Sts, Cr, NFA) ->
%% io:fwrite("move1: ~p\n", [{Sts,Cr}]),
[ St || N <- Sts,
{Crs,St} <- (element(N, NFA))#nfa_state.edges,
- Crs /= epsilon, % Not an epsilon transition
+ Crs =/= epsilon, % Not an epsilon transition
in_crs(Cr, Crs) ].
in_crs({C1,C2}, [{C3,C4}|_Crs]) when C1 >= C3, C2 =< C4 -> true;
@@ -1436,7 +1435,7 @@ pack_trans([{{$\n,Cl},S}|Trs], Pt) ->
pack_trans([{{Cf,Cl},S}|Trs], Pt) when Cf < $\n, Cl > $\n ->
pack_trans([{{Cf,$\n-1},S},{{$\n+1,Cl},S}|Trs], [{$\n,S}|Pt]);
%% Small ranges become singletons.
-pack_trans([{{Cf,Cl},S}|Trs], Pt) when Cl == Cf + 1 ->
+pack_trans([{{Cf,Cl},S}|Trs], Pt) when Cl =:= Cf + 1 ->
pack_trans(Trs, [{Cf,S},{Cl,S}|Pt]);
pack_trans([Tr|Trs], Pt) -> % The default uninteresting case
pack_trans(Trs, Pt ++ [Tr]);
diff --git a/lib/parsetools/vsn.mk b/lib/parsetools/vsn.mk
index b1354e89d8..f3e2dc0fb4 100644
--- a/lib/parsetools/vsn.mk
+++ b/lib/parsetools/vsn.mk
@@ -1 +1 @@
-PARSETOOLS_VSN = 2.0.2
+PARSETOOLS_VSN = 2.0.3
diff --git a/lib/public_key/asn1/OTP-PKIX.asn1 b/lib/public_key/asn1/OTP-PKIX.asn1
index 2bcacc0990..c0cf440496 100644
--- a/lib/public_key/asn1/OTP-PKIX.asn1
+++ b/lib/public_key/asn1/OTP-PKIX.asn1
@@ -313,7 +313,7 @@ SupportedPublicKeyAlgorithms PUBLIC-KEY-ALGORITHM-CLASS ::= {
dsa-with-sha1 SIGNATURE-ALGORITHM-CLASS ::= {
ID id-dsa-with-sha1
- TYPE NULL } -- XXX Must be empty and not NULL
+ TYPE Dss-Parms }
--
-- RSA Keys and Signatures
diff --git a/lib/public_key/doc/src/notes.xml b/lib/public_key/doc/src/notes.xml
index 33a424f432..13a9151869 100644
--- a/lib/public_key/doc/src/notes.xml
+++ b/lib/public_key/doc/src/notes.xml
@@ -33,6 +33,35 @@
<rev>A</rev>
<file>notes.xml</file>
</header>
+
+<section><title>Public_Key 0.7</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Certificates without any extensions could not be handled
+ by public_key.</p>
+ <p>
+ Own Id: OTP-8626</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Code cleanup and minor bugfixes.</p>
+ <p>
+ Own Id: OTP-8649</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Public_Key 0.6</title>
<section><title>Improvements and New Features</title>
diff --git a/lib/public_key/src/pubkey_cert.erl b/lib/public_key/src/pubkey_cert.erl
index 799e3820d1..0651dcec29 100644
--- a/lib/public_key/src/pubkey_cert.erl
+++ b/lib/public_key/src/pubkey_cert.erl
@@ -29,7 +29,7 @@
validate_issuer/4, validate_names/6,
validate_revoked_status/3, validate_extensions/4,
validate_unknown_extensions/3,
- normalize_general_name/1, digest_type/1, digest/2, is_self_signed/1,
+ normalize_general_name/1, digest_type/1, is_self_signed/1,
is_issuer/2, issuer_id/2, is_fixed_dh_cert/1]).
-define(NULL, 0).
@@ -130,7 +130,7 @@ validate_signature(OtpCert, DerCert, Key, KeyParams,
validate_names(OtpCert, Permit, Exclude, Last, AccErr, Verify) ->
case is_self_signed(OtpCert) andalso (not Last) of
true ->
- ok;
+ AccErr;
false ->
TBSCert = OtpCert#'OTPCertificate'.tbsCertificate,
Subject = TBSCert#'OTPTBSCertificate'.subject,
@@ -197,7 +197,7 @@ normalize_general_name({rdnSequence, Issuer}) ->
normalize_general_name(Issuer) ->
Normalize = fun([{Description, Type, {printableString, Value}}]) ->
NewValue = string:to_lower(strip_spaces(Value)),
- {Description, Type, {printableString, NewValue}};
+ [{Description, Type, {printableString, NewValue}}];
(Atter) ->
Atter
end,
@@ -275,13 +275,6 @@ digest_type(?md5WithRSAEncryption) ->
digest_type(?'id-dsa-with-sha1') ->
sha.
-digest(?sha1WithRSAEncryption, Msg) ->
- crypto:sha(Msg);
-digest(?md5WithRSAEncryption, Msg) ->
- crypto:md5(Msg);
-digest(?'id-dsa-with-sha1', Msg) ->
- crypto:sha(Msg).
-
public_key_info(PublicKeyInfo,
#path_validation_state{working_public_key_algorithm =
WorkingAlgorithm,
@@ -332,12 +325,6 @@ is_dir_name([[{'AttributeTypeAndValue', Type, What1}]|Rest1],
true -> is_dir_name(Rest1,Rest2,Exact);
false -> false
end;
-is_dir_name([{'AttributeTypeAndValue', Type, What1}|Rest1],
- [{'AttributeTypeAndValue', Type, What2}|Rest2], Exact) ->
- case is_dir_name2(What1,What2) of
- true -> is_dir_name(Rest1,Rest2,Exact);
- false -> false
- end;
is_dir_name(_,[],false) ->
true;
is_dir_name(_,_,_) ->
diff --git a/lib/public_key/src/pubkey_cert_records.erl b/lib/public_key/src/pubkey_cert_records.erl
index c7d4080adb..ac04e1c2cb 100644
--- a/lib/public_key/src/pubkey_cert_records.erl
+++ b/lib/public_key/src/pubkey_cert_records.erl
@@ -25,8 +25,6 @@
-export([decode_cert/2, encode_cert/1, encode_tbs_cert/1, transform/2]).
--export([old_decode_cert/2, old_encode_cert/1]). %% Debugging and testing new code.
-
%%====================================================================
%% Internal application API
%%====================================================================
@@ -35,77 +33,25 @@ decode_cert(DerCert, plain) ->
'OTP-PUB-KEY':decode('Certificate', DerCert);
decode_cert(DerCert, otp) ->
{ok, Cert} = 'OTP-PUB-KEY':decode('OTPCertificate', DerCert),
- {ok, decode_all_otp(Cert)}.
-
-old_decode_cert(DerCert, otp) ->
- {ok, Cert} = 'OTP-PUB-KEY':decode('Certificate', DerCert),
- {ok, plain_to_otp(Cert)}.
-
-old_encode_cert(Cert) ->
- PlainCert = otp_to_plain(Cert),
- {ok, EncCert} = 'OTP-PUB-KEY':encode('Certificate', PlainCert),
- list_to_binary(EncCert).
-
+ #'OTPCertificate'{tbsCertificate = TBS} = Cert,
+ {ok, Cert#'OTPCertificate'{tbsCertificate = decode_tbs(TBS)}}.
encode_cert(Cert = #'Certificate'{}) ->
{ok, EncCert} = 'OTP-PUB-KEY':encode('Certificate', Cert),
list_to_binary(EncCert);
-encode_cert(C = #'OTPCertificate'{tbsCertificate = TBS =
- #'OTPTBSCertificate'{
- issuer=Issuer0,
- subject=Subject0,
- subjectPublicKeyInfo=Spki0,
- extensions=Exts0}
- }) ->
- Issuer = transform(Issuer0,encode),
- Subject = transform(Subject0,encode),
- Spki = encode_supportedPublicKey(Spki0),
- Exts = encode_extensions(Exts0),
- %% io:format("Extensions ~p~n",[Exts]),
- Cert = C#'OTPCertificate'{tbsCertificate=
- TBS#'OTPTBSCertificate'{
- issuer=Issuer, subject=Subject,
- subjectPublicKeyInfo=Spki,
- extensions=Exts}},
+encode_cert(C = #'OTPCertificate'{tbsCertificate = TBS}) ->
+ Cert = C#'OTPCertificate'{tbsCertificate=encode_tbs(TBS)},
{ok, EncCert} = 'OTP-PUB-KEY':encode('OTPCertificate', Cert),
list_to_binary(EncCert).
-encode_tbs_cert(TBS = #'OTPTBSCertificate'{
- issuer=Issuer0,
- subject=Subject0,
- subjectPublicKeyInfo=Spki0,
- extensions=Exts0}) ->
- Issuer = transform(Issuer0,encode),
- Subject = transform(Subject0,encode),
- Spki = encode_supportedPublicKey(Spki0),
- Exts = encode_extensions(Exts0),
- TBSCert = TBS#'OTPTBSCertificate'{issuer=Issuer,subject=Subject,
- subjectPublicKeyInfo=Spki,extensions=Exts},
- {ok, EncTBSCert} = 'OTP-PUB-KEY':encode('OTPTBSCertificate', TBSCert),
+encode_tbs_cert(TBS) ->
+ {ok, EncTBSCert} = 'OTP-PUB-KEY':encode('OTPTBSCertificate', encode_tbs(TBS)),
list_to_binary(EncTBSCert).
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-decode_all_otp(C = #'OTPCertificate'{tbsCertificate = TBS =
- #'OTPTBSCertificate'{
- issuer=Issuer0,
- subject=Subject0,
- subjectPublicKeyInfo=Spki0,
- extensions=Exts0}
- }) ->
- Issuer = transform(Issuer0,decode),
- Subject = transform(Subject0,decode),
- Spki = decode_supportedPublicKey(Spki0),
- Exts = decode_extensions(Exts0),
- %% io:format("Extensions ~p~n",[Exts]),
- C#'OTPCertificate'{tbsCertificate=
- TBS#'OTPTBSCertificate'{
- issuer=Issuer, subject=Subject,
- subjectPublicKeyInfo=Spki,extensions=Exts}}.
-
-
%%% SubjectPublicKey
supportedPublicKeyAlgorithms(?'rsaEncryption') -> 'RSAPublicKey';
supportedPublicKeyAlgorithms(?'id-dsa') -> 'DSAPublicKey';
@@ -204,15 +150,35 @@ transform({rdnSequence, SeqList},Func) when is_list(SeqList) ->
lists:map(fun(Seq) ->
lists:map(fun(Element) -> transform(Element,Func) end, Seq)
end, SeqList)};
-%% transform(List = [{rdnSequence, _}|_],Func) ->
-%% lists:map(fun(Element) -> transform(Element,Func) end, List);
transform(#'NameConstraints'{permittedSubtrees=Permitted, excludedSubtrees=Excluded}, Func) ->
- Res = #'NameConstraints'{permittedSubtrees=transform_sub_tree(Permitted,Func),
- excludedSubtrees=transform_sub_tree(Excluded,Func)},
-%% io:format("~p~n",[Res]),
- Res;
+ #'NameConstraints'{permittedSubtrees=transform_sub_tree(Permitted,Func),
+ excludedSubtrees=transform_sub_tree(Excluded,Func)};
+
transform(Other,_) ->
Other.
+
+encode_tbs(TBS=#'OTPTBSCertificate'{issuer=Issuer0,
+ subject=Subject0,
+ subjectPublicKeyInfo=Spki0,
+ extensions=Exts0}) ->
+ Issuer = transform(Issuer0,encode),
+ Subject = transform(Subject0,encode),
+ Spki = encode_supportedPublicKey(Spki0),
+ Exts = encode_extensions(Exts0),
+ TBS#'OTPTBSCertificate'{issuer=Issuer, subject=Subject,
+ subjectPublicKeyInfo=Spki,extensions=Exts}.
+
+decode_tbs(TBS = #'OTPTBSCertificate'{issuer=Issuer0,
+ subject=Subject0,
+ subjectPublicKeyInfo=Spki0,
+ extensions=Exts0}) ->
+ Issuer = transform(Issuer0,decode),
+ Subject = transform(Subject0,decode),
+ Spki = decode_supportedPublicKey(Spki0),
+ Exts = decode_extensions(Exts0),
+ TBS#'OTPTBSCertificate'{issuer=Issuer, subject=Subject,
+ subjectPublicKeyInfo=Spki,extensions=Exts}.
+
transform_sub_tree(asn1_NOVALUE,_) -> asn1_NOVALUE;
transform_sub_tree(TreeList,Func) ->
[Tree#'GeneralSubtree'{base=transform(Name,Func)} ||
@@ -236,303 +202,3 @@ attribute_type(?'id-at-pseudonym') -> 'X520Pseudonym';
attribute_type(?'id-domainComponent') -> 'DomainComponent';
attribute_type(?'id-emailAddress') -> 'EmailAddress';
attribute_type(Type) -> Type.
-
-%%% Old code transforms
-
-plain_to_otp(#'Certificate'{tbsCertificate = TBSCert,
- signatureAlgorithm = SigAlg,
- signature = Signature} = Cert) ->
- Cert#'Certificate'{tbsCertificate = plain_to_otp(TBSCert),
- signatureAlgorithm = plain_to_otp(SigAlg),
- signature = plain_to_otp(Signature)};
-
-plain_to_otp(#'TBSCertificate'{signature = Signature,
- issuer = Issuer,
- subject = Subject,
- subjectPublicKeyInfo = SPubKeyInfo,
- extensions = Extensions} = TBSCert) ->
-
- TBSCert#'TBSCertificate'{signature = plain_to_otp(Signature),
- issuer = plain_to_otp(Issuer),
- subject =
- plain_to_otp(Subject),
- subjectPublicKeyInfo =
- plain_to_otp(SPubKeyInfo),
- extensions =
- plain_to_otp_extensions(Extensions)
- };
-
-plain_to_otp(#'AlgorithmIdentifier'{algorithm = Algorithm,
- parameters = Params}) ->
- SignAlgAny =
- #'SignatureAlgorithm-Any'{algorithm = Algorithm,
- parameters = Params},
- {ok, AnyEnc} = 'OTP-PUB-KEY':encode('SignatureAlgorithm-Any',
- SignAlgAny),
- {ok, SignAlg} = 'OTP-PUB-KEY':decode('SignatureAlgorithm',
- list_to_binary(AnyEnc)),
- SignAlg;
-
-plain_to_otp({rdnSequence, SeqList}) when is_list(SeqList) ->
- {rdnSequence,
- lists:map(fun(Seq) ->
- lists:map(fun(Element) ->
- plain_to_otp(Element)
- end,
- Seq)
- end, SeqList)};
-
-plain_to_otp(#'AttributeTypeAndValue'{} = ATAV) ->
- {ok, ATAVEnc} =
- 'OTP-PUB-KEY':encode('AttributeTypeAndValue', ATAV),
- {ok, ATAVDec} = 'OTP-PUB-KEY':decode('OTPAttributeTypeAndValue',
- list_to_binary(ATAVEnc)),
- #'AttributeTypeAndValue'{type = ATAVDec#'OTPAttributeTypeAndValue'.type,
- value =
- ATAVDec#'OTPAttributeTypeAndValue'.value};
-
-plain_to_otp(#'SubjectPublicKeyInfo'{algorithm =
- #'AlgorithmIdentifier'{algorithm
- = Algo,
- parameters =
- Params},
- subjectPublicKey = PublicKey}) ->
-
- AnyAlgo = #'PublicKeyAlgorithm'{algorithm = Algo,
- parameters = Params},
- {0, AnyKey} = PublicKey,
- AnyDec = #'OTPSubjectPublicKeyInfo-Any'{algorithm = AnyAlgo,
- subjectPublicKey = AnyKey},
- {ok, AnyEnc} =
- 'OTP-PUB-KEY':encode('OTPSubjectPublicKeyInfo-Any', AnyDec),
- {ok, InfoDec} = 'OTP-PUB-KEY':decode('OTPOLDSubjectPublicKeyInfo',
- list_to_binary(AnyEnc)),
-
- AlgorithmDec = InfoDec#'OTPOLDSubjectPublicKeyInfo'.algorithm,
- AlgoDec = AlgorithmDec#'OTPOLDSubjectPublicKeyInfo_algorithm'.algo,
- NewParams = AlgorithmDec#'OTPOLDSubjectPublicKeyInfo_algorithm'.parameters,
- PublicKeyDec = InfoDec#'OTPOLDSubjectPublicKeyInfo'.subjectPublicKey,
- NewAlgorithmDec =
- #'SubjectPublicKeyInfoAlgorithm'{algorithm = AlgoDec,
- parameters = NewParams},
- #'SubjectPublicKeyInfo'{algorithm = NewAlgorithmDec,
- subjectPublicKey = PublicKeyDec
- };
-
-plain_to_otp(#'Extension'{extnID = ExtID,
- critical = Critical,
- extnValue = Value})
- when ExtID == ?'id-ce-authorityKeyIdentifier';
- ExtID == ?'id-ce-subjectKeyIdentifier';
- ExtID == ?'id-ce-keyUsage';
- ExtID == ?'id-ce-privateKeyUsagePeriod';
- ExtID == ?'id-ce-certificatePolicies';
- ExtID == ?'id-ce-policyMappings';
- ExtID == ?'id-ce-subjectAltName';
- ExtID == ?'id-ce-issuerAltName';
- ExtID == ?'id-ce-subjectDirectoryAttributes';
- ExtID == ?'id-ce-basicConstraints';
- ExtID == ?'id-ce-nameConstraints';
- ExtID == ?'id-ce-policyConstraints';
- ExtID == ?'id-ce-extKeyUsage';
- ExtID == ?'id-ce-cRLDistributionPoints';
- ExtID == ?'id-ce-inhibitAnyPolicy';
- ExtID == ?'id-ce-freshestCRL' ->
- ExtAny = #'Extension-Any'{extnID = ExtID,
- critical = Critical,
- extnValue = Value},
- {ok, AnyEnc} = 'OTP-PUB-KEY':encode('Extension-Any', ExtAny),
- {ok, ExtDec} = 'OTP-PUB-KEY':decode('OTPExtension',
- list_to_binary(AnyEnc)),
-
- ExtValue = plain_to_otp_extension_value(ExtID,
- ExtDec#'OTPExtension'.extnValue),
- #'Extension'{extnID = ExtID,
- critical = ExtDec#'OTPExtension'.critical,
- extnValue = ExtValue};
-
-plain_to_otp(#'Extension'{} = Ext) ->
- Ext;
-
-plain_to_otp(#'AuthorityKeyIdentifier'{} = Ext) ->
- CertIssuer = Ext#'AuthorityKeyIdentifier'.authorityCertIssuer,
- Ext#'AuthorityKeyIdentifier'{authorityCertIssuer =
- plain_to_otp(CertIssuer)};
-
-
-plain_to_otp([{directoryName, Value}]) ->
- [{directoryName, plain_to_otp(Value)}];
-
-plain_to_otp(Value) ->
- Value.
-
-otp_to_plain(#'Certificate'{tbsCertificate = TBSCert,
- signatureAlgorithm = SigAlg,
- signature = Signature} = Cert) ->
- Cert#'Certificate'{tbsCertificate = otp_to_plain(TBSCert),
- signatureAlgorithm =
- otp_to_plain(SigAlg),
- signature = otp_to_plain(Signature)};
-
-otp_to_plain(#'TBSCertificate'{signature = Signature,
- issuer = Issuer,
- subject = Subject,
- subjectPublicKeyInfo = SPubKeyInfo,
- extensions = Extensions} = TBSCert) ->
-
- TBSCert#'TBSCertificate'{signature = otp_to_plain(Signature),
- issuer = otp_to_plain(Issuer),
- subject =
- otp_to_plain(Subject),
- subjectPublicKeyInfo =
- otp_to_plain(SPubKeyInfo),
- extensions = otp_to_plain_extensions(Extensions)
- };
-
-otp_to_plain(#'SignatureAlgorithm'{} = SignAlg) ->
- {ok, EncSignAlg} = 'OTP-PUB-KEY':encode('SignatureAlgorithm', SignAlg),
- {ok, #'SignatureAlgorithm-Any'{algorithm = Algorithm,
- parameters = Params}} =
- 'OTP-PUB-KEY':decode('SignatureAlgorithm-Any',
- list_to_binary(EncSignAlg)),
- #'AlgorithmIdentifier'{algorithm = Algorithm,
- parameters = Params};
-
-otp_to_plain({rdnSequence, SeqList}) when is_list(SeqList) ->
- {rdnSequence,
- lists:map(fun(Seq) ->
- lists:map(fun(Element) ->
- otp_to_plain(Element)
- end,
- Seq)
- end, SeqList)};
-
-otp_to_plain(#'AttributeTypeAndValue'{type = Type, value = Value}) ->
- {ok, ATAVEnc} =
- 'OTP-PUB-KEY':encode('OTPAttributeTypeAndValue',
- #'OTPAttributeTypeAndValue'{type = Type,
- value = Value}),
- {ok, ATAVDec} = 'OTP-PUB-KEY':decode('AttributeTypeAndValue',
- list_to_binary(ATAVEnc)),
- ATAVDec;
-
-otp_to_plain(#'SubjectPublicKeyInfo'{algorithm =
- #'SubjectPublicKeyInfoAlgorithm'{
- algorithm = Algo,
- parameters =
- Params},
- subjectPublicKey = PublicKey}) ->
-
- OtpAlgo = #'OTPOLDSubjectPublicKeyInfo_algorithm'{algo = Algo,
- parameters = Params},
- OtpDec = #'OTPOLDSubjectPublicKeyInfo'{algorithm = OtpAlgo,
- subjectPublicKey = PublicKey},
- {ok, OtpEnc} =
- 'OTP-PUB-KEY':encode('OTPOLDSubjectPublicKeyInfo', OtpDec),
-
- {ok, AnyDec} = 'OTP-PUB-KEY':decode('OTPSubjectPublicKeyInfo-Any',
- list_to_binary(OtpEnc)),
-
- #'OTPSubjectPublicKeyInfo-Any'{algorithm = #'PublicKeyAlgorithm'{
- algorithm = NewAlgo,
- parameters = NewParams},
- subjectPublicKey = Bin} = AnyDec,
-
- #'SubjectPublicKeyInfo'{algorithm =
- #'AlgorithmIdentifier'{
- algorithm = NewAlgo,
- parameters = plain_key_params(NewParams)},
- subjectPublicKey =
- {0, Bin}
- };
-
-otp_to_plain(#'Extension'{extnID = ExtID,
- extnValue = Value} = Ext) ->
- ExtValue =
- otp_to_plain_extension_value(ExtID, Value),
-
- Ext#'Extension'{extnValue = ExtValue};
-
-otp_to_plain(#'AuthorityKeyIdentifier'{} = Ext) ->
- CertIssuer = Ext#'AuthorityKeyIdentifier'.authorityCertIssuer,
- Ext#'AuthorityKeyIdentifier'{authorityCertIssuer =
- otp_to_plain(CertIssuer)};
-
-otp_to_plain([{directoryName, Value}]) ->
- [{directoryName, otp_to_plain(Value)}];
-
-otp_to_plain(Value) ->
- Value.
-
-plain_key_params('NULL') ->
- <<5,0>>;
-plain_key_params(Value) ->
- Value.
-
-plain_to_otp_extension_value(?'id-ce-authorityKeyIdentifier', Value) ->
- plain_to_otp(Value);
-plain_to_otp_extension_value(_, Value) ->
- Value.
-
-plain_to_otp_extensions(Exts) when is_list(Exts) ->
- lists:map(fun(Ext) -> plain_to_otp(Ext) end, Exts).
-
-otp_to_plain_extension_value(?'id-ce-authorityKeyIdentifier', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('AuthorityKeyIdentifier',
- otp_to_plain(Value)),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-subjectKeyIdentifier', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('SubjectKeyIdentifier', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-keyUsage', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('KeyUsage', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-privateKeyUsagePeriod', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('PrivateKeyUsagePeriod', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-certificatePolicies', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('CertificatePolicies', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-policyMappings', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('PolicyMappings', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-subjectAltName', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('SubjectAltName', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-issuerAltName', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('IssuerAltName', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-subjectDirectoryAttributes', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('SubjectDirectoryAttributes', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-basicConstraints', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('BasicConstraints', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-nameConstraints', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('NameConstraints', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-policyConstraints', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('PolicyConstraints', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-extKeyUsage', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('ExtKeyUsage', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-cRLDistributionPoints', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('CRLDistributionPoints', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-inhibitAnyPolicy', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('InhibitAnyPolicy', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(?'id-ce-freshestCRL', Value) ->
- {ok, Enc} = 'OTP-PUB-KEY':encode('FreshestCRL', Value),
- otp_to_plain_extension_value_format(Enc);
-otp_to_plain_extension_value(_Id, Value) ->
- Value.
-
-otp_to_plain_extension_value_format(Value) ->
- list_to_binary(Value).
-
-otp_to_plain_extensions(Exts) when is_list(Exts) ->
- lists:map(fun(Ext) ->
- otp_to_plain(Ext)
- end, Exts).
diff --git a/lib/public_key/src/pubkey_crypto.erl b/lib/public_key/src/pubkey_crypto.erl
index 4ab655e977..7b7abb1c56 100644
--- a/lib/public_key/src/pubkey_crypto.erl
+++ b/lib/public_key/src/pubkey_crypto.erl
@@ -106,6 +106,11 @@ sign(DigestType, PlainText, #'RSAPrivateKey'{modulus = N, publicExponent = E,
crypto:mpint(N),
crypto:mpint(D)]);
+sign(none, Hash, #'DSAPrivateKey'{p = P, q = Q, g = G, x = X}) ->
+ crypto:dss_sign(none, Hash,
+ [crypto:mpint(P), crypto:mpint(Q),
+ crypto:mpint(G), crypto:mpint(X)]);
+
sign(sha, PlainText, #'DSAPrivateKey'{p = P, q = Q, g = G, x = X}) ->
crypto:dss_sign(sized_binary(PlainText),
[crypto:mpint(P), crypto:mpint(Q),
@@ -128,6 +133,12 @@ verify(DigestType, PlainText, Signature,
sized_binary(Signature),
[crypto:mpint(Exp), crypto:mpint(Mod)]);
+verify(none, Hash, Signature, Key, #'Dss-Parms'{p = P, q = Q, g = G}) ->
+ crypto:dss_verify(none, Hash,
+ sized_binary(Signature),
+ [crypto:mpint(P), crypto:mpint(Q),
+ crypto:mpint(G), crypto:mpint(Key)]);
+
verify(sha, PlainText, Signature, Key, #'Dss-Parms'{p = P, q = Q, g = G}) ->
crypto:dss_verify(sized_binary(PlainText),
sized_binary(Signature),
diff --git a/lib/public_key/src/pubkey_pem.erl b/lib/public_key/src/pubkey_pem.erl
index 9fc17b6f73..65879f1bbe 100644
--- a/lib/public_key/src/pubkey_pem.erl
+++ b/lib/public_key/src/pubkey_pem.erl
@@ -124,25 +124,31 @@ decode_file2([L|Rest], RLs, Ens, Tag, Info0) ->
decode_file2([], _, Ens, _, _) ->
{ok, lists:reverse(Ens)}.
-%% TODO Support same as decode_file
+%% Support same as decode_file
encode_file(Ds) ->
lists:map(
- fun({cert, Bin}) ->
+ fun({cert, Bin, not_encrypted}) ->
%% PKIX (X.509)
["-----BEGIN CERTIFICATE-----\n",
b64encode_and_split(Bin),
"-----END CERTIFICATE-----\n\n"];
- ({cert_req, Bin}) ->
+ ({cert_req, Bin, not_encrypted}) ->
%% PKCS#10
["-----BEGIN CERTIFICATE REQUEST-----\n",
b64encode_and_split(Bin),
"-----END CERTIFICATE REQUEST-----\n\n"];
- ({rsa_private_key, Bin}) ->
+ ({rsa_private_key, Bin, not_encrypted}) ->
%% PKCS#?
["XXX Following key assumed not encrypted\n",
"-----BEGIN RSA PRIVATE KEY-----\n",
b64encode_and_split(Bin),
- "-----END RSA PRIVATE KEY-----\n\n"]
+ "-----END RSA PRIVATE KEY-----\n\n"];
+ ({dsa_private_key, Bin, not_encrypted}) ->
+ %% PKCS#?
+ ["XXX Following key assumed not encrypted\n",
+ "-----BEGIN DSA PRIVATE KEY-----\n",
+ b64encode_and_split(Bin),
+ "-----END DSA PRIVATE KEY-----\n\n"]
end, Ds).
dek_info(Line0, Info) ->
diff --git a/lib/public_key/src/public_key.appup.src b/lib/public_key/src/public_key.appup.src
index 46e5ecca33..2eb5750923 100644
--- a/lib/public_key/src/public_key.appup.src
+++ b/lib/public_key/src/public_key.appup.src
@@ -1,39 +1,43 @@
%% -*- erlang -*-
{"%VSN%",
[
- {"0.5",
+ {"0.6",
[
+ {update, 'OTP-PUB-KEY', soft, soft_purge, soft_purge, []},
{update, public_key, soft, soft_purge, soft_purge, []},
- {update, pubkey_crypto, soft, soft_purge, soft_purge, []},
{update, pubkey_pem, soft, soft_purge, soft_purge, []},
+ {update, pubkey_cert_records, soft, soft_purge, soft_purge, []}
{update, pubkey_cert, soft, soft_purge, soft_purge, []}
]
},
- {"0.4",
+ {"0.5",
[
+ {update, 'OTP-PUB-KEY', soft, soft_purge, soft_purge, []},
{update, public_key, soft, soft_purge, soft_purge, []},
- {update, pubkey_cert_records, soft, soft_purge, soft_purge, []},
{update, pubkey_crypto, soft, soft_purge, soft_purge, []},
{update, pubkey_pem, soft, soft_purge, soft_purge, []},
+ {update, pubkey_cert_records, soft, soft_purge, soft_purge, []},
{update, pubkey_cert, soft, soft_purge, soft_purge, []}
- ]
+ ]
}
],
[
- {"0.5",
+ {"0.6",
[
+ {update, 'OTP-PUB-KEY', soft, soft_purge, soft_purge, []},
{update, public_key, soft, soft_purge, soft_purge, []},
- {update, pubkey_crypto, soft, soft_purge, soft_purge, []},
{update, pubkey_pem, soft, soft_purge, soft_purge, []},
+ {update, pubkey_cert_records, soft, soft_purge, soft_purge, []}
{update, pubkey_cert, soft, soft_purge, soft_purge, []}
]
},
- {"0.4",
+ {"0.5",
[
+ {update, 'OTP-PUB-KEY', soft, soft_purge, soft_purge, []},
{update, public_key, soft, soft_purge, soft_purge, []},
- {update, pubkey_cert_records, soft, soft_purge, soft_purge, []},
{update, pubkey_crypto, soft, soft_purge, soft_purge, []},
{update, pubkey_pem, soft, soft_purge, soft_purge, []},
+ {update, pubkey_cert_records, soft, soft_purge, soft_purge, []},
{update, pubkey_cert, soft, soft_purge, soft_purge, []}
]
}
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 157e76bb21..12354eee5d 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -113,13 +113,13 @@ decrypt_public(CipherText, Key, Options) ->
encrypt_public(PlainText, Key) ->
encrypt_public(PlainText, Key, []).
encrypt_public(PlainText, Key, Options) ->
- Padding = proplists:get_value(rsa_pad, Options, rsa_pkcs1_oaep_padding),
+ Padding = proplists:get_value(rsa_pad, Options, rsa_pkcs1_padding),
pubkey_crypto:encrypt_public(PlainText, Key, Padding).
encrypt_private(PlainText, Key) ->
encrypt_private(PlainText, Key, []).
encrypt_private(PlainText, Key, Options) ->
- Padding = proplists:get_value(rsa_pad, Options, rsa_pkcs1_oaep_padding),
+ Padding = proplists:get_value(rsa_pad, Options, rsa_pkcs1_padding),
pubkey_crypto:encrypt_private(PlainText, Key, Padding).
%%--------------------------------------------------------------------
@@ -360,7 +360,9 @@ verify_signature(PlainText, DigestType, Signature, #'RSAPublicKey'{} = Key,
pubkey_crypto:verify(DigestType, PlainText, Signature, Key, KeyParams);
verify_signature(PlainText, sha, Signature, Key, #'Dss-Parms'{} = KeyParams)
when is_binary(PlainText), is_binary(Signature), is_integer(Key) ->
- pubkey_crypto:verify(sha, PlainText, Signature, Key, KeyParams).
+ pubkey_crypto:verify(sha, PlainText, Signature, Key, KeyParams);
+verify_signature(Hash, none, Signature, Key, KeyParams) ->
+ pubkey_crypto:verify(none, Hash, Signature, Key, KeyParams).
verify_signature(DerCert, Key, #'Dss-Parms'{} = KeyParams)
when is_binary(DerCert), is_integer(Key) ->
diff --git a/lib/public_key/test/Makefile b/lib/public_key/test/Makefile
index c7215020c7..5544339ff2 100644
--- a/lib/public_key/test/Makefile
+++ b/lib/public_key/test/Makefile
@@ -28,6 +28,7 @@ INCLUDES= -I. -I ../include
# ----------------------------------------------------
MODULES= \
+ pkey_test \
public_key_SUITE \
pkits_SUITE
@@ -40,6 +41,9 @@ TARGET_FILES= \
SPEC_FILES = public_key.spec
+COVER_FILE = public_key.cover
+
+
# ----------------------------------------------------
# Release directory specification
# ----------------------------------------------------
@@ -74,7 +78,7 @@ release_spec: opt
release_tests_spec: opt
$(INSTALL_DIR) $(RELSYSDIR)
- $(INSTALL_DATA) $(SPEC_FILES) $(ERL_FILES) $(HRL_FILES)$(RELSYSDIR)
+ $(INSTALL_DATA) $(SPEC_FILES) $(ERL_FILES) $(COVER_FILE) $(HRL_FILES) $(RELSYSDIR)
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)
chmod -f -R u+w $(RELSYSDIR)
@tar cf - *_SUITE_data | (cd $(RELSYSDIR); tar xf -)
diff --git a/lib/public_key/test/pkey_test.erl b/lib/public_key/test/pkey_test.erl
new file mode 100644
index 0000000000..4cf20f0174
--- /dev/null
+++ b/lib/public_key/test/pkey_test.erl
@@ -0,0 +1,412 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%% Create test certificates
+
+-module(pkey_test).
+-include_lib("public_key/include/public_key.hrl").
+
+-export([make_cert/1, gen_rsa/1, verify_signature/3, write_pem/3]).
+-compile(export_all).
+
+%%--------------------------------------------------------------------
+%% @doc Create and return a der encoded certificate
+%% Option Default
+%% -------------------------------------------------------
+%% digest sha1
+%% validity {date(), date() + week()}
+%% version 3
+%% subject [] list of the following content
+%% {name, Name}
+%% {email, Email}
+%% {city, City}
+%% {state, State}
+%% {org, Org}
+%% {org_unit, OrgUnit}
+%% {country, Country}
+%% {serial, Serial}
+%% {title, Title}
+%% {dnQualifer, DnQ}
+%% issuer = {Issuer, IssuerKey} true (i.e. a ca cert is created)
+%% (obs IssuerKey migth be {Key, Password}
+%% key = KeyFile|KeyBin|rsa|dsa Subject PublicKey rsa or dsa generates key
+%%
+%%
+%% (OBS: The generated keys are for testing only)
+%% @spec ([{::atom(), ::term()}]) -> {Cert::binary(), Key::binary()}
+%% @end
+%%--------------------------------------------------------------------
+
+make_cert(Opts) ->
+ SubjectPrivateKey = get_key(Opts),
+ {TBSCert, IssuerKey} = make_tbs(SubjectPrivateKey, Opts),
+ Cert = public_key:sign(TBSCert, IssuerKey),
+ true = verify_signature(Cert, IssuerKey, undef), %% verify that the keys where ok
+ {Cert, encode_key(SubjectPrivateKey)}.
+
+%%--------------------------------------------------------------------
+%% @doc Writes pem files in Dir with FileName ++ ".pem" and FileName ++ "_key.pem"
+%% @spec (::string(), ::string(), {Cert,Key}) -> ok
+%% @end
+%%--------------------------------------------------------------------
+write_pem(Dir, FileName, {Cert, Key = {_,_,not_encrypted}}) when is_binary(Cert) ->
+ ok = public_key:der_to_pem(filename:join(Dir, FileName ++ ".pem"), [{cert, Cert, not_encrypted}]),
+ ok = public_key:der_to_pem(filename:join(Dir, FileName ++ "_key.pem"), [Key]).
+
+%%--------------------------------------------------------------------
+%% @doc Creates a rsa key (OBS: for testing only)
+%% the size are in bytes
+%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
+%% @end
+%%--------------------------------------------------------------------
+gen_rsa(Size) when is_integer(Size) ->
+ Key = gen_rsa2(Size),
+ {Key, encode_key(Key)}.
+
+%%--------------------------------------------------------------------
+%% @doc Creates a dsa key (OBS: for testing only)
+%% the sizes are in bytes
+%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
+%% @end
+%%--------------------------------------------------------------------
+gen_dsa(LSize,NSize) when is_integer(LSize), is_integer(NSize) ->
+ Key = gen_dsa2(LSize, NSize),
+ {Key, encode_key(Key)}.
+
+%%--------------------------------------------------------------------
+%% @doc Verifies cert signatures
+%% @spec (::binary(), ::tuple()) -> ::boolean()
+%% @end
+%%--------------------------------------------------------------------
+verify_signature(DerEncodedCert, DerKey, KeyParams) ->
+ Key = decode_key(DerKey),
+ case Key of
+ #'RSAPrivateKey'{modulus=Mod, publicExponent=Exp} ->
+ public_key:verify_signature(DerEncodedCert,
+ #'RSAPublicKey'{modulus=Mod, publicExponent=Exp},
+ 'NULL');
+ #'DSAPrivateKey'{p=P, q=Q, g=G, y=Y} ->
+ public_key:verify_signature(DerEncodedCert, Y, #'Dss-Parms'{p=P, q=Q, g=G});
+
+ _ ->
+ public_key:verify_signature(DerEncodedCert, Key, KeyParams)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%% Implementation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+get_key(Opts) ->
+ case proplists:get_value(key, Opts) of
+ undefined -> make_key(rsa, Opts);
+ rsa -> make_key(rsa, Opts);
+ dsa -> make_key(dsa, Opts);
+ Key ->
+ Password = proplists:get_value(password, Opts, no_passwd),
+ decode_key(Key, Password)
+ end.
+
+decode_key({Key, Pw}) ->
+ decode_key(Key, Pw);
+decode_key(Key) ->
+ decode_key(Key, no_passwd).
+
+
+decode_key(#'RSAPublicKey'{} = Key,_) ->
+ Key;
+decode_key(#'RSAPrivateKey'{} = Key,_) ->
+ Key;
+decode_key(#'DSAPrivateKey'{} = Key,_) ->
+ Key;
+decode_key(Der = {_,_,_}, Pw) ->
+ {ok, Key} = public_key:decode_private_key(Der, Pw),
+ Key;
+decode_key(FileOrDer, Pw) ->
+ {ok, [KeyInfo]} = public_key:pem_to_der(FileOrDer),
+ decode_key(KeyInfo, Pw).
+
+encode_key(Key = #'RSAPrivateKey'{}) ->
+ {ok, Der} = 'OTP-PUB-KEY':encode('RSAPrivateKey', Key),
+ {rsa_private_key, list_to_binary(Der), not_encrypted};
+encode_key(Key = #'DSAPrivateKey'{}) ->
+ {ok, Der} = 'OTP-PUB-KEY':encode('DSAPrivateKey', Key),
+ {dsa_private_key, list_to_binary(Der), not_encrypted}.
+
+make_tbs(SubjectKey, Opts) ->
+ Version = list_to_atom("v"++integer_to_list(proplists:get_value(version, Opts, 3))),
+ {Issuer, IssuerKey} = issuer(Opts, SubjectKey),
+
+ {Algo, Parameters} = sign_algorithm(IssuerKey, Opts),
+
+ SignAlgo = #'SignatureAlgorithm'{algorithm = Algo,
+ parameters = Parameters},
+
+ {#'OTPTBSCertificate'{serialNumber = trunc(random:uniform()*100000000)*10000 + 1,
+ signature = SignAlgo,
+ issuer = Issuer,
+ validity = validity(Opts),
+ subject = subject(proplists:get_value(subject, Opts),false),
+ subjectPublicKeyInfo = publickey(SubjectKey),
+ version = Version,
+ extensions = extensions(Opts)
+ }, IssuerKey}.
+
+issuer(Opts, SubjectKey) ->
+ IssuerProp = proplists:get_value(issuer, Opts, true),
+ case IssuerProp of
+ true -> %% Self signed
+ {subject(proplists:get_value(subject, Opts), true), SubjectKey};
+ {Issuer, IssuerKey} when is_binary(Issuer) ->
+ {issuer_der(Issuer), decode_key(IssuerKey)};
+ {File, IssuerKey} when is_list(File) ->
+ {ok, [{cert, Cert, _}|_]} = public_key:pem_to_der(File),
+ {issuer_der(Cert), decode_key(IssuerKey)}
+ end.
+
+issuer_der(Issuer) ->
+ {ok, Decoded} = public_key:pkix_decode_cert(Issuer, otp),
+ #'OTPCertificate'{tbsCertificate=Tbs} = Decoded,
+ #'OTPTBSCertificate'{subject=Subject} = Tbs,
+ Subject.
+
+subject(undefined, IsCA) ->
+ User = if IsCA -> "CA"; true -> os:getenv("USER") end,
+ Opts = [{email, User ++ "@erlang.org"},
+ {name, User},
+ {city, "Stockholm"},
+ {country, "SE"},
+ {org, "erlang"},
+ {org_unit, "testing dep"}],
+ subject(Opts);
+subject(Opts, _) ->
+ subject(Opts).
+
+subject(SubjectOpts) when is_list(SubjectOpts) ->
+ Encode = fun(Opt) ->
+ {Type,Value} = subject_enc(Opt),
+ [#'AttributeTypeAndValue'{type=Type, value=Value}]
+ end,
+ {rdnSequence, [Encode(Opt) || Opt <- SubjectOpts]}.
+
+%% Fill in the blanks
+subject_enc({name, Name}) -> {?'id-at-commonName', {printableString, Name}};
+subject_enc({email, Email}) -> {?'id-emailAddress', Email};
+subject_enc({city, City}) -> {?'id-at-localityName', {printableString, City}};
+subject_enc({state, State}) -> {?'id-at-stateOrProvinceName', {printableString, State}};
+subject_enc({org, Org}) -> {?'id-at-organizationName', {printableString, Org}};
+subject_enc({org_unit, OrgUnit}) -> {?'id-at-organizationalUnitName', {printableString, OrgUnit}};
+subject_enc({country, Country}) -> {?'id-at-countryName', Country};
+subject_enc({serial, Serial}) -> {?'id-at-serialNumber', Serial};
+subject_enc({title, Title}) -> {?'id-at-title', {printableString, Title}};
+subject_enc({dnQualifer, DnQ}) -> {?'id-at-dnQualifier', DnQ};
+subject_enc(Other) -> Other.
+
+
+extensions(Opts) ->
+ case proplists:get_value(extensions, Opts, []) of
+ false ->
+ asn1_NOVALUE;
+ Exts ->
+ lists:flatten([extension(Ext) || Ext <- default_extensions(Exts)])
+ end.
+
+default_extensions(Exts) ->
+ Def = [{key_usage,undefined},
+ {subject_altname, undefined},
+ {issuer_altname, undefined},
+ {basic_constraints, default},
+ {name_constraints, undefined},
+ {policy_constraints, undefined},
+ {ext_key_usage, undefined},
+ {inhibit_any, undefined},
+ {auth_key_id, undefined},
+ {subject_key_id, undefined},
+ {policy_mapping, undefined}],
+ Filter = fun({Key, _}, D) -> lists:keydelete(Key, 1, D) end,
+ Exts ++ lists:foldl(Filter, Def, Exts).
+
+extension({_, undefined}) -> [];
+extension({basic_constraints, Data}) ->
+ case Data of
+ default ->
+ #'Extension'{extnID = ?'id-ce-basicConstraints',
+ extnValue = #'BasicConstraints'{cA=true},
+ critical=true};
+ false ->
+ [];
+ Len when is_integer(Len) ->
+ #'Extension'{extnID = ?'id-ce-basicConstraints',
+ extnValue = #'BasicConstraints'{cA=true, pathLenConstraint=Len},
+ critical=true};
+ _ ->
+ #'Extension'{extnID = ?'id-ce-basicConstraints',
+ extnValue = Data}
+ end;
+extension({Id, Data, Critical}) ->
+ #'Extension'{extnID = Id, extnValue = Data, critical = Critical}.
+
+
+publickey(#'RSAPrivateKey'{modulus=N, publicExponent=E}) ->
+ Public = #'RSAPublicKey'{modulus=N, publicExponent=E},
+ Algo = #'PublicKeyAlgorithm'{algorithm= ?rsaEncryption, parameters='NULL'},
+ #'OTPSubjectPublicKeyInfo'{algorithm = Algo,
+ subjectPublicKey = Public};
+publickey(#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y}) ->
+ Algo = #'PublicKeyAlgorithm'{algorithm= ?'id-dsa',
+ parameters=#'Dss-Parms'{p=P, q=Q, g=G}},
+ #'OTPSubjectPublicKeyInfo'{algorithm = Algo, subjectPublicKey = Y}.
+
+validity(Opts) ->
+ DefFrom0 = calendar:gregorian_days_to_date(calendar:date_to_gregorian_days(date())-1),
+ DefTo0 = calendar:gregorian_days_to_date(calendar:date_to_gregorian_days(date())+7),
+ {DefFrom, DefTo} = proplists:get_value(validity, Opts, {DefFrom0, DefTo0}),
+ Format = fun({Y,M,D}) -> lists:flatten(io_lib:format("~w~2..0w~2..0w000000Z",[Y,M,D])) end,
+ #'Validity'{notBefore={generalTime, Format(DefFrom)},
+ notAfter ={generalTime, Format(DefTo)}}.
+
+sign_algorithm(#'RSAPrivateKey'{}, Opts) ->
+ Type = case proplists:get_value(digest, Opts, sha1) of
+ sha1 -> ?'sha1WithRSAEncryption';
+ sha512 -> ?'sha512WithRSAEncryption';
+ sha384 -> ?'sha384WithRSAEncryption';
+ sha256 -> ?'sha256WithRSAEncryption';
+ md5 -> ?'md5WithRSAEncryption';
+ md2 -> ?'md2WithRSAEncryption'
+ end,
+ {Type, 'NULL'};
+sign_algorithm(#'DSAPrivateKey'{p=P, q=Q, g=G}, _Opts) ->
+ {?'id-dsa-with-sha1', #'Dss-Parms'{p=P, q=Q, g=G}}.
+
+make_key(rsa, _Opts) ->
+ %% (OBS: for testing only)
+ gen_rsa2(64);
+make_key(dsa, _Opts) ->
+ gen_dsa2(128, 20). %% Bytes i.e. {1024, 160}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% RSA key generation (OBS: for testing only)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-define(SMALL_PRIMES, [65537,97,89,83,79,73,71,67,61,59,53,
+ 47,43,41,37,31,29,23,19,17,13,11,7,5,3]).
+
+gen_rsa2(Size) ->
+ P = prime(Size),
+ Q = prime(Size),
+ N = P*Q,
+ Tot = (P - 1) * (Q - 1),
+ [E|_] = lists:dropwhile(fun(Candidate) -> (Tot rem Candidate) == 0 end, ?SMALL_PRIMES),
+ {D1,D2} = extended_gcd(E, Tot),
+ D = erlang:max(D1,D2),
+ case D < E of
+ true ->
+ gen_rsa2(Size);
+ false ->
+ {Co1,Co2} = extended_gcd(Q, P),
+ Co = erlang:max(Co1,Co2),
+ #'RSAPrivateKey'{version = 'two-prime',
+ modulus = N,
+ publicExponent = E,
+ privateExponent = D,
+ prime1 = P,
+ prime2 = Q,
+ exponent1 = D rem (P-1),
+ exponent2 = D rem (Q-1),
+ coefficient = Co
+ }
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% DSA key generation (OBS: for testing only)
+%% See http://en.wikipedia.org/wiki/Digital_Signature_Algorithm
+%% and the fips_186-3.pdf
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+gen_dsa2(LSize, NSize) ->
+ Q = prime(NSize), %% Choose N-bit prime Q
+ X0 = prime(LSize),
+ P0 = prime((LSize div 2) +1),
+
+ %% Choose L-bit prime modulus P such that p–1 is a multiple of q.
+ case dsa_search(X0 div (2*Q*P0), P0, Q, 1000) of
+ error ->
+ gen_dsa2(LSize, NSize);
+ P ->
+ G = crypto:mod_exp(2, (P-1) div Q, P), % Choose G a number whose multiplicative order modulo p is q.
+ %% such that This may be done by setting g = h^(p–1)/q mod p, commonly h=2 is used.
+
+ X = prime(20), %% Choose x by some random method, where 0 < x < q.
+ Y = crypto:mod_exp(G, X, P), %% Calculate y = g^x mod p.
+
+ #'DSAPrivateKey'{version=0, p=P, q=Q, g=G, y=Y, x=X}
+ end.
+
+%% See fips_186-3.pdf
+dsa_search(T, P0, Q, Iter) when Iter > 0 ->
+ P = 2*T*Q*P0 + 1,
+ case is_prime(crypto:mpint(P), 50) of
+ true -> P;
+ false -> dsa_search(T+1, P0, Q, Iter-1)
+ end;
+dsa_search(_,_,_,_) ->
+ error.
+
+
+%%%%%%% Crypto Math %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+prime(ByteSize) ->
+ Rand = odd_rand(ByteSize),
+ crypto:erlint(prime_odd(Rand, 0)).
+
+prime_odd(Rand, N) ->
+ case is_prime(Rand, 50) of
+ true ->
+ Rand;
+ false ->
+ NotPrime = crypto:erlint(Rand),
+ prime_odd(crypto:mpint(NotPrime+2), N+1)
+ end.
+
+%% see http://en.wikipedia.org/wiki/Fermat_primality_test
+is_prime(_, 0) -> true;
+is_prime(Candidate, Test) ->
+ CoPrime = odd_rand(<<0,0,0,4, 10000:32>>, Candidate),
+ case crypto:mod_exp(CoPrime, Candidate, Candidate) of
+ CoPrime -> is_prime(Candidate, Test-1);
+ _ -> false
+ end.
+
+odd_rand(Size) ->
+ Min = 1 bsl (Size*8-1),
+ Max = (1 bsl (Size*8))-1,
+ odd_rand(crypto:mpint(Min), crypto:mpint(Max)).
+
+odd_rand(Min,Max) ->
+ Rand = <<Sz:32, _/binary>> = crypto:rand_uniform(Min,Max),
+ BitSkip = (Sz+4)*8-1,
+ case Rand of
+ Odd = <<_:BitSkip, 1:1>> -> Odd;
+ Even = <<_:BitSkip, 0:1>> ->
+ crypto:mpint(crypto:erlint(Even)+1)
+ end.
+
+extended_gcd(A, B) ->
+ case A rem B of
+ 0 ->
+ {0, 1};
+ N ->
+ {X, Y} = extended_gcd(B, N),
+ {Y, X-Y*(A div B)}
+ end.
diff --git a/lib/public_key/test/public_key.cover b/lib/public_key/test/public_key.cover
new file mode 100644
index 0000000000..8477c76ef6
--- /dev/null
+++ b/lib/public_key/test/public_key.cover
@@ -0,0 +1,2 @@
+
+{exclude, ['OTP-PUB-KEY']}. \ No newline at end of file
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index 8cc36e490d..dc1015969a 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -101,14 +101,13 @@ all(doc) ->
all(suite) ->
[app,
+ dh,
pem_to_der,
- decode_private_key
-%% encrypt_decrypt,
-%% rsa_verify
-%% dsa_verify_sign,
-%% pkix_encode_decode,
-%% pkix_verify_sign,
-%% pkix_path_validation
+ decode_private_key,
+ encrypt_decrypt,
+ sign_verify,
+ pkix,
+ pkix_path_validation
].
%% Test cases starts here.
@@ -118,20 +117,35 @@ app(doc) ->
"Test that the public_key app file is ok";
app(suite) ->
[];
-app(Config) when list(Config) ->
+app(Config) when is_list(Config) ->
ok = test_server:app_test(public_key).
+dh(doc) ->
+ "Test diffie-hellman functions file is ok";
+dh(suite) ->
+ [];
+dh(Config) when is_list(Config) ->
+ Datadir = ?config(data_dir, Config),
+ {ok,[DerDHparams = {dh_params, _, _}]} =
+ public_key:pem_to_der(filename:join(Datadir, "dh.pem")),
+ {ok, DHps = #'DHParameter'{prime=P,base=G}} = public_key:decode_dhparams(DerDHparams),
+ DHKeys = {Private,_Public} = public_key:gen_key(DHps),
+ test_server:format("DHparams = ~p~nDH Keys~p~n", [DHps, DHKeys]),
+ {_Private,_Public2} = pubkey_crypto:gen_key(diffie_hellman, [crypto:erlint(Private), P, G]),
+ ok.
+
+
pem_to_der(doc) ->
["Check that supported PEM files are decoded into the expected entry type"];
pem_to_der(suite) ->
[];
pem_to_der(Config) when is_list(Config) ->
Datadir = ?config(data_dir, Config),
- {ok,[{dsa_private_key, _, not_encrypted}]} =
+ {ok,DSAKey =[{dsa_private_key, _, not_encrypted}]} =
public_key:pem_to_der(filename:join(Datadir, "dsa.pem")),
{ok,[{rsa_private_key, _, _}]} =
public_key:pem_to_der(filename:join(Datadir, "client_key.pem")),
- {ok,[{rsa_private_key, _, _}]} =
+ {ok, [{rsa_private_key, _, _}]} =
public_key:pem_to_der(filename:join(Datadir, "rsa.pem")),
{ok,[{rsa_private_key, _, _}]} =
public_key:pem_to_der(filename:join(Datadir, "rsa.pem"), "abcd1234"),
@@ -144,12 +158,18 @@ pem_to_der(Config) when is_list(Config) ->
public_key:pem_to_der(filename:join(Datadir, "client_cert.pem")),
{ok,[{cert_req, _, _}]} =
public_key:pem_to_der(filename:join(Datadir, "req.pem")),
- {ok,[{cert, _, _}, {cert, _, _}]} =
+ {ok, Certs = [{cert, _, _}, {cert, _, _}]} =
public_key:pem_to_der(filename:join(Datadir, "cacerts.pem")),
- {ok, Bin1} = file:read_file(filename:join(Datadir, "cacerts.pem")),
+ {ok, Bin1} = file:read_file(filename:join(Datadir, "cacerts.pem")),
{ok, [{cert, _, _}, {cert, _, _}]} = public_key:pem_to_der(Bin1),
+
+ ok = public_key:der_to_pem(filename:join(Datadir, "wcacerts.pem"), Certs),
+ ok = public_key:der_to_pem(filename:join(Datadir, "wdsa.pem"), DSAKey),
+ {ok, Certs} = public_key:pem_to_der(filename:join(Datadir, "wcacerts.pem")),
+ {ok, DSAKey} = public_key:pem_to_der(filename:join(Datadir, "wdsa.pem")),
+
ok.
%%--------------------------------------------------------------------
decode_private_key(doc) ->
@@ -178,84 +198,148 @@ encrypt_decrypt(doc) ->
encrypt_decrypt(suite) ->
[];
encrypt_decrypt(Config) when is_list(Config) ->
- RSAPrivateKey = #'RSAPrivateKey'{publicExponent = 17,
- modulus = 3233,
- privateExponent = 2753,
- prime1 = 61,
- prime2 = 53,
- version = 'two-prime'},
- Msg = <<0,123>>,
- {ok, Encrypted} = public_key:encrypt(Msg, RSAPrivateKey, [{block_type, 2}]),
- test_server:format("Expected 855, Encrypted ~p ~n", [Encrypted]),
+ {PrivateKey, _DerKey} = pkey_test:gen_rsa(64),
+ #'RSAPrivateKey'{modulus=Mod, publicExponent=Exp} = PrivateKey,
+ PublicKey = #'RSAPublicKey'{modulus=Mod, publicExponent=Exp},
+ Msg = list_to_binary(lists:duplicate(5, "Foo bar 100")),
+ RsaEncrypted = public_key:encrypt_private(Msg, PrivateKey),
+ Msg = public_key:decrypt_public(RsaEncrypted, PublicKey),
+ Msg = public_key:decrypt_public(RsaEncrypted, PrivateKey),
+ RsaEncrypted2 = public_key:encrypt_public(Msg, PublicKey),
+ RsaEncrypted3 = public_key:encrypt_public(Msg, PrivateKey),
+ Msg = public_key:decrypt_private(RsaEncrypted2, PrivateKey),
+ Msg = public_key:decrypt_private(RsaEncrypted3, PrivateKey),
+
ok.
+
+%%--------------------------------------------------------------------
+sign_verify(doc) ->
+ ["Checks that we can sign and verify signatures."];
+sign_verify(suite) ->
+ [];
+sign_verify(Config) when is_list(Config) ->
+ %% Make cert signs and validates the signature using RSA and DSA
+ Ca = {_, CaKey} = pkey_test:make_cert([]),
+ {ok, PrivateRSA = #'RSAPrivateKey'{modulus=Mod, publicExponent=Exp}} =
+ public_key:decode_private_key(CaKey),
+
+ CertInfo = {Cert1,CertKey1} = pkey_test:make_cert([{key, dsa}, {issuer, Ca}]),
+
+ PublicRSA = #'RSAPublicKey'{modulus=Mod, publicExponent=Exp},
+ true = public_key:verify_signature(Cert1, PublicRSA, undefined),
+
+ {Cert2,_CertKey} = pkey_test:make_cert([{issuer, CertInfo}]),
+
+ {ok, #'DSAPrivateKey'{p=P, q=Q, g=G, y=Y, x=_X}} =
+ public_key:decode_private_key(CertKey1),
+ true = public_key:verify_signature(Cert2, Y, #'Dss-Parms'{p=P, q=Q, g=G}),
+ %% RSA sign
+ Msg0 = lists:duplicate(5, "Foo bar 100"),
+ Msg = list_to_binary(Msg0),
+ RSASign = public_key:sign(sha, Msg0, PrivateRSA),
+ RSASign = public_key:sign(Msg, PrivateRSA),
+ true = public_key:verify_signature(Msg, sha, RSASign, PublicRSA),
+ false = public_key:verify_signature(<<1:8, Msg/binary>>, sha, RSASign, PublicRSA),
+ false = public_key:verify_signature(Msg, sha, <<1:8, RSASign/binary>>, PublicRSA),
+ RSASign = public_key:sign(sha, Msg, PrivateRSA),
+ RSASign1 = public_key:sign(md5, Msg, PrivateRSA),
+ true = public_key:verify_signature(Msg, md5, RSASign1, PublicRSA),
+
+ %% DSA sign
+ Datadir = ?config(data_dir, Config),
+ {ok,[DsaKey = {dsa_private_key, _, _}]} =
+ public_key:pem_to_der(filename:join(Datadir, "dsa.pem")),
+ {ok, DSAPrivateKey} = public_key:decode_private_key(DsaKey),
+ #'DSAPrivateKey'{p=P1, q=Q1, g=G1, y=Y1, x=_X1} = DSAPrivateKey,
+ DSASign = public_key:sign(Msg, DSAPrivateKey),
+ DSAPublicKey = Y1,
+ DSAParams = #'Dss-Parms'{p=P1, q=Q1, g=G1},
+ true = public_key:verify_signature(Msg, sha, DSASign, DSAPublicKey, DSAParams),
+ false = public_key:verify_signature(<<1:8, Msg/binary>>, sha, DSASign, DSAPublicKey, DSAParams),
+ false = public_key:verify_signature(Msg, sha, <<1:8, DSASign/binary>>, DSAPublicKey, DSAParams),
+
+ ok.
+pkix(doc) ->
+ "Misc pkix tests not covered elsewhere";
+pkix(suite) ->
+ [];
+pkix(Config) when is_list(Config) ->
+ Datadir = ?config(data_dir, Config),
+ {ok,Certs0} = public_key:pem_to_der(filename:join(Datadir, "cacerts.pem")),
+ {ok,Certs1} = public_key:pem_to_der(filename:join(Datadir, "client_cert.pem")),
+ TestTransform = fun({cert, CertDer, not_encrypted}) ->
+ {ok, PlainCert} = public_key:pkix_decode_cert(CertDer, plain),
+ {ok, OtpCert} = public_key:pkix_decode_cert(CertDer, otp),
+ CertDer = public_key:pkix_encode_cert(OtpCert),
+ CertDer = public_key:pkix_encode_cert(PlainCert),
+ OTPSubj = (OtpCert#'OTPCertificate'.tbsCertificate)#'OTPTBSCertificate'.subject,
+ Subj = public_key:pkix_transform(OTPSubj, encode),
+ {ok, DNEncoded} = 'OTP-PUB-KEY':encode('Name', Subj),
+ Subj2 = (PlainCert#'Certificate'.tbsCertificate)#'TBSCertificate'.subject,
+ {ok, DNEncoded} = 'OTP-PUB-KEY':encode('Name', Subj2),
+ OTPSubj = public_key:pkix_transform(Subj2, decode),
+ false = public_key:pkix_is_fixed_dh_cert(CertDer)
+ end,
+ [TestTransform(Cert) || Cert <- Certs0 ++ Certs1],
+ true = public_key:pkix_is_self_signed(element(2,hd(Certs0))),
+ false = public_key:pkix_is_self_signed(element(2,hd(Certs1))),
+ CaIds = [element(2, public_key:pkix_issuer_id(Cert, self)) || {cert, Cert, _} <- Certs0],
+ {ok, IssuerId = {_, IssuerName}} = public_key:pkix_issuer_id(element(2,hd(Certs1)), other),
+ true = lists:member(IssuerId, CaIds),
+ %% Should be normalized allready
+ TestStr = {rdnSequence, [[{'AttributeTypeAndValue', {2,5,4,3},{printableString,"ERLANGCA"}}],
+ [{'AttributeTypeAndValue', {2,5,4,3},{printableString," erlang ca "}}]]},
+ VerifyStr = {rdnSequence, [[{'AttributeTypeAndValue', {2,5,4,3},{printableString,"erlang ca"}}],
+ [{'AttributeTypeAndValue', {2,5,4,3},{printableString,"erlangca"}}]]},
+ VerifyStr = public_key:pkix_normalize_general_name(TestStr),
-%% Datadir = ?config(data_dir, Config),
-%% {ok,[{rsa_private_key, EncKey}]} =
-%% public_key:pem_to_der(filename:join(Datadir, "server_key.pem")),
-%% {ok, Key} = public_key:decode_private_key(EncKey, rsa),
-%% RSAPublicKey = #'RSAPublicKey'{publicExponent =
-%% Key#'RSAPrivateKey'.publicExponent,
-%% modulus = Key#'RSAPrivateKey'.modulus},
-%% {ok, Msg} = file:read_file(filename:join(Datadir, "msg.txt")),
-%% Hash = crypto:sha(Msg),
-%% {ok, Encrypted} = public_key:encrypt(Hash, Key, [{block_type, 2}]),
-%% test_server:format("Encrypted ~p", [Encrypted]),
-%% {ok, Decrypted} = public_key:decrypt(Encrypted,
-%% RSAPublicKey, [{block_type, 1}]),
-%% test_server:format("Encrypted ~p", [Decrypted]),
-%% true = Encrypted == Decrypted.
-
-%%--------------------------------------------------------------------
-rsa_verify(doc) ->
- ["Cheks that we can verify an rsa signature."];
-rsa_verify(suite) ->
+ ok.
+
+pkix_path_validation(doc) ->
+ "Misc pkix tests not covered elsewhere";
+pkix_path_validation(suite) ->
[];
-rsa_verify(Config) when is_list(Config) ->
- Datadir = ?config(data_dir, Config),
+pkix_path_validation(Config) when is_list(Config) ->
+ CaK = {Trusted,_} =
+ pkey_test:make_cert([{key, dsa},
+ {subject, [
+ {name, "Public Key"},
+ {?'id-at-name', {printableString, "public_key"}},
+ {?'id-at-pseudonym', {printableString, "pubkey"}},
+ {city, "Stockholm"},
+ {country, "SE"},
+ {org, "erlang"},
+ {org_unit, "testing dep"}
+ ]}
+ ]),
+ ok = pkey_test:write_pem("./", "public_key_cacert", CaK),
+
+ CertK1 = {Cert1, _} = pkey_test:make_cert([{issuer, CaK}]),
+ CertK2 = {Cert2,_} = pkey_test:make_cert([{issuer, CertK1}, {digest, md5}, {extensions, false}]),
+ ok = pkey_test:write_pem("./", "public_key_cert", CertK2),
- {ok,[{cert, DerCert}]} =
- public_key:pem_to_der(filename:join(Datadir, "server_cert.pem")),
+ {ok, _} = public_key:pkix_path_validation(Trusted, [Cert1], []),
- {ok, OTPCert} = public_key:pkix_decode_cert(DerCert, otp),
+ {error, {bad_cert,invalid_issuer}} = public_key:pkix_path_validation(Trusted, [Cert2], []),
+ %%{error, {bad_cert,invalid_issuer}} = public_key:pkix_path_validation(Trusted, [Cert2], [{verify,false}]),
- {0, Signature} = OTPCert#'Certificate'.signature,
- TBSCert = OTPCert#'Certificate'.tbsCertificate,
+ {ok, _} = public_key:pkix_path_validation(Trusted, [Cert1, Cert2], []),
+ {error, issuer_not_found} = public_key:pkix_issuer_id(Cert2, other),
- #'TBSCertificate'{subjectPublicKeyInfo = Info} = TBSCert,
-
- #'SubjectPublicKeyInfo'{subjectPublicKey = RSAPublicKey} = Info,
+ CertK3 = {Cert3,_} = pkey_test:make_cert([{issuer, CertK1}, {extensions, [{basic_constraints, false}]}]),
+ {Cert4,_} = pkey_test:make_cert([{issuer, CertK3}]),
+ {error, E={bad_cert,missing_basic_constraint}} =
+ public_key:pkix_path_validation(Trusted, [Cert1, Cert3,Cert4], []),
- EncTBSCert = encoded_tbs_cert(DerCert),
- Digest = crypto:sha(EncTBSCert),
-
- public_key:verify_signature(Digest, Signature, RSAPublicKey).
-
-
-%% Signature is generated in the following way (in datadir):
-%% openssl dgst -sha1 -binary -out rsa_signature -sign server_key.pem msg.txt
-%%{ok, Signature} = file:read_file(filename:join(Datadir, "rsa_signature")),
-%%{ok, Signature} = file:read_file(filename:join(Datadir, "rsa_signature")),
-%% {ok, Msg} = file:read_file(filename:join(Datadir, "msg.txt")),
-%% Digest = crypto:sha(Msg),
-%% {ok,[{rsa_private_key, EncKey}]} =
-%% public_key:pem_to_der(filename:join(Datadir, "server_key.pem")),
-%% {ok, Key} = public_key:decode_private_key(EncKey, rsa),
-%% RSAPublicKey = #'RSAPublicKey'{publicExponent =
-%% Key#'RSAPrivateKey'.publicExponent,
-%% modulus = Key#'RSAPrivateKey'.modulus},
-
-encoded_tbs_cert(Cert) ->
- {ok, PKIXCert} =
- 'OTP-PUB-KEY':decode_TBSCert_exclusive(Cert),
- {'Certificate',
- {'Certificate_tbsCertificate', EncodedTBSCert}, _, _} = PKIXCert,
- EncodedTBSCert.
+ {ok, {_,_,[E]}} = public_key:pkix_path_validation(Trusted, [Cert1, Cert3,Cert4], [{verify,false}]),
+ % test_server:format("PV ~p ~n", [Result]),
+ ok.
diff --git a/lib/public_key/vsn.mk b/lib/public_key/vsn.mk
index 8c4e4127b2..4b3071a85b 100644
--- a/lib/public_key/vsn.mk
+++ b/lib/public_key/vsn.mk
@@ -1,6 +1,6 @@
PUBLIC_KEY_VSN = 0.7
-TICKETS = OTP-8626
+TICKETS = OTP-8626 OTP-8649
#TICKETS_0.6 = OTP-7046 \
# OTP-8553
diff --git a/lib/reltool/vsn.mk b/lib/reltool/vsn.mk
index b0561a6110..44bbd136e1 100644
--- a/lib/reltool/vsn.mk
+++ b/lib/reltool/vsn.mk
@@ -16,7 +16,7 @@
#
# %CopyrightEnd%
-RELTOOL_VSN = 0.5.3
+RELTOOL_VSN = 0.5.4
TICKETS = OTP-8057
TICKETS_0_5_2 = OTP-8254
diff --git a/lib/runtime_tools/vsn.mk b/lib/runtime_tools/vsn.mk
index 4bbdef19de..9e87d5b144 100644
--- a/lib/runtime_tools/vsn.mk
+++ b/lib/runtime_tools/vsn.mk
@@ -1 +1 @@
-RUNTIME_TOOLS_VSN = 1.8.3
+RUNTIME_TOOLS_VSN = 1.8.4
diff --git a/lib/snmp/doc/src/notes.xml b/lib/snmp/doc/src/notes.xml
index eb7c9db6ba..3f4954cfbd 100644
--- a/lib/snmp/doc/src/notes.xml
+++ b/lib/snmp/doc/src/notes.xml
@@ -33,6 +33,61 @@
</header>
<section>
+ <title>SNMP Development Toolkit 4.17</title>
+ <p>Version 4.17 supports code replacement in runtime from/to
+ version 4.16.2, 4.16.1, 4.16, 4.15, 4.14 and 4.13.5.</p>
+
+ <section>
+ <title>Improvements and new features</title>
+ <!--
+ <p>-</p>
+ -->
+ <list type="bulleted">
+ <item>
+ <p>[agent] Added very basic support for multiple SNMPv3
+ EngineIDs in a single agent. See
+ <seealso marker="snmpa#send_notification">send_notification/7</seealso>,
+ <seealso marker="snmpa_mpd#process_packet">process_packet/7</seealso>,
+ <seealso marker="snmpa_mpd#generate_response_msg">generate_response_msg/6</seealso> or
+ <seealso marker="snmpa_mpd#generate_msg">generate_msg/6</seealso>
+ for more info. </p>
+
+ <p>Own Id: OTP-8478</p>
+ </item>
+
+ </list>
+
+ </section>
+
+ <section>
+ <title>Reported Fixed Bugs and Malfunctions</title>
+ <p>-</p>
+
+ <!--
+ <list type="bulleted">
+ <item>
+ <p>The config utility
+ (<seealso marker="snmp#config">snmp:config/0</seealso>)
+ generated a default notify.conf
+ with a bad name for the starndard trap entry (was "stadard trap",
+ but should have been "standard trap"). This has been corrected. </p>
+ <p>Kenji Rikitake</p>
+ <p>Own Id: OTP-8433</p>
+ </item>
+
+ </list>
+ -->
+
+ </section>
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+ </section>
+ </section> <!-- 4.17 -->
+
+
+ <section>
<title>SNMP Development Toolkit 4.16.2</title>
<p>Version 4.16.2 supports code replacement in runtime from/to
version 4.16.1, 4.16, 4.15, 4.14 and 4.13.5.</p>
@@ -60,6 +115,12 @@
<p>Own Id: OTP-8594</p>
</item>
+ <item>
+ <p>Auto [agent] Changed default value for the MIB server cache.
+ GC is now on by default. </p>
+ <p>Own Id: OTP-8648</p>
+ </item>
+
</list>
</section>
@@ -83,6 +144,15 @@
<p>Own Id: OTP-8595</p>
</item>
+ <item>
+ <p>[manager] Raise condition causing the manager server process to
+ crash. Unregistering an agent while traffic (set/get-operations)
+ is ongoing could cause a crash in the manager server process
+ (raise condition). </p>
+ <p>Own Id: OTP-8646</p>
+ <p>Aux Id: Seq 11585</p>
+ </item>
+
</list>
</section>
diff --git a/lib/snmp/doc/src/snmp_app.xml b/lib/snmp/doc/src/snmp_app.xml
index 57eb87a759..694e619da1 100644
--- a/lib/snmp/doc/src/snmp_app.xml
+++ b/lib/snmp/doc/src/snmp_app.xml
@@ -346,7 +346,7 @@
<p>Defines if the mib server shall perform cache gc automatically or
leave it to the user (see
<seealso marker="snmpa#gc_mibs_cache">gc_mibs_cache/0,1,2,3</seealso>). </p>
- <p>Default is <c>false</c>.</p>
+ <p>Default is <c>true</c>.</p>
</item>
<tag><c><![CDATA[mibs_cache_age() = integer() > 0 <optional>]]></c></tag>
diff --git a/lib/snmp/doc/src/snmp_config.xml b/lib/snmp/doc/src/snmp_config.xml
index 5bd36305fc..769b908adc 100644
--- a/lib/snmp/doc/src/snmp_config.xml
+++ b/lib/snmp/doc/src/snmp_config.xml
@@ -343,7 +343,7 @@
<p>Defines if the mib server shall perform cache gc automatically or
leave it to the user (see
<seealso marker="snmpa#gc_mibs_cache">gc_mibs_cache/0,1,2,3</seealso>). </p>
- <p>Default is <c>false</c>.</p>
+ <p>Default is <c>true</c>.</p>
</item>
<tag><c><![CDATA[mibs_cache_age() = integer() > 0 <optional>]]></c></tag>
diff --git a/lib/snmp/doc/src/snmpa.xml b/lib/snmp/doc/src/snmpa.xml
index b3661ae9b0..1be6abe6dd 100644
--- a/lib/snmp/doc/src/snmpa.xml
+++ b/lib/snmp/doc/src/snmpa.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2004</year><year>2009</year>
+ <year>2004</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>snmpa</title>
@@ -648,6 +648,20 @@ notification_delivery_info() = #snmpa_notification_delivery_info{}
<desc>
<p>Disable the mib server cache. </p>
+ <marker id="which_mibs_cache_size"></marker>
+ </desc>
+ </func>
+
+ <func>
+ <name>which_mibs_cache_size() -> void()</name>
+ <name>which_mibs_cache_size(Agent) -> void()</name>
+ <fsummary>The size of the mib server cache</fsummary>
+ <type>
+ <v>Agent = pid() | atom()</v>
+ </type>
+ <desc>
+ <p>Retreive the size of the mib server cache. </p>
+
<marker id="gc_mibs_cache"></marker>
</desc>
</func>
@@ -867,6 +881,7 @@ snmp_agent:register_subagent(SA1,[1,2,3], SA2).
<name>send_notification(Agent, Notification, Receiver, Varbinds)</name>
<name>send_notification(Agent, Notification, Receiver, NotifyName, Varbinds)</name>
<name>send_notification(Agent, Notification, Receiver, NotifyName, ContextName, Varbinds) -> void() </name>
+ <name>send_notification(Agent, Notification, Receiver, NotifyName, ContextName, Varbinds, LocalEngineID) -> void() </name>
<fsummary>Send a notification</fsummary>
<type>
<v>Agent = pid() | atom()</v>
@@ -888,6 +903,7 @@ snmp_agent:register_subagent(SA1,[1,2,3], SA2).
<v>OID = oid()</v>
<v>Value = term()</v>
<v>RowIndex = [int()]</v>
+ <v>LocalEngineID = string()</v>
</type>
<desc>
<p>Sends the notification <c>Notification</c> to the
@@ -1027,6 +1043,7 @@ snmp_agent:register_subagent(SA1,[1,2,3], SA2).
<item><c>{?sysLocation_instance, "upstairs"}</c> (provided
that the generated <c>.hrl</c> file is included)</item>
</list>
+
<p>If a variable in the notification is a table element, the
<c>RowIndex</c> for the element must be given in the
<c>Varbinds</c> list. In this case, the OBJECT IDENTIFIER sent
@@ -1034,15 +1051,27 @@ snmp_agent:register_subagent(SA1,[1,2,3], SA2).
element. This OBJECT IDENTIFIER could be used in a get
operation later.
</p>
+
<p>This function is asynchronous, and does not return any
information. If an error occurs, <c>user_err/2</c> of the error
report module is called and the notification is discarded.
</p>
+ <note>
+ <p>Note that the use of the LocalEngineID argument is only intended
+ for special cases, if the agent is to "emulate" multiple EngineIDs!
+ By default, the agent uses the value of <c>SnmpEngineID</c>
+ (see SNMP-FRAMEWORK-MIB). </p>
+ </note>
+
+<!--
<marker id="send_trap"></marker>
+-->
+ <marker id="discovery"></marker>
</desc>
</func>
+<!--
<func>
<name>send_trap(Agent,Trap,Community)</name>
<name>send_trap(Agent,Trap,Community,Varbinds) -> void()</name>
@@ -1114,6 +1143,7 @@ snmp_agent:register_subagent(SA1,[1,2,3], SA2).
<marker id="discovery"></marker>
</desc>
</func>
+-->
<func>
<name>discovery(TargetName, Notification) -> {ok, ManagerEngineID} | {error, Reason}</name>
diff --git a/lib/snmp/doc/src/snmpa_mpd.xml b/lib/snmp/doc/src/snmpa_mpd.xml
index ea5bde8956..202e6b5661 100644
--- a/lib/snmp/doc/src/snmpa_mpd.xml
+++ b/lib/snmp/doc/src/snmpa_mpd.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2009</year>
+ <year>1999</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>snmpa_mpd</title>
@@ -63,15 +63,19 @@
</func>
<func>
- <name>process_packet(Packet, TDomain, TAddress, State) -> {ok, Vsn, Pdu, PduMS, ACMData} | {discarded, Reason} | {discovery, DiscoPacket}</name>
+ <name>process_packet(Packet, TDomain, TAddress, State, NoteStore, Log) -> {ok, Vsn, Pdu, PduMS, ACMData} | {discarded, Reason} | {discovery, DiscoPacket}</name>
+ <name>process_packet(Packet, TDomain, TAddress, LocalEngineID, State, NoteStore, Log) -> {ok, Vsn, Pdu, PduMS, ACMData} | {discarded, Reason} | {discovery, DiscoPacket}</name>
<fsummary>Process a packet received from the network</fsummary>
<type>
<v>Packet = binary()</v>
<v>TDomain = snmpUDPDomain</v>
<v>TAddress = {Ip, Udp}</v>
+ <v>LocalEngineID = string()</v>
<v>Ip = {integer(), integer(), integer(), integer()}</v>
<v>Udp = integer()</v>
<v>State = mpd_state()</v>
+ <v>NoteStore = pid()</v>
+ <v>Log = snmp_log()</v>
<v>Vsn = 'version-1' | 'version-2' | 'version-3'</v>
<v>Pdu = #pdu</v>
<v>PduMs = integer()</v>
@@ -84,18 +88,27 @@
decryption as necessary. The return values should be passed the
agent.</p>
+ <note>
+ <p>Note that the use of the LocalEngineID argument is only intended
+ for special cases, if the agent is to "emulate" multiple EngineIDs!
+ By default, the agent uses the value of <c>SnmpEngineID</c>
+ (see SNMP-FRAMEWORK-MIB). </p>
+ </note>
+
<marker id="generate_response_msg"></marker>
</desc>
</func>
<func>
- <name>generate_response_msg(Vsn, RePdu, Type, ACMData) -> {ok, Packet} | {discarded, Reason}</name>
+ <name>generate_response_msg(Vsn, RePdu, Type, ACMData, Log) -> {ok, Packet} | {discarded, Reason}</name>
+ <name>generate_response_msg(Vsn, RePdu, Type, ACMData, LocalEngineID, Log) -> {ok, Packet} | {discarded, Reason}</name>
<fsummary>Generate a response packet to be sent to the network</fsummary>
<type>
<v>Vsn = 'version-1' | 'version-2' | 'version-3'</v>
<v>RePdu = #pdu</v>
<v>Type = atom()</v>
<v>ACMData = acm_data()</v>
+ <v>LocalEngineID = string()</v>
<v>Packet = binary()</v>
</type>
<desc>
@@ -103,17 +116,27 @@
network. <c>Type</c> is the <c>#pdu.type</c> of the original
request.</p>
+ <note>
+ <p>Note that the use of the LocalEngineID argument is only intended
+ for special cases, if the agent is to "emulate" multiple EngineIDs!
+ By default, the agent uses the value of <c>SnmpEngineID</c>
+ (see SNMP-FRAMEWORK-MIB). </p>
+ </note>
+
<marker id="generate_msg"></marker>
</desc>
</func>
<func>
- <name>generate_msg(Vsn, Pdu, MsgData, To) -> {ok, PacketsAndAddresses} | {discarded, Reason}</name>
+ <name>generate_msg(Vsn, NoteStore, Pdu, MsgData, To) -> {ok, PacketsAndAddresses} | {discarded, Reason}</name>
+ <name>generate_msg(Vsn, NoteStore, Pdu, MsgData, LocalEngineID, To) -> {ok, PacketsAndAddresses} | {discarded, Reason}</name>
<fsummary>Generate a request message to be sent to the network</fsummary>
<type>
<v>Vsn = 'version-1' | 'version-2' | 'version-3'</v>
+ <v>NoteStore = pid()</v>
<v>Pdu = #pdu</v>
<v>MsgData = msg_data()</v>
+ <v>LocalEngineID = string()</v>
<v>To = [dest_addrs()]</v>
<v>PacketsAndAddresses = [{TDomain, TAddress, Packet}]</v>
<v>TDomain = snmpUDPDomain</v>
@@ -136,6 +159,13 @@
also received from the requests mentioned above.
</p>
+ <note>
+ <p>Note that the use of the LocalEngineID argument is only intended
+ for special cases, if the agent is to "emulate" multiple EngineIDs!
+ By default, the agent uses the value of <c>SnmpEngineID</c>
+ (see SNMP-FRAMEWORK-MIB). </p>
+ </note>
+
<marker id="discarded_pdu"></marker>
</desc>
</func>
diff --git a/lib/snmp/src/agent/snmpa.erl b/lib/snmp/src/agent/snmpa.erl
index a113bba3a7..87b191caed 100644
--- a/lib/snmp/src/agent/snmpa.erl
+++ b/lib/snmp/src/agent/snmpa.erl
@@ -47,6 +47,7 @@
mib_of/1, mib_of/2,
me_of/1, me_of/2,
invalidate_mibs_cache/0, invalidate_mibs_cache/1,
+ which_mibs_cache_size/0, which_mibs_cache_size/1,
enable_mibs_cache/0, enable_mibs_cache/1,
disable_mibs_cache/0, disable_mibs_cache/1,
gc_mibs_cache/0, gc_mibs_cache/1, gc_mibs_cache/2, gc_mibs_cache/3,
@@ -60,7 +61,7 @@
register_subagent/3, unregister_subagent/2,
send_notification/3, send_notification/4, send_notification/5,
- send_notification/6,
+ send_notification/6, send_notification/7,
send_trap/3, send_trap/4,
discovery/2, discovery/3, discovery/4, discovery/5, discovery/6,
@@ -302,6 +303,13 @@ invalidate_mibs_cache(Agent) ->
snmpa_agent:invalidate_mibs_cache(Agent).
+which_mibs_cache_size() ->
+ which_mibs_cache_size(snmp_master_agent).
+
+which_mibs_cache_size(Agent) ->
+ snmpa_agent:which_mibs_cache_size(Agent).
+
+
enable_mibs_cache() ->
enable_mibs_cache(snmp_master_agent).
@@ -415,14 +423,23 @@ send_notification(Agent, Notification, Recv, Varbinds) ->
send_notification(Agent, Notification, Recv, NotifyName, Varbinds) ->
send_notification(Agent, Notification, Recv, NotifyName, "", Varbinds).
-send_notification(Agent, Notification, Recv,
- NotifyName, ContextName, Varbinds)
+send_notification(Agent, Notification, Recv, NotifyName,
+ ContextName, Varbinds)
when (is_list(NotifyName) andalso
is_list(ContextName) andalso
is_list(Varbinds)) ->
snmpa_agent:send_trap(Agent, Notification, NotifyName,
ContextName, Recv, Varbinds).
+send_notification(Agent, Notification, Recv,
+ NotifyName, ContextName, Varbinds, LocalEngineID)
+ when (is_list(NotifyName) andalso
+ is_list(ContextName) andalso
+ is_list(Varbinds) andalso
+ is_list(LocalEngineID)) ->
+ snmpa_agent:send_trap(Agent, Notification, NotifyName,
+ ContextName, Recv, Varbinds, LocalEngineID).
+
%% Kept for backwards compatibility
send_trap(Agent, Trap, Community) ->
send_notification(Agent, Trap, no_receiver, Community, "", []).
diff --git a/lib/snmp/src/agent/snmpa_agent.erl b/lib/snmp/src/agent/snmpa_agent.erl
index fb04fca632..f70885b2ec 100644
--- a/lib/snmp/src/agent/snmpa_agent.erl
+++ b/lib/snmp/src/agent/snmpa_agent.erl
@@ -30,7 +30,7 @@
-export([subagent_set/2,
load_mibs/2, unload_mibs/2, which_mibs/1, whereis_mib/2, info/1,
register_subagent/3, unregister_subagent/2,
- send_trap/6,
+ send_trap/6, send_trap/7,
register_notification_filter/5,
unregister_notification_filter/2,
which_notification_filter/1,
@@ -48,6 +48,7 @@
get/2, get/3, get_next/2, get_next/3]).
-export([mib_of/1, mib_of/2, me_of/1, me_of/2,
invalidate_mibs_cache/1,
+ which_mibs_cache_size/1,
enable_mibs_cache/1, disable_mibs_cache/1,
gc_mibs_cache/1, gc_mibs_cache/2, gc_mibs_cache/3,
enable_mibs_cache_autogc/1, disable_mibs_cache_autogc/1,
@@ -64,7 +65,7 @@
%% Internal exports
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3, tr_var/2, tr_varbind/1,
- handle_pdu/7, worker/2, worker_loop/1, do_send_trap/6]).
+ handle_pdu/7, worker/2, worker_loop/1, do_send_trap/7]).
-ifndef(default_verbosity).
-define(default_verbosity,silence).
@@ -245,6 +246,10 @@ disable_mibs_cache(Agent) ->
call(Agent, {mibs_cache_request, disable_cache}).
+which_mibs_cache_size(Agent) ->
+ call(Agent, {mibs_cache_request, cache_size}).
+
+
enable_mibs_cache_autogc(Agent) ->
call(Agent, {mibs_cache_request, enable_autogc}).
@@ -524,14 +529,15 @@ which_notification_filter(Agent) ->
send_trap(Agent, Trap, NotifyName, CtxName, Recv, Varbinds) ->
?d("send_trap -> entry with"
- "~n self(): ~p"
- "~n Agent: ~p [~p]"
- "~n Trap: ~p"
- "~n NotifyName: ~p"
- "~n CtxName: ~p"
- "~n Recv: ~p"
- "~n Varbinds: ~p",
- [self(), Agent, wis(Agent), Trap, NotifyName, CtxName, Recv, Varbinds]),
+ "~n self(): ~p"
+ "~n Agent: ~p [~p]"
+ "~n Trap: ~p"
+ "~n NotifyName: ~p"
+ "~n CtxName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p",
+ [self(), Agent, wis(Agent),
+ Trap, NotifyName, CtxName, Recv, Varbinds]),
Msg = {send_trap, Trap, NotifyName, CtxName, Recv, Varbinds},
case (wis(Agent) =:= self()) of
false ->
@@ -540,6 +546,27 @@ send_trap(Agent, Trap, NotifyName, CtxName, Recv, Varbinds) ->
Agent ! Msg
end.
+send_trap(Agent, Trap, NotifyName, CtxName, Recv, Varbinds, LocalEngineID) ->
+ ?d("send_trap -> entry with"
+ "~n self(): ~p"
+ "~n Agent: ~p [~p]"
+ "~n Trap: ~p"
+ "~n NotifyName: ~p"
+ "~n CtxName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p"
+ "~n LocalEngineID: ~p",
+ [self(), Agent, wis(Agent),
+ Trap, NotifyName, CtxName, Recv, Varbinds, LocalEngineID]),
+ Msg =
+ {send_trap, Trap, NotifyName, CtxName, Recv, Varbinds, LocalEngineID},
+ case (wis(Agent) =:= self()) of
+ false ->
+ call(Agent, Msg);
+ true ->
+ Agent ! Msg
+ end.
+
%% -- Discovery functions --
@@ -626,6 +653,7 @@ wis(Pid) when is_pid(Pid) ->
wis(Atom) when is_atom(Atom) ->
whereis(Atom).
+
forward_trap(Agent, TrapRecord, NotifyName, CtxName, Recv, Varbinds) ->
Agent ! {forward_trap, TrapRecord, NotifyName, CtxName, Recv, Varbinds}.
@@ -719,14 +747,15 @@ handle_info(worker_available, S) ->
handle_info({send_trap, Trap, NotifyName, ContextName, Recv, Varbinds}, S) ->
?vlog("[handle_info] send trap request:"
- "~n Trap: ~p"
- "~n NotifyName: ~p"
- "~n ContextName: ~p"
- "~n Recv: ~p"
- "~n Varbinds: ~p",
- [Trap,NotifyName,ContextName,Recv,Varbinds]),
+ "~n Trap: ~p"
+ "~n NotifyName: ~p"
+ "~n ContextName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p",
+ [Trap, NotifyName, ContextName, Recv, Varbinds]),
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
case catch handle_send_trap(S, Trap, NotifyName, ContextName,
- Recv, Varbinds) of
+ Recv, Varbinds, LocalEngineID) of
{ok, NewS} ->
{noreply, NewS};
{'EXIT', R} ->
@@ -736,17 +765,39 @@ handle_info({send_trap, Trap, NotifyName, ContextName, Recv, Varbinds}, S) ->
{noreply, S}
end;
-handle_info({forward_trap, TrapRecord, NotifyName, ContextName,
- Recv, Varbinds},S) ->
+handle_info({send_trap, Trap, NotifyName, ContextName, Recv, Varbinds,
+ LocalEngineID}, S) ->
+ ?vlog("[handle_info] send trap request:"
+ "~n Trap: ~p"
+ "~n NotifyName: ~p"
+ "~n ContextName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p"
+ "~n LocalEngineID: ~p",
+ [Trap, NotifyName, ContextName, Recv, Varbinds, LocalEngineID]),
+ case catch handle_send_trap(S, Trap, NotifyName, ContextName,
+ Recv, Varbinds, LocalEngineID) of
+ {ok, NewS} ->
+ {noreply, NewS};
+ {'EXIT', R} ->
+ ?vinfo("Trap not sent:~n ~p", [R]),
+ {noreply, S};
+ _ ->
+ {noreply, S}
+ end;
+
+handle_info({forward_trap, TrapRecord, NotifyName, ContextName,
+ Recv, Varbinds}, S) ->
?vlog("[handle_info] forward trap request:"
- "~n TrapRecord: ~p"
- "~n NotifyName: ~p"
- "~n ContextName: ~p"
- "~n Recv: ~p"
- "~n Varbinds: ~p",
- [TrapRecord,NotifyName,ContextName,Recv,Varbinds]),
+ "~n TrapRecord: ~p"
+ "~n NotifyName: ~p"
+ "~n ContextName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p",
+ [TrapRecord, NotifyName, ContextName, Recv, Varbinds]),
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
case (catch maybe_send_trap(S, TrapRecord, NotifyName, ContextName,
- Recv, Varbinds)) of
+ Recv, Varbinds, LocalEngineID)) of
{ok, NewS} ->
{noreply, NewS};
{'EXIT', R} ->
@@ -856,17 +907,52 @@ handle_call(restart_set_worker, _From, #state{set_worker = Pid} = S) ->
ok
end,
{reply, ok, S};
+
handle_call({send_trap, Trap, NotifyName, ContextName, Recv, Varbinds},
_From, S) ->
?vlog("[handle_call] send trap request:"
- "~n Trap: ~p"
- "~n NotifyName: ~p"
- "~n ContextName: ~p"
- "~n Recv: ~p"
- "~n Varbinds: ~p",
- [Trap,NotifyName,ContextName,Recv,Varbinds]),
+ "~n Trap: ~p"
+ "~n NotifyName: ~p"
+ "~n ContextName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p",
+ [Trap, NotifyName, ContextName, Recv, Varbinds]),
+ LocalEngineID =
+ case S#state.type of
+ master_agent ->
+ ?DEFAULT_LOCAL_ENGINE_ID;
+ _ ->
+ %% subagent -
+ %% we don't need this, eventually the trap sent request
+ %% will reach the master-agent and then it will look up
+ %% the proper engine id.
+ ignore
+ end,
+ case (catch handle_send_trap(S, Trap, NotifyName, ContextName,
+ Recv, Varbinds, LocalEngineID)) of
+ {ok, NewS} ->
+ {reply, ok, NewS};
+ {'EXIT', Reason} ->
+ ?vinfo("Trap not sent:~n ~p", [Reason]),
+ {reply, {error, {send_failed, Reason}}, S};
+ _ ->
+ ?vinfo("Trap not sent", []),
+ {reply, {error, send_failed}, S}
+ end;
+
+handle_call({send_trap, Trap, NotifyName,
+ ContextName, Recv, Varbinds, LocalEngineID},
+ _From, S) ->
+ ?vlog("[handle_call] send trap request:"
+ "~n Trap: ~p"
+ "~n NotifyName: ~p"
+ "~n ContextName: ~p"
+ "~n Recv: ~p"
+ "~n Varbinds: ~p"
+ "~n LocalEngineID: ~p",
+ [Trap, NotifyName, ContextName, Recv, Varbinds, LocalEngineID]),
case (catch handle_send_trap(S, Trap, NotifyName, ContextName,
- Recv, Varbinds)) of
+ Recv, Varbinds, LocalEngineID)) of
{ok, NewS} ->
{reply, ok, NewS};
{'EXIT', Reason} ->
@@ -876,8 +962,10 @@ handle_call({send_trap, Trap, NotifyName, ContextName, Recv, Varbinds},
?vinfo("Trap not sent", []),
{reply, {error, send_failed}, S}
end;
+
handle_call({discovery,
- TargetName, Notification, ContextName, Vbs, DiscoHandler, ExtraInfo},
+ TargetName, Notification, ContextName, Vbs, DiscoHandler,
+ ExtraInfo},
From,
#state{disco = undefined} = S) ->
?vlog("[handle_call] initiate discovery process:"
@@ -1219,6 +1307,8 @@ handle_mibs_cache_request(MibServer, Req) ->
snmpa_mib:gc_cache(MibServer, Age);
{gc_cache, Age, GcLimit} ->
snmpa_mib:gc_cache(MibServer, Age, GcLimit);
+ cache_size ->
+ snmpa_mib:which_cache_size(MibServer);
enable_cache ->
snmpa_mib:enable_cache(MibServer);
disable_cache ->
@@ -1432,17 +1522,20 @@ spawn_thread(Vsn, Pdu, PduMS, ACMData, Address, Extra) ->
Args = [Vsn, Pdu, PduMS, ACMData, Address, Extra, Dict],
proc_lib:spawn_link(?MODULE, handle_pdu, Args).
-spawn_trap_thread(TrapRec, NotifyName, ContextName, Recv, V) ->
+spawn_trap_thread(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID) ->
Dict = get(),
proc_lib:spawn_link(?MODULE, do_send_trap,
- [TrapRec, NotifyName, ContextName, Recv, V, Dict]).
+ [TrapRec, NotifyName, ContextName,
+ Recv, Vbs, LocalEngineID, Dict]).
-do_send_trap(TrapRec, NotifyName, ContextName, Recv, V, Dict) ->
+do_send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID, Dict) ->
lists:foreach(fun({Key, Val}) -> put(Key, Val) end, Dict),
put(sname,trap_sender_short_name(get(sname))),
?vlog("starting",[]),
- snmpa_trap:send_trap(TrapRec, NotifyName, ContextName, Recv, V,
- get(net_if)).
+ snmpa_trap:send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID, get(net_if)).
worker(Master, Dict) ->
lists:foreach(fun({Key, Val}) -> put(Key, Val) end, Dict),
@@ -1457,17 +1550,22 @@ worker_loop(Master) ->
handle_pdu(Vsn, Pdu, PduMS, ACMData, Address, Extra),
Master ! worker_available;
- %% Old style message
- {MibView, Vsn, Pdu, PduMS, ACMData, AgentData, Extra} ->
- ?vtrace("worker_loop -> received (old) request", []),
- do_handle_pdu(MibView, Vsn, Pdu, PduMS, ACMData, AgentData, Extra),
+ %% We don't trap exits!
+ {TrapRec, NotifyName, ContextName, Recv, Vbs} ->
+ ?vtrace("worker_loop -> send trap:"
+ "~n ~p", [TrapRec]),
+ snmpa_trap:send_trap(TrapRec, NotifyName,
+ ContextName, Recv, Vbs, get(net_if)),
Master ! worker_available;
- {TrapRec, NotifyName, ContextName, Recv, V} -> % We don't trap exits!
+ %% We don't trap exits!
+ {send_trap,
+ TrapRec, NotifyName, ContextName, Recv, Vbs, LocalEngineID} ->
?vtrace("worker_loop -> send trap:"
"~n ~p", [TrapRec]),
snmpa_trap:send_trap(TrapRec, NotifyName,
- ContextName, Recv, V, get(net_if)),
+ ContextName, Recv, Vbs, LocalEngineID,
+ get(net_if)),
Master ! worker_available;
{verbosity, Verbosity} ->
@@ -1616,13 +1714,15 @@ handle_acm_error(Vsn, Reason, Pdu, ACMData, Address, Extra) ->
end.
-handle_send_trap(S, TrapName, NotifyName, ContextName, Recv, Varbinds) ->
+handle_send_trap(S, TrapName, NotifyName, ContextName, Recv, Varbinds,
+ LocalEngineID) ->
?vtrace("handle_send_trap -> entry with"
- "~n S#state.type: ~p"
- "~n TrapName: ~p"
- "~n NotifyName: ~p"
- "~n ContextName: ~p",
- [S#state.type, TrapName, NotifyName, ContextName]),
+ "~n S#state.type: ~p"
+ "~n TrapName: ~p"
+ "~n NotifyName: ~p"
+ "~n ContextName: ~p"
+ "~n LocalEngineID: ~p",
+ [S#state.type, TrapName, NotifyName, ContextName, LocalEngineID]),
case snmpa_trap:construct_trap(TrapName, Varbinds) of
{ok, TrapRecord, VarList} ->
?vtrace("handle_send_trap -> construction complete: "
@@ -1639,7 +1739,8 @@ handle_send_trap(S, TrapName, NotifyName, ContextName, Recv, Varbinds) ->
?vtrace("handle_send_trap -> "
"[master] handle send trap",[]),
maybe_send_trap(S, TrapRecord, NotifyName,
- ContextName, Recv, VarList)
+ ContextName, Recv, VarList,
+ LocalEngineID)
end;
error ->
error
@@ -1676,7 +1777,8 @@ maybe_forward_trap(#state{parent = Parent, nfilters = NFs} = S,
maybe_send_trap(#state{nfilters = NFs} = S,
- TrapRec, NotifyName, ContextName, Recv, Varbinds) ->
+ TrapRec, NotifyName, ContextName, Recv, Varbinds,
+ LocalEngineID) ->
?vtrace("maybe_send_trap -> entry with"
"~n NFs: ~p", [NFs]),
case filter_notification(NFs, [], TrapRec) of
@@ -1693,39 +1795,45 @@ maybe_send_trap(#state{nfilters = NFs} = S,
?vtrace("maybe_send_trap -> send trap:"
"~n ~p", [TrapRec2]),
do_handle_send_trap(S, TrapRec2,
- NotifyName, ContextName, Recv, Varbinds);
+ NotifyName, ContextName, Recv, Varbinds,
+ LocalEngineID);
{send, Removed, TrapRec2} ->
?vtrace("maybe_send_trap -> send trap:"
"~n ~p", [TrapRec2]),
NFs2 = del_notification_filter(Removed, NFs),
do_handle_send_trap(S#state{nfilters = NFs2}, TrapRec2,
- NotifyName, ContextName, Recv, Varbinds)
+ NotifyName, ContextName, Recv, Varbinds,
+ LocalEngineID)
end.
-do_handle_send_trap(S, TrapRec, NotifyName, ContextName, Recv, Varbinds) ->
- V = snmpa_trap:try_initialise_vars(get(mibserver), Varbinds),
+do_handle_send_trap(S, TrapRec, NotifyName, ContextName, Recv, Varbinds,
+ LocalEngineID) ->
+ Vbs = snmpa_trap:try_initialise_vars(get(mibserver), Varbinds),
case S#state.type of
subagent ->
forward_trap(S#state.parent, TrapRec, NotifyName, ContextName,
- Recv, V),
+ Recv, Vbs),
{ok, S};
master_agent when S#state.multi_threaded =:= false ->
?vtrace("do_handle_send_trap -> send trap:"
"~n ~p", [TrapRec]),
snmpa_trap:send_trap(TrapRec, NotifyName, ContextName,
- Recv, V, get(net_if)),
+ Recv, Vbs, LocalEngineID, get(net_if)),
{ok, S};
master_agent when S#state.worker_state =:= busy ->
%% Main worker busy => create new worker
?vtrace("do_handle_send_trap -> main worker busy: "
"spawn a trap sender", []),
- spawn_trap_thread(TrapRec, NotifyName, ContextName, Recv, V),
+ spawn_trap_thread(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID),
{ok, S};
master_agent ->
%% Send to main worker
?vtrace("do_handle_send_trap -> send to main worker",[]),
- S#state.worker ! {TrapRec, NotifyName, ContextName, Recv, V},
+ S#state.worker ! {send_trap,
+ TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID},
{ok, S#state{worker_state = busy}}
end.
diff --git a/lib/snmp/src/agent/snmpa_internal.hrl b/lib/snmp/src/agent/snmpa_internal.hrl
index a33a6809dc..9fa874f119 100644
--- a/lib/snmp/src/agent/snmpa_internal.hrl
+++ b/lib/snmp/src/agent/snmpa_internal.hrl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -22,6 +22,8 @@
-include_lib("snmp/src/app/snmp_internal.hrl").
+-define(DEFAULT_LOCAL_ENGINE_ID, snmp_framework_mib:get_engine_id()).
+
-define(snmpa_info(F, A), ?snmp_info("agent", F, A)).
-define(snmpa_warning(F, A), ?snmp_warning("agent", F, A)).
-define(snmpa_error(F, A), ?snmp_error("agent", F, A)).
diff --git a/lib/snmp/src/agent/snmpa_mib.erl b/lib/snmp/src/agent/snmpa_mib.erl
index 370989d0be..ce90db18b3 100644
--- a/lib/snmp/src/agent/snmpa_mib.erl
+++ b/lib/snmp/src/agent/snmpa_mib.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(snmpa_mib).
@@ -55,7 +55,7 @@
-define(NO_CACHE, no_mibs_cache).
-define(DEFAULT_CACHE_USAGE, true).
-define(CACHE_GC_TICKTIME, timer:minutes(1)).
--define(DEFAULT_CACHE_AUTOGC, false).
+-define(DEFAULT_CACHE_AUTOGC, true).
-define(DEFAULT_CACHE_GCLIMIT, 100).
-define(DEFAULT_CACHE_AGE, timer:minutes(10)).
-define(CACHE_GC_TRIGGER, cache_gc_trigger).
diff --git a/lib/snmp/src/agent/snmpa_mpd.erl b/lib/snmp/src/agent/snmpa_mpd.erl
index 2e09286b87..fd75b98f84 100644
--- a/lib/snmp/src/agent/snmpa_mpd.erl
+++ b/lib/snmp/src/agent/snmpa_mpd.erl
@@ -1,27 +1,28 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(snmpa_mpd).
-export([init/1, reset/0, inc/1, counters/0,
discarded_pdu/1,
- process_packet/6,
- generate_response_msg/5, generate_msg/5,
+ process_packet/6, process_packet/7,
+ generate_response_msg/5, generate_response_msg/6,
+ generate_msg/5, generate_msg/6,
generate_discovery_msg/4,
process_taddrs/1,
generate_req_id/0]).
@@ -34,6 +35,7 @@
-define(VMODULE,"MPD").
-include("snmp_verbosity.hrl").
+-include("snmpa_internal.hrl").
-define(empty_msg_size, 24).
@@ -120,6 +122,12 @@ reset() ->
%% section 4.2.1 in rfc2272)
%%-----------------------------------------------------------------
process_packet(Packet, TDomain, TAddress, State, NoteStore, Log) ->
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ process_packet(Packet, TDomain, TAddress, LocalEngineID,
+ State, NoteStore, Log).
+
+process_packet(Packet, TDomain, TAddress, LocalEngineID,
+ State, NoteStore, Log) ->
inc(snmpInPkts),
case catch snmp_pdus:dec_message_only(binary_to_list(Packet)) of
@@ -127,15 +135,17 @@ process_packet(Packet, TDomain, TAddress, State, NoteStore, Log) ->
when State#state.v1 =:= true ->
?vlog("v1, community: ~s", [Community]),
HS = ?empty_msg_size + length(Community),
- v1_v2c_proc('version-1', NoteStore, Community, TDomain, TAddress,
- Data, HS, Log, Packet);
+ v1_v2c_proc('version-1', NoteStore, Community,
+ TDomain, TAddress,
+ LocalEngineID, Data, HS, Log, Packet);
#message{version = 'version-2', vsn_hdr = Community, data = Data}
when State#state.v2c =:= true ->
?vlog("v2c, community: ~s", [Community]),
HS = ?empty_msg_size + length(Community),
- v1_v2c_proc('version-2', NoteStore, Community, TDomain, TAddress,
- Data, HS, Log, Packet);
+ v1_v2c_proc('version-2', NoteStore, Community,
+ TDomain, TAddress,
+ LocalEngineID, Data, HS, Log, Packet);
#message{version = 'version-3', vsn_hdr = V3Hdr, data = Data}
when State#state.v3 =:= true ->
@@ -143,7 +153,9 @@ process_packet(Packet, TDomain, TAddress, State, NoteStore, Log) ->
[V3Hdr#v3_hdr.msgID,
V3Hdr#v3_hdr.msgFlags,
V3Hdr#v3_hdr.msgSecurityModel]),
- v3_proc(NoteStore, Packet, TDomain, TAddress, V3Hdr, Data, Log);
+ v3_proc(NoteStore, Packet,
+ TDomain, TAddress,
+ LocalEngineID, V3Hdr, Data, Log);
{'EXIT', {bad_version, Vsn}} ->
?vtrace("exit: bad version: ~p",[Vsn]),
@@ -170,10 +182,11 @@ discarded_pdu(Variable) -> inc(Variable).
%%-----------------------------------------------------------------
%% Handles a Community based message (v1 or v2c).
%%-----------------------------------------------------------------
-v1_v2c_proc(Vsn, NoteStore, Community, snmpUDPDomain, {Ip, Udp},
+v1_v2c_proc(Vsn, NoteStore, Community, snmpUDPDomain,
+ {Ip, Udp}, LocalEngineID,
Data, HS, Log, Packet) ->
TAddress = tuple_to_list(Ip) ++ [Udp div 256, Udp rem 256],
- AgentMS = snmp_framework_mib:get_engine_max_message_size(),
+ AgentMS = get_engine_max_message_size(LocalEngineID),
MgrMS = snmp_community_mib:get_target_addr_ext_mms(?snmpUDPDomain,
TAddress),
PduMS = case MgrMS of
@@ -220,10 +233,10 @@ v1_v2c_proc(Vsn, NoteStore, Community, snmpUDPDomain, {Ip, Udp},
{discarded, trap_pdu}
end;
v1_v2c_proc(_Vsn, _NoteStore, _Community, snmpUDPDomain, TAddress,
- _Data, _HS, _Log, _Packet) ->
+ _LocalEngineID, _Data, _HS, _Log, _Packet) ->
{discarded, {badarg, TAddress}};
v1_v2c_proc(_Vsn, _NoteStore, _Community, TDomain, _TAddress,
- _Data, _HS, _Log, _Packet) ->
+ _LocalEngineID, _Data, _HS, _Log, _Packet) ->
{discarded, {badarg, TDomain}}.
sec_model('version-1') -> ?SEC_V1;
@@ -234,15 +247,19 @@ sec_model('version-2') -> ?SEC_V2C.
%% Handles a SNMPv3 Message, following the procedures in rfc2272,
%% section 4.2 and 7.2
%%-----------------------------------------------------------------
-v3_proc(NoteStore, Packet, _TDomain, _TAddress, V3Hdr, Data, Log) ->
- case (catch v3_proc(NoteStore, Packet, V3Hdr, Data, Log)) of
+v3_proc(NoteStore, Packet, _TDomain, _TAddress, LocalEngineID,
+ V3Hdr, Data, Log) ->
+ case (catch v3_proc(NoteStore, Packet, LocalEngineID, V3Hdr, Data, Log)) of
{'EXIT', Reason} ->
exit(Reason);
Result ->
Result
end.
-v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
+v3_proc(NoteStore, Packet, LocalEngineID, V3Hdr, Data, Log) ->
+ ?vtrace("v3_proc -> entry with"
+ "~n LocalEngineID: ~p",
+ [LocalEngineID]),
%% 7.2.3
#v3_hdr{msgID = MsgID,
msgMaxSize = MMS,
@@ -250,7 +267,7 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
msgSecurityModel = MsgSecurityModel,
msgSecurityParameters = SecParams,
hdr_size = HdrSize} = V3Hdr,
- ?vdebug("v3_proc -> version 3 message header:"
+ ?vdebug("v3_proc -> version 3 message header [7.2.3]:"
"~n msgID = ~p"
"~n msgMaxSize = ~p"
"~n msgFlags = ~p"
@@ -263,17 +280,19 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
SecLevel = check_sec_level(MsgFlags),
IsReportable = snmp_misc:is_reportable(MsgFlags),
%% 7.2.6
- ?vtrace("v3_proc -> "
+ ?vtrace("v3_proc -> [7.2.6]"
"~n SecModule = ~p"
"~n SecLevel = ~p"
"~n IsReportable = ~p",
- [SecModule,SecLevel,IsReportable]),
+ [SecModule, SecLevel, IsReportable]),
SecRes = (catch SecModule:process_incoming_msg(Packet, Data,
- SecParams, SecLevel)),
+ SecParams, SecLevel,
+ LocalEngineID)),
?vtrace("v3_proc -> message processing result: "
"~n SecRes: ~p", [SecRes]),
{SecEngineID, SecName, ScopedPDUBytes, SecData, DiscoOrPlain} =
- check_sec_module_result(SecRes, V3Hdr, Data, IsReportable, Log),
+ check_sec_module_result(SecRes, V3Hdr, Data,
+ LocalEngineID, IsReportable, Log),
?vtrace("v3_proc -> "
"~n DiscoOrPlain: ~w"
"~n SecEngineID: ~w"
@@ -311,7 +330,7 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
Log(PDU#pdu.type, Packet)
end,
%% Make sure a get_bulk doesn't get too big.
- AgentMS = snmp_framework_mib:get_engine_max_message_size(),
+ AgentMS = get_engine_max_message_size(LocalEngineID),
%% PduMMS is supposed to be the maximum total length of the response
%% PDU we can send. From the MMS, we need to subtract everything before
%% the PDU, i.e. Message and ScopedPDU.
@@ -415,8 +434,8 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
throw({discarded, received_v2_trap});
Type ->
%% 7.2.13
- SnmpEngineID = snmp_framework_mib:get_engine_id(),
- ?vtrace("v3_proc -> SnmpEngineID = ~w", [SnmpEngineID]),
+ SnmpEngineID = LocalEngineID,
+ ?vtrace("v3_proc -> 7.2.13", []),
case SecEngineID of
SnmpEngineID when (DiscoOrPlain =:= discovery) ->
%% This is a discovery step 2 message!
@@ -429,6 +448,7 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
ContextName,
SecData,
PDU,
+ LocalEngineID,
Log);
SnmpEngineID when (DiscoOrPlain =:= plain) ->
@@ -444,17 +464,18 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
%% 4.2.2.1.2
NIsReportable = snmp_misc:is_reportable_pdu(Type),
Val = inc(snmpUnknownPDUHandlers),
- ErrorInfo = {#varbind{oid = ?snmpUnknownPDUHandlers,
- variabletype = 'Counter32',
- value = Val},
- SecName,
- [{securityLevel, SecLevel},
- {contextEngineID, ContextEngineID},
- {contextName, ContextName}]},
+ ErrorInfo =
+ {#varbind{oid = ?snmpUnknownPDUHandlers,
+ variabletype = 'Counter32',
+ value = Val},
+ SecName,
+ [{securityLevel, SecLevel},
+ {contextEngineID, ContextEngineID},
+ {contextName, ContextName}]},
case generate_v3_report_msg(MsgID,
MsgSecurityModel,
- Data, ErrorInfo,
- Log) of
+ Data, LocalEngineID,
+ ErrorInfo, Log) of
{ok, Report} when NIsReportable =:= true ->
{discarded, snmpUnknownPDUHandlers, Report};
_ ->
@@ -473,6 +494,7 @@ v3_proc(NoteStore, Packet, V3Hdr, Data, Log) ->
ContextName,
SecData,
PDU,
+ LocalEngineID,
Log);
_ ->
@@ -501,7 +523,7 @@ check_sec_level(Unknown) ->
inc(snmpInvalidMsgs),
throw({discarded, snmpInvalidMsgs}).
-check_sec_module_result(Res, V3Hdr, Data, IsReportable, Log) ->
+check_sec_module_result(Res, V3Hdr, Data, LocalEngineID, IsReportable, Log) ->
case Res of
{ok, X} ->
X;
@@ -516,7 +538,7 @@ check_sec_module_result(Res, V3Hdr, Data, IsReportable, Log) ->
#v3_hdr{msgID = MsgID, msgSecurityModel = MsgSecModel} = V3Hdr,
Pdu = get_scoped_pdu(Data),
case generate_v3_report_msg(MsgID, MsgSecModel, Pdu,
- ErrorInfo, Log) of
+ LocalEngineID, ErrorInfo, Log) of
{ok, Report} ->
throw({discarded, {securityError, Reason}, Report});
{discarded, _SomeOtherReason} ->
@@ -545,8 +567,15 @@ get_scoped_pdu(D) ->
generate_response_msg(Vsn, RePdu, Type, ACMData, Log) ->
generate_response_msg(Vsn, RePdu, Type, ACMData, Log, 1).
+generate_response_msg(Vsn, RePdu, Type, ACMData, Log, N) when is_integer(N) ->
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ generate_response_msg(Vsn, RePdu, Type, ACMData, LocalEngineID, Log, N);
+generate_response_msg(Vsn, RePdu, Type, ACMData, LocalEngineID, Log) ->
+ generate_response_msg(Vsn, RePdu, Type, ACMData, LocalEngineID, Log, 1).
+
generate_response_msg(Vsn, RePdu, Type,
{community, _SecModel, Community, _IpUdp},
+ LocalEngineID,
Log, _) ->
case catch snmp_pdus:enc_pdu(RePdu) of
{'EXIT', Reason} ->
@@ -555,8 +584,9 @@ generate_response_msg(Vsn, RePdu, Type,
[RePdu, Community, Reason]),
{discarded, Reason};
PduBytes ->
- Message = #message{version = Vsn, vsn_hdr = Community,
- data = PduBytes},
+ Message = #message{version = Vsn,
+ vsn_hdr = Community,
+ data = PduBytes},
case catch list_to_binary(
snmp_pdus:enc_message_only(Message)) of
{'EXIT', Reason} ->
@@ -565,7 +595,7 @@ generate_response_msg(Vsn, RePdu, Type,
[RePdu, Community, Reason]),
{discarded, Reason};
Packet ->
- MMS = snmp_framework_mib:get_engine_max_message_size(),
+ MMS = get_engine_max_message_size(LocalEngineID),
case size(Packet) of
Len when Len =< MMS ->
Log(Type, Packet),
@@ -584,6 +614,7 @@ generate_response_msg(Vsn, RePdu, Type,
generate_response_msg(Vsn, RePdu, Type,
{v3, MsgID, MsgSecurityModel, SecName, SecLevel,
ContextEngineID, ContextName, SecData},
+ LocalEngineID,
Log, N) ->
%% rfc2272: 7.1 steps 6-8
ScopedPDU = #scopedPdu{contextEngineID = ContextEngineID,
@@ -596,7 +627,7 @@ generate_response_msg(Vsn, RePdu, Type,
[RePdu, ContextName, Reason]),
{discarded, Reason};
ScopedPDUBytes ->
- AgentMS = snmp_framework_mib:get_engine_max_message_size(),
+ AgentMS = get_engine_max_message_size(LocalEngineID),
V3Hdr = #v3_hdr{msgID = MsgID,
msgMaxSize = AgentMS,
msgFlags = snmp_misc:mk_msg_flags(Type, SecLevel),
@@ -611,13 +642,14 @@ generate_response_msg(Vsn, RePdu, Type,
?SEC_USM ->
snmpa_usm
end,
- SecEngineID = snmp_framework_mib:get_engine_id(),
+ SecEngineID = LocalEngineID,
?vtrace("generate_response_msg -> SecEngineID: ~w", [SecEngineID]),
case (catch SecModule:generate_outgoing_msg(Message,
SecEngineID,
SecName,
SecData,
- SecLevel)) of
+ SecLevel,
+ LocalEngineID)) of
{'EXIT', Reason} ->
config_err("~p (message: ~p)", [Reason, Message]),
{discarded, Reason};
@@ -668,12 +700,14 @@ generate_response_msg(Vsn, RePdu, Type,
SecName, SecLevel,
ContextEngineID,
ContextName,
- SecData}, Log, N+1)
+ SecData},
+ LocalEngineID, Log, N+1)
end
end
end.
-generate_v3_report_msg(MsgID, MsgSecurityModel, Data, ErrorInfo, Log) ->
+generate_v3_report_msg(MsgID, MsgSecurityModel, Data, LocalEngineID,
+ ErrorInfo, Log) ->
{Varbind, SecName, Opts} = ErrorInfo,
ReqId =
if
@@ -689,7 +723,7 @@ generate_v3_report_msg(MsgID, MsgSecurityModel, Data, ErrorInfo, Log) ->
error_index = 0,
varbinds = [Varbind]},
SecLevel = snmp_misc:get_option(securityLevel, Opts, 0),
- SnmpEngineID = snmp_framework_mib:get_engine_id(),
+ SnmpEngineID = LocalEngineID,
ContextEngineID =
snmp_misc:get_option(contextEngineID, Opts, SnmpEngineID),
ContextName = snmp_misc:get_option(contextName, Opts, ""),
@@ -697,7 +731,8 @@ generate_v3_report_msg(MsgID, MsgSecurityModel, Data, ErrorInfo, Log) ->
generate_response_msg('version-3', Pdu, report,
{v3, MsgID, MsgSecurityModel, SecName, SecLevel,
- ContextEngineID, ContextName, SecData}, Log).
+ ContextEngineID, ContextName, SecData},
+ LocalEngineID, Log).
%% req_id(#scopedPdu{data = #pdu{request_id = ReqId}}) ->
%% ?vtrace("Report ReqId: ~p",[ReqId]),
@@ -719,7 +754,8 @@ generate_discovery1_report_msg(MsgID, MsgSecurityModel,
SecName, SecLevel,
ContextEngineID, ContextName,
{SecData, Oid, Value},
- #pdu{request_id = ReqId}, Log) ->
+ #pdu{request_id = ReqId},
+ LocalEngineID, Log) ->
?vtrace("generate_discovery1_report_msg -> entry with"
"~n ReqId: ~p"
"~n Value: ~p", [ReqId, Value]),
@@ -734,7 +770,8 @@ generate_discovery1_report_msg(MsgID, MsgSecurityModel,
varbinds = [Varbind]},
case generate_response_msg('version-3', PduOut, report,
{v3, MsgID, MsgSecurityModel, SecName, SecLevel,
- ContextEngineID, ContextName, SecData}, Log) of
+ ContextEngineID, ContextName, SecData},
+ LocalEngineID, Log) of
{ok, Packet} ->
{discovery, Packet};
Error ->
@@ -745,7 +782,8 @@ generate_discovery1_report_msg(MsgID, MsgSecurityModel,
generate_discovery2_report_msg(MsgID, MsgSecurityModel,
SecName, SecLevel,
ContextEngineID, ContextName,
- SecData, #pdu{request_id = ReqId}, Log) ->
+ SecData, #pdu{request_id = ReqId},
+ LocalEngineID, Log) ->
?vtrace("generate_discovery2_report_msg -> entry with"
"~n ReqId: ~p", [ReqId]),
SecModule = get_security_module(MsgSecurityModel),
@@ -757,7 +795,8 @@ generate_discovery2_report_msg(MsgID, MsgSecurityModel,
varbinds = [Vb]},
case generate_response_msg('version-3', PduOut, report,
{v3, MsgID, MsgSecurityModel, SecName, SecLevel,
- ContextEngineID, ContextName, SecData}, Log) of
+ ContextEngineID, ContextName, SecData},
+ LocalEngineID, Log) of
{ok, Packet} ->
{discovery, Packet};
Error ->
@@ -816,7 +855,11 @@ set_vb_null([]) ->
%% Executed when a message that isn't a response is generated, i.e.
%% a trap or an inform.
%%-----------------------------------------------------------------
-generate_msg(Vsn, _NoteStore, Pdu, {community, Community}, To) ->
+generate_msg(Vsn, NoteStore, Pdu, ACMData, To) ->
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ generate_msg(Vsn, NoteStore, Pdu, ACMData, LocalEngineID, To).
+
+generate_msg(Vsn, _NoteStore, Pdu, {community, Community}, LocalEngineID, To) ->
Message = #message{version = Vsn, vsn_hdr = Community, data = Pdu},
case catch list_to_binary(snmp_pdus:enc_message(Message)) of
{'EXIT', Reason} ->
@@ -825,7 +868,7 @@ generate_msg(Vsn, _NoteStore, Pdu, {community, Community}, To) ->
[Pdu, Community, Reason]),
{discarded, Reason};
Packet ->
- AgentMax = snmp_framework_mib:get_engine_max_message_size(),
+ AgentMax = get_engine_max_message_size(LocalEngineID),
case size(Packet) of
Len when Len =< AgentMax ->
{ok, mk_v1_v2_packet_list(To, Packet, Len, Pdu)};
@@ -838,9 +881,9 @@ generate_msg(Vsn, _NoteStore, Pdu, {community, Community}, To) ->
end
end;
generate_msg('version-3', NoteStore, Pdu,
- {v3, ContextEngineID, ContextName}, To) ->
- %% rfc2272: 7.1.6
- ScopedPDU = #scopedPdu{contextEngineID = ContextEngineID,
+ {v3, ContextEngineID, ContextName}, LocalEngineID, To) ->
+ %% rfc2272: 7.1 step 6
+ ScopedPDU = #scopedPdu{contextEngineID = LocalEngineID,
contextName = ContextName,
data = Pdu},
case (catch snmp_pdus:enc_scoped_pdu(ScopedPDU)) of
@@ -851,7 +894,8 @@ generate_msg('version-3', NoteStore, Pdu,
{discarded, Reason};
ScopedPDUBytes ->
{ok, mk_v3_packet_list(NoteStore, To, ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName)}
+ ContextEngineID, ContextName,
+ LocalEngineID)}
end.
@@ -1094,17 +1138,21 @@ mk_msg_flags(PduType, SecLevel) ->
mk_v3_packet_entry(NoteStore, Domain, Addr,
{SecModel, SecName, SecLevel, TargetAddrName},
- ScopedPDUBytes, Pdu, ContextEngineID, ContextName) ->
- %% 7.1.7
- ?vtrace("mk_v3_packet_entry -> entry - 7.1.7", []),
- MsgID = generate_msg_id(),
- PduType = Pdu#pdu.type,
- MsgFlags = mk_msg_flags(PduType, SecLevel),
+ ScopedPDUBytes, Pdu, _ContextEngineID, ContextName,
+ LocalEngineID) ->
+ %% rfc2272 7.1 step 77
+ ?vtrace("mk_v3_packet_entry -> entry - RFC2272-7.1:7", []),
+ MsgVersion = 'version-3', % 7.1:7a
+ MsgID = generate_msg_id(), % 7.1:7b
+ MaxMsgSz = get_max_message_size(), % 7.1:7c
+ PduType = Pdu#pdu.type,
+ MsgFlags = mk_msg_flags(PduType, SecLevel), % 7.1:7d
+ MsgSecModel = SecModel, % 7.1:7e
V3Hdr = #v3_hdr{msgID = MsgID,
- msgMaxSize = get_max_message_size(),
+ msgMaxSize = MaxMsgSz,
msgFlags = MsgFlags,
- msgSecurityModel = SecModel},
- Message = #message{version = 'version-3',
+ msgSecurityModel = MsgSecModel},
+ Message = #message{version = MsgVersion,
vsn_hdr = V3Hdr,
data = ScopedPDUBytes},
SecModule =
@@ -1113,12 +1161,21 @@ mk_v3_packet_entry(NoteStore, Domain, Addr,
snmpa_usm
end,
+ %%
+ %% 7.1:8 - If the PDU is from the Response Class or the Internal Class
+ %% securityEngineID = snmpEngineID (local/source)
+ %% 7.1:9 - If the PDU is from the Unconfirmed Class
+ %% securityEngineID = snmpEngineID (local/source)
+ %% else
+ %% securityEngineID = targetEngineID (remote/destination)
+ %%
+
%% 7.1.9a
?vtrace("mk_v3_packet_entry -> sec engine id - 7.1.9a", []),
SecEngineID =
case PduType of
'snmpv2-trap' ->
- snmp_framework_mib:get_engine_id();
+ LocalEngineID;
_ ->
%% This is the implementation dependent target engine id
%% procedure.
@@ -1141,8 +1198,9 @@ mk_v3_packet_entry(NoteStore, Domain, Addr,
?vdebug("mk_v3_packet_entry -> secEngineID: ~p", [SecEngineID]),
%% 7.1.9b
- case catch SecModule:generate_outgoing_msg(Message, SecEngineID,
- SecName, [], SecLevel) of
+ case (catch SecModule:generate_outgoing_msg(Message, SecEngineID,
+ SecName, [], SecLevel,
+ LocalEngineID)) of
{'EXIT', Reason} ->
config_err("~p (message: ~p)", [Reason, Message]),
skip;
@@ -1169,7 +1227,7 @@ mk_v3_packet_entry(NoteStore, Domain, Addr,
sec_model = SecModel,
sec_name = SecName,
sec_level = SecLevel,
- ctx_engine_id = ContextEngineID,
+ ctx_engine_id = LocalEngineID,
ctx_name = ContextName,
disco = false,
req_id = Pdu#pdu.request_id},
@@ -1180,15 +1238,16 @@ mk_v3_packet_entry(NoteStore, Domain, Addr,
mk_v3_packet_list(NoteStore, To,
- ScopedPDUBytes, Pdu, ContextEngineID, ContextName) ->
+ ScopedPDUBytes, Pdu, ContextEngineID, ContextName,
+ LocalEngineID) ->
mk_v3_packet_list(NoteStore, To,
ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName, []).
+ ContextEngineID, ContextName, LocalEngineID, []).
mk_v3_packet_list(_, [],
_ScopedPDUBytes, _Pdu,
_ContextEngineID, _ContextName,
- Acc) ->
+ _LocalEngineID, Acc) ->
lists:reverse(Acc);
%% This clause is for backward compatibillity reasons
@@ -1196,20 +1255,21 @@ mk_v3_packet_list(_, [],
mk_v3_packet_list(NoteStore,
[{{?snmpUDPDomain, [A,B,C,D,U1,U2]}, SecData} | T],
ScopedPDUBytes, Pdu, ContextEngineID, ContextName,
- Acc) ->
+ LocalEngineID, Acc) ->
case mk_v3_packet_entry(NoteStore,
snmpUDPDomain, {{A,B,C,D}, U1 bsl 8 + U2}, SecData,
ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName) of
+ ContextEngineID, ContextName, LocalEngineID) of
skip ->
mk_v3_packet_list(NoteStore, T,
ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName,
+ ContextEngineID, ContextName, LocalEngineID,
Acc);
{ok, Entry} ->
mk_v3_packet_list(NoteStore, T,
ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName, [Entry | Acc])
+ ContextEngineID, ContextName, LocalEngineID,
+ [Entry | Acc])
end;
%% This is the new clause
@@ -1218,11 +1278,11 @@ mk_v3_packet_list(NoteStore,
mk_v3_packet_list(NoteStore,
[{{Domain, Addr}, SecData} | T],
ScopedPDUBytes, Pdu, ContextEngineID, ContextName,
- Acc) ->
+ LocalEngineID, Acc) ->
case mk_v3_packet_entry(NoteStore,
Domain, Addr, SecData,
ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName) of
+ ContextEngineID, ContextName, LocalEngineID) of
skip ->
mk_v3_packet_list(NoteStore, T,
ScopedPDUBytes, Pdu,
@@ -1230,7 +1290,8 @@ mk_v3_packet_list(NoteStore,
{ok, Entry} ->
mk_v3_packet_list(NoteStore, T,
ScopedPDUBytes, Pdu,
- ContextEngineID, ContextName, [Entry | Acc])
+ ContextEngineID, ContextName,
+ LocalEngineID, [Entry | Acc])
end.
@@ -1253,6 +1314,9 @@ gen(Id) ->
get_target_engine_id(TargetAddrName) ->
snmp_target_mib:get_target_engine_id(TargetAddrName).
+get_engine_max_message_size(_LocalEngineID) ->
+ snmp_framework_mib:get_engine_max_message_size().
+
sec_module(?SEC_USM) ->
snmpa_usm.
diff --git a/lib/snmp/src/agent/snmpa_trap.erl b/lib/snmp/src/agent/snmpa_trap.erl
index b1096b1135..450cb2e9f4 100644
--- a/lib/snmp/src/agent/snmpa_trap.erl
+++ b/lib/snmp/src/agent/snmpa_trap.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(snmpa_trap).
@@ -23,14 +23,18 @@
%%%-----------------------------------------------------------------
%% External exports
-export([construct_trap/2,
- try_initialise_vars/2, send_trap/6]).
+ try_initialise_vars/2,
+ send_trap/6, send_trap/7]).
-export([send_discovery/5]).
%% Internal exports
--export([init_v2_inform/9, init_v3_inform/9, send_inform/6]).
+-export([init_v2_inform/9,
+ init_v3_inform/9, init_v3_inform/10,
+ send_inform/6]).
-export([init_discovery_inform/12, send_discovery_inform/5]).
-include("snmp_types.hrl").
+-include("snmpa_internal.hrl").
-include("SNMPv2-MIB.hrl").
-include("SNMPv2-TM.hrl").
-include("SNMPv2-TC.hrl").
@@ -331,13 +335,20 @@ make_varbind_list(Varbinds) ->
%% SnmpTargetAddrTable (using the Tag).
%%-----------------------------------------------------------------
send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs, NetIf) ->
- (catch do_send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs, NetIf)).
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID, NetIf).
+
+send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs, LocalEngineID, NetIf) ->
+ (catch do_send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID, NetIf)).
-do_send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs, NetIf) ->
+do_send_trap(TrapRec, NotifyName, ContextName, Recv, Vbs,
+ LocalEngineID, NetIf) ->
VarbindList = make_varbind_list(Vbs),
Dests = find_dests(NotifyName),
send_trap_pdus(Dests, ContextName, {TrapRec, VarbindList}, [], [], [],
- Recv, NetIf).
+ Recv, LocalEngineID, NetIf).
send_discovery(TargetName, Record, ContextName, Vbs, NetIf) ->
case find_dest(TargetName) of
@@ -619,7 +630,9 @@ send_discovery_inform(Parent, Timeout, Retry, Msg, NetIf) ->
%%-----------------------------------------------------------------
send_trap_pdus([{DestAddr, TargetName, {MpModel, SecModel, SecName, SecLevel},
Type} | T],
- ContextName,{TrapRec, Vbs}, V1Res, V2Res, V3Res, Recv, NetIf) ->
+ ContextName,
+ {TrapRec, Vbs}, V1Res, V2Res, V3Res, Recv,
+ LocalEngineID, NetIf) ->
?vdebug("send trap pdus: "
"~n Destination address: ~p"
"~n Target name: ~p"
@@ -634,7 +647,7 @@ send_trap_pdus([{DestAddr, TargetName, {MpModel, SecModel, SecName, SecLevel},
case check_all_varbinds(TrapRec, Vbs, MibView) of
true when MpModel =:= ?MP_V1 ->
?vtrace("send_trap_pdus -> v1 mp model",[]),
- ContextEngineId = snmp_framework_mib:get_engine_id(),
+ ContextEngineId = LocalEngineID,
case snmp_community_mib:vacm2community({SecName,
ContextEngineId,
ContextName},
@@ -644,16 +657,18 @@ send_trap_pdus([{DestAddr, TargetName, {MpModel, SecModel, SecName, SecLevel},
[element(2, DestAddr)]),
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
[{DestAddr, Community} | V1Res],
- V2Res, V3Res, Recv, NetIf);
+ V2Res, V3Res, Recv,
+ LocalEngineID, NetIf);
undefined ->
?vdebug("No community found for v1 dest: ~p",
[element(2, DestAddr)]),
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
- V1Res, V2Res, V3Res, Recv, NetIf)
+ V1Res, V2Res, V3Res, Recv,
+ LocalEngineID, NetIf)
end;
true when MpModel =:= ?MP_V2C ->
?vtrace("send_trap_pdus -> v2c mp model",[]),
- ContextEngineId = snmp_framework_mib:get_engine_id(),
+ ContextEngineId = LocalEngineID,
case snmp_community_mib:vacm2community({SecName,
ContextEngineId,
ContextName},
@@ -664,12 +679,13 @@ send_trap_pdus([{DestAddr, TargetName, {MpModel, SecModel, SecName, SecLevel},
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
V1Res,
[{DestAddr, Community, Type}|V2Res],
- V3Res, Recv, NetIf);
+ V3Res, Recv, LocalEngineID, NetIf);
undefined ->
?vdebug("No community found for v2c dest: ~p",
[element(2, DestAddr)]),
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
- V1Res, V2Res, V3Res, Recv, NetIf)
+ V1Res, V2Res, V3Res, Recv,
+ LocalEngineID, NetIf)
end;
true when MpModel =:= ?MP_V3 ->
?vtrace("send_trap_pdus -> v3 mp model",[]),
@@ -678,18 +694,20 @@ send_trap_pdus([{DestAddr, TargetName, {MpModel, SecModel, SecName, SecLevel},
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
V1Res, V2Res,
[{DestAddr, MsgData, Type} | V3Res],
- Recv, NetIf);
+ Recv, LocalEngineID, NetIf);
true ->
?vlog("bad MpModel ~p for dest ~p",
[MpModel, element(2, DestAddr)]),
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
- V1Res, V2Res, V3Res, Recv, NetIf);
+ V1Res, V2Res, V3Res, Recv,
+ LocalEngineID, NetIf);
_ ->
?vlog("no access for dest: "
"~n ~p in target ~p",
[element(2, DestAddr), TargetName]),
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
- V1Res, V2Res, V3Res, Recv, NetIf)
+ V1Res, V2Res, V3Res, Recv,
+ LocalEngineID, NetIf)
end;
{discarded, Reason} ->
?vlog("mib view error ~p for"
@@ -697,10 +715,10 @@ send_trap_pdus([{DestAddr, TargetName, {MpModel, SecModel, SecName, SecLevel},
"~n SecName: ~w",
[Reason, element(2, DestAddr), SecName]),
send_trap_pdus(T, ContextName, {TrapRec, Vbs},
- V1Res, V2Res, V3Res, Recv, NetIf)
+ V1Res, V2Res, V3Res, Recv, LocalEngineID, NetIf)
end;
send_trap_pdus([], ContextName, {TrapRec, Vbs}, V1Res, V2Res, V3Res,
- Recv, NetIf) ->
+ Recv, LocalEngineID, NetIf) ->
SysUpTime = snmp_standard_mib:sys_up_time(),
?vdebug("send trap pdus with sysUpTime ~p", [SysUpTime]),
InformRecvs = get_inform_recvs(V2Res ++ V3Res),
@@ -708,7 +726,8 @@ send_trap_pdus([], ContextName, {TrapRec, Vbs}, V1Res, V2Res, V3Res,
deliver_recv(Recv, snmp_targets, InformTargets),
send_v1_trap(TrapRec, V1Res, Vbs, NetIf, SysUpTime),
send_v2_trap(TrapRec, V2Res, Vbs, Recv, NetIf, SysUpTime),
- send_v3_trap(TrapRec, V3Res, Vbs, Recv, NetIf, SysUpTime, ContextName).
+ send_v3_trap(TrapRec, V3Res, Vbs, Recv, LocalEngineID, NetIf,
+ SysUpTime, ContextName).
send_v1_trap(_TrapRec, [], _Vbs, _NetIf, _SysUpTime) ->
ok;
@@ -762,21 +781,25 @@ send_v2_trap(TrapRec, V2Res, Vbs, Recv, NetIf, SysUpTime) ->
do_send_v2_trap(TrapRecvs, IVbs, NetIf),
do_send_v2_inform(InformRecvs, IVbs, Recv, NetIf).
-send_v3_trap(_TrapRec, [], _Vbs, _Recv, _NetIf, _SysUpTime, _ContextName) ->
+send_v3_trap(_TrapRec, [], _Vbs, _Recv, _LocalEngineID,
+ _NetIf, _SysUpTime, _ContextName) ->
ok;
-send_v3_trap(TrapRec, V3Res, Vbs, Recv, NetIf, SysUpTime, ContextName) ->
+send_v3_trap(TrapRec, V3Res, Vbs, Recv, LocalEngineID,
+ NetIf, SysUpTime, ContextName) ->
?vdebug("prepare to send v3 trap",[]),
{_Oid, IVbs} = mk_v2_trap(TrapRec, Vbs, SysUpTime), % v2 refers to SMIv2;
- TrapRecvs = get_trap_recvs(V3Res), % same SMI for v3
+ TrapRecvs = get_trap_recvs(V3Res), % same SMI for v3
InformRecvs = get_inform_recvs(V3Res),
do_send_v3_trap(TrapRecvs, ContextName, IVbs, NetIf),
- do_send_v3_inform(InformRecvs, ContextName, IVbs, Recv, NetIf).
+ do_send_v3_inform(InformRecvs, ContextName, IVbs, Recv,
+ LocalEngineID, NetIf).
mk_v2_trap(#notification{oid = Oid}, Vbs, SysUpTime) ->
?vtrace("make v2 notification '~p'",[Oid]),
mk_v2_notif(Oid, Vbs, SysUpTime);
-mk_v2_trap(#trap{enterpriseoid = Enter, specificcode = Spec}, Vbs, SysUpTime) ->
+mk_v2_trap(#trap{enterpriseoid = Enter, specificcode = Spec},
+ Vbs, SysUpTime) ->
%% Use alg. in rfc1908 to map a v1 trap to a v2 trap
?vtrace("make v2 trap for '~p' with ~p",[Enter,Spec]),
{Oid,Enterp} =
@@ -845,16 +868,16 @@ do_send_v3_trap(Recvs, ContextName, Vbs, NetIf) ->
end, Recvs),
ok.
-do_send_v3_inform([], _ContextName, _Vbs, _Recv, _NetIf) ->
+do_send_v3_inform([], _ContextName, _Vbs, _Recv, _LocalEngineID, _NetIf) ->
ok;
-do_send_v3_inform(Recvs, ContextName, Vbs, Recv, NetIf) ->
+do_send_v3_inform(Recvs, ContextName, Vbs, Recv, LocalEngineID, NetIf) ->
lists:foreach(
fun({Addr, MsgData, Timeout, Retry}) ->
?vtrace("~n start inform sender to send v3 inform to ~p",
[Addr]),
proc_lib:spawn_link(?MODULE, init_v3_inform,
[{Addr, MsgData}, Timeout, Retry, Vbs,
- Recv, NetIf, ContextName,
+ Recv, LocalEngineID, NetIf, ContextName,
get(verbosity), get(sname)])
end,
Recvs).
@@ -874,7 +897,13 @@ init_v2_inform(Addr, Timeout, Retry, Vbs, Recv, NetIf, Community,V,S) ->
%% New process
-init_v3_inform(Addr, Timeout, Retry, Vbs, Recv, NetIf, ContextName,V,S) ->
+init_v3_inform(Addr, Timeout, Retry, Vbs, Recv, NetIf, ContextName, V, S) ->
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ init_v3_inform(Addr, Timeout, Retry, Vbs, Recv, LocalEngineID,
+ NetIf, ContextName, V, S).
+
+init_v3_inform(Addr, Timeout, Retry, Vbs, Recv, LocalEngineID,
+ NetIf, ContextName, V, S) ->
%% Make a new Inform for each recipient; they need unique
%% request-ids!
put(verbosity,V),
@@ -882,7 +911,7 @@ init_v3_inform(Addr, Timeout, Retry, Vbs, Recv, NetIf, ContextName,V,S) ->
?vdebug("~n starting with timeout = ~p and retry = ~p",
[Timeout,Retry]),
InformPdu = make_v2_notif_pdu(Vbs, 'inform-request'), % Yes, v2
- ContextEngineId = snmp_framework_mib:get_engine_id(),
+ ContextEngineId = LocalEngineID,
Msg = {send_pdu_req, 'version-3', InformPdu,
{v3, ContextEngineId, ContextName}, [Addr], self()},
?MODULE:send_inform(Addr, Timeout*10, Retry, Msg, Recv, NetIf).
diff --git a/lib/snmp/src/agent/snmpa_usm.erl b/lib/snmp/src/agent/snmpa_usm.erl
index b94294844b..ae584bb3c1 100644
--- a/lib/snmp/src/agent/snmpa_usm.erl
+++ b/lib/snmp/src/agent/snmpa_usm.erl
@@ -19,8 +19,8 @@
-module(snmpa_usm).
-export([
- process_incoming_msg/4,
- generate_outgoing_msg/5,
+ process_incoming_msg/4, process_incoming_msg/5,
+ generate_outgoing_msg/5, generate_outgoing_msg/6,
generate_discovery_msg/4, generate_discovery_msg/5,
current_statsNotInTimeWindows_vb/0
]).
@@ -33,6 +33,7 @@
-define(VMODULE,"A-USM").
-include("snmp_verbosity.hrl").
+-include("snmpa_internal.hrl").
%%-----------------------------------------------------------------
@@ -58,7 +59,11 @@
%%-----------------------------------------------------------------
process_incoming_msg(Packet, Data, SecParams, SecLevel) ->
- TermDiscoEnabled = is_terminating_discovery_enabled(),
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ process_incoming_msg(Packet, Data, SecParams, SecLevel, LocalEngineID).
+
+process_incoming_msg(Packet, Data, SecParams, SecLevel, LocalEngineID) ->
+ TermDiscoEnabled = is_terminating_discovery_enabled(),
TermTriggerUsername = terminating_trigger_username(),
%% 3.2.1
?vtrace("process_incoming_msg -> check security parms: 3.2.1",[]),
@@ -124,7 +129,7 @@ process_incoming_msg(Packet, Data, SecParams, SecLevel) ->
"~n ~p",[UsmUser]),
DiscoOrPlain = authenticate_incoming(Packet,
UsmSecParams, UsmUser,
- SecLevel),
+ SecLevel, LocalEngineID),
%% 3.2.8
?vtrace("process_incoming_msg -> "
"decrypt scoped data: 3.2.8",[]),
@@ -166,7 +171,8 @@ process_discovery_msg(MsgAuthEngineID, Data, SecLevel) ->
end.
-authenticate_incoming(Packet, UsmSecParams, UsmUser, SecLevel) ->
+authenticate_incoming(Packet, UsmSecParams, UsmUser, SecLevel,
+ LocalEngineID) ->
%% 3.2.6
?vtrace("authenticate_incoming -> 3.2.6", []),
AuthProtocol = element(?usmUserAuthProtocol, UsmUser),
@@ -190,7 +196,8 @@ authenticate_incoming(Packet, UsmSecParams, UsmUser, SecLevel) ->
SecName,
MsgAuthEngineID,
MsgAuthEngineBoots,
- MsgAuthEngineTime) of
+ MsgAuthEngineTime,
+ LocalEngineID) of
discovery ->
discovery;
true ->
@@ -205,15 +212,15 @@ authenticate_incoming(Packet, UsmSecParams, UsmUser, SecLevel) ->
plain
end.
-authoritative(SecName, MsgAuthEngineBoots, MsgAuthEngineTime) ->
+authoritative(SecName, MsgAuthEngineBoots, MsgAuthEngineTime, LocalEngineID) ->
?vtrace("authoritative -> entry with"
"~n SecName: ~p"
"~n MsgAuthEngineBoots: ~p"
"~n MsgAuthEngineTime: ~p",
[SecName, MsgAuthEngineBoots, MsgAuthEngineTime]),
- SnmpEngineBoots = snmp_framework_mib:get_engine_boots(),
+ SnmpEngineBoots = get_local_engine_boots(LocalEngineID),
?vtrace("authoritative -> SnmpEngineBoots: ~p", [SnmpEngineBoots]),
- SnmpEngineTime = snmp_framework_mib:get_engine_time(),
+ SnmpEngineTime = get_local_engine_time(LocalEngineID),
?vtrace("authoritative -> SnmpEngineTime: ~p", [SnmpEngineTime]),
InTimeWindow =
if
@@ -320,11 +327,12 @@ non_authoritative(SecName,
end.
-is_auth(?usmNoAuthProtocol, _, _, _, SecName, _, _, _) -> % 3.2.5
+is_auth(?usmNoAuthProtocol, _, _, _, SecName, _, _, _, _) -> % 3.2.5
error(usmStatsUnsupportedSecLevels,
?usmStatsUnsupportedSecLevels_instance, SecName); % OTP-5464
is_auth(AuthProtocol, AuthKey, AuthParams, Packet, SecName,
- MsgAuthEngineID, MsgAuthEngineBoots, MsgAuthEngineTime) ->
+ MsgAuthEngineID, MsgAuthEngineBoots, MsgAuthEngineTime,
+ LocalEngineID) ->
TermDiscoEnabled = is_terminating_discovery_enabled(),
TermDiscoStage2 = terminating_discovery_stage2(),
IsAuth = auth_in(AuthProtocol, AuthKey, AuthParams, Packet),
@@ -334,7 +342,7 @@ is_auth(AuthProtocol, AuthKey, AuthParams, Packet, SecName,
%% 3.2.7
?vtrace("is_auth -> "
"retrieve EngineBoots and EngineTime: 3.2.7",[]),
- SnmpEngineID = snmp_framework_mib:get_engine_id(),
+ SnmpEngineID = LocalEngineID,
?vtrace("is_auth -> SnmpEngineID: ~p", [SnmpEngineID]),
case MsgAuthEngineID of
SnmpEngineID when ((MsgAuthEngineBoots =:= 0) andalso
@@ -351,12 +359,14 @@ is_auth(AuthProtocol, AuthKey, AuthParams, Packet, SecName,
%% This will *always* result in the manager *not*
%% beeing in timewindow
authoritative(SecName,
- MsgAuthEngineBoots, MsgAuthEngineTime);
+ MsgAuthEngineBoots, MsgAuthEngineTime,
+ LocalEngineID);
SnmpEngineID -> %% 3.2.7a
?vtrace("is_auth -> we are authoritative: 3.2.7a", []),
authoritative(SecName,
- MsgAuthEngineBoots, MsgAuthEngineTime);
+ MsgAuthEngineBoots, MsgAuthEngineTime,
+ LocalEngineID);
_ -> %% 3.2.7b - we're non-authoritative
?vtrace("is_auth -> we are non-authoritative: 3.2.7b",[]),
@@ -418,12 +428,19 @@ try_decrypt(?usmAesCfb128Protocol,
generate_outgoing_msg(Message, SecEngineID, SecName, SecData, SecLevel) ->
+ LocalEngineID = ?DEFAULT_LOCAL_ENGINE_ID,
+ generate_outgoing_msg(Message, SecEngineID, SecName, SecData, SecLevel,
+ LocalEngineID).
+
+generate_outgoing_msg(Message, SecEngineID, SecName, SecData, SecLevel,
+ LocalEngineID) ->
%% 3.1.1
?vtrace("generate_outgoing_msg -> [3.1.1] entry with"
- "~n SecEngineID: ~p"
- "~n SecName: ~p"
- "~n SecLevel: ~w",
- [SecEngineID, SecName, SecLevel]),
+ "~n SecEngineID: ~p"
+ "~n SecName: ~p"
+ "~n SecLevel: ~w"
+ "~n LocalEngineID: ~p",
+ [SecEngineID, SecName, SecLevel, LocalEngineID]),
{UserName, AuthProtocol, PrivProtocol, AuthKey, PrivKey} =
case SecData of
[] -> % 3.1.1b
@@ -439,7 +456,7 @@ generate_outgoing_msg(Message, SecEngineID, SecName, SecData, SecLevel) ->
element(?usmUserPrivKey, User)};
{_, Name,_,_,_,_,_,_,_,_,_,_,_, RowStatus,_,_} ->
?vdebug("generate_outgoing_msg -> "
- "found user ~p with wrong row status: ~p",
+ "found not active user ~p: ~p",
[Name, RowStatus]),
error(unknownSecurityName);
_ ->
@@ -460,7 +477,7 @@ generate_outgoing_msg(Message, SecEngineID, SecName, SecData, SecLevel) ->
ScopedPduBytes = Message#message.data,
{ScopedPduData, MsgPrivParams} =
encrypt(ScopedPduBytes, PrivProtocol, PrivKey, SecLevel),
- SnmpEngineID = snmp_framework_mib:get_engine_id(),
+ SnmpEngineID = LocalEngineID,
?vtrace("generate_outgoing_msg -> SnmpEngineID: ~p [3.1.6]",
[SnmpEngineID]),
%% 3.1.6
@@ -474,8 +491,8 @@ generate_outgoing_msg(Message, SecEngineID, SecName, SecData, SecLevel) ->
{get_engine_boots(SecEngineID),
get_engine_time(SecEngineID)};
_ ->
- {snmp_framework_mib:get_engine_boots(),
- snmp_framework_mib:get_engine_time()}
+ {get_local_engine_boots(SnmpEngineID),
+ get_local_engine_time(SnmpEngineID)}
end,
%% 3.1.5 - 3.1.7
?vtrace("generate_outgoing_msg -> [3.1.5 - 3.1.7]",[]),
@@ -681,6 +698,19 @@ current_statsNotInTimeWindows_vb() ->
value = get_counter(usmStatsNotInTimeWindows)}.
+
+%%-----------------------------------------------------------------
+%% Future profing...
+%%-----------------------------------------------------------------
+
+get_local_engine_boots(_LocalEngineID) ->
+ snmp_framework_mib:get_engine_boots().
+
+get_local_engine_time(_LocalEngineID) ->
+ snmp_framework_mib:get_engine_time().
+
+
+
%%-----------------------------------------------------------------
%% We cache the local values of all non-auth engines we know.
%% Keep the values in the snmp_agent_table.
diff --git a/lib/snmp/src/app/snmp.appup.src b/lib/snmp/src/app/snmp.appup.src
index aa3410fea3..9ad16ffad2 100644
--- a/lib/snmp/src/app/snmp.appup.src
+++ b/lib/snmp/src/app/snmp.appup.src
@@ -22,53 +22,91 @@
%% ----- U p g r a d e -------------------------------------------------------
[
+ {"4.16.2",
+ [
+ {load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent]},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, []},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []}
+ ]
+ },
{"4.16.1",
[
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
{load_module, snmp_usm, soft_purge, soft_purge, []},
- {load_module, snmp_pdus, soft_purge, soft_purge, []}
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent]},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
+ {update, snmpm_server, soft, soft_purge, soft_purge, []}
]
},
{"4.16",
[
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {load_module, snmp_log, soft_purge, soft_purge, []},
{load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpm_net_if, soft, soft_purge, soft_purge, []}
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
+ {update, snmpm_net_if, soft, soft_purge, soft_purge, []},
+ {update, snmpm_server, soft, soft_purge, soft_purge, []}
]
},
{"4.15",
[
- {load_module, snmp_pdus, soft_purge, soft_purge, []},
- {load_module, snmpa, soft_purge, soft_purge, [snmp_log]},
{load_module, snmp_config, soft_purge, soft_purge, []},
{load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmp_log, snmpa_agent]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpm_net_if, {advanced, upgrade_from_pre_4_16},
- soft_purge, soft_purge, [snmpm_config, snmp_log]},
- {update, snmpa_net_if, {advanced, upgrade_from_pre_4_16},
- soft_purge, soft_purge, [snmpa_agent, snmp_log]},
- {update, snmpa_agent, soft, soft_purge, soft_purge, []},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
{load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {update, snmpa_net_if, {advanced, upgrade_from_pre_4_16},
+ soft_purge, soft_purge, [snmpa_agent, snmp_log]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
- {update, snmpm_config, soft, soft_purge, soft_purge, []}
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
+ {update, snmpm_net_if, {advanced, upgrade_from_pre_4_16},
+ soft_purge, soft_purge, [snmpm_config, snmp_log]},
+ {update, snmpm_config, soft, soft_purge, soft_purge, []},
+ {update, snmpm_server, soft, soft_purge, soft_purge, []}
]
},
{"4.14",
[
- {load_module, snmp_pdus, soft_purge, soft_purge, []},
- {load_module, snmpa, soft_purge, soft_purge, [snmp_log]},
{load_module, snmp_config, soft_purge, soft_purge, []},
{load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmp_log, snmpa_agent]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpa_net_if, {advanced, upgrade_from_pre_4_16},
- soft_purge, soft_purge, [snmpa_agent, snmp_log]},
- {update, snmpa_agent, soft, soft_purge, soft_purge, []},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
{load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {update, snmpa_net_if, {advanced, upgrade_from_pre_4_16},
+ soft_purge, soft_purge, [snmp_log, snmpa_agent]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
{load_module, snmpm_user, soft_purge, soft_purge, []},
{load_module, snmpm_user_default, soft_purge, soft_purge, [snmpm_user]},
{update, snmpm_net_if, {advanced, upgrade_from_pre_4_16},
@@ -80,18 +118,22 @@
},
{"4.13.5",
[
- {load_module, snmp_pdus, soft_purge, soft_purge, []},
- {load_module, snmpa_mib_data, soft_purge, soft_purge, []},
- {load_module, snmpa, soft_purge, soft_purge, [snmp_log]},
{load_module, snmp_config, soft_purge, soft_purge, []},
{load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmp_log, snmpa_agent]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
+ {load_module, snmpa_mib_data, soft_purge, soft_purge, []},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
{update, snmpa_net_if, {advanced, upgrade_from_pre_4_16},
soft_purge, soft_purge, [snmpa_agent, snmp_log]},
- {update, snmpa_agent, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, [snmpa_mib_data]},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
{load_module, snmpm_user, soft_purge, soft_purge, []},
{load_module, snmpm_user_default, soft_purge, soft_purge, [snmpm_user]},
{update, snmpm_net_if, {advanced, upgrade_from_pre_4_14},
@@ -107,54 +149,92 @@
%% ------D o w n g r a d e ---------------------------------------------------
[
+ {"4.16.2",
+ [
+ {load_module, snmp_log, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent]},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, []},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []}
+ ]
+ },
{"4.16.1",
[
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
{load_module, snmp_usm, soft_purge, soft_purge, []},
- {load_module, snmp_pdus, soft_purge, soft_purge, []}
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent]},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
+ {update, snmpm_server, soft, soft_purge, soft_purge, []}
]
},
{"4.16",
[
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {load_module, snmp_log, soft_purge, soft_purge, []},
{load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpm_net_if, soft, soft_purge, soft_purge, []}
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
+ {update, snmpm_net_if, soft, soft_purge, soft_purge, []},
+ {update, snmpm_server, soft, soft_purge, soft_purge, []}
]
},
{"4.15",
[
- {load_module, snmp_pdus, soft_purge, soft_purge, []},
- {load_module, snmpa, soft_purge, soft_purge, [snmp_log]},
{load_module, snmp_config, soft_purge, soft_purge, []},
{load_module, snmp_log, soft_purge, soft_purge, []},
- {load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpa_net_if, {advanced, downgrade_to_pre_4_16},
- soft_purge, soft_purge, [snmpa_agent, snmp_log]},
- {update, snmpa_agent, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
{load_module, snmp_usm, soft_purge, soft_purge, []},
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent, snmp_log]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpm_net_if, {advanced, downgrade_to_pre_4_16},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
+ {update, snmpa_net_if, {advanced, downgrade_to_pre_4_16},
+ soft_purge, soft_purge, [snmpa_agent, snmp_log]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
+ {update, snmpm_net_if, {advanced, downgrade_to_pre_4_16},
soft_purge, soft_purge, [snmpm_config, snmp_log]},
- {update, snmpm_config, soft, soft_purge, soft_purge, []}
+ {update, snmpm_config, soft, soft_purge, soft_purge, []},
+ {update, snmpm_server, soft, soft_purge, soft_purge, []}
]
},
{"4.14",
[
- {load_module, snmp_pdus, soft_purge, soft_purge, []},
- {load_module, snmpa, soft_purge, soft_purge, [snmp_log]},
{load_module, snmp_config, soft_purge, soft_purge, []},
{load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmpa_agent, snmp_log]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
- {update, snmpa_net_if, {advanced, downgrade_to_pre_4_16},
- soft_purge, soft_purge, [snmpa_agent, snmp_log]},
- {update, snmpa_agent, soft, soft_purge, soft_purge, []},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
{load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {update, snmpa_net_if, {advanced, downgrade_to_pre_4_16},
+ soft_purge, soft_purge, [snmpa_agent, snmp_log]},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, []},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
{load_module, snmpm_user, soft_purge, soft_purge, []},
{load_module, snmpm_user_default, soft_purge, soft_purge, [snmpm_user]},
{update, snmpm_net_if, {advanced, downgrade_to_pre_4_16},
@@ -166,18 +246,22 @@
},
{"4.13.5",
[
- {load_module, snmp_pdus, soft_purge, soft_purge, []},
- {load_module, snmpa_mib_data, soft_purge, soft_purge, []},
{load_module, snmp_config, soft_purge, soft_purge, []},
- {load_module, snmpa, soft_purge, soft_purge, [snmp_log]},
{load_module, snmp_log, soft_purge, soft_purge, []},
+ {load_module, snmp_pdus, soft_purge, soft_purge, []},
+ {load_module, snmp_usm, soft_purge, soft_purge, []},
+
+ {load_module, snmpa, soft_purge, soft_purge, [snmp_log, snmpa_agent]},
{load_module, snmpa_general_db, soft_purge, soft_purge, []},
+ {load_module, snmpa_mib_data, soft_purge, soft_purge, []},
+ {load_module, snmpa_mpd, soft_purge, soft_purge, [snmpa_usm]},
+ {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
{update, snmpa_net_if, {advanced, downgrade_to_pre_4_16},
soft_purge, soft_purge, [snmpa_agent, snmp_log]},
- {update, snmpa_agent, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, [snmp_usm]},
- {load_module, snmp_usm, soft_purge, soft_purge, []},
+ {update, snmpa_mib, soft, soft_purge, soft_purge, [snmpa_mib_data]},
+ {update, snmpa_agent, soft, soft_purge, soft_purge, [snmpa_mib]},
+ {load_module, snmpm_mpd, soft_purge, soft_purge, []},
{load_module, snmpm_user, soft_purge, soft_purge, []},
{load_module, snmpm_user_default, soft_purge, soft_purge, [snmpm_user]},
{update, snmpm_net_if, {advanced, downgrade_to_pre_4_14},
diff --git a/lib/snmp/src/manager/snmpm_mpd.erl b/lib/snmp/src/manager/snmpm_mpd.erl
index d76ad20051..7712370d28 100644
--- a/lib/snmp/src/manager/snmpm_mpd.erl
+++ b/lib/snmp/src/manager/snmpm_mpd.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -257,11 +257,11 @@ process_v3_msg(NoteStore, Msg, Hdr, Data, Addr, Port, Log) ->
end,
?vlog("7.2.7"
- "~n ContextEngineID: \"~s\" "
+ "~n ContextEngineID: ~p "
"~n context: \"~s\" ",
[CtxEngineID, CtxName]),
if
- SecLevel == 3 -> % encrypted message - log decrypted pdu
+ SecLevel =:= 3 -> % encrypted message - log decrypted pdu
Log({Hdr, ScopedPDUBytes});
true -> % otherwise, log binary
Log(Msg)
@@ -338,7 +338,8 @@ process_v3_msg(NoteStore, Msg, Hdr, Data, Addr, Port, Log) ->
SnmpEngineID = get_engine_id(),
case SecEngineID of
SnmpEngineID -> % 7.2.13.b
- ?vtrace("valid securityEngineID: ~p", [SecEngineID]),
+ ?vtrace("7.2.13d - valid securityEngineID: ~p",
+ [SecEngineID]),
%% 4.2.2.1.1 - we don't handle proxys yet => we only
%% handle CtxEngineID to ourselves
%% Check that we actually know of an agent with this
@@ -353,7 +354,9 @@ process_v3_msg(NoteStore, Msg, Hdr, Data, Addr, Port, Log) ->
{MsgID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData},
{ok, 'version-3', PDU, PduMMS, ACMData};
- _ ->
+ UnknownEngineID ->
+ ?vtrace("4.2.2.1.2 - UnknownEngineId: ~p",
+ [UnknownEngineID]),
%% 4.2.2.1.2
NIsReportable = snmp_misc:is_reportable_pdu(Type),
Val = inc(snmpUnknownPDUHandlers),
@@ -377,7 +380,8 @@ process_v3_msg(NoteStore, Msg, Hdr, Data, Addr, Port, Log) ->
end
end;
_ -> % 7.2.13.a
- ?vinfo("invalid securityEngineID: ~p",[SecEngineID]),
+ ?vinfo("7.2.13a - invalid securityEngineID: ~p",
+ [SecEngineID]),
discard({badSecurityEngineID, SecEngineID})
end;
diff --git a/lib/snmp/src/manager/snmpm_server.erl b/lib/snmp/src/manager/snmpm_server.erl
index 30aacc0ec3..d64b5b1d53 100644
--- a/lib/snmp/src/manager/snmpm_server.erl
+++ b/lib/snmp/src/manager/snmpm_server.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -2804,16 +2804,16 @@ agent_data(TargetName, CtxName) ->
agent_data(TargetName, CtxName, Config) ->
case snmpm_config:agent_info(TargetName, all) of
{ok, Info} ->
- {value, {_, Version}} = lists:keysearch(version, 1, Info),
+ Version = agent_data_item(version, Info),
MsgData =
case Version of
v3 ->
DefSecModel = agent_data_item(sec_model, Info),
DefSecName = agent_data_item(sec_name, Info),
DefSecLevel = agent_data_item(sec_level, Info),
-
+
EngineId = agent_data_item(engine_id, Info),
-
+
SecModel = agent_data_item(sec_model,
Config,
DefSecModel),
@@ -2829,7 +2829,7 @@ agent_data(TargetName, CtxName, Config) ->
_ ->
DefComm = agent_data_item(community, Info),
DefSecModel = agent_data_item(sec_model, Info),
-
+
Comm = agent_data_item(community,
Config,
DefComm),
@@ -2848,8 +2848,12 @@ agent_data(TargetName, CtxName, Config) ->
end.
agent_data_item(Item, Info) ->
- {value, {_, Val}} = lists:keysearch(Item, 1, Info),
- Val.
+ case lists:keysearch(Item, 1, Info) of
+ {value, {_, Val}} ->
+ Val;
+ false ->
+ throw({error, {not_found, Item, Info}})
+ end.
agent_data_item(Item, Info, Default) ->
case lists:keysearch(Item, 1, Info) of
diff --git a/lib/snmp/test/snmp_agent_test.erl b/lib/snmp/test/snmp_agent_test.erl
index af0581150a..9d2e9969c4 100644
--- a/lib/snmp/test/snmp_agent_test.erl
+++ b/lib/snmp/test/snmp_agent_test.erl
@@ -1046,7 +1046,7 @@ v1_cases() ->
sparse_table,
cnt_64,
opaque,
-
+
change_target_addr_config
].
@@ -1977,7 +1977,8 @@ inform_i(Config) ->
?P1("unload TestTrap & TestTrapv2..."),
?line unload_master("TestTrap"),
- ?line unload_master("TestTrapv2").
+ ?line unload_master("TestTrapv2"),
+ ok.
v3_inform_i(X) ->
%% <CONDITIONAL-SKIP>
@@ -3446,7 +3447,7 @@ do_mul_set_err() ->
?line ?v1_2(expect(2, noSuchName, 1, any),
expect(2, [{[friendsEntry, [2,3]], noSuchInstance}])),
g([NewKeyc4]),
- ?line ?v1_2(expect(3, noSuchName, 1, any),
+ ?line ?v1_2(expect(3, noSuchName, 1, any),
expect(3, [{NewKeyc4, noSuchInstance}])).
%% Req. SA-MIB
@@ -3457,10 +3458,10 @@ sa_mib() ->
?line expect(2, [{[sa, [1,0]], "sa_test"}]).
ma_trap1(MA) ->
- snmpa:send_trap(MA, testTrap2, "standard trap"),
+ ok = snmpa:send_trap(MA, testTrap2, "standard trap"),
?line expect(1, trap, [system], 6, 1, [{[system, [4,0]],
"{mbj,eklas}@erlang.ericsson.se"}]),
- snmpa:send_trap(MA, testTrap1, "standard trap"),
+ ok = snmpa:send_trap(MA, testTrap1, "standard trap"),
?line expect(2, trap, [1,2,3] , 1, 0, [{[system, [4,0]],
"{mbj,eklas}@erlang.ericsson.se"}]).
@@ -3509,7 +3510,8 @@ ma_v2_trap1(MA) ->
?DBG("ma_v2_traps -> send standard trap: testTrapv21",[]),
snmpa:send_trap(MA, testTrapv21, "standard trap"),
?line expect(2, v2trap, [{[sysUpTime, 0], any},
- {[snmpTrapOID, 0], ?snmp ++ [1]}]).
+ {[snmpTrapOID, 0], ?snmp ++ [1]}]),
+ ok.
ma_v2_trap2(MA) ->
snmpa:send_trap(MA,testTrapv22,"standard trap",[{sysContact,"pelle"}]),
@@ -3517,7 +3519,7 @@ ma_v2_trap2(MA) ->
{[snmpTrapOID, 0], ?system ++ [0,1]},
{[system, [4,0]], "pelle"}]).
-%% Note: This test case takes a while... actually a couple of minutes.
+%% Note: This test case takes a while... actually a couple of minutes.
ma_v2_inform1(MA) ->
?DBG("ma_v2_inform1 -> entry with"
"~n MA = ~p => "
@@ -5258,7 +5260,35 @@ otp_1131_2(X) -> ?P(otp_1131_2), otp_1131(X).
otp_1131_3(X) ->
%% <CONDITIONAL-SKIP>
- Skippable = [{unix, [darwin]}],
+ %% This is intended to catch Montavista Linux 4.0/ppc (2.6.5)
+ %% Montavista Linux looks like a Debian distro (/etc/issue)
+ LinuxVersionVerify =
+ fun() ->
+ case os:cmd("uname -m") of
+ "ppc" ++ _ ->
+ case file:read_file_info("/etc/issue") of
+ {ok, _} ->
+ case os:cmd("grep -i montavista /etc/issue") of
+ Info when (is_list(Info) andalso
+ (length(Info) > 0)) ->
+ case os:version() of
+ {2, 6, 10} ->
+ true;
+ _ ->
+ false
+ end;
+ _ -> % Maybe plain Debian or Ubuntu
+ false
+ end;
+ _ ->
+ %% Not a Debian based distro
+ false
+ end;
+ _ ->
+ false
+ end
+ end,
+ Skippable = [{unix, [darwin, {linux, LinuxVersionVerify}]}],
Condition = fun() -> ?OS_BASED_SKIP(Skippable) end,
?NON_PC_TC_MAYBE_SKIP(X, Condition),
%% </CONDITIONAL-SKIP>
@@ -6219,12 +6249,15 @@ verify_old_info([Key|Keys], Info) ->
is(S) -> [length(S) | S].
try_test(Func) ->
+ ?P2("try test ~w...", [Func]),
snmp_agent_test_lib:try_test(?MODULE, Func).
try_test(Func, A) ->
+ ?P2("try test ~w...", [Func]),
snmp_agent_test_lib:try_test(?MODULE, Func, A).
try_test(Func, A, Opts) ->
+ ?P2("try test ~w...", [Func]),
snmp_agent_test_lib:try_test(?MODULE, Func, A, Opts).
diff --git a/lib/snmp/test/snmp_agent_test_lib.erl b/lib/snmp/test/snmp_agent_test_lib.erl
index 31b375efa9..9e89aa889c 100644
--- a/lib/snmp/test/snmp_agent_test_lib.erl
+++ b/lib/snmp/test/snmp_agent_test_lib.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -421,7 +421,7 @@ start_agent(Config, Vsns, Opts) ->
?LOG("start_agent -> entry (~p) with"
"~n Config: ~p"
"~n Vsns: ~p"
- "~n Opts: ~p",[node(), Config, Vsns, Opts]),
+ "~n Opts: ~p", [node(), Config, Vsns, Opts]),
?line AgentDir = ?config(agent_dir, Config),
?line SaNode = ?config(snmp_sa, Config),
diff --git a/lib/snmp/test/snmp_manager_test.erl b/lib/snmp/test/snmp_manager_test.erl
index 518b8b34de..cef96417dc 100644
--- a/lib/snmp/test/snmp_manager_test.erl
+++ b/lib/snmp/test/snmp_manager_test.erl
@@ -795,6 +795,35 @@ notify_started02(suite) -> [];
notify_started02(Config) when is_list(Config) ->
process_flag(trap_exit, true),
put(tname,ns02),
+
+ %% <CONDITIONAL-SKIP>
+ %% The point of this is to catch machines running
+ %% SLES9 (2.6.5)
+ LinuxVersionVerify =
+ fun() ->
+ case os:cmd("uname -m") of
+ "i686" ++ _ ->
+%% io:format("found an i686 machine, "
+%% "now check version~n", []),
+ case os:version() of
+ {2, 6, Rev} when Rev >= 16 ->
+ true;
+ {2, Min, _} when Min > 6 ->
+ true;
+ {Maj, _, _} when Maj > 2 ->
+ true;
+ _ ->
+ false
+ end;
+ _ ->
+ true
+ end
+ end,
+ Skippable = [{unix, [{linux, LinuxVersionVerify}]}],
+ Condition = fun() -> ?OS_BASED_SKIP(Skippable) end,
+ ?NON_PC_TC_MAYBE_SKIP(Config, Condition),
+ %% </CONDITIONAL-SKIP>
+
p("starting with Config: ~n~p", [Config]),
ConfDir = ?config(manager_conf_dir, Config),
diff --git a/lib/snmp/test/snmp_manager_user_test.erl b/lib/snmp/test/snmp_manager_user_test.erl
index 24ed3b0b73..0f47d70873 100644
--- a/lib/snmp/test/snmp_manager_user_test.erl
+++ b/lib/snmp/test/snmp_manager_user_test.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2004-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -822,10 +822,39 @@ register_monitor_and_crash3(doc) ->
"Start a single user process, "
"register-monitor one user and register one user, "
"crash the single user process.";
-register_monitor_and_crash3(Conf) when is_list(Conf) ->
+register_monitor_and_crash3(Conf) when is_list(Conf) ->
+ process_flag(trap_exit, true),
put(tname,rlac3),
+
+ %% <CONDITIONAL-SKIP>
+ %% The point of this is to catch machines running
+ %% SLES9 (2.6.5)
+ LinuxVersionVerify =
+ fun() ->
+ case os:cmd("uname -m") of
+ "i686" ++ _ ->
+%% io:format("found an i686 machine, "
+%% "now check version~n", []),
+ case os:version() of
+ {2, 6, Rev} when Rev >= 16 ->
+ true;
+ {2, Min, _} when Min > 6 ->
+ true;
+ {Maj, _, _} when Maj > 2 ->
+ true;
+ _ ->
+ false
+ end;
+ _ ->
+ true
+ end
+ end,
+ Skippable = [{unix, [{linux, LinuxVersionVerify}]}],
+ Condition = fun() -> ?OS_BASED_SKIP(Skippable) end,
+ ?NON_PC_TC_MAYBE_SKIP(Conf, Condition),
+ %% </CONDITIONAL-SKIP>
+
p("start"),
- process_flag(trap_exit, true),
ConfDir = ?config(manager_conf_dir, Conf),
DbDir = ?config(manager_db_dir, Conf),
diff --git a/lib/snmp/test/snmp_test_lib.erl b/lib/snmp/test/snmp_test_lib.erl
index 2586b66a13..54839d989b 100644
--- a/lib/snmp/test/snmp_test_lib.erl
+++ b/lib/snmp/test/snmp_test_lib.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -172,7 +172,17 @@ os_based_skip(Skippable) when is_list(Skippable) ->
{value, {OsFam, OsName}} ->
true;
{value, {OsFam, OsNames}} when is_list(OsNames) ->
- lists:member(OsName, OsNames);
+ case lists:member(OsName, OsNames) of
+ true ->
+ true;
+ false ->
+ case lists:keymember(OsName, 1, OsNames) of
+ {value, {OsName, Check}} when is_function(Check) ->
+ Check();
+ _ ->
+ false
+ end
+ end;
_ ->
false
end
diff --git a/lib/snmp/vsn.mk b/lib/snmp/vsn.mk
index 2fccc733e6..4ca1fb7901 100644
--- a/lib/snmp/vsn.mk
+++ b/lib/snmp/vsn.mk
@@ -17,162 +17,41 @@
#
# %CopyrightEnd%
-SNMP_VSN = 4.16.2
+SNMP_VSN = 4.17
PRE_VSN =
APP_VSN = "snmp-$(SNMP_VSN)$(PRE_VSN)"
-TICKETS = OTP-8563 OTP-8574 OTP-8594 OTP-8595
+TICKETS = OTP-8478
-TICKETS_4_16_1 = OTP-8480 OTP-8481
+TICKETS_4_16_2 = \
+ OTP-8563 \
+ OTP-8574 \
+ OTP-8594 \
+ OTP-8595 \
+ OTP-8646 \
+ OTP-8648
+
+TICKETS_4_16_1 = \
+ OTP-8480 \
+ OTP-8481
TICKETS_4_16 = \
OTP-8395 \
OTP-8433 \
OTP-8442
-TICKETS_4_15 = OTP-8229 OTP-8249
-
-TICKETS_4_14 = OTP-8223 OTP-8228 OTP-8237
-
-TICKETS_4_13_5 = OTP-8116 OTP-8120 OTP-8181 OTP-8182
-
-TICKETS_4_13_4 = OTP-8044 OTP-8062 OTP-8098
-
-TICKETS_4_13_3 = OTP-8015 OTP-8020
-
-TICKETS_4_13_2 = OTP-7961 OTP-7977 OTP-7983 OTP-7989
-
-TICKETS_4_13_1 = OTP-7902
-
-TICKETS_4_13 = OTP-7571 OTP-7735 OTP-7836 OTP-7851
-
-TICKETS_4_12_2 = OTP-7868
-
-TICKETS_4_12_1 = OTP-7695 OTP-7698
-
-TICKETS_4_12 = OTP-7346 OTP-7525
-
-TICKETS_4_11_2 = OTP-7570 OTP-7575
-
-TICKETS_4_11_1 = OTP-7390 OTP-7412 OTP-7426 OTP-7432
-
-TICKETS_4_11 = OTP-7201 OTP-7287 OTP-7319 OTP-7369 OTP-7371 OTP-7377 OTP-7381
-
-TICKETS_4_10_3 = OTP-7219
-
-TICKETS_4_10_2 = OTP-7152 OTP-7153 OTP-7157 OTP-7158 OTP-7159 OTP-7160
-
-TICKETS_4_10_1 = OTP-7083 OTP-7109 OTP-7110 OTP-7119 OTP-7121 OTP-7123
-
-TICKETS_4_10 = OTP-6649 OTP-6841 OTP-6898 OTP-6945
-
-TICKETS_4_9_6 = OTP-6840 OTP-6843
-
-TICKETS_4_9_5 = OTP-6805 OTP-6815
-
-TICKETS_4_9_4 = OTP-6784 OTP-6771
-
-TICKETS_4_9_3 = OTP-6605 OTP-6712 OTP-6713
-
-TICKETS_4_9_2 = OTP-6571
-
-TICKETS_4_9_1 = OTP-6566 OTP-6569
-
-TICKETS_4_9 = \
- OTP-6317 \
- OTP-6318 \
- OTP-6383 \
- OTP-6487 \
- OTP-6515 \
- OTP-6518 \
- OTP-6529 \
- OTP-6532 \
- OTP-6533 \
- OTP-6540
-
-TICKETS_4_8_4 = OTP-6408
-
-TICKETS_4_8_3 = OTP-6337 OTP-6340
-
-TICKETS_4_8_2 = OTP-6214 OTP-6247 OTP-6293
-
-TICKETS_4_8_1 = OTP-6176 OTP-6177
-
-TICKETS_4_8 = OTP-6137 OTP-6149 OTP-6150 OTP-6164
-
-TICKETS_4_7_4 = \
- OTP-6042 \
- OTP-6044 \
- OTP-6049 \
- OTP-6062 \
- OTP-6068 \
- OTP-6074 \
- OTP-6077 \
- OTP-6081
-
-TICKETS_4_7_3 = \
- OTP-6031 \
- OTP-6032
-
-TICKETS_4_7_2 = \
- OTP-5992 \
- OTP-6024
-
-TICKETS_4_7_1 = \
- OTP-5963 \
- OTP-5968 \
- OTP-5969
-
-TICKETS_4_7 = \
- OTP-5870 \
- OTP-5934 \
- OTP-5935 \
- OTP-5937
-
-TICKETS_4_6_1 = \
- OTP-5834 \
- OTP-5838
-
-TICKETS_4_6 = \
- OTP-5763 \
- OTP-5771 \
- OTP-5787 \
- OTP-5797 \
- OTP-5829
-
-TICKETS_4_5 = \
- OTP-5581 \
- OTP-5726 \
- OTP-5727 \
- OTP-5732 \
- OTP-5733 \
- OTP-5740 \
- OTP-5742
-
-TICKETS_4_4_1 = \
- OTP-5719 \
- OTP-5720
-
-TICKETS_4_4 = \
- OTP-5666 \
- OTP-5668 \
- OTP-5669 \
- OTP-5675 \
- OTP-5676 \
- OTP-5678 \
- OTP-5703
+TICKETS_4_15 = \
+ OTP-8229 \
+ OTP-8249
-TICKETS_4_3 = \
- OTP-5636 \
- OTP-5637 \
- OTP-5490
+TICKETS_4_14 = \
+ OTP-8223 \
+ OTP-8228 \
+ OTP-8237
-TICKETS_4_2 = \
- OTP-5574 \
- OTP-5578 \
- OTP-5579 \
- OTP-5580 \
- OTP-5590 \
- OTP-5591 \
- OTP-5592
+TICKETS_4_13_5 = \
+ OTP-8116 \
+ OTP-8120 \
+ OTP-8181 \
+ OTP-8182
diff --git a/lib/ssh/doc/src/notes.xml b/lib/ssh/doc/src/notes.xml
index 67a226f726..7c8735cf56 100644
--- a/lib/ssh/doc/src/notes.xml
+++ b/lib/ssh/doc/src/notes.xml
@@ -37,6 +37,18 @@
<p>The function ssh:connect/4 was not exported.</p>
<p>Own Id: OTP-8550 Aux Id:</p>
</item>
+ <item>
+ <p>Aligned error message with used version (SSH_FX_FAILURE vs
+ SSH_FX_NOT_A_DIRECTORY, the latter introduced in version 6).</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>Own Id: OTP-8644 Aux Id: seq11574</p>
+ </item>
+ <item>
+ <p>Resolved race condition when another connection is started
+ before a channel is opened in the first connection.</p>
+ <p>Own Id: OTP-8645 Aux Id: seq11577</p>
+ </item>
</list>
</section>
diff --git a/lib/ssh/src/ssh_acceptor.erl b/lib/ssh/src/ssh_acceptor.erl
index 57229daa27..9060626ab3 100644
--- a/lib/ssh/src/ssh_acceptor.erl
+++ b/lib/ssh/src/ssh_acceptor.erl
@@ -79,11 +79,11 @@ acceptor_loop(Callback, Port, Address, Opts, ListenSocket, AcceptTimeout) ->
handle_connection(Callback, Address, Port, Options, Socket) ->
SystemSup = ssh_system_sup:system_supervisor(Address, Port),
- ssh_system_sup:start_subsystem(SystemSup, Options),
+ {ok, SubSysSup} = ssh_system_sup:start_subsystem(SystemSup, Options),
ConnectionSup = ssh_system_sup:connection_supervisor(SystemSup),
{ok, Pid} =
ssh_connection_controler:start_manager_child(ConnectionSup,
- [server, Socket, Options]),
+ [server, Socket, Options, SubSysSup]),
Callback:controlling_process(Socket, Pid),
SshOpts = proplists:get_value(ssh_opts, Options),
Pid ! {start_connection, server, [Address, Port, Socket, SshOpts]}.
diff --git a/lib/ssh/src/ssh_cli.erl b/lib/ssh/src/ssh_cli.erl
index 2764ea2e43..e3b6ffa125 100644
--- a/lib/ssh/src/ssh_cli.erl
+++ b/lib/ssh/src/ssh_cli.erl
@@ -327,7 +327,7 @@ window_change(Tty, OldTty, Buf)
{[], Buf};
window_change(Tty, OldTty, {Buf, BufTail, Col}) ->
M1 = move_cursor(Col, 0, OldTty),
- N = max(Tty#ssh_pty.width - OldTty#ssh_pty.width, 0) * 2,
+ N = erlang:max(Tty#ssh_pty.width - OldTty#ssh_pty.width, 0) * 2,
S = lists:reverse(Buf, [BufTail | lists:duplicate(N, $ )]),
M2 = move_cursor(length(Buf) + length(BufTail) + N, Col, Tty),
{[M1, S | M2], {Buf, BufTail, Col}}.
@@ -398,10 +398,6 @@ nthtail(0, A) -> A;
nthtail(N, [_ | A]) when N > 0 -> nthtail(N-1, A);
nthtail(_, _) -> [].
-%%% utils
-max(A, B) when A > B -> A;
-max(_A, B) -> B.
-
ifelse(Cond, A, B) ->
case Cond of
true -> A;
diff --git a/lib/ssh/src/ssh_connect.hrl b/lib/ssh/src/ssh_connect.hrl
index 57bb141c60..34d4ff8fc1 100755
--- a/lib/ssh/src/ssh_connect.hrl
+++ b/lib/ssh/src/ssh_connect.hrl
@@ -260,5 +260,6 @@
address,
port,
options,
- exec
+ exec,
+ sub_system_supervisor
}).
diff --git a/lib/ssh/src/ssh_connection.erl b/lib/ssh/src/ssh_connection.erl
index b9827c90ea..7b9e9185bf 100644
--- a/lib/ssh/src/ssh_connection.erl
+++ b/lib/ssh/src/ssh_connection.erl
@@ -946,13 +946,12 @@ encode_ip(Addr) when is_list(Addr) ->
end
end.
-start_channel(Address, Port, Cb, Id, Args) ->
- start_channel(Address, Port, Cb, Id, Args, undefined).
+start_channel(Cb, Id, Args, SubSysSup) ->
+ start_channel(Cb, Id, Args, SubSysSup, undefined).
-start_channel(Address, Port, Cb, Id, Args, Exec) ->
+start_channel(Cb, Id, Args, SubSysSup, Exec) ->
ChildSpec = child_spec(Cb, Id, Args, Exec),
- SystemSup = ssh_system_sup:system_supervisor(Address, Port),
- ChannelSup = ssh_system_sup:channel_supervisor(SystemSup),
+ ChannelSup =ssh_subsystem_sup:channel_supervisor(SubSysSup),
ssh_channel_sup:start_child(ChannelSup, ChildSpec).
%%--------------------------------------------------------------------
@@ -1017,18 +1016,19 @@ start_cli(#connection{address = Address, port = Port, cli_spec = {Fun, [Shell]},
{ok, Pid}
end;
-start_cli(#connection{address = Address, port = Port,
- cli_spec = {CbModule, Args}, exec = Exec}, ChannelId) ->
- start_channel(Address, Port, CbModule, ChannelId, Args, Exec).
+start_cli(#connection{cli_spec = {CbModule, Args}, exec = Exec,
+ sub_system_supervisor = SubSysSup}, ChannelId) ->
+ start_channel(CbModule, ChannelId, Args, SubSysSup, Exec).
start_subsytem(BinName, #connection{address = Address, port = Port,
- options = Options},
+ options = Options,
+ sub_system_supervisor = SubSysSup},
#channel{local_id = ChannelId, remote_id = RemoteChannelId},
ReplyMsg) ->
Name = binary_to_list(BinName),
case check_subsystem(Name, Options) of
{Callback, Opts} when is_atom(Callback), Callback =/= none ->
- start_channel(Address, Port, Callback, ChannelId, Opts);
+ start_channel(Callback, ChannelId, Opts, SubSysSup);
{Other, _} when Other =/= none ->
handle_backwards_compatibility(Other, self(),
ChannelId, RemoteChannelId,
diff --git a/lib/ssh/src/ssh_connection_controler.erl b/lib/ssh/src/ssh_connection_controler.erl
index 990541f8d6..636ecba532 100644
--- a/lib/ssh/src/ssh_connection_controler.erl
+++ b/lib/ssh/src/ssh_connection_controler.erl
@@ -99,8 +99,8 @@ terminate(_Reason, #state{}) ->
handle_call({handler, Pid, [Role, Socket, Opts]}, _From, State) ->
{ok, Handler} = ssh_connection_handler:start_link(Role, Pid, Socket, Opts),
{reply, {ok, Handler}, State#state{handler = Handler}};
-handle_call({manager, [server = Role, Socket, Opts]}, _From, State) ->
- {ok, Manager} = ssh_connection_manager:start_link([Role, Socket, Opts]),
+handle_call({manager, [server = Role, Socket, Opts, SubSysSup]}, _From, State) ->
+ {ok, Manager} = ssh_connection_manager:start_link([Role, Socket, Opts, SubSysSup]),
{reply, {ok, Manager}, State#state{manager = Manager}};
handle_call({manager, [client = Role | Opts]}, _From, State) ->
{ok, Manager} = ssh_connection_manager:start_link([Role, Opts]),
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index 822ef8f8f9..d46002c494 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -527,7 +527,7 @@ handle_info({Protocol, Socket, Data}, Statename,
%% Implementations SHOULD decrypt the length after receiving the
%% first 8 (or cipher block size, whichever is larger) bytes of a
%% packet. (RFC 4253: Section 6 - Binary Packet Protocol)
- case size(EncData0) + size(Data) >= max(8, BlockSize) of
+ case size(EncData0) + size(Data) >= erlang:max(8, BlockSize) of
true ->
{Ssh, SshPacketLen, DecData, EncData} =
@@ -758,11 +758,6 @@ after_new_keys(#state{renegotiate = false,
ssh_params = #ssh{role = server}} = State) ->
{userauth, State}.
-max(N, M) when N > M ->
- N;
-max(_, M) ->
- M.
-
handle_ssh_packet_data(RemainingSshPacketLen, DecData, EncData, StateName,
State) ->
EncSize = size(EncData),
diff --git a/lib/ssh/src/ssh_connection_manager.erl b/lib/ssh/src/ssh_connection_manager.erl
index a2effc177e..cffeade485 100644
--- a/lib/ssh/src/ssh_connection_manager.erl
+++ b/lib/ssh/src/ssh_connection_manager.erl
@@ -178,7 +178,7 @@ send_eof(ConnectionManager, ChannelId) ->
%% {stop, Reason}
%% Description: Initiates the server
%%--------------------------------------------------------------------
-init([server, _Socket, Opts]) ->
+init([server, _Socket, Opts, SubSysSup]) ->
process_flag(trap_exit, true),
ssh_bits:install_messages(ssh_connection:messages()),
Cache = ssh_channel:cache_create(),
@@ -187,7 +187,8 @@ init([server, _Socket, Opts]) ->
channel_id_seed = 0,
port_bindings = [],
requests = [],
- channel_pids = []},
+ channel_pids = [],
+ sub_system_supervisor = SubSysSup},
opts = Opts,
connected = false}};
@@ -400,7 +401,7 @@ handle_call({close, ChannelId}, _,
end;
handle_call(stop, _, #state{role = _client,
- client = ChannelPid,
+ client = _ChannelPid,
connection = Pid} = State) ->
DisconnectMsg =
#ssh_msg_disconnect{code = ?SSH_DISCONNECT_BY_APPLICATION,
diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl
index dc789092dd..da91817fd7 100644
--- a/lib/ssh/src/ssh_sftpd.erl
+++ b/lib/ssh/src/ssh_sftpd.erl
@@ -242,7 +242,8 @@ handle_op(?SSH_FXP_REALPATH, ReqId,
end;
handle_op(?SSH_FXP_OPENDIR, ReqId,
<<?UINT32(RLen), RPath:RLen/binary>>,
- State0 = #state{file_handler = FileMod, file_state = FS0}) ->
+ State0 = #state{xf = #ssh_xfer{vsn = Vsn},
+ file_handler = FileMod, file_state = FS0}) ->
RelPath = binary_to_list(RPath),
AbsPath = relate_file_name(RelPath, State0),
@@ -250,10 +251,14 @@ handle_op(?SSH_FXP_OPENDIR, ReqId,
{IsDir, FS1} = FileMod:is_dir(AbsPath, FS0),
State1 = State0#state{file_state = FS1},
case IsDir of
- false ->
+ false when Vsn > 5 ->
ssh_xfer:xf_send_status(XF, ReqId, ?SSH_FX_NOT_A_DIRECTORY,
"Not a directory"),
State1;
+ false ->
+ ssh_xfer:xf_send_status(XF, ReqId, ?SSH_FX_FAILURE,
+ "Not a directory"),
+ State1;
true ->
add_handle(State1, XF, ReqId, directory, {RelPath,unread})
end;
diff --git a/lib/ssh/vsn.mk b/lib/ssh/vsn.mk
index 38a82ff32d..ccdbfe4f9a 100644
--- a/lib/ssh/vsn.mk
+++ b/lib/ssh/vsn.mk
@@ -7,7 +7,9 @@ TICKETS = OTP-8524 \
OTP-8534 \
OTP-8535 \
OTP-8550 \
- OTP-8596
+ OTP-8596 \
+ OTP-8644 \
+ OTP-8645
TICKETS_1.1.8 = OTP-8356 \
OTP-8401
diff --git a/lib/ssl/doc/src/new_ssl.xml b/lib/ssl/doc/src/new_ssl.xml
index ab6e112a35..69298759bd 100644
--- a/lib/ssl/doc/src/new_ssl.xml
+++ b/lib/ssl/doc/src/new_ssl.xml
@@ -22,7 +22,6 @@
The Initial Developer of the Original Code is Ericsson AB.
</legalnotice>
-
<title>ssl</title>
<prepared>Ingela Anderton Andin</prepared>
<responsible>Ingela Anderton Andin</responsible>
@@ -83,11 +82,15 @@
meaningless pid.</item>
<item>New API functions are
ssl:shutdown/2, ssl:cipher_suites/[0,1] and
- ssl:versions/0</item>
+ ssl:versions/0, ssl:renegotiate/1</item>
<item>CRL and policy certificate
extensions are not supported yet. </item>
<item>Supported SSL/TLS-versions are SSL-3.0 and TLS-1.0 </item>
<item>For security reasons sslv2 is not supported.</item>
+ <item>Ephemeral Diffie-Hellman cipher suites are supported
+ but not Diffie Hellman Certificates cipher suites.</item>
+ <item>Export cipher suites are not supported as the
+ U.S. lifted its export restrictions in early 2000.</item>
</list>
</section>
@@ -148,25 +151,20 @@
<p><c>protocol() = sslv3 | tlsv1 </c></p>
- <p><c>ciphers() = [ciphersuite()] | sting() (according to old API)</c></p>
+ <p><c>ciphers() = [ciphersuite()] | string() (according to old API)</c></p>
<p><c>ciphersuite() =
- {key_exchange(), cipher(), hash(), exportable()}</c></p>
+ {key_exchange(), cipher(), hash()}</c></p>
- <p><c>key_exchange() = rsa | dh_dss | dh_rsa | dh_anon | dhe_dss
- | dhe_rsa | krb5 | KeyExchange_export
+ <p><c>key_exchange() = rsa | dhe_dss | dhe_rsa
</c></p>
- <p><c>cipher() = rc4_128 | idea_cbc | des_cbc | '3des_ede_cbc'
- des40_cbc | dh_dss | aes_128_cbc | aes_256_cbc |
- rc2_cbc_40 | rc4_40 </c></p>
+ <p><c>cipher() = rc4_128 | des_cbc | '3des_ede_cbc'
+ | aes_128_cbc | aes_256_cbc </c></p>
<p> <c>hash() = md5 | sha
</c></p>
- <p> <c>exportable() = export | no_export | ignore
- </c></p>
-
<p><c>ssl_imp() = new | old - default is old.</c></p>
</section>
@@ -409,6 +407,17 @@ end
</desc>
</func>
+ <func>
+ <name>format_error(Reason) -> string()</name>
+ <fsummary>Return an error string.</fsummary>
+ <type>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p>Presents the error returned by an ssl function as a printable string.</p>
+ </desc>
+ </func>
+
<func>
<name>getopts(Socket) -> </name>
<name>getopts(Socket, OptionNames) ->
diff --git a/lib/ssl/doc/src/notes.xml b/lib/ssl/doc/src/notes.xml
index 9d13427677..f213bd11ae 100644
--- a/lib/ssl/doc/src/notes.xml
+++ b/lib/ssl/doc/src/notes.xml
@@ -30,6 +30,67 @@
</header>
<p>This document describes the changes made to the SSL application.
</p>
+
+ <section><title>SSL 3.11.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed handling of several ssl/tls packets arriving at the
+ same time. This was broken during a refactoring of the
+ code.</p>
+ <p>
+ Own Id: OTP-8679</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Added missing checks for padding and Mac value. Removed
+ code for export ciphers and DH certificates as we decided
+ not to support them.</p>
+ <p>
+ Own Id: OTP-7047</p>
+ </item>
+ <item>
+ <p>
+ New ssl will no longer return esslerrssl to be backwards
+ compatible with old ssl as this hids infomation from the
+ user. format_error/1 has been updated to support new ssl.</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-7049</p>
+ </item>
+ <item>
+ <p>
+ New ssl now supports secure renegotiation as described by
+ RFC 5746.</p>
+ <p>
+ Own Id: OTP-8568</p>
+ </item>
+ <item>
+ <p>
+ Alert handling has been improved to better handle
+ unexpected but valid messages and the implementation is
+ also changed to avoid timing related issues that could
+ cause different error messages depending on network
+ latency. Packet handling was sort of broken but would
+ mostly work as expected when socket was in binary mode.
+ This has now been fixed.</p>
+ <p>
+ Own Id: OTP-8588</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>SSL 3.11</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src
index e8ae6846aa..52a41617bb 100644
--- a/lib/ssl/src/ssl.appup.src
+++ b/lib/ssl/src/ssl.appup.src
@@ -1,6 +1,7 @@
%% -*- erlang -*-
{"%VSN%",
[
+ {"3.11", [{restart_application, ssl}]},
{"3.10", [{restart_application, ssl}]},
{"3.10.1", [{restart_application, ssl}]},
{"3.10.2", [{restart_application, ssl}]},
@@ -13,6 +14,7 @@
{"3.10.9", [{restart_application, ssl}]}
],
[
+ {"3.11", [{restart_application, ssl}]},
{"3.10", [{restart_application, ssl}]},
{"3.10.1", [{restart_application, ssl}]},
{"3.10.2", [{restart_application, ssl}]},
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index 95cd92ee60..185a1f755a 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -718,7 +718,10 @@ emulated_options([], Inet,Emulated) ->
cipher_suites(Version, []) ->
ssl_cipher:suites(Version);
-cipher_suites(Version, [{_,_,_,_}| _] = Ciphers0) ->
+cipher_suites(Version, [{_,_,_,_}| _] = Ciphers0) -> %% Backwards compatibility
+ Ciphers = [{KeyExchange, Cipher, Hash} || {KeyExchange, Cipher, Hash, _} <- Ciphers0],
+ cipher_suites(Version, Ciphers);
+cipher_suites(Version, [{_,_,_}| _] = Ciphers0) ->
Ciphers = [ssl_cipher:suite(C) || C <- Ciphers0],
cipher_suites(Version, Ciphers);
cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl
index 686e90a70c..9aa31ae8a4 100644
--- a/lib/ssl/src/ssl_certificate.erl
+++ b/lib/ssl/src/ssl_certificate.erl
@@ -34,7 +34,13 @@
-export([trusted_cert_and_path/3,
certificate_chain/2,
file_to_certificats/1,
- validate_extensions/6]).
+ validate_extensions/6,
+ is_valid_extkey_usage/2,
+ is_valid_key_usage/2,
+ select_extension/2,
+ extensions_list/1,
+ signature_type/1
+ ]).
%%====================================================================
%% Internal application API
@@ -67,7 +73,7 @@ trusted_cert_and_path(CertChain, CertDbRef, Verify) ->
%% The root CA was not sent and can not be found, we fail if verify = true
not_valid(?ALERT_REC(?FATAL, ?UNKNOWN_CA), Verify, {Cert, RestPath});
{{SerialNr, Issuer}, Path} ->
- case ssl_certificate_db:lookup_trusted_cert(CertDbRef,
+ case ssl_manager:lookup_trusted_cert(CertDbRef,
SerialNr, Issuer) of
{ok, {BinCert,_}} ->
{BinCert, Path, []};
@@ -85,7 +91,7 @@ certificate_chain(OwnCert, CertsDbRef) ->
{ok, ErlCert} = public_key:pkix_decode_cert(OwnCert, otp),
certificate_chain(ErlCert, OwnCert, CertsDbRef, [OwnCert]).
-file_to_certificats(File) ->
+file_to_certificats(File) ->
{ok, List} = ssl_manager:cache_pem_file(File),
[Bin || {cert, Bin, not_encrypted} <- List].
@@ -112,7 +118,28 @@ validate_extensions([Extension | Rest], ValidationState, UnknownExtensions,
Verify, AccErr, Role) ->
validate_extensions(Rest, ValidationState, [Extension | UnknownExtensions],
Verify, AccErr, Role).
-
+
+is_valid_key_usage(KeyUse, Use) ->
+ lists:member(Use, KeyUse).
+
+ select_extension(_, []) ->
+ undefined;
+select_extension(Id, [#'Extension'{extnID = Id} = Extension | _]) ->
+ Extension;
+select_extension(Id, [_ | Extensions]) ->
+ select_extension(Id, Extensions).
+
+extensions_list(asn1_NOVALUE) ->
+ [];
+extensions_list(Extensions) ->
+ Extensions.
+
+signature_type(RSA) when RSA == ?sha1WithRSAEncryption;
+ RSA == ?md5WithRSAEncryption ->
+ rsa;
+signature_type(?'id-dsa-with-sha1') ->
+ dsa.
+
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
@@ -148,7 +175,7 @@ certificate_chain(_CertsDbRef, Chain, _SerialNr, _Issuer, true) ->
{ok, lists:reverse(Chain)};
certificate_chain(CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned) ->
- case ssl_certificate_db:lookup_trusted_cert(CertsDbRef,
+ case ssl_manager:lookup_trusted_cert(CertsDbRef,
SerialNr, Issuer) of
{ok, {IssuerCert, ErlCert}} ->
{ok, ErlCert} = public_key:pkix_decode_cert(IssuerCert, otp),
@@ -164,7 +191,7 @@ certificate_chain(CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned) ->
end.
find_issuer(OtpCert, PrevCandidateKey) ->
- case ssl_certificate_db:issuer_candidate(PrevCandidateKey) of
+ case ssl_manager:issuer_candidate(PrevCandidateKey) of
no_more_candidates ->
{error, issuer_not_found};
{Key, {_Cert, ErlCertCandidate}} ->
@@ -188,9 +215,6 @@ is_valid_extkey_usage(KeyUse, server) ->
%% Server wants to verify client
is_valid_key_usage(KeyUse, ?'id-kp-clientAuth').
-is_valid_key_usage(KeyUse, Use) ->
- lists:member(Use, KeyUse).
-
not_valid_extension(Error, true, _) ->
throw(Error);
not_valid_extension(Error, false, AccErrors) ->
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index 3d3d11b7f3..2a71df8ee1 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -28,12 +28,14 @@
-include("ssl_internal.hrl").
-include("ssl_record.hrl").
-include("ssl_cipher.hrl").
+-include("ssl_alert.hrl").
-include("ssl_debug.hrl").
+-include_lib("public_key/include/public_key.hrl").
-export([security_parameters/2, suite_definition/1,
- decipher/4, cipher/4,
+ decipher/5, cipher/4,
suite/1, suites/1,
- openssl_suite/1, openssl_suite_name/1]).
+ openssl_suite/1, openssl_suite_name/1, filter/2]).
-compile(inline).
@@ -48,7 +50,7 @@
%% cipher values has been updated according to <CipherSuite>
%%-------------------------------------------------------------------
security_parameters(CipherSuite, SecParams) ->
- { _, Cipher, Hash, Exportable} = suite_definition(CipherSuite),
+ { _, Cipher, Hash} = suite_definition(CipherSuite),
SecParams#security_parameters{
cipher_suite = CipherSuite,
bulk_cipher_algorithm = bulk_cipher_algorithm(Cipher),
@@ -58,8 +60,7 @@ security_parameters(CipherSuite, SecParams) ->
key_material_length = key_material(Cipher),
iv_size = iv_size(Cipher),
mac_algorithm = mac_algorithm(Hash),
- hash_size = hash_size(Hash),
- exportable = Exportable}.
+ hash_size = hash_size(Hash)}.
%%--------------------------------------------------------------------
%% Function: cipher(Method, CipherState, Mac, Data) ->
@@ -91,10 +92,10 @@ cipher(?DES, CipherState, Mac, Fragment) ->
block_cipher(fun(Key, IV, T) ->
crypto:des_cbc_encrypt(Key, IV, T)
end, block_size(des_cbc), CipherState, Mac, Fragment);
-cipher(?DES40, CipherState, Mac, Fragment) ->
- block_cipher(fun(Key, IV, T) ->
- crypto:des_cbc_encrypt(Key, IV, T)
- end, block_size(des_cbc), CipherState, Mac, Fragment);
+%% cipher(?DES40, CipherState, Mac, Fragment) ->
+%% block_cipher(fun(Key, IV, T) ->
+%% crypto:des_cbc_encrypt(Key, IV, T)
+%% end, block_size(des_cbc), CipherState, Mac, Fragment);
cipher(?'3DES', CipherState, Mac, Fragment) ->
block_cipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) ->
crypto:des3_cbc_encrypt(K1, K2, K3, IV, T)
@@ -104,15 +105,11 @@ cipher(?AES, CipherState, Mac, Fragment) ->
crypto:aes_cbc_128_encrypt(Key, IV, T);
(Key, IV, T) when byte_size(Key) =:= 32 ->
crypto:aes_cbc_256_encrypt(Key, IV, T)
- end, block_size(aes_128_cbc), CipherState, Mac, Fragment);
+ end, block_size(aes_128_cbc), CipherState, Mac, Fragment).
%% cipher(?IDEA, CipherState, Mac, Fragment) ->
%% block_cipher(fun(Key, IV, T) ->
%% crypto:idea_cbc_encrypt(Key, IV, T)
%% end, block_size(idea_cbc), CipherState, Mac, Fragment);
-cipher(?RC2, CipherState, Mac, Fragment) ->
- block_cipher(fun(Key, IV, T) ->
- crypto:rc2_40_cbc_encrypt(Key, IV, T)
- end, block_size(rc2_cbc_40), CipherState, Mac, Fragment).
block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
Mac, Fragment) ->
@@ -128,7 +125,7 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
{T, CS0#cipher_state{iv=NextIV}}.
%%--------------------------------------------------------------------
-%% Function: decipher(Method, CipherState, Mac, Data) ->
+%% Function: decipher(Method, CipherState, Mac, Data, Version) ->
%% {Decrypted, UpdateCipherState}
%%
%% Method - integer() (as defined in ssl_cipher.hrl)
@@ -138,9 +135,9 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
%% Description: Decrypts the data and the mac using method, updating
%% the cipher state
%%-------------------------------------------------------------------
-decipher(?NULL, _HashSz, CipherState, Fragment) ->
+decipher(?NULL, _HashSz, CipherState, Fragment, _) ->
{Fragment, <<>>, CipherState};
-decipher(?RC4, HashSz, CipherState, Fragment) ->
+decipher(?RC4, HashSz, CipherState, Fragment, _) ->
?DBG_TERM(CipherState#cipher_state.key),
State0 = case CipherState#cipher_state.state of
undefined -> crypto:rc4_set_key(CipherState#cipher_state.key);
@@ -153,47 +150,47 @@ decipher(?RC4, HashSz, CipherState, Fragment) ->
GSC = generic_stream_cipher_from_bin(T, HashSz),
#generic_stream_cipher{content=Content, mac=Mac} = GSC,
{Content, Mac, CipherState#cipher_state{state=State1}};
-decipher(?DES, HashSz, CipherState, Fragment) ->
+decipher(?DES, HashSz, CipherState, Fragment, Version) ->
block_decipher(fun(Key, IV, T) ->
crypto:des_cbc_decrypt(Key, IV, T)
- end, CipherState, HashSz, Fragment);
-decipher(?DES40, HashSz, CipherState, Fragment) ->
- block_decipher(fun(Key, IV, T) ->
- crypto:des_cbc_decrypt(Key, IV, T)
- end, CipherState, HashSz, Fragment);
-decipher(?'3DES', HashSz, CipherState, Fragment) ->
+ end, CipherState, HashSz, Fragment, Version);
+%% decipher(?DES40, HashSz, CipherState, Fragment, Version) ->
+%% block_decipher(fun(Key, IV, T) ->
+%% crypto:des_cbc_decrypt(Key, IV, T)
+%% end, CipherState, HashSz, Fragment, Version);
+decipher(?'3DES', HashSz, CipherState, Fragment, Version) ->
block_decipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) ->
crypto:des3_cbc_decrypt(K1, K2, K3, IV, T)
- end, CipherState, HashSz, Fragment);
-decipher(?AES, HashSz, CipherState, Fragment) ->
+ end, CipherState, HashSz, Fragment, Version);
+decipher(?AES, HashSz, CipherState, Fragment, Version) ->
block_decipher(fun(Key, IV, T) when byte_size(Key) =:= 16 ->
crypto:aes_cbc_128_decrypt(Key, IV, T);
(Key, IV, T) when byte_size(Key) =:= 32 ->
crypto:aes_cbc_256_decrypt(Key, IV, T)
- end, CipherState, HashSz, Fragment);
-%% decipher(?IDEA, HashSz, CipherState, Fragment) ->
+ end, CipherState, HashSz, Fragment, Version).
+%% decipher(?IDEA, HashSz, CipherState, Fragment, Version) ->
%% block_decipher(fun(Key, IV, T) ->
%% crypto:idea_cbc_decrypt(Key, IV, T)
-%% end, CipherState, HashSz, Fragment);
-decipher(?RC2, HashSz, CipherState, Fragment) ->
- block_decipher(fun(Key, IV, T) ->
- crypto:rc2_40_cbc_decrypt(Key, IV, T)
- end, CipherState, HashSz, Fragment).
+%% end, CipherState, HashSz, Fragment, Version);
block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
- HashSz, Fragment) ->
+ HashSz, Fragment, Version) ->
?DBG_HEX(Key),
?DBG_HEX(IV),
?DBG_HEX(Fragment),
T = Fun(Key, IV, Fragment),
?DBG_HEX(T),
GBC = generic_block_cipher_from_bin(T, HashSz),
- ok = check_padding(GBC), %% TODO kolla ocks�...
- Content = GBC#generic_block_cipher.content,
- Mac = GBC#generic_block_cipher.mac,
- CipherState1 = CipherState0#cipher_state{iv=next_iv(Fragment, IV)},
- {Content, Mac, CipherState1}.
-
+ case is_correct_padding(GBC, Version) of
+ true ->
+ Content = GBC#generic_block_cipher.content,
+ Mac = GBC#generic_block_cipher.mac,
+ CipherState1 = CipherState0#cipher_state{iv=next_iv(Fragment, IV)},
+ {Content, Mac, CipherState1};
+ false ->
+ ?ALERT_REC(?FATAL, ?BAD_RECORD_MAC)
+ end.
+
%%--------------------------------------------------------------------
%% Function: suites(Version) -> [Suite]
%%
@@ -209,293 +206,111 @@ suites({3, N}) when N == 1; N == 2 ->
%%--------------------------------------------------------------------
%% Function: suite_definition(CipherSuite) ->
-%% {KeyExchange, Cipher, Hash, Exportable}
+%% {KeyExchange, Cipher, Hash}
%%
%%
%% CipherSuite - as defined in ssl_cipher.hrl
-%% KeyExchange - rsa | dh_dss | dh_rsa | dh_anon | dhe_dss | dhe_rsa
-%% krb5 | *_export (old ssl)
+%% KeyExchange - rsa | dh_anon | dhe_dss | dhe_rsa | kerb5
+%%
%% Cipher - null | rc4_128 | idea_cbc | des_cbc | '3des_ede_cbc'
-%% des40_cbc | dh_dss | aes_128_cbc | aes_256_cbc |
-%% rc2_cbc_40 | rc4_40
+%% des40_cbc | aes_128_cbc | aes_256_cbc
%% Hash - null | md5 | sha
-%% Exportable - export | no_export | ignore(?)
%%
-%% Description: Returns a security parameters record where the
+%% Description: Returns a security parameters tuple where the
%% cipher values has been updated according to <CipherSuite>
-%% Note: since idea is unsupported on the openssl version used by
-%% crypto (as of OTP R12B), we've commented away the idea stuff
+%% Note: Currently not supported suites are commented away.
+%% They should be supported or removed in the future.
%%-------------------------------------------------------------------
%% TLS v1.1 suites
suite_definition(?TLS_NULL_WITH_NULL_NULL) ->
- {null, null, null, ignore};
-suite_definition(?TLS_RSA_WITH_NULL_MD5) ->
- {rsa, null, md5, ignore};
-suite_definition(?TLS_RSA_WITH_NULL_SHA) ->
- {rsa, null, sha, ignore};
-suite_definition(?TLS_RSA_WITH_RC4_128_MD5) -> % ok
- {rsa, rc4_128, md5, no_export};
-suite_definition(?TLS_RSA_WITH_RC4_128_SHA) -> % ok
- {rsa, rc4_128, sha, no_export};
-%% suite_definition(?TLS_RSA_WITH_IDEA_CBC_SHA) -> % unsupported
-%% {rsa, idea_cbc, sha, no_export};
-suite_definition(?TLS_RSA_WITH_DES_CBC_SHA) -> % ok
- {rsa, des_cbc, sha, no_export};
+ {null, null, null};
+%% suite_definition(?TLS_RSA_WITH_NULL_MD5) ->
+%% {rsa, null, md5};
+%% suite_definition(?TLS_RSA_WITH_NULL_SHA) ->
+%% {rsa, null, sha};
+suite_definition(?TLS_RSA_WITH_RC4_128_MD5) ->
+ {rsa, rc4_128, md5};
+suite_definition(?TLS_RSA_WITH_RC4_128_SHA) ->
+ {rsa, rc4_128, sha};
+%% suite_definition(?TLS_RSA_WITH_IDEA_CBC_SHA) ->
+%% {rsa, idea_cbc, sha};
+suite_definition(?TLS_RSA_WITH_DES_CBC_SHA) ->
+ {rsa, des_cbc, sha};
suite_definition(?TLS_RSA_WITH_3DES_EDE_CBC_SHA) ->
- {rsa, '3des_ede_cbc', sha, no_export};
-suite_definition(?TLS_DH_DSS_WITH_DES_CBC_SHA) ->
- {dh_dss, des_cbc, sha, no_export};
-suite_definition(?TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA) ->
- {dh_dss, '3des_ede_cbc', sha, no_export};
-suite_definition(?TLS_DH_RSA_WITH_DES_CBC_SHA) ->
- {dh_rsa, des_cbc, sha, no_export};
-suite_definition(?TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA) ->
- {dh_rsa, '3des_ede_cbc', sha, no_export};
+ {rsa, '3des_ede_cbc', sha};
suite_definition(?TLS_DHE_DSS_WITH_DES_CBC_SHA) ->
- {dhe_dss, des_cbc, sha, no_export};
+ {dhe_dss, des_cbc, sha};
suite_definition(?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA) ->
- {dhe_dss, '3des_ede_cbc', sha, no_export};
+ {dhe_dss, '3des_ede_cbc', sha};
suite_definition(?TLS_DHE_RSA_WITH_DES_CBC_SHA) ->
- {dhe_rsa, des_cbc, sha, no_export};
+ {dhe_rsa, des_cbc, sha};
suite_definition(?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA) ->
- {dhe_rsa, '3des_ede_cbc', sha, no_export};
-suite_definition(?TLS_DH_anon_WITH_RC4_128_MD5) ->
- {dh_anon, rc4_128, md5, no_export};
-suite_definition(?TLS_DH_anon_WITH_DES_CBC_SHA) ->
- {dh_anon, des40_cbc, sha, no_export};
-suite_definition(?TLS_DH_anon_WITH_3DES_EDE_CBC_SHA) ->
- {dh_anon, '3des_ede_cbc', sha, no_export};
+ {dhe_rsa, '3des_ede_cbc', sha};
%%% TSL V1.1 AES suites
-suite_definition(?TLS_RSA_WITH_AES_128_CBC_SHA) -> % ok
- {rsa, aes_128_cbc, sha, ignore};
-suite_definition(?TLS_DH_DSS_WITH_AES_128_CBC_SHA) ->
- {dh_dss, aes_128_cbc, sha, ignore};
-suite_definition(?TLS_DH_RSA_WITH_AES_128_CBC_SHA) ->
- {dh_rsa, aes_128_cbc, sha, ignore};
+suite_definition(?TLS_RSA_WITH_AES_128_CBC_SHA) ->
+ {rsa, aes_128_cbc, sha};
suite_definition(?TLS_DHE_DSS_WITH_AES_128_CBC_SHA) ->
- {dhe_dss, aes_128_cbc, sha, ignore};
+ {dhe_dss, aes_128_cbc, sha};
suite_definition(?TLS_DHE_RSA_WITH_AES_128_CBC_SHA) ->
- {dhe_rsa, aes_128_cbc, sha, ignore};
-suite_definition(?TLS_DH_anon_WITH_AES_128_CBC_SHA) ->
- {dh_anon, aes_128_cbc, sha, ignore};
-suite_definition(?TLS_RSA_WITH_AES_256_CBC_SHA) -> % ok
- {rsa, aes_256_cbc, sha, ignore};
-suite_definition(?TLS_DH_DSS_WITH_AES_256_CBC_SHA) ->
- {dh_dss, aes_256_cbc, sha, ignore};
-suite_definition(?TLS_DH_RSA_WITH_AES_256_CBC_SHA) ->
- {dh_rsa, aes_256_cbc, sha, ignore};
+ {dhe_rsa, aes_128_cbc, sha};
+suite_definition(?TLS_RSA_WITH_AES_256_CBC_SHA) ->
+ {rsa, aes_256_cbc, sha};
suite_definition(?TLS_DHE_DSS_WITH_AES_256_CBC_SHA) ->
- {dhe_dss, aes_256_cbc, sha, ignore};
+ {dhe_dss, aes_256_cbc, sha};
suite_definition(?TLS_DHE_RSA_WITH_AES_256_CBC_SHA) ->
- {dhe_rsa, aes_256_cbc, sha, ignore};
-suite_definition(?TLS_DH_anon_WITH_AES_256_CBC_SHA) ->
- {dh_anon, aes_256_cbc, sha, ignore};
-
-%% TSL V1.1 KRB SUITES
-suite_definition(?TLS_KRB5_WITH_DES_CBC_SHA) ->
- {krb5, des_cbc, sha, ignore};
-suite_definition(?TLS_KRB5_WITH_3DES_EDE_CBC_SHA) ->
- {krb5, '3des_ede_cbc', sha, ignore};
-suite_definition(?TLS_KRB5_WITH_RC4_128_SHA) ->
- {krb5, rc4_128, sha, ignore};
-%% suite_definition(?TLS_KRB5_WITH_IDEA_CBC_SHA) ->
-%% {krb5, idea_cbc, sha, ignore};
-suite_definition(?TLS_KRB5_WITH_DES_CBC_MD5) ->
- {krb5, des_cbc, md5, ignore};
-suite_definition(?TLS_KRB5_WITH_3DES_EDE_CBC_MD5) ->
- {krb5, '3des_ede_cbc', md5, ignore};
-suite_definition(?TLS_KRB5_WITH_RC4_128_MD5) ->
- {krb5, rc4_128, md5, ignore};
-%% suite_definition(?TLS_KRB5_WITH_IDEA_CBC_MD5) ->
-%% {krb5, idea_cbc, md5, ignore};
-
-suite_definition(?TLS_RSA_EXPORT1024_WITH_RC4_56_MD5) ->
- {rsa, rc4_56, md5, export};
-suite_definition(?TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5) ->
- {rsa, rc2_cbc_56, md5, export};
-suite_definition(?TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA) ->
- {rsa, des_cbc, sha, export};
-suite_definition(?TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA) ->
- {dhe_dss, des_cbc, sha, export};
-suite_definition(?TLS_RSA_EXPORT1024_WITH_RC4_56_SHA) ->
- {rsa, rc4_56, sha, export};
-suite_definition(?TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA) ->
- {dhe_dss, rc4_56, sha, export};
-suite_definition(?TLS_DHE_DSS_WITH_RC4_128_SHA) ->
- {dhe_dss, rc4_128, sha, export};
-
-%% Export suites TLS 1.0 OR SSLv3-only servers.
-suite_definition(?TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA) ->
- {krb5_export, des40_cbc, sha, export};
-suite_definition(?TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA) ->
- {krb5_export, rc2_cbc_40, sha, export};
-suite_definition(?TLS_KRB5_EXPORT_WITH_RC4_40_SHA) ->
- {krb5_export, des40_cbc, sha, export};
-suite_definition(?TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5) ->
- {krb5_export, des40_cbc, md5, export};
-suite_definition(?TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5) ->
- {krb5_export, rc2_cbc_40, md5, export};
-suite_definition(?TLS_KRB5_EXPORT_WITH_RC4_40_MD5) ->
- {krb5_export, rc2_cbc_40, md5, export};
-suite_definition(?TLS_RSA_EXPORT_WITH_RC4_40_MD5) -> % ok
- {rsa, rc4_40, md5, export};
-suite_definition(?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5) -> % ok
- {rsa, rc2_cbc_40, md5, export};
-suite_definition(?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA) ->
- {rsa, des40_cbc, sha, export};
-suite_definition(?TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA) ->
- {dh_dss, des40_cbc, sha, export};
-suite_definition(?TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA) ->
- {dh_rsa, des40_cbc, sha, export};
-suite_definition(?TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA) ->
- {dhe_dss, des40_cbc, sha, export};
-suite_definition(?TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA) ->
- {dhe_rsa, des40_cbc, sha, export};
-suite_definition(?TLS_DH_anon_EXPORT_WITH_RC4_40_MD5) ->
- {dh_anon, rc4_40, md5, export};
-suite_definition(?TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA) ->
- {dh_anon, des40_cbc, sha, export}.
+ {dhe_rsa, aes_256_cbc, sha}.
%% TLS v1.1 suites
-suite({rsa, null, md5, ignore}) ->
- ?TLS_RSA_WITH_NULL_MD5;
-suite({rsa, null, sha, ignore}) ->
- ?TLS_RSA_WITH_NULL_SHA;
-suite({rsa, rc4_128, md5, no_export}) ->
+%%suite({rsa, null, md5}) ->
+%% ?TLS_RSA_WITH_NULL_MD5;
+%%suite({rsa, null, sha}) ->
+%% ?TLS_RSA_WITH_NULL_SHA;
+suite({rsa, rc4_128, md5}) ->
?TLS_RSA_WITH_RC4_128_MD5;
-suite({rsa, rc4_128, sha, no_export}) ->
+suite({rsa, rc4_128, sha}) ->
?TLS_RSA_WITH_RC4_128_SHA;
-%% suite({rsa, idea_cbc, sha, no_export}) ->
+%% suite({rsa, idea_cbc, sha}) ->
%% ?TLS_RSA_WITH_IDEA_CBC_SHA;
-suite({rsa, des_cbc, sha, no_export}) ->
+suite({rsa, des_cbc, sha}) ->
?TLS_RSA_WITH_DES_CBC_SHA;
-suite({rsa, '3des_ede_cbc', sha, no_export}) ->
+suite({rsa, '3des_ede_cbc', sha}) ->
?TLS_RSA_WITH_3DES_EDE_CBC_SHA;
-suite({dh_dss, des_cbc, sha, no_export}) ->
- ?TLS_DH_DSS_WITH_DES_CBC_SHA;
-suite({dh_dss, '3des_ede_cbc', sha, no_export}) ->
- ?TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA;
-suite({dh_rsa, des_cbc, sha, no_export}) ->
- ?TLS_DH_RSA_WITH_DES_CBC_SHA;
-suite({dh_rsa, '3des_ede_cbc', sha, no_export}) ->
- ?TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA;
-suite({dhe_dss, des_cbc, sha, no_export}) ->
+suite({dhe_dss, des_cbc, sha}) ->
?TLS_DHE_DSS_WITH_DES_CBC_SHA;
-suite({dhe_dss, '3des_ede_cbc', sha, no_export}) ->
+suite({dhe_dss, '3des_ede_cbc', sha}) ->
?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA;
-suite({dhe_rsa, des_cbc, sha, no_export}) ->
+suite({dhe_rsa, des_cbc, sha}) ->
?TLS_DHE_RSA_WITH_DES_CBC_SHA;
-suite({dhe_rsa, '3des_ede_cbc', sha, no_export}) ->
+suite({dhe_rsa, '3des_ede_cbc', sha}) ->
?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA;
-suite({dh_anon, rc4_128, md5, no_export}) ->
- ?TLS_DH_anon_WITH_RC4_128_MD5;
-suite({dh_anon, des40_cbc, sha, no_export}) ->
- ?TLS_DH_anon_WITH_DES_CBC_SHA;
-suite({dh_anon, '3des_ede_cbc', sha, no_export}) ->
- ?TLS_DH_anon_WITH_3DES_EDE_CBC_SHA;
+%% suite({dh_anon, rc4_128, md5}) ->
+%% ?TLS_DH_anon_WITH_RC4_128_MD5;
+%% suite({dh_anon, des40_cbc, sha}) ->
+%% ?TLS_DH_anon_WITH_DES_CBC_SHA;
+%% suite({dh_anon, '3des_ede_cbc', sha}) ->
+%% ?TLS_DH_anon_WITH_3DES_EDE_CBC_SHA;
%%% TSL V1.1 AES suites
-suite({rsa, aes_128_cbc, sha, ignore}) ->
+suite({rsa, aes_128_cbc, sha}) ->
?TLS_RSA_WITH_AES_128_CBC_SHA;
-suite({dh_dss, aes_128_cbc, sha, ignore}) ->
- ?TLS_DH_DSS_WITH_AES_128_CBC_SHA;
-suite({dh_rsa, aes_128_cbc, sha, ignore}) ->
- ?TLS_DH_RSA_WITH_AES_128_CBC_SHA;
-suite({dhe_dss, aes_128_cbc, sha, ignore}) ->
+suite({dhe_dss, aes_128_cbc, sha}) ->
?TLS_DHE_DSS_WITH_AES_128_CBC_SHA;
-suite({dhe_rsa, aes_128_cbc, sha, ignore}) ->
+suite({dhe_rsa, aes_128_cbc, sha}) ->
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA;
-suite({dh_anon, aes_128_cbc, sha, ignore}) ->
- ?TLS_DH_anon_WITH_AES_128_CBC_SHA;
-suite({rsa, aes_256_cbc, sha, ignore}) ->
+%% suite({dh_anon, aes_128_cbc, sha}) ->
+%% ?TLS_DH_anon_WITH_AES_128_CBC_SHA;
+suite({rsa, aes_256_cbc, sha}) ->
?TLS_RSA_WITH_AES_256_CBC_SHA;
-suite({dh_dss, aes_256_cbc, sha, ignore}) ->
- ?TLS_DH_DSS_WITH_AES_256_CBC_SHA;
-suite({dh_rsa, aes_256_cbc, sha, ignore}) ->
- ?TLS_DH_RSA_WITH_AES_256_CBC_SHA;
-suite({dhe_dss, aes_256_cbc, sha, ignore}) ->
+suite({dhe_dss, aes_256_cbc, sha}) ->
?TLS_DHE_DSS_WITH_AES_256_CBC_SHA;
-suite({dhe_rsa, aes_256_cbc, sha, ignore}) ->
- ?TLS_DHE_RSA_WITH_AES_256_CBC_SHA;
-suite({dh_anon, aes_256_cbc, sha, ignore}) ->
- ?TLS_DH_anon_WITH_AES_256_CBC_SHA;
-
-%% TSL V1.1 KRB SUITES
-suite({krb5, des_cbc, sha, ignore}) ->
- ?TLS_KRB5_WITH_DES_CBC_SHA;
-suite({krb5_cbc, '3des_ede_cbc', sha, ignore}) ->
- ?TLS_KRB5_WITH_3DES_EDE_CBC_SHA;
-suite({krb5, rc4_128, sha, ignore}) ->
- ?TLS_KRB5_WITH_RC4_128_SHA;
-%% suite({krb5_cbc, idea_cbc, sha, ignore}) ->
-%% ?TLS_KRB5_WITH_IDEA_CBC_SHA;
-suite({krb5_cbc, md5, ignore}) ->
- ?TLS_KRB5_WITH_DES_CBC_MD5;
-suite({krb5_ede_cbc, des_cbc, md5, ignore}) ->
- ?TLS_KRB5_WITH_3DES_EDE_CBC_MD5;
-suite({krb5_128, rc4_128, md5, ignore}) ->
- ?TLS_KRB5_WITH_RC4_128_MD5;
-%% suite({krb5, idea_cbc, md5, ignore}) ->
-%% ?TLS_KRB5_WITH_IDEA_CBC_MD5;
-
-%% Export suites TLS 1.0 OR SSLv3-only servers.
-suite({rsa, rc4_40, md5, export}) ->
- ?TLS_RSA_EXPORT_WITH_RC4_40_MD5;
-suite({rsa, rc2_cbc_40, md5, export}) ->
- ?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5;
-suite({rsa, des40_cbc, sha, export}) ->
- ?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA;
-suite({rsa, rc4_56, md5, export}) ->
- ?TLS_RSA_EXPORT1024_WITH_RC4_56_MD5;
-suite({rsa, rc2_cbc_56, md5, export}) ->
- ?TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5;
-suite({rsa, des_cbc, sha, export}) ->
- ?TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA;
-suite({dhe_dss, des_cbc, sha, export}) ->
- ?TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA;
-suite({rsa, rc4_56, sha, export}) ->
- ?TLS_RSA_EXPORT1024_WITH_RC4_56_SHA;
-suite({dhe_dss, rc4_56, sha, export}) ->
- ?TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA;
-suite({dhe_dss, rc4_128, sha, export}) ->
- ?TLS_DHE_DSS_WITH_RC4_128_SHA;
-suite({krb5_export, des40_cbc, sha, export}) ->
- ?TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA;
-suite({krb5_export, rc2_cbc_40, sha, export}) ->
- ?TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA;
-suite({krb5_export, rc4_cbc_40, sha, export}) ->
- ?TLS_KRB5_EXPORT_WITH_RC4_40_SHA;
-suite({krb5_export, des40_cbc, md5, export}) ->
- ?TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5;
-suite({krb5_export, rc2_cbc_40, md5, export}) ->
- ?TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5;
-suite({krb5_export, rc4_cbc_40, md5, export}) ->
- ?TLS_KRB5_EXPORT_WITH_RC4_40_MD5;
-suite({rsa_export, rc4_cbc_40, md5, export}) ->
- ?TLS_RSA_EXPORT_WITH_RC4_40_MD5;
-suite({rsa_export, rc2_cbc_40, md5, export}) ->
- ?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5;
-suite({rsa_export, des40_cbc, sha, export}) ->
- ?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA;
-suite({dh_dss_export, des40_cbc, sha, export}) ->
- ?TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA;
-suite({dh_rsa_export, des40_cbc, sha, export}) ->
- ?TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA;
-suite({dhe_dss_export, des40_cbc, sha, export}) ->
- ?TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA;
-suite({dhe_rsa_export, des40_cbc, sha, export}) ->
- ?TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA;
-suite({dh_anon_export, rc4_40, md5, export}) ->
- ?TLS_DH_anon_EXPORT_WITH_RC4_40_MD5;
-suite({dh_anon_export, des40_cbc, sha, export}) ->
- ?TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA.
+suite({dhe_rsa, aes_256_cbc, sha}) ->
+ ?TLS_DHE_RSA_WITH_AES_256_CBC_SHA.
+%% suite({dh_anon, aes_256_cbc, sha}) ->
+%% ?TLS_DH_anon_WITH_AES_256_CBC_SHA.
%% translate constants <-> openssl-strings
-%% TODO: Is there a pattern in the nameing
-%% that is useable to make a nicer function defention?
-
openssl_suite("DHE-RSA-AES256-SHA") ->
?TLS_DHE_RSA_WITH_AES_256_CBC_SHA;
openssl_suite("DHE-DSS-AES256-SHA") ->
@@ -514,45 +329,16 @@ openssl_suite("DHE-DSS-AES128-SHA") ->
?TLS_DHE_DSS_WITH_AES_128_CBC_SHA;
openssl_suite("AES128-SHA") ->
?TLS_RSA_WITH_AES_128_CBC_SHA;
-%% TODO: Do we want to support this?
-%% openssl_suite("DHE-DSS-RC4-SHA") ->
-%% ?TLS_DHE_DSS_WITH_RC4_128_SHA;
%%openssl_suite("IDEA-CBC-SHA") ->
%% ?TLS_RSA_WITH_IDEA_CBC_SHA;
openssl_suite("RC4-SHA") ->
?TLS_RSA_WITH_RC4_128_SHA;
openssl_suite("RC4-MD5") ->
?TLS_RSA_WITH_RC4_128_MD5;
-%% TODO: Do we want to support this?
-openssl_suite("EXP1024-RC4-MD5") ->
- ?TLS_RSA_EXPORT1024_WITH_RC4_56_MD5;
-openssl_suite("EXP1024-RC2-CBC-MD5") ->
- ?TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5;
-openssl_suite("EXP1024-DES-CBC-SHA") ->
- ?TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA;
-openssl_suite("EXP1024-DHE-DSS-DES-CBC-SHA") ->
- ?TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA;
-openssl_suite("EXP1024-RC4-SHA") ->
- ?TLS_RSA_EXPORT1024_WITH_RC4_56_SHA;
-openssl_suite("EXP1024-DHE-DSS-RC4-SHA") ->
- ?TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA;
-openssl_suite("DHE-DSS-RC4-SHA") ->
- ?TLS_DHE_DSS_WITH_RC4_128_SHA;
-
openssl_suite("EDH-RSA-DES-CBC-SHA") ->
?TLS_DHE_RSA_WITH_DES_CBC_SHA;
openssl_suite("DES-CBC-SHA") ->
- ?TLS_RSA_WITH_DES_CBC_SHA;
-openssl_suite("EXP-EDH-RSA-DES-CBC-SHA") ->
- ?TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA;
-openssl_suite("EXP-EDH-DSS-DES-CBC-SHA") ->
- ?TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA;
-openssl_suite("EXP-DES-CBC-SHA") ->
- ?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA;
-openssl_suite("EXP-RC2-CBC-MD5") ->
- ?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5;
-openssl_suite("EXP-RC4-MD5") ->
- ?TLS_RSA_EXPORT_WITH_RC4_40_MD5.
+ ?TLS_RSA_WITH_DES_CBC_SHA.
openssl_suite_name(?TLS_DHE_RSA_WITH_AES_256_CBC_SHA) ->
"DHE-RSA-AES256-SHA";
@@ -582,36 +368,22 @@ openssl_suite_name(?TLS_DHE_RSA_WITH_DES_CBC_SHA) ->
"EDH-RSA-DES-CBC-SHA";
openssl_suite_name(?TLS_RSA_WITH_DES_CBC_SHA) ->
"DES-CBC-SHA";
-openssl_suite_name(?TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA) ->
- "EXP-EDH-RSA-DES-CBC-SHA";
-openssl_suite_name(?TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA) ->
- "EXP-EDH-DSS-DES-CBC-SHA";
-openssl_suite_name(?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA) ->
- "EXP-DES-CBC-SHA";
-openssl_suite_name(?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5) ->
- "EXP-RC2-CBC-MD5";
-openssl_suite_name(?TLS_RSA_EXPORT_WITH_RC4_40_MD5) ->
- "EXP-RC4-MD5";
-
-openssl_suite_name(?TLS_RSA_EXPORT1024_WITH_RC4_56_MD5) ->
- "EXP1024-RC4-MD5";
-openssl_suite_name(?TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5) ->
- "EXP1024-RC2-CBC-MD5";
-openssl_suite_name(?TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA) ->
- "EXP1024-DES-CBC-SHA";
-openssl_suite_name(?TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA) ->
- "EXP1024-DHE-DSS-DES-CBC-SHA";
-openssl_suite_name(?TLS_RSA_EXPORT1024_WITH_RC4_56_SHA) ->
- "EXP1024-RC4-SHA";
-openssl_suite_name(?TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA) ->
- "EXP1024-DHE-DSS-RC4-SHA";
-openssl_suite_name(?TLS_DHE_DSS_WITH_RC4_128_SHA) ->
- "DHE-DSS-RC4-SHA";
-
%% No oppenssl name
openssl_suite_name(Cipher) ->
suite_definition(Cipher).
+filter(undefined, Ciphers) ->
+ Ciphers;
+filter(DerCert, Ciphers) ->
+ {ok, OtpCert} = public_key:pkix_decode_cert(DerCert, otp),
+ SigAlg = OtpCert#'OTPCertificate'.signatureAlgorithm,
+ case ssl_certificate:signature_type(SigAlg#'SignatureAlgorithm'.algorithm) of
+ rsa ->
+ filter_rsa(OtpCert, Ciphers -- dsa_signed_suites());
+ dsa ->
+ Ciphers -- rsa_signed_suites()
+ end.
+
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
@@ -621,15 +393,10 @@ bulk_cipher_algorithm(null) ->
%% Not supported yet
%% bulk_cipher_algorithm(idea_cbc) ->
%% ?IDEA;
-bulk_cipher_algorithm(Cipher) when Cipher == rc2_cbc_40;
- Cipher == rc2_cbc_56 ->
- ?RC2;
-bulk_cipher_algorithm(Cipher) when Cipher == rc4_40;
- Cipher == rc4_56;
- Cipher == rc4_128 ->
+bulk_cipher_algorithm(rc4_128) ->
?RC4;
-bulk_cipher_algorithm(des40_cbc) ->
- ?DES40;
+%% bulk_cipher_algorithm(des40_cbc) ->
+%% ?DES40;
bulk_cipher_algorithm(des_cbc) ->
?DES;
bulk_cipher_algorithm('3des_ede_cbc') ->
@@ -639,14 +406,10 @@ bulk_cipher_algorithm(Cipher) when Cipher == aes_128_cbc;
?AES.
type(Cipher) when Cipher == null;
- Cipher == rc4_40;
- Cipher == rc4_56;
Cipher == rc4_128 ->
?STREAM;
type(Cipher) when Cipher == idea_cbc;
- Cipher == rc2_cbc_40;
- Cipher == rc2_cbc_56;
Cipher == des40_cbc;
Cipher == des_cbc;
Cipher == '3des_ede_cbc';
@@ -659,13 +422,8 @@ key_material(null) ->
key_material(Cipher) when Cipher == idea_cbc;
Cipher == rc4_128 ->
16;
-key_material(Cipher) when Cipher == rc2_cbc_56;
- Cipher == rc4_56 ->
- 7;
-key_material(Cipher) when Cipher == rc2_cbc_40;
- Cipher == rc4_40;
- Cipher == des40_cbc ->
- 5;
+%%key_material(des40_cbc) ->
+%% 5;
key_material(des_cbc) ->
8;
key_material('3des_ede_cbc') ->
@@ -678,10 +436,6 @@ key_material(aes_256_cbc) ->
expanded_key_material(null) ->
0;
expanded_key_material(Cipher) when Cipher == idea_cbc;
- Cipher == rc2_cbc_40;
- Cipher == rc2_cbc_56;
- Cipher == rc4_40;
- Cipher == rc4_56;
Cipher == rc4_128 ->
16;
expanded_key_material(Cipher) when Cipher == des_cbc;
@@ -696,13 +450,9 @@ expanded_key_material(Cipher) when Cipher == aes_128_cbc;
effective_key_bits(null) ->
0;
-effective_key_bits(Cipher) when Cipher == rc2_cbc_40;
- Cipher == rc4_40;
- Cipher == des40_cbc ->
- 40;
-effective_key_bits(Cipher) when Cipher == rc2_cbc_56;
- Cipher == rc4_56;
- Cipher == des_cbc ->
+%%effective_key_bits(des40_cbc) ->
+%% 40;
+effective_key_bits(des_cbc) ->
56;
effective_key_bits(Cipher) when Cipher == idea_cbc;
Cipher == rc4_128;
@@ -714,16 +464,12 @@ effective_key_bits(aes_256_cbc) ->
256.
iv_size(Cipher) when Cipher == null;
- Cipher == rc4_40;
- Cipher == rc4_56;
Cipher == rc4_128 ->
0;
iv_size(Cipher) ->
block_size(Cipher).
block_size(Cipher) when Cipher == idea_cbc;
- Cipher == rc2_cbc_40;
- Cipher == rc2_cbc_56;
Cipher == des40_cbc;
Cipher == des_cbc;
Cipher == '3des_ede_cbc' ->
@@ -763,9 +509,12 @@ generic_stream_cipher_from_bin(T, HashSz) ->
#generic_stream_cipher{content=Content,
mac=Mac}.
-check_padding(_GBC) ->
- ok.
+is_correct_padding(_, {3, 0}) ->
+ true;
+is_correct_padding(#generic_block_cipher{padding_length = Len, padding = Padding}, _) ->
+ list_to_binary(lists:duplicate(Len, Len)) == Padding.
+
get_padding(Length, BlockSize) ->
get_padding_aux(BlockSize, Length rem BlockSize).
@@ -782,3 +531,53 @@ next_iv(Bin, IV) ->
<<_:FirstPart/binary, NextIV:IVSz/binary>> = Bin,
NextIV.
+rsa_signed_suites() ->
+ dhe_rsa_suites() ++ rsa_suites().
+
+dhe_rsa_suites() ->
+ [?TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
+ ?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
+ ?TLS_DHE_RSA_WITH_DES_CBC_SHA].
+
+rsa_suites() ->
+ [?TLS_RSA_WITH_AES_256_CBC_SHA,
+ ?TLS_RSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_RSA_WITH_AES_128_CBC_SHA,
+ %%?TLS_RSA_WITH_IDEA_CBC_SHA,
+ ?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5,
+ ?TLS_RSA_WITH_DES_CBC_SHA].
+
+dsa_signed_suites() ->
+ dhe_dss_suites().
+
+dhe_dss_suites() ->
+ [?TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA].
+
+filter_rsa(OtpCert, RsaCiphers) ->
+ TBSCert = OtpCert#'OTPCertificate'.tbsCertificate,
+ TBSExtensions = TBSCert#'OTPTBSCertificate'.extensions,
+ Extensions = ssl_certificate:extensions_list(TBSExtensions),
+ case ssl_certificate:select_extension(?'id-ce-keyUsage', Extensions) of
+ undefined ->
+ RsaCiphers;
+ #'Extension'{extnValue = KeyUse} ->
+ Result = filter_rsa_suites(keyEncipherment,
+ KeyUse, RsaCiphers, rsa_suites()),
+ filter_rsa_suites(digitalSignature,
+ KeyUse, Result, dhe_rsa_suites())
+ end.
+
+filter_rsa_suites(Use, KeyUse, CipherSuits, RsaSuites) ->
+ case ssl_certificate:is_valid_key_usage(KeyUse, Use) of
+ true ->
+ CipherSuits;
+ false ->
+ CipherSuits -- RsaSuites
+ end.
+
+
diff --git a/lib/ssl/src/ssl_cipher.hrl b/lib/ssl/src/ssl_cipher.hrl
index d282cbd780..80fe527f45 100644
--- a/lib/ssl/src/ssl_cipher.hrl
+++ b/lib/ssl/src/ssl_cipher.hrl
@@ -57,7 +57,7 @@
%% TLS_NULL_WITH_NULL_NULL = { 0x00,0x00 };
-define(TLS_NULL_WITH_NULL_NULL, <<?BYTE(16#00), ?BYTE(16#00)>>).
-%%% The following CipherSuite definitions require that the server
+%%% The following cipher suite definitions require that the server
%%% provide an RSA certificate that can be used for key exchange. The
%%% server may request either an RSA or a DSS signature-capable
%%% certificate in the certificate request message.
@@ -68,24 +68,15 @@
%% TLS_RSA_WITH_NULL_SHA = { 0x00,0x02 };
-define(TLS_RSA_WITH_NULL_SHA, <<?BYTE(16#00), ?BYTE(16#02)>>).
-%% TLS_RSA_EXPORT_WITH_RC4_40_MD5 = { 0x00,0x03 };
--define(TLS_RSA_EXPORT_WITH_RC4_40_MD5, <<?BYTE(16#00), ?BYTE(16#03)>>).
-
%% TLS_RSA_WITH_RC4_128_MD5 = { 0x00,0x04 };
-define(TLS_RSA_WITH_RC4_128_MD5, <<?BYTE(16#00), ?BYTE(16#04)>>).
%% TLS_RSA_WITH_RC4_128_SHA = { 0x00,0x05 };
-define(TLS_RSA_WITH_RC4_128_SHA, <<?BYTE(16#00), ?BYTE(16#05)>>).
-%% TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5 = { 0x00,0x06 };
--define(TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5, <<?BYTE(16#00), ?BYTE(16#06)>>).
-
%% TLS_RSA_WITH_IDEA_CBC_SHA = { 0x00,0x07 };
-define(TLS_RSA_WITH_IDEA_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#07)>>).
-%% TLS_RSA_EXPORT_WITH_DES40_CBC_SHA = { 0x00,0x08 };
--define(TLS_RSA_EXPORT_WITH_DES40_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#08)>>).
-
%% TLS_RSA_WITH_DES_CBC_SHA = { 0x00,0x09 };
-define(TLS_RSA_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#09)>>).
@@ -106,51 +97,33 @@
%%% provided by the client must use the parameters (group and
%%% generator) described by the server.
-%% TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA = { 0x00,0x0B };
--define(TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#0B)>>).
-
%% TLS_DH_DSS_WITH_DES_CBC_SHA = { 0x00,0x0C };
-define(TLS_DH_DSS_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#0C)>>).
%% TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA = { 0x00,0x0D };
-define(TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#0D)>>).
-%% TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA = { 0x00,0x0E };
--define(TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#0E)>>).
-
%% TLS_DH_RSA_WITH_DES_CBC_SHA = { 0x00,0x0F };
-define(TLS_DH_RSA_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#0F)>>).
%% TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA = { 0x00,0x10 };
-define(TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#10)>>).
-%% TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA = { 0x00,0x11 };
--define(TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#11)>>).
-
%% TLS_DHE_DSS_WITH_DES_CBC_SHA = { 0x00,0x12 };
-define(TLS_DHE_DSS_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#12)>>).
%% TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA = { 0x00,0x13 };
-define(TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#13)>>).
-%% TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA = { 0x00,0x14 };
--define(TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#14)>>).
-
%% TLS_DHE_RSA_WITH_DES_CBC_SHA = { 0x00,0x15 };
-define(TLS_DHE_RSA_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#15)>>).
%% TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA = { 0x00,0x16 };
-define(TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#16)>>).
-%% TLS_DH_anon_EXPORT_WITH_RC4_40_MD5 = { 0x00,0x17 };
--define(TLS_DH_anon_EXPORT_WITH_RC4_40_MD5, <<?BYTE(16#00), ?BYTE(16#17)>>).
-
%% TLS_DH_anon_WITH_RC4_128_MD5 = { 0x00,0x18 };
-define(TLS_DH_anon_WITH_RC4_128_MD5, <<?BYTE(16#00),?BYTE(16#18)>>).
-%% TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA = { 0x00,0x19 };
--define(TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#19)>>).
-
%% TLS_DH_anon_WITH_DES_CBC_SHA = { 0x00,0x1A };
-define(TLS_DH_anon_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#1A)>>).
@@ -222,35 +195,7 @@
%% TLS_KRB5_WITH_IDEA_CBC_MD5 = { 0x00,0x25 };
-define(TLS_KRB5_WITH_IDEA_CBC_MD5, <<?BYTE(16#00), ?BYTE(16#25)>>).
-%% TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA = { 0x00,0x26 };
--define(TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA, <<?BYTE(16#00), ?BYTE(16#26)>>).
-
-%% TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA = { 0x00,0x27 };
--define(TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA, <<?BYTE(16#00), ?BYTE(16#27)>>).
-
-%% TLS_KRB5_EXPORT_WITH_RC4_40_SHA = { 0x00,0x28 };
--define(TLS_KRB5_EXPORT_WITH_RC4_40_SHA, <<?BYTE(16#00), ?BYTE(16#28)>>).
-
-%% TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5 = { 0x00,0x29 };
--define(TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5, <<?BYTE(16#00), ?BYTE(16#29)>>).
-
-%% TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5 = { 0x00,0x2A };
--define(TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5, <<?BYTE(16#00), ?BYTE(16#2A)>>).
-
-%% TLS_KRB5_EXPORT_WITH_RC4_40_MD5 = { 0x00,0x2B };
--define(TLS_KRB5_EXPORT_WITH_RC4_40_MD5, <<?BYTE(16#00), ?BYTE(16#2B)>>).
-
-%% Additional TLS ciphersuites from draft-ietf-tls-56-bit-ciphersuites-00.txt
-
--define(TLS_RSA_EXPORT1024_WITH_RC4_56_MD5, <<?BYTE(16#00), ?BYTE(16#60)>>).
--define(TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5, <<?BYTE(16#00), ?BYTE(16#61)>>).
--define(TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#62)>>).
--define(TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA, <<?BYTE(16#00), ?BYTE(16#63)>>).
--define(TLS_RSA_EXPORT1024_WITH_RC4_56_SHA, <<?BYTE(16#00), ?BYTE(16#64)>>).
--define(TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA, <<?BYTE(16#00), ?BYTE(16#65)>>).
--define(TLS_DHE_DSS_WITH_RC4_128_SHA, <<?BYTE(16#00), ?BYTE(16#66)>>).
-
-%% RFC 5746 - Not a real ciphersuite used to signal empty "renegotiation_info" extension
+%% RFC 5746 - Not a real cipher suite used to signal empty "renegotiation_info" extension
%% to avoid handshake failure from old servers that do not ignore
%% hello extension data as they should.
-define(TLS_EMPTY_RENEGOTIATION_INFO_SCSV, <<?BYTE(16#00), ?BYTE(16#FF)>>).
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index a4eaf03086..abd1b59011 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -39,7 +39,7 @@
-include_lib("public_key/include/public_key.hrl").
%% Internal application API
--export([send/2, send/3, recv/3, connect/7, ssl_accept/6, handshake/2,
+-export([send/2, recv/3, connect/7, ssl_accept/6, handshake/2,
socket_control/3, close/1, shutdown/2,
new_user/2, get_opts/2, set_opts/2, info/1, session_info/1,
peer_certificate/1, sockname/1, peername/1, renegotiation/1]).
@@ -65,13 +65,14 @@
ssl_options, % #ssl_options{}
socket_options, % #socket_options{}
connection_states, % #connection_states{} from ssl_record.hrl
+ tls_packets = [], % Not yet handled decode ssl/tls packets.
tls_record_buffer, % binary() buffer of incomplete records
tls_handshake_buffer, % binary() buffer of incomplete handshakes
%% {{md5_hash, sha_hash}, {prev_md5, prev_sha}} (binary())
tls_handshake_hashes, % see above
tls_cipher_texts, % list() received but not deciphered yet
own_cert, % binary()
- session, % #session{} from ssl_handshake.erl
+ session, % #session{} from ssl_handshake.hrl
session_cache, %
session_cache_cb, %
negotiated_version, % #protocol_version{}
@@ -87,7 +88,6 @@
from, % term(), where to reply
bytes_to_read, % integer(), # bytes to read in passive mode
user_data_buffer, % binary()
-%% tls_buffer, % Keeps a lookahead one packet if available
log_alert, % boolean()
renegotiation, % {boolean(), From | internal | peer}
recv_during_renegotiation, %boolean()
@@ -108,9 +108,9 @@
%% Description: Sends data over the ssl connection
%%--------------------------------------------------------------------
send(Pid, Data) ->
- sync_send_all_state_event(Pid, {application_data, erlang:iolist_to_binary(Data)}, infinity).
-send(Pid, Data, Timeout) ->
- sync_send_all_state_event(Pid, {application_data, erlang:iolist_to_binary(Data)}, Timeout).
+ sync_send_all_state_event(Pid, {application_data,
+ erlang:iolist_to_binary(Data)}, infinity).
+
%%--------------------------------------------------------------------
%% Function: recv(Socket, Length Timeout) -> {ok, Data} | {error, reason}
%%
@@ -211,8 +211,6 @@ peername(ConnectionPid) ->
%%
%% Description: Same as inet:getopts/2
%%--------------------------------------------------------------------
-get_opts({ListenSocket, {_SslOpts, SockOpts}, _}, OptTags) ->
- get_socket_opts(ListenSocket, OptTags, SockOpts, []);
get_opts(ConnectionPid, OptTags) ->
sync_send_all_state_event(ConnectionPid, {get_opts, OptTags}).
%%--------------------------------------------------------------------
@@ -283,12 +281,12 @@ start_link(Role, Host, Port, Socket, Options, User, CbInfo) ->
%% gen_fsm:start_link/3,4, this function is called by the new process to
%% initialize.
%%--------------------------------------------------------------------
-init([Role, Host, Port, Socket, {SSLOpts, _} = Options,
+init([Role, Host, Port, Socket, {SSLOpts0, _} = Options,
User, CbInfo]) ->
State0 = initial_state(Role, Host, Port, Socket, Options, User, CbInfo),
Hashes0 = ssl_handshake:init_hashes(),
- try ssl_init(SSLOpts, Role) of
+ try ssl_init(SSLOpts0, Role) of
{ok, Ref, CacheRef, OwnCert, Key, DHParams} ->
State = State0#state{tls_handshake_hashes = Hashes0,
own_cert = OwnCert,
@@ -320,10 +318,14 @@ hello(start, #state{host = Host, port = Port, role = client,
ssl_options = SslOpts,
transport_cb = Transport, socket = Socket,
connection_states = ConnectionStates,
+ own_cert = Cert,
renegotiation = {Renegotiation, _}}
= State0) ->
+
Hello = ssl_handshake:client_hello(Host, Port,
- ConnectionStates, SslOpts, Renegotiation),
+ ConnectionStates,
+ SslOpts, Cert,
+ Renegotiation),
Version = Hello#client_hello.client_version,
Hashes0 = ssl_handshake:init_hashes(),
@@ -361,7 +363,7 @@ hello(#server_hello{cipher_suite = CipherSuite,
case ssl_handshake:hello(Hello, SslOptions, ConnectionStates0, Renegotiation) of
{Version, NewId, ConnectionStates1} ->
- {KeyAlgorithm, _, _, _} =
+ {KeyAlgorithm, _, _} =
ssl_cipher:suite_definition(CipherSuite),
PremasterSecret = make_premaster_secret(ReqVersion, KeyAlgorithm),
@@ -404,10 +406,11 @@ hello(Hello = #client_hello{client_version = ClientVersion},
renegotiation = {Renegotiation, _},
session_cache = Cache,
session_cache_cb = CacheCb,
- ssl_options = SslOpts}) ->
+ ssl_options = SslOpts,
+ own_cert = Cert}) ->
case ssl_handshake:hello(Hello, SslOpts, {Port, Session0, Cache, CacheCb,
- ConnectionStates0}, Renegotiation) of
+ ConnectionStates0, Cert}, Renegotiation) of
{Version, {Type, Session}, ConnectionStates} ->
do_server_hello(Type, State#state{connection_states =
ConnectionStates,
@@ -425,7 +428,7 @@ abbreviated(#hello_request{}, State0) ->
{Record, State} = next_record(State0),
next_state(hello, Record, State);
-abbreviated(Finished = #finished{verify_data = Data},
+abbreviated(#finished{verify_data = Data} = Finished,
#state{role = server,
negotiated_version = Version,
tls_handshake_hashes = Hashes,
@@ -443,7 +446,7 @@ abbreviated(Finished = #finished{verify_data = Data},
{stop, normal, State}
end;
-abbreviated(Finished = #finished{verify_data = Data},
+abbreviated(#finished{verify_data = Data} = Finished,
#state{role = client, tls_handshake_hashes = Hashes0,
session = #session{master_secret = MasterSecret},
negotiated_version = Version,
@@ -507,7 +510,7 @@ certify(#certificate{} = Cert,
certify(#server_key_exchange{} = KeyExchangeMsg,
#state{role = client, negotiated_version = Version,
key_algorithm = Alg} = State0)
- when Alg == dhe_dss; Alg == dhe_rsa ->%%Not imp:Alg == dh_anon;Alg == krb5 ->
+ when Alg == dhe_dss; Alg == dhe_rsa ->
case handle_server_key(KeyExchangeMsg, State0) of
#state{} = State1 ->
{Record, State} = next_record(State1),
@@ -518,13 +521,9 @@ certify(#server_key_exchange{} = KeyExchangeMsg,
{stop, normal, State0}
end;
-certify(#server_key_exchange{},
- State = #state{role = client, negotiated_version = Version,
- key_algorithm = Alg})
- when Alg == rsa; Alg == dh_dss; Alg == dh_rsa ->
- Alert = ?ALERT_REC(?FATAL, ?UNEXPECTED_MESSAGE),
- handle_own_alert(Alert, Version, certify_server_key_exchange, State),
- {stop, normal, State};
+certify(#server_key_exchange{} = Msg,
+ #state{role = client, key_algorithm = rsa} = State) ->
+ handle_unexpected_message(Msg, certify_server_keyexchange, State);
certify(#certificate_request{}, State0) ->
{Record, State} = next_record(State0#state{client_certificate_requested = true}),
@@ -568,17 +567,12 @@ certify(#server_hello_done{},
{stop, normal, State0}
end;
-certify(#client_key_exchange{},
- State = #state{role = server,
- client_certificate_requested = true,
- ssl_options = #ssl_options{fail_if_no_peer_cert = true},
- negotiated_version = Version}) ->
+certify(#client_key_exchange{} = Msg,
+ #state{role = server,
+ client_certificate_requested = true,
+ ssl_options = #ssl_options{fail_if_no_peer_cert = true}} = State) ->
%% We expect a certificate here
- Alert = ?ALERT_REC(?FATAL, ?UNEXPECTED_MESSAGE),
- handle_own_alert(Alert, Version,
- certify_server_waiting_certificate, State),
- {stop, normal, State};
-
+ handle_unexpected_message(Msg, certify_client_key_exchange, State);
certify(#client_key_exchange{exchange_keys
= #encrypted_premaster_secret{premaster_secret
@@ -712,13 +706,14 @@ connection(#hello_request{}, #state{host = Host, port = Port,
socket = Socket,
ssl_options = SslOpts,
negotiated_version = Version,
+ own_cert = Cert,
transport_cb = Transport,
connection_states = ConnectionStates0,
renegotiation = {Renegotiation, _},
tls_handshake_hashes = Hashes0} = State0) ->
Hello = ssl_handshake:client_hello(Host, Port,
- ConnectionStates0, SslOpts, Renegotiation),
+ ConnectionStates0, SslOpts, Cert, Renegotiation),
{BinMsg, ConnectionStates1, Hashes1} =
encode_handshake(Hello, Version, ConnectionStates0, Hashes0),
@@ -818,10 +813,22 @@ handle_sync_event(start, From, StateName, State) ->
handle_sync_event(close, _, _StateName, State) ->
{stop, normal, ok, State};
-handle_sync_event({shutdown, How}, _, StateName,
- #state{transport_cb = CbModule,
+handle_sync_event({shutdown, How0}, _, StateName,
+ #state{transport_cb = Transport,
+ negotiated_version = Version,
+ connection_states = ConnectionStates,
socket = Socket} = State) ->
- case CbModule:shutdown(Socket, How) of
+ case How0 of
+ How when How == write; How == both ->
+ Alert = ?ALERT_REC(?WARNING, ?CLOSE_NOTIFY),
+ {BinMsg, _} =
+ encode_alert(Alert, Version, ConnectionStates),
+ Transport:send(Socket, BinMsg);
+ _ ->
+ ok
+ end,
+
+ case Transport:shutdown(Socket, How0) of
ok ->
{reply, ok, StateName, State};
Error ->
@@ -1056,16 +1063,9 @@ init_certificates(#ssl_options{cacertfile = CACertFile,
case ssl_manager:connection_init(CACertFile, Role) of
{ok, CertDbRef, CacheRef} ->
init_certificates(CertDbRef, CacheRef, CertFile, Role);
- {error, {badmatch, _Error}} ->
- Report = io_lib:format("SSL: Error ~p Initializing: ~p ~n",
- [_Error, CACertFile]),
- error_logger:error_report(Report),
- throw(ecacertfile);
- {error, _Error} ->
- Report = io_lib:format("SSL: Error ~p Initializing: ~p ~n",
- [_Error, CACertFile]),
- error_logger:error_report(Report),
- throw(ecacertfile)
+ {error, Reason} ->
+ handle_file_error(?LINE, error, Reason, CACertFile, ecacertfile,
+ erlang:get_stacktrace())
end.
init_certificates(CertDbRef, CacheRef, CertFile, client) ->
@@ -1081,59 +1081,56 @@ init_certificates(CertDbRef, CacheRef, CertFile, server) ->
[OwnCert] = ssl_certificate:file_to_certificats(CertFile),
{ok, CertDbRef, CacheRef, OwnCert}
catch
- _E:{badmatch, _R={error,_}} ->
- Report = io_lib:format("SSL: ~p: ~p:~p ~s~n ~p~n",
- [?LINE, _E,_R, CertFile,
- erlang:get_stacktrace()]),
- error_logger:error_report(Report),
- throw(ecertfile);
- _E:_R ->
- Report = io_lib:format("SSL: ~p: ~p:~p ~s~n ~p~n",
- [?LINE, _E,_R, CertFile,
- erlang:get_stacktrace()]),
- error_logger:error_report(Report),
- throw(ecertfile)
+ Error:Reason ->
+ handle_file_error(?LINE, Error, Reason, CertFile, ecertfile,
+ erlang:get_stacktrace())
end.
init_private_key(undefined, "", _Password, client) ->
undefined;
init_private_key(undefined, KeyFile, Password, _) ->
- try
- {ok, List} = ssl_manager:cache_pem_file(KeyFile),
- [Der] = [Der || Der = {PKey, _ , _} <- List,
- PKey =:= rsa_private_key orelse
- PKey =:= dsa_private_key],
- {ok, Decoded} = public_key:decode_private_key(Der,Password),
- Decoded
- catch
- _E:{badmatch, _R={error,_}} ->
- Report = io_lib:format("SSL: ~p: ~p:~p ~s~n ~p~n",
- [?LINE, _E,_R, KeyFile,
- erlang:get_stacktrace()]),
- error_logger:error_report(Report),
- throw(ekeyfile);
- _E:_R ->
- Report = io_lib:format("SSL: ~p: ~p:~p ~s~n ~p~n",
- [?LINE, _E,_R, KeyFile,
- erlang:get_stacktrace()]),
- error_logger:error_report(Report),
- throw(ekeyfile)
+ case ssl_manager:cache_pem_file(KeyFile) of
+ {ok, List} ->
+ [Der] = [Der || Der = {PKey, _ , _} <- List,
+ PKey =:= rsa_private_key orelse
+ PKey =:= dsa_private_key],
+ {ok, Decoded} = public_key:decode_private_key(Der,Password),
+ Decoded;
+ {error, Reason} ->
+ handle_file_error(?LINE, error, Reason, KeyFile, ekeyfile,
+ erlang:get_stacktrace())
end;
+
init_private_key(PrivateKey, _, _,_) ->
PrivateKey.
+handle_file_error(Line, Error, {badmatch, Reason}, File, Throw, Stack) ->
+ file_error(Line, Error, Reason, File, Throw, Stack);
+handle_file_error(Line, Error, Reason, File, Throw, Stack) ->
+ file_error(Line, Error, Reason, File, Throw, Stack).
+
+file_error(Line, Error, Reason, File, Throw, Stack) ->
+ Report = io_lib:format("SSL: ~p: ~p:~p ~s~n ~p~n",
+ [Line, Error, Reason, File, Stack]),
+ error_logger:error_report(Report),
+ throw(Throw).
+
init_diffie_hellman(_, client) ->
undefined;
init_diffie_hellman(undefined, _) ->
?DEFAULT_DIFFIE_HELLMAN_PARAMS;
init_diffie_hellman(DHParamFile, server) ->
- {ok, List} = ssl_manager:cache_pem_file(DHParamFile),
- case [Der || Der = {dh_params, _ , _} <- List] of
- [Der] ->
- {ok, Decoded} = public_key:decode_dhparams(Der),
- Decoded;
- [] ->
- ?DEFAULT_DIFFIE_HELLMAN_PARAMS
+ case ssl_manager:cache_pem_file(DHParamFile) of
+ {ok, List} ->
+ case [Der || Der = {dh_params, _ , _} <- List] of
+ [Der] ->
+ {ok, Decoded} = public_key:decode_dhparams(Der),
+ Decoded;
+ [] ->
+ ?DEFAULT_DIFFIE_HELLMAN_PARAMS
+ end;
+ {error, Reason} ->
+ handle_file_error(?LINE, error, Reason, DHParamFile, edhfile, erlang:get_stacktrace())
end.
sync_send_all_state_event(FsmPid, Event) ->
@@ -1147,6 +1144,8 @@ sync_send_all_state_event(FsmPid, Event, Timeout) ->
exit:{timeout, _} ->
{error, timeout};
exit:{normal, _} ->
+ {error, closed};
+ exit:{shutdown, _} ->
{error, closed}
end.
@@ -1191,15 +1190,18 @@ verify_client_cert(#state{client_certificate_requested = true, role = client,
case ssl_handshake:client_certificate_verify(OwnCert, MasterSecret,
Version, KeyAlg,
PrivateKey, Hashes0) of
- ignore -> %% No key or cert or fixed_diffie_hellman
- State;
- Verified ->
+ #certificate_verify{} = Verified ->
{BinVerified, ConnectionStates1, Hashes1} =
encode_handshake(Verified, KeyAlg, Version,
ConnectionStates0, Hashes0),
Transport:send(Socket, BinVerified),
State#state{connection_states = ConnectionStates1,
- tls_handshake_hashes = Hashes1}
+ tls_handshake_hashes = Hashes1};
+ ignore ->
+ State;
+ #alert{} = Alert ->
+ handle_own_alert(Alert, Version, certify, State)
+
end;
verify_client_cert(#state{client_certificate_requested = false} = State) ->
State.
@@ -1290,8 +1292,7 @@ server_hello(ServerHello, #state{transport_cb = Transport,
connection_states = ConnectionStates0,
tls_handshake_hashes = Hashes0} = State) ->
CipherSuite = ServerHello#server_hello.cipher_suite,
- {KeyAlgorithm, _, _, _} = ssl_cipher:suite_definition(CipherSuite),
- %% Version = ServerHello#server_hello.server_version, TODO ska kontrolleras
+ {KeyAlgorithm, _, _} = ssl_cipher:suite_definition(CipherSuite),
{BinMsg, ConnectionStates1, Hashes1} =
encode_handshake(ServerHello, Version, ConnectionStates0, Hashes0),
Transport:send(Socket, BinMsg),
@@ -1333,19 +1334,8 @@ certify_server(#state{transport_cb = Transport,
throw(Alert)
end.
-key_exchange(#state{role = server, key_algorithm = Algo} = State)
- when Algo == rsa;
- Algo == dh_dss;
- Algo == dh_rsa ->
+key_exchange(#state{role = server, key_algorithm = rsa} = State) ->
State;
-
-%% Remove or uncomment when we decide if to support export cipher suites
-%%key_exchange(#state{role = server, key_algorithm = rsa_export} = State) ->
- %% TODO when the public key in the server certificate is
- %% less than or equal to 512 bits in length dont send key_exchange
- %% but do it otherwise
-%% State;
-
key_exchange(#state{role = server, key_algorithm = Algo,
diffie_hellman_params = Params,
private_key = PrivateKey,
@@ -1396,7 +1386,6 @@ key_exchange(#state{role = client,
Transport:send(Socket, BinMsg),
State#state{connection_states = ConnectionStates1,
tls_handshake_hashes = Hashes1};
-
key_exchange(#state{role = client,
connection_states = ConnectionStates0,
key_algorithm = Algorithm,
@@ -1415,9 +1404,6 @@ key_exchange(#state{role = client,
State#state{connection_states = ConnectionStates1,
tls_handshake_hashes = Hashes1}.
-%% key_algorithm = dh_rsa | dh_dss are not supported. If we want to
-%% support it we need a key_exchange clause for it here.
-
rsa_key_exchange(PremasterSecret, PublicKeyInfo = {Algorithm, _, _})
when Algorithm == ?rsaEncryption;
Algorithm == ?md2WithRSAEncryption;
@@ -1429,20 +1415,6 @@ rsa_key_exchange(PremasterSecret, PublicKeyInfo = {Algorithm, _, _})
rsa_key_exchange(_, _) ->
throw (?ALERT_REC(?FATAL,?HANDSHAKE_FAILURE)).
-%% Uncomment if we decide to support cipher suites with key_algorithm
-%% dh_rsa and dh_dss. Could also be removed if we decide support for
-%% this will not be needed. Not supported by openssl!
-%% dh_key_exchange(OwnCert, DhKeys, true) ->
-%% case public_key:pkix_is_fixed_dh_cert(OwnCert) of
-%% true ->
-%% ssl_handshake:key_exchange(client, fixed_diffie_hellman);
-%% false ->
-%% {DhPubKey, _} = DhKeys,
-%% ssl_handshake:key_exchange(client, {dh, DhPubKey})
-%% end;
-%% dh_key_exchange(_, {DhPubKey, _}, false) ->
-%% ssl_handshake:key_exchange(client, {dh, DhPubKey}).
-
request_client_cert(#state{ssl_options = #ssl_options{verify = verify_peer},
connection_states = ConnectionStates0,
cert_db_ref = CertDbRef,
@@ -1520,15 +1492,15 @@ handle_server_key(
SecParams = ConnectionState#connection_state.security_parameters,
#security_parameters{client_random = ClientRandom,
server_random = ServerRandom} = SecParams,
- Hash = ssl_handshake:server_key_exchange_hash(KeyAlgo,
- <<ClientRandom/binary,
+ Plain = ssl_handshake:server_key_exchange_plain(KeyAlgo,
+ <<ClientRandom/binary,
ServerRandom/binary,
- ?UINT16(PLen), P/binary,
- ?UINT16(GLen), G/binary,
- ?UINT16(YLen),
+ ?UINT16(PLen), P/binary,
+ ?UINT16(GLen), G/binary,
+ ?UINT16(YLen),
ServerPublicDhKey/binary>>),
-
- case verify_dh_params(Signed, Hash, PubKeyInfo) of
+
+ case verify_dh_params(Signed, Plain, PubKeyInfo) of
true ->
PMpint = mpint_binary(P),
GMpint = mpint_binary(G),
@@ -1552,14 +1524,18 @@ handle_server_key(
?ALERT_REC(?FATAL,?HANDSHAKE_FAILURE)
end.
-verify_dh_params(Signed, Hash, {?rsaEncryption, PubKey, _PubKeyparams}) ->
+
+verify_dh_params(Signed, Hashes, {?rsaEncryption, PubKey, _PubKeyParams}) ->
case public_key:decrypt_public(Signed, PubKey,
[{rsa_pad, rsa_pkcs1_padding}]) of
- Hash ->
+ Hashes ->
true;
_ ->
false
- end.
+ end;
+verify_dh_params(Signed, Plain, {?'id-dsa', PublicKey, PublicKeyParams}) ->
+ public_key:verify_signature(Plain, sha, Signed, PublicKey, PublicKeyParams).
+
encode_alert(#alert{} = Alert, Version, ConnectionStates) ->
?DBG_TERM(Alert),
@@ -1749,13 +1725,7 @@ header(N, Binary) ->
<<?BYTE(ByteN), NewBinary/binary>> = Binary,
[ByteN | header(N-1, NewBinary)].
-%% tcp_closed
-send_or_reply(false, _Pid, undefined, _Data) ->
- Report = io_lib:format("SSL(debug): Unexpected Data ~p ~n",[_Data]),
- error_logger:error_report(Report),
- erlang:error({badarg, _Pid, undefined, _Data}),
- ok;
-send_or_reply(false, _Pid, From, Data) ->
+send_or_reply(false, _Pid, From, Data) when From =/= undefined ->
gen_fsm:reply(From, Data);
send_or_reply(_, Pid, _From, Data) ->
send_user(Pid, Data).
@@ -1768,6 +1738,23 @@ opposite_role(server) ->
send_user(Pid, Msg) ->
Pid ! Msg.
+handle_tls_handshake(Handle, StateName, #state{tls_packets = [Packet]} = State) ->
+ FsmReturn = {next_state, StateName, State#state{tls_packets = []}},
+ Handle(Packet, FsmReturn);
+
+handle_tls_handshake(Handle, StateName, #state{tls_packets = [Packet | Packets]} = State0) ->
+ FsmReturn = {next_state, StateName, State0#state{tls_packets = Packets}},
+ case Handle(Packet, FsmReturn) of
+ {next_state, NextStateName, State} ->
+ handle_tls_handshake(Handle, NextStateName, State);
+ {stop, _,_} = Stop ->
+ Stop
+ end.
+
+next_state(_, #alert{} = Alert, #state{negotiated_version = Version} = State) ->
+ handle_own_alert(Alert, Version, decipher_error, State),
+ {stop, normal, State};
+
next_state(Next, no_record, State) ->
{next_state, Next, State};
@@ -1802,8 +1789,8 @@ next_state(StateName, #ssl_tls{type = ?HANDSHAKE, fragment = Data},
end,
try
{Packets, Buf} = ssl_handshake:get_tls_handshake(Data,Buf0, KeyAlg,Version),
- Start = {next_state, StateName, State0#state{tls_handshake_buffer = Buf}},
- lists:foldl(Handle, Start, Packets)
+ State = State0#state{tls_packets = Packets, tls_handshake_buffer = Buf},
+ handle_tls_handshake(Handle, StateName, State)
catch throw:#alert{} = Alert ->
handle_own_alert(Alert, Version, StateName, State0),
{stop, normal, State0}
@@ -1840,13 +1827,19 @@ next_tls_record(Data, #state{tls_record_buffer = Buf0,
Alert
end.
-next_record(#state{tls_cipher_texts = [], socket = Socket} = State) ->
+next_record(#state{tls_packets = [], tls_cipher_texts = [], socket = Socket} = State) ->
inet:setopts(Socket, [{active,once}]),
{no_record, State};
-next_record(#state{tls_cipher_texts = [CT | Rest],
+next_record(#state{tls_packets = [], tls_cipher_texts = [CT | Rest],
connection_states = ConnStates0} = State) ->
- {Plain, ConnStates} = ssl_record:decode_cipher_text(CT, ConnStates0),
- {Plain, State#state{tls_cipher_texts = Rest, connection_states = ConnStates}}.
+ case ssl_record:decode_cipher_text(CT, ConnStates0) of
+ {Plain, ConnStates} ->
+ {Plain, State#state{tls_cipher_texts = Rest, connection_states = ConnStates}};
+ #alert{} = Alert ->
+ {Alert, State}
+ end;
+next_record(State) ->
+ {no_record, State}.
next_record_if_active(State =
#state{socket_options =
@@ -2016,34 +2009,19 @@ handle_alerts(_, {stop, _, _} = Stop) ->
handle_alerts([Alert | Alerts], {next_state, StateName, State}) ->
handle_alerts(Alerts, handle_alert(Alert, StateName, State)).
-handle_alert(#alert{level = ?FATAL} = Alert, connection,
- #state{from = From, user_application = {_Mon, Pid},
- log_alert = Log,
- host = Host, port = Port, session = Session,
- role = Role, socket_options = Opts} = State) ->
- invalidate_session(Role, Host, Port, Session),
- log_alert(Log, connection, Alert),
- alert_user(Opts#socket_options.active, Pid, From, Alert, Role),
- {stop, normal, State};
-
-handle_alert(#alert{level = ?WARNING, description = ?CLOSE_NOTIFY} = Alert,
- connection, #state{from = From,
- role = Role,
- user_application = {_Mon, Pid},
- socket_options = Opts} = State) ->
- alert_user(Opts#socket_options.active, Pid, From, Alert, Role),
- {stop, normal, State};
-
handle_alert(#alert{level = ?FATAL} = Alert, StateName,
#state{from = From, host = Host, port = Port, session = Session,
- log_alert = Log, role = Role} = State) ->
+ user_application = {_Mon, Pid},
+ log_alert = Log, role = Role, socket_options = Opts} = State) ->
invalidate_session(Role, Host, Port, Session),
log_alert(Log, StateName, Alert),
- alert_user(From, Alert, Role),
+ alert_user(StateName, Opts, Pid, From, Alert, Role),
{stop, normal, State};
+
handle_alert(#alert{level = ?WARNING, description = ?CLOSE_NOTIFY} = Alert,
- _, #state{from = From, role = Role} = State) ->
- alert_user(From, Alert, Role),
+ StateName, #state{from = From, role = Role,
+ user_application = {_Mon, Pid}, socket_options = Opts} = State) ->
+ alert_user(StateName, Opts, Pid, From, Alert, Role),
{stop, normal, State};
handle_alert(#alert{level = ?WARNING, description = ?NO_RENEGOTIATION} = Alert, StateName,
@@ -2066,6 +2044,11 @@ handle_alert(#alert{level = ?WARNING, description = ?USER_CANCELED} = Alert, Sta
{Record, State} = next_record(State0),
next_state(StateName, Record, State).
+alert_user(connection, Opts, Pid, From, Alert, Role) ->
+ alert_user(Opts#socket_options.active, Pid, From, Alert, Role);
+alert_user(_, _, _, From, Alert, Role) ->
+ alert_user(From, Alert, Role).
+
alert_user(From, Alert, Role) ->
alert_user(false, no_pid, From, Alert, Role).
@@ -2085,13 +2068,13 @@ alert_user(Active, Pid, From, Alert, Role) ->
{ssl_error, sslsocket(), ReasonCode})
end.
-log_alert(true, StateName, Alert) ->
+log_alert(true, Info, Alert) ->
Txt = ssl_alert:alert_txt(Alert),
- error_logger:format("SSL: ~p: ~s\n", [StateName, Txt]);
+ error_logger:format("SSL: ~p: ~s\n", [Info, Txt]);
log_alert(false, _, _) ->
ok.
-handle_own_alert(Alert, Version, StateName,
+handle_own_alert(Alert, Version, Info,
#state{transport_cb = Transport,
socket = Socket,
from = User,
@@ -2101,25 +2084,24 @@ handle_own_alert(Alert, Version, StateName,
try %% Try to tell the other side
{BinMsg, _} =
encode_alert(Alert, Version, ConnectionStates),
+ linux_workaround_transport_delivery_problems(Alert, Socket),
Transport:send(Socket, BinMsg)
catch _:_ -> %% Can crash if we are in a uninitialized state
ignore
end,
try %% Try to tell the local user
- log_alert(Log, StateName, Alert),
+ log_alert(Log, Info, Alert),
alert_user(User, Alert, Role)
catch _:_ ->
ok
end.
-handle_unexpected_message(_Msg, StateName, #state{negotiated_version = Version} = State) ->
+handle_unexpected_message(Msg, Info, #state{negotiated_version = Version} = State) ->
Alert = ?ALERT_REC(?FATAL,?UNEXPECTED_MESSAGE),
- handle_own_alert(Alert, Version, StateName, State),
+ handle_own_alert(Alert, Version, {Info, Msg}, State),
{stop, normal, State}.
-make_premaster_secret({MajVer, MinVer}, Alg) when Alg == rsa;
- Alg == dh_dss;
- Alg == dh_rsa ->
+make_premaster_secret({MajVer, MinVer}, rsa) ->
Rand = crypto:rand_bytes(?NUM_OF_PREMASTERSECRET_BYTES-2),
<<?BYTE(MajVer), ?BYTE(MinVer), Rand/binary>>;
make_premaster_secret(_, _) ->
@@ -2176,6 +2158,19 @@ notify_renegotiater(_) ->
ok.
workaround_transport_delivery_problems(Socket, Transport) ->
+ %% Standard trick to try to make sure all
+ %% data sent to to tcp port is really sent
+ %% before tcp port is closed.
inet:setopts(Socket, [{active, false}]),
Transport:shutdown(Socket, write),
Transport:recv(Socket, 0).
+
+linux_workaround_transport_delivery_problems(#alert{level = ?FATAL}, Socket) ->
+ case os:type() of
+ {unix, linux} ->
+ inet:setopts(Socket, [{nodelay, true}]);
+ _ ->
+ ok
+ end;
+linux_workaround_transport_delivery_problems(_, _) ->
+ ok.
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 54938e0fbc..c8245e2fb4 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -31,11 +31,11 @@
-include("ssl_debug.hrl").
-include_lib("public_key/include/public_key.hrl").
--export([master_secret/4, client_hello/5, server_hello/4, hello/4,
+-export([master_secret/4, client_hello/6, server_hello/4, hello/4,
hello_request/0, certify/7, certificate/3,
client_certificate_verify/6,
certificate_verify/6, certificate_request/2,
- key_exchange/2, server_key_exchange_hash/2, finished/4,
+ key_exchange/2, server_key_exchange_plain/2, finished/4,
verify_connection/5,
get_tls_handshake/4,
server_hello_done/0, sig_alg/1,
@@ -46,7 +46,7 @@
%% Internal application API
%%====================================================================
%%--------------------------------------------------------------------
-%% Function: client_hello(Host, Port, ConnectionStates, SslOpts) ->
+%% Function: client_hello(Host, Port, ConnectionStates, SslOpts, Cert, Renegotiation) ->
%% #client_hello{}
%% Host
%% Port
@@ -56,8 +56,8 @@
%% Description: Creates a client hello message.
%%--------------------------------------------------------------------
client_hello(Host, Port, ConnectionStates, #ssl_options{versions = Versions,
- ciphers = Ciphers}
- = SslOpts, Renegotiation) ->
+ ciphers = UserSuites}
+ = SslOpts, Cert, Renegotiation) ->
Fun = fun(Version) ->
ssl_record:protocol_version(Version)
@@ -65,7 +65,8 @@ client_hello(Host, Port, ConnectionStates, #ssl_options{versions = Versions,
Version = ssl_record:highest_protocol_version(lists:map(Fun, Versions)),
Pending = ssl_record:pending_connection_state(ConnectionStates, read),
SecParams = Pending#connection_state.security_parameters,
-
+ Ciphers = available_suites(Cert, UserSuites, Version),
+
Id = ssl_manager:client_session_id(Host, Port, SslOpts),
#client_hello{session_id = Id,
@@ -128,16 +129,21 @@ hello(#server_hello{cipher_suite = CipherSuite, server_version = Version,
session_id = SessionId, renegotiation_info = Info},
#ssl_options{secure_renegotiate = SecureRenegotation},
ConnectionStates0, Renegotiation) ->
-
- case handle_renegotiation_info(client, Info, ConnectionStates0,
- Renegotiation, SecureRenegotation, []) of
- {ok, ConnectionStates1} ->
- ConnectionStates =
- hello_pending_connection_states(client, CipherSuite, Random,
- Compression, ConnectionStates1),
- {Version, SessionId, ConnectionStates};
- #alert{} = Alert ->
- Alert
+
+ case ssl_record:is_acceptable_version(Version) of
+ true ->
+ case handle_renegotiation_info(client, Info, ConnectionStates0,
+ Renegotiation, SecureRenegotation, []) of
+ {ok, ConnectionStates1} ->
+ ConnectionStates =
+ hello_pending_connection_states(client, CipherSuite, Random,
+ Compression, ConnectionStates1),
+ {Version, SessionId, ConnectionStates};
+ #alert{} = Alert ->
+ Alert
+ end;
+ false ->
+ ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION)
end;
hello(#client_hello{client_version = ClientVersion, random = Random,
@@ -145,14 +151,14 @@ hello(#client_hello{client_version = ClientVersion, random = Random,
renegotiation_info = Info} = Hello,
#ssl_options{versions = Versions,
secure_renegotiate = SecureRenegotation} = SslOpts,
- {Port, Session0, Cache, CacheCb, ConnectionStates0}, Renegotiation) ->
+ {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) ->
Version = select_version(ClientVersion, Versions),
case ssl_record:is_acceptable_version(Version) of
true ->
{Type, #session{cipher_suite = CipherSuite,
compression_method = Compression} = Session}
= select_session(Hello, Port, Session0, Version,
- SslOpts, Cache, CacheCb),
+ SslOpts, Cache, CacheCb, Cert),
case CipherSuite of
no_suite ->
?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY);
@@ -282,7 +288,7 @@ client_certificate_verify(OwnCert, MasterSecret, Version, Algorithm,
PrivateKey, {Hashes0, _}) ->
case public_key:pkix_is_fixed_dh_cert(OwnCert) of
true ->
- ignore;
+ ?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE);
false ->
Hashes =
calc_certificate_verify(Version, MasterSecret,
@@ -302,7 +308,6 @@ client_certificate_verify(OwnCert, MasterSecret, Version, Algorithm,
certificate_verify(Signature, {_, PublicKey, _}, Version,
MasterSecret, Algorithm, {_, Hashes0})
when Algorithm == rsa;
- Algorithm == dh_rsa;
Algorithm == dhe_rsa ->
Hashes = calc_certificate_verify(Version, MasterSecret,
Algorithm, Hashes0),
@@ -312,8 +317,12 @@ certificate_verify(Signature, {_, PublicKey, _}, Version,
valid;
_ ->
?ALERT_REC(?FATAL, ?BAD_CERTIFICATE)
- end.
-%% TODO dsa clause
+ end;
+certificate_verify(Signature, {_, PublicKey, PublicKeyParams}, Version,
+ MasterSecret, dhe_dss = Algorithm, {_, Hashes0}) ->
+ Hashes = calc_certificate_verify(Version, MasterSecret,
+ Algorithm, Hashes0),
+ public_key:verify_signature(Hashes, sha, Signature, PublicKey, PublicKeyParams).
%%--------------------------------------------------------------------
%% Function: certificate_request(ConnectionStates, CertDbRef) ->
@@ -346,20 +355,13 @@ key_exchange(client, {premaster_secret, Secret, {_, PublicKey, _}}) ->
encrypted_premaster_secret(Secret, PublicKey),
#client_key_exchange{exchange_keys = EncPremasterSecret};
-%% Uncomment if dh_rsa and dh_dss cipher suites should
-%% be supported.
-%% key_exchange(client, fixed_diffie_hellman) ->
-%% #client_key_exchange{exchange_keys =
-%% #client_diffie_hellman_public{
-%% dh_public = <<>>
-%% }};
key_exchange(client, {dh, <<?UINT32(Len), PublicKey:Len/binary>>}) ->
#client_key_exchange{
exchange_keys = #client_diffie_hellman_public{
dh_public = PublicKey}
};
-key_exchange(server, {dh, {<<?UINT32(_), PublicKey/binary>>, _},
+key_exchange(server, {dh, {<<?UINT32(Len), PublicKey:Len/binary>>, _},
#'DHParameter'{prime = P, base = G},
KeyAlgo, ClientRandom, ServerRandom, PrivateKey}) ->
<<?UINT32(_), PBin/binary>> = crypto:mpint(P),
@@ -368,15 +370,14 @@ key_exchange(server, {dh, {<<?UINT32(_), PublicKey/binary>>, _},
GLen = byte_size(GBin),
YLen = byte_size(PublicKey),
ServerDHParams = #server_dh_params{dh_p = PBin,
- dh_g = GBin, dh_y = PublicKey},
-
- Hash =
- server_key_exchange_hash(KeyAlgo, <<ClientRandom/binary,
- ServerRandom/binary,
- ?UINT16(PLen), PBin/binary,
- ?UINT16(GLen), GBin/binary,
- ?UINT16(YLen), PublicKey/binary>>),
- Signed = digitally_signed(Hash, PrivateKey),
+ dh_g = GBin, dh_y = PublicKey},
+ Plain =
+ server_key_exchange_plain(KeyAlgo, <<ClientRandom/binary,
+ ServerRandom/binary,
+ ?UINT16(PLen), PBin/binary,
+ ?UINT16(GLen), GBin/binary,
+ ?UINT16(YLen), PublicKey/binary>>),
+ Signed = digitally_signed(Plain, PrivateKey),
#server_key_exchange{params = ServerDHParams,
signed_params = Signed}.
@@ -527,18 +528,12 @@ path_validation_alert(_, _) ->
?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE).
select_session(Hello, Port, Session, Version,
- #ssl_options{ciphers = UserSuites} = SslOpts, Cache, CacheCb) ->
+ #ssl_options{ciphers = UserSuites} = SslOpts, Cache, CacheCb, Cert) ->
SuggestedSessionId = Hello#client_hello.session_id,
SessionId = ssl_manager:server_session_id(Port, SuggestedSessionId,
SslOpts),
- Suites = case UserSuites of
- [] ->
- ssl_cipher:suites(Version);
- _ ->
- UserSuites
- end,
-
+ Suites = available_suites(Cert, UserSuites, Version),
case ssl_session:is_new(SuggestedSessionId, SessionId) of
true ->
CipherSuite =
@@ -552,7 +547,14 @@ select_session(Hello, Port, Session, Version,
{resumed, CacheCb:lookup(Cache, {Port, SessionId})}
end.
-
+available_suites(Cert, UserSuites, Version) ->
+ case UserSuites of
+ [] ->
+ ssl_cipher:filter(Cert, ssl_cipher:suites(Version));
+ _ ->
+ ssl_cipher:filter(Cert, UserSuites)
+ end.
+
cipher_suites(Suites, false) ->
[?TLS_EMPTY_RENEGOTIATION_INFO_SCSV | Suites];
cipher_suites(Suites, true) ->
@@ -725,12 +727,11 @@ master_secret(Version, MasterSecret, #security_parameters{
hash_size = HashSize,
key_material_length = KML,
expanded_key_material_length = EKML,
- iv_size = IVS,
- exportable = Exportable},
+ iv_size = IVS},
ConnectionStates, Role) ->
{ClientWriteMacSecret, ServerWriteMacSecret, ClientWriteKey,
ServerWriteKey, ClientIV, ServerIV} =
- setup_keys(Version, Exportable, MasterSecret, ServerRandom,
+ setup_keys(Version, MasterSecret, ServerRandom,
ClientRandom, HashSize, KML, EKML, IVS),
?DBG_HEX(ClientWriteKey),
?DBG_HEX(ClientIV),
@@ -812,18 +813,11 @@ dec_hs(?SERVER_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:32/binary,
renegotiation_info = RenegotiationInfo};
dec_hs(?CERTIFICATE, <<?UINT24(ACLen), ASN1Certs:ACLen/binary>>, _, _) ->
#certificate{asn1_certificates = certs_to_list(ASN1Certs)};
-%% Uncomment if support for export ciphers is added.
-%% dec_hs(?SERVER_KEY_EXCHANGE, <<?UINT16(ModLen), Mod:ModLen/binary,
-%% ?UINT16(ExpLen), Exp:ExpLen/binary,
-%% ?UINT16(_), Sig/binary>>,
-%% ?KEY_EXCHANGE_RSA, _) ->
-%% #server_key_exchange{params = #server_rsa_params{rsa_modulus = Mod,
-%% rsa_exponent = Exp},
-%% signed_params = Sig};
+
dec_hs(?SERVER_KEY_EXCHANGE, <<?UINT16(PLen), P:PLen/binary,
?UINT16(GLen), G:GLen/binary,
?UINT16(YLen), Y:YLen/binary,
- ?UINT16(_), Sig/binary>>,
+ ?UINT16(Len), Sig:Len/binary>>,
?KEY_EXCHANGE_DIFFIE_HELLMAN, _) ->
#server_key_exchange{params = #server_dh_params{dh_p = P,dh_g = G,
dh_y = Y},
@@ -831,7 +825,6 @@ dec_hs(?SERVER_KEY_EXCHANGE, <<?UINT16(PLen), P:PLen/binary,
dec_hs(?CERTIFICATE_REQUEST,
<<?BYTE(CertTypesLen), CertTypes:CertTypesLen/binary,
?UINT16(CertAuthsLen), CertAuths:CertAuthsLen/binary>>, _, _) ->
- %% TODO: maybe we should chop up CertAuths into a list?
#certificate_request{certificate_types = CertTypes,
certificate_authorities = CertAuths};
dec_hs(?SERVER_HELLO_DONE, <<>>, _, _) ->
@@ -846,8 +839,7 @@ dec_hs(?CLIENT_KEY_EXCHANGE, <<?UINT16(_), PKEPMS/binary>>,
PreSecret = #encrypted_premaster_secret{premaster_secret = PKEPMS},
#client_key_exchange{exchange_keys = PreSecret};
dec_hs(?CLIENT_KEY_EXCHANGE, <<>>, ?KEY_EXCHANGE_DIFFIE_HELLMAN, _) ->
- %% TODO: Should check whether the cert already contains a suitable DH-key (7.4.7.2)
- throw(?ALERT_REC(?FATAL, implicit_public_value_encoding));
+ throw(?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE));
dec_hs(?CLIENT_KEY_EXCHANGE, <<?UINT16(DH_YLen), DH_Y:DH_YLen/binary>>,
?KEY_EXCHANGE_DIFFIE_HELLMAN, _) ->
#client_key_exchange{exchange_keys =
@@ -953,17 +945,6 @@ enc_hs(#certificate{asn1_certificates = ASN1CertList}, _Version, _) ->
ASN1Certs = certs_from_list(ASN1CertList),
ACLen = erlang:iolist_size(ASN1Certs),
{?CERTIFICATE, <<?UINT24(ACLen), ASN1Certs:ACLen/binary>>};
-%% Uncomment if support for export ciphers is added.
-%% enc_hs(#server_key_exchange{params = #server_rsa_params{rsa_modulus = Mod,
-%% rsa_exponent = Exp},
-%% signed_params = SignedParams}, _Version, _) ->
-%% ModLen = byte_size(Mod),
-%% ExpLen = byte_size(Exp),
-%% SignedLen = byte_size(SignedParams),
-%% {?SERVER_KEY_EXCHANGE, <<?UINT16(ModLen),Mod/binary,
-%% ?UINT16(ExpLen), Exp/binary,
-%% ?UINT16(SignedLen), SignedParams/binary>>
-%% };
enc_hs(#server_key_exchange{params = #server_dh_params{
dh_p = P, dh_g = G, dh_y = Y},
signed_params = SignedParams}, _Version, _) ->
@@ -1073,16 +1054,11 @@ from_2bytes(<<?UINT16(N), Rest/binary>>, Acc) ->
certificate_types({KeyExchange, _, _, _})
when KeyExchange == rsa;
- KeyExchange == dh_dss;
- KeyExchange == dh_rsa;
KeyExchange == dhe_dss;
KeyExchange == dhe_rsa ->
<<?BYTE(?RSA_SIGN), ?BYTE(?DSS_SIGN)>>;
certificate_types(_) ->
- %%TODO: Is this a good default,
- %% is there a case where we like to request
- %% a RSA_FIXED_DH or DSS_FIXED_DH
<<?BYTE(?RSA_SIGN)>>.
certificate_authorities(CertDbRef) ->
@@ -1101,7 +1077,7 @@ certificate_authorities_from_db(CertDbRef) ->
certificate_authorities_from_db(CertDbRef, no_candidate, []).
certificate_authorities_from_db(CertDbRef, PrevKey, Acc) ->
- case ssl_certificate_db:issuer_candidate(PrevKey) of
+ case ssl_manager:issuer_candidate(PrevKey) of
no_more_candidates ->
lists:reverse(Acc);
{{CertDbRef, _, _} = Key, Cert} ->
@@ -1114,9 +1090,8 @@ certificate_authorities_from_db(CertDbRef, PrevKey, Acc) ->
digitally_signed(Hashes, #'RSAPrivateKey'{} = Key) ->
public_key:encrypt_private(Hashes, Key,
[{rsa_pad, rsa_pkcs1_padding}]);
-digitally_signed(Hashes, #'DSAPrivateKey'{} = Key) ->
- public_key:sign(Hashes, Key).
-
+digitally_signed(Plain, #'DSAPrivateKey'{} = Key) ->
+ public_key:sign(Plain, Key).
calc_master_secret({3,0}, PremasterSecret, ClientRandom, ServerRandom) ->
ssl_ssl3:master_secret(PremasterSecret, ClientRandom, ServerRandom);
@@ -1125,20 +1100,15 @@ calc_master_secret({3,N},PremasterSecret, ClientRandom, ServerRandom)
when N == 1; N == 2 ->
ssl_tls1:master_secret(PremasterSecret, ClientRandom, ServerRandom).
-setup_keys({3,0}, Exportable, MasterSecret,
+setup_keys({3,0}, MasterSecret,
ServerRandom, ClientRandom, HashSize, KML, EKML, IVS) ->
- ssl_ssl3:setup_keys(Exportable, MasterSecret, ServerRandom,
+ ssl_ssl3:setup_keys(MasterSecret, ServerRandom,
ClientRandom, HashSize, KML, EKML, IVS);
-setup_keys({3,1}, _Exportable, MasterSecret,
+setup_keys({3,1}, MasterSecret,
ServerRandom, ClientRandom, HashSize, KML, _EKML, IVS) ->
ssl_tls1:setup_keys(MasterSecret, ServerRandom, ClientRandom, HashSize,
- KML, IVS);
-
-setup_keys({3,2}, _Exportable, MasterSecret,
- ServerRandom, ClientRandom, HashSize, KML, _EKML, _IVS) ->
- ssl_tls1:setup_keys(MasterSecret, ServerRandom,
- ClientRandom, HashSize, KML).
+ KML, IVS).
calc_finished({3, 0}, Role, MasterSecret, Hashes) ->
ssl_ssl3:finished(Role, MasterSecret, Hashes);
@@ -1152,32 +1122,21 @@ calc_certificate_verify({3, N}, _, Algorithm, Hashes)
when N == 1; N == 2 ->
ssl_tls1:certificate_verify(Algorithm, Hashes).
-server_key_exchange_hash(Algorithm, Value) when Algorithm == rsa;
- Algorithm == dh_rsa;
+server_key_exchange_plain(Algorithm, Value) when Algorithm == rsa;
Algorithm == dhe_rsa ->
- MD5Context = crypto:md5_init(),
- NewMD5Context = crypto:md5_update(MD5Context, Value),
- MD5 = crypto:md5_final(NewMD5Context),
-
- SHAContext = crypto:sha_init(),
- NewSHAContext = crypto:sha_update(SHAContext, Value),
- SHA = crypto:sha_final(NewSHAContext),
-
+ MD5 = crypto:md5(Value),
+ SHA = crypto:sha(Value),
<<MD5/binary, SHA/binary>>;
-server_key_exchange_hash(Algorithm, Value) when Algorithm == dh_dss;
- Algorithm == dhe_dss ->
-
- SHAContext = crypto:sha_init(),
- NewSHAContext = crypto:sha_update(SHAContext, Value),
- crypto:sha_final(NewSHAContext).
-
+server_key_exchange_plain(dhe_dss, Value) ->
+ %% Hash will be done by crypto.
+ Value.
sig_alg(dh_anon) ->
?SIGNATURE_ANONYMOUS;
-sig_alg(Alg) when Alg == dhe_rsa; Alg == rsa; Alg == dh_rsa ->
+sig_alg(Alg) when Alg == dhe_rsa; Alg == rsa ->
?SIGNATURE_RSA;
-sig_alg(Alg) when Alg == dh_dss; Alg == dhe_dss ->
+sig_alg(dhe_dss) ->
?SIGNATURE_DSA;
sig_alg(_) ->
?NULL.
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 0151426d43..19bdcfa1f5 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -27,7 +27,7 @@
%% Internal application API
-export([start_link/0, start_link/1,
connection_init/2, cache_pem_file/1,
- lookup_trusted_cert/3, client_session_id/3, server_session_id/3,
+ lookup_trusted_cert/3, issuer_candidate/1, client_session_id/3, server_session_id/3,
register_session/2, register_session/3, invalidate_session/2,
invalidate_session/3]).
@@ -85,13 +85,20 @@ cache_pem_file(File) ->
%% Function:
%% Description:
%%--------------------------------------------------------------------
-lookup_trusted_cert(SerialNumber, Issuer, Ref) ->
+lookup_trusted_cert(Ref, SerialNumber, Issuer) ->
ssl_certificate_db:lookup_trusted_cert(Ref, SerialNumber, Issuer).
%%--------------------------------------------------------------------
%% Function:
%% Description:
%%--------------------------------------------------------------------
+issuer_candidate(PrevCandidateKey) ->
+ ssl_certificate_db:issuer_candidate(PrevCandidateKey).
+
+%%--------------------------------------------------------------------
+%% Function:
+%% Description:
+%%--------------------------------------------------------------------
client_session_id(Host, Port, SslOpts) ->
call({client_session_id, Host, Port, SslOpts}).
@@ -133,19 +140,19 @@ invalidate_session(Port, Session) ->
%% {stop, Reason}
%% Description: Initiates the server
%%--------------------------------------------------------------------
-init(Opts) ->
+init([Opts]) ->
process_flag(trap_exit, true),
- CacheCb = proplists:get_value(session_cache, Opts, ssl_session_cache),
+ CacheCb = proplists:get_value(session_cb, Opts, ssl_session_cache),
SessionLifeTime =
proplists:get_value(session_lifetime, Opts, ?'24H_in_sec'),
CertDb = ssl_certificate_db:create(),
- SessionCache = CacheCb:init(),
+ SessionCache = CacheCb:init(proplists:get_value(session_cb_init_args, Opts, [])),
Timer = erlang:send_after(SessionLifeTime * 1000,
self(), validate_sessions),
{ok, #state{certificate_db = CertDb,
session_cache = SessionCache,
session_cache_cb = CacheCb,
- session_lifetime = SessionLifeTime ,
+ session_lifetime = SessionLifeTime,
session_validation_timer = Timer}}.
%%--------------------------------------------------------------------
@@ -172,10 +179,8 @@ handle_call({{connection_init, TrustedcertsFile, _Role}, Pid}, _From,
{ok, Ref} = ssl_certificate_db:add_trusted_certs(Pid, TrustedcertsFile, Db),
{ok, Ref, Cache}
catch
- _:{badmatch, Error} ->
- {error, Error};
- _E:_R ->
- {error, {_R,erlang:get_stacktrace()}}
+ _:Reason ->
+ {error, Reason}
end,
{reply, Result, State};
@@ -197,14 +202,10 @@ handle_call({{cache_pem, File},Pid}, _, State = #state{certificate_db = Db}) ->
try ssl_certificate_db:cache_pem_file(Pid,File,Db) of
Result ->
{reply, Result, State}
- catch _:{badmatch, Reason} ->
- {reply, Reason, State};
- _:Reason ->
+ catch
+ _:Reason ->
{reply, {error, Reason}, State}
- end;
-
-handle_call(_,_, State) ->
- {reply, ok, State}.
+ end.
%%--------------------------------------------------------------------
%% Function: handle_cast(Msg, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
@@ -332,7 +333,7 @@ init_session_validator([Cache, CacheCb, LifeTime]) ->
CacheCb:foldl(fun session_validation/2,
LifeTime, Cache).
-session_validation({{Host, Port, _}, Session}, LifeTime) ->
+session_validation({{{Host, Port}, _}, Session}, LifeTime) ->
validate_session(Host, Port, Session, LifeTime),
LifeTime;
session_validation({{Port, _}, Session}, LifeTime) ->
diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl
index f9f915f13d..6b7cffaa7d 100644
--- a/lib/ssl/src/ssl_record.erl
+++ b/lib/ssl/src/ssl_record.erl
@@ -29,6 +29,7 @@
-include("ssl_internal.hrl").
-include("ssl_alert.hrl").
-include("ssl_handshake.hrl").
+-include("ssl_cipher.hrl").
-include("ssl_debug.hrl").
%% Connection state handling
@@ -410,16 +411,14 @@ protocol_version(tlsv1) ->
{3, 1};
protocol_version(sslv3) ->
{3, 0};
-protocol_version(sslv2) ->
+protocol_version(sslv2) -> %% Backwards compatibility
{2, 0};
protocol_version({3, 2}) ->
'tlsv1.1';
protocol_version({3, 1}) ->
tlsv1;
protocol_version({3, 0}) ->
- sslv3;
-protocol_version({2, 0}) ->
- sslv2.
+ sslv3.
%%--------------------------------------------------------------------
%% Function: protocol_version(Version1, Version2) -> #protocol_version{}
%% Version1 = Version2 = #protocol_version{}
@@ -467,7 +466,7 @@ highest_protocol_version(_, [Version | Rest]) ->
%%--------------------------------------------------------------------
supported_protocol_versions() ->
Fun = fun(Version) ->
- protocol_version(Version)
+ protocol_version(Version)
end,
case application:get_env(ssl, protocol_version) of
undefined ->
@@ -475,11 +474,18 @@ supported_protocol_versions() ->
{ok, []} ->
lists:map(Fun, ?DEFAULT_SUPPORTED_VERSIONS);
{ok, Vsns} when is_list(Vsns) ->
- lists:map(Fun, Vsns);
+ Versions = lists:filter(fun is_acceptable_version/1, lists:map(Fun, Vsns)),
+ supported_protocol_versions(Versions);
{ok, Vsn} ->
- [Fun(Vsn)]
+ Versions = lists:filter(fun is_acceptable_version/1, [Fun(Vsn)]),
+ supported_protocol_versions(Versions)
end.
+supported_protocol_versions([]) ->
+ ?DEFAULT_SUPPORTED_VERSIONS;
+supported_protocol_versions([_|_] = Vsns) ->
+ Vsns.
+
%%--------------------------------------------------------------------
%% Function: is_acceptable_version(Version) -> true | false
%% Version = #protocol_version{}
@@ -511,13 +517,17 @@ decode_cipher_text(CipherText, ConnnectionStates0) ->
#connection_state{compression_state = CompressionS0,
security_parameters = SecParams} = ReadState0,
CompressAlg = SecParams#security_parameters.compression_algorithm,
- {Compressed, ReadState1} = decipher(CipherText, ReadState0),
- {Plain, CompressionS1} = uncompress(CompressAlg,
- Compressed, CompressionS0),
- ConnnectionStates = ConnnectionStates0#connection_states{
- current_read = ReadState1#connection_state{
- compression_state = CompressionS1}},
- {Plain, ConnnectionStates}.
+ case decipher(CipherText, ReadState0) of
+ {Compressed, ReadState1} ->
+ {Plain, CompressionS1} = uncompress(CompressAlg,
+ Compressed, CompressionS0),
+ ConnnectionStates = ConnnectionStates0#connection_states{
+ current_read = ReadState1#connection_state{
+ compression_state = CompressionS1}},
+ {Plain, ConnnectionStates};
+ #alert{} = Alert ->
+ Alert
+ end.
%%--------------------------------------------------------------------
%%% Internal functions
@@ -532,12 +542,10 @@ initial_connection_state(ConnectionEnd) ->
}.
initial_security_params(ConnectionEnd) ->
- #security_parameters{connection_end = ConnectionEnd,
- bulk_cipher_algorithm = ?NULL,
- mac_algorithm = ?NULL,
- compression_algorithm = ?NULL,
- cipher_type = ?NULL
- }.
+ SecParams = #security_parameters{connection_end = ConnectionEnd,
+ compression_algorithm = ?NULL},
+ ssl_cipher:security_parameters(?TLS_NULL_WITH_NULL_NULL,
+ SecParams).
empty_connection_state(ConnectionEnd) ->
SecParams = empty_security_params(ConnectionEnd),
@@ -643,29 +651,37 @@ encode_tls_cipher_text(Type, {MajVer, MinVer}, Fragment) ->
cipher(Type, Version, Fragment, CS0) ->
Length = erlang:iolist_size(Fragment),
- {Hash, CS1=#connection_state{cipher_state = CipherS0,
+ {MacHash, CS1=#connection_state{cipher_state = CipherS0,
security_parameters=
#security_parameters{bulk_cipher_algorithm =
BCA}
}} =
hash_and_bump_seqno(CS0, Type, Version, Length, Fragment),
?DBG_HEX(Fragment),
- {Ciphered, CipherS1} = ssl_cipher:cipher(BCA, CipherS0, Hash, Fragment),
+ {Ciphered, CipherS1} = ssl_cipher:cipher(BCA, CipherS0, MacHash, Fragment),
?DBG_HEX(Ciphered),
CS2 = CS1#connection_state{cipher_state=CipherS1},
{Ciphered, CS2}.
decipher(TLS=#ssl_tls{type=Type, version=Version, fragment=Fragment}, CS0) ->
SP = CS0#connection_state.security_parameters,
- BCA = SP#security_parameters.bulk_cipher_algorithm, % eller Cipher?
+ BCA = SP#security_parameters.bulk_cipher_algorithm,
HashSz = SP#security_parameters.hash_size,
CipherS0 = CS0#connection_state.cipher_state,
- {T, Mac, CipherS1} = ssl_cipher:decipher(BCA, HashSz, CipherS0, Fragment),
- CS1 = CS0#connection_state{cipher_state = CipherS1},
- TLength = size(T),
- {Hash, CS2} = hash_and_bump_seqno(CS1, Type, Version, TLength, Fragment),
- ok = check_hash(Hash, Mac),
- {TLS#ssl_tls{fragment = T}, CS2}.
+ case ssl_cipher:decipher(BCA, HashSz, CipherS0, Fragment, Version) of
+ {T, Mac, CipherS1} ->
+ CS1 = CS0#connection_state{cipher_state = CipherS1},
+ TLength = size(T),
+ {MacHash, CS2} = hash_and_bump_seqno(CS1, Type, Version, TLength, T),
+ case is_correct_mac(Mac, MacHash) of
+ true ->
+ {TLS#ssl_tls{fragment = T}, CS2};
+ false ->
+ ?ALERT_REC(?FATAL, ?BAD_RECORD_MAC)
+ end;
+ #alert{} = Alert ->
+ Alert
+ end.
uncompress(?NULL, Data = #ssl_tls{type = _Type,
version = _Version,
@@ -686,10 +702,12 @@ hash_and_bump_seqno(#connection_state{sequence_number = SeqNo,
Length, Fragment),
{Hash, CS0#connection_state{sequence_number = SeqNo+1}}.
-check_hash(_, _) ->
- ok. %% TODO check this
+is_correct_mac(Mac, Mac) ->
+ true;
+is_correct_mac(_M,_H) ->
+ false.
-mac_hash(?NULL, {_,_}, _MacSecret, _SeqNo, _Type,
+mac_hash({_,_}, ?NULL, _MacSecret, _SeqNo, _Type,
_Length, _Fragment) ->
<<>>;
mac_hash({3, 0}, MacAlg, MacSecret, SeqNo, Type, Length, Fragment) ->
diff --git a/lib/ssl/src/ssl_session_cache.erl b/lib/ssl/src/ssl_session_cache.erl
index 4a60892235..1f2d1fc7d3 100644
--- a/lib/ssl/src/ssl_session_cache.erl
+++ b/lib/ssl/src/ssl_session_cache.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -22,8 +22,8 @@
-behaviour(ssl_session_cache_api).
--export([init/0, terminate/1, lookup/2, update/3, delete/2, foldl/3,
- select_session/2]).
+-export([init/1, terminate/1, lookup/2, update/3, delete/2, foldl/3,
+ select_session/2]).
%%--------------------------------------------------------------------
%% Function: init() -> Cache
@@ -32,7 +32,7 @@
%%
%% Description: Return table reference. Called by ssl_manager process.
%%--------------------------------------------------------------------
-init() ->
+init(_) ->
ets:new(cache_name(), [set, protected]).
%%--------------------------------------------------------------------
diff --git a/lib/ssl/src/ssl_session_cache_api.erl b/lib/ssl/src/ssl_session_cache_api.erl
index d2e846e9fd..f8416bf327 100644
--- a/lib/ssl/src/ssl_session_cache_api.erl
+++ b/lib/ssl/src/ssl_session_cache_api.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -25,7 +25,7 @@
behaviour_info(callbacks) ->
[
- {init, 0},
+ {init, 1},
{terminate, 1},
{lookup, 2},
{update, 3},
diff --git a/lib/ssl/src/ssl_ssl3.erl b/lib/ssl/src/ssl_ssl3.erl
index df809ce275..400298a322 100644
--- a/lib/ssl/src/ssl_ssl3.erl
+++ b/lib/ssl/src/ssl_ssl3.erl
@@ -30,7 +30,7 @@
-include("ssl_record.hrl"). % MD5 and SHA
-export([master_secret/3, finished/3, certificate_verify/3,
- mac_hash/6, setup_keys/8,
+ mac_hash/6, setup_keys/7,
suites/0]).
-compile(inline).
@@ -76,7 +76,7 @@ finished(Role, MasterSecret, {MD5Hash, SHAHash}) ->
<<MD5/binary, SHA/binary>>.
certificate_verify(Algorithm, MasterSecret, {MD5Hash, SHAHash})
- when Algorithm == rsa; Algorithm == dh_rsa; Algorithm == dhe_rsa ->
+ when Algorithm == rsa; Algorithm == dhe_rsa ->
%% md5_hash
%% MD5(master_secret + pad_2 +
%% MD5(handshake_messages + master_secret + pad_1));
@@ -88,8 +88,7 @@ certificate_verify(Algorithm, MasterSecret, {MD5Hash, SHAHash})
SHA = handshake_hash(?SHA, MasterSecret, undefined, SHAHash),
<<MD5/binary, SHA/binary>>;
-certificate_verify(Algorithm, MasterSecret, {_, SHAHash})
- when Algorithm == dh_dss; Algorithm == dhe_dss ->
+certificate_verify(dhe_dss, MasterSecret, {_, SHAHash}) ->
%% sha_hash
%% SHA(master_secret + pad_2 +
%% SHA(handshake_messages + master_secret + pad_1));
@@ -114,9 +113,7 @@ mac_hash(Method, Mac_write_secret, Seq_num, Type, Length, Fragment) ->
?DBG_HEX(Mac),
Mac.
-setup_keys(Exportable, MasterSecret, ServerRandom, ClientRandom,
- HS, KML, _EKML, IVS)
- when Exportable == no_export; Exportable == ignore ->
+setup_keys(MasterSecret, ServerRandom, ClientRandom, HS, KML, _EKML, IVS) ->
KeyBlock = generate_keyblock(MasterSecret, ServerRandom, ClientRandom,
2*(HS+KML+IVS)),
%% draft-ietf-tls-ssl-version3-00 - 6.2.2
@@ -137,79 +134,23 @@ setup_keys(Exportable, MasterSecret, ServerRandom, ClientRandom,
?DBG_HEX(ClientIV),
?DBG_HEX(ServerIV),
{ClientWriteMacSecret, ServerWriteMacSecret, ClientWriteKey,
- ServerWriteKey, ClientIV, ServerIV};
-
-setup_keys(export, MasterSecret, ServerRandom, ClientRandom,
- HS, KML, EKML, IVS) ->
- KeyBlock = generate_keyblock(MasterSecret, ServerRandom, ClientRandom,
- 2*(HS+KML)),
- %% draft-ietf-tls-ssl-version3-00 - 6.2.2
- %% Exportable encryption algorithms (for which
- %% CipherSpec.is_exportable is true) require additional processing as
- %% follows to derive their final write keys:
-
- %% final_client_write_key = MD5(client_write_key +
- %% ClientHello.random +
- %% ServerHello.random);
- %% final_server_write_key = MD5(server_write_key +
- %% ServerHello.random +
- %% ClientHello.random);
-
- %% Exportable encryption algorithms derive their IVs from the random
- %% messages:
- %% client_write_IV = MD5(ClientHello.random + ServerHello.random);
- %% server_write_IV = MD5(ServerHello.random + ClientHello.random);
-
- <<ClientWriteMacSecret:HS/binary, ServerWriteMacSecret:HS/binary,
- ClientWriteKey:KML/binary, ServerWriteKey:KML/binary>> = KeyBlock,
- <<ClientIV:IVS/binary, _/binary>> =
- hash(?MD5, [ClientRandom, ServerRandom]),
- <<ServerIV:IVS/binary, _/binary>> =
- hash(?MD5, [ServerRandom, ClientRandom]),
- <<FinalClientWriteKey:EKML/binary, _/binary>> =
- hash(?MD5, [ClientWriteKey, ClientRandom, ServerRandom]),
- <<FinalServerWriteKey:EKML/binary, _/binary>> =
- hash(?MD5, [ServerWriteKey, ServerRandom, ClientRandom]),
- ?DBG_HEX(ClientWriteMacSecret),
- ?DBG_HEX(ServerWriteMacSecret),
- ?DBG_HEX(FinalClientWriteKey),
- ?DBG_HEX(FinalServerWriteKey),
- ?DBG_HEX(ClientIV),
- ?DBG_HEX(ServerIV),
- {ClientWriteMacSecret, ServerWriteMacSecret, FinalClientWriteKey,
- FinalServerWriteKey, ClientIV, ServerIV}.
+ ServerWriteKey, ClientIV, ServerIV}.
suites() ->
[
- %% TODO: uncomment when supported
?TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
- %% ?TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
?TLS_RSA_WITH_AES_256_CBC_SHA,
?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA,
- %% ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
?TLS_RSA_WITH_3DES_EDE_CBC_SHA,
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
- %% ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
- %%?TLS_DHE_DSS_WITH_RC4_128_SHA, TODO: Support this?
- %% ?TLS_RSA_WITH_IDEA_CBC_SHA, Not supported: in later openssl version than OTP requires
-
+ ?TLS_RSA_WITH_IDEA_CBC_SHA,
?TLS_RSA_WITH_RC4_128_SHA,
?TLS_RSA_WITH_RC4_128_MD5,
- %%?TLS_RSA_EXPORT1024_WITH_RC4_56_MD5,
- %%?TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5,
- %%?TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA,
- %%?TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA,
- %%?TLS_RSA_EXPORT1024_WITH_RC4_56_SHA,
- %%?TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA,
- %%?TLS_DHE_DSS_WITH_RC4_128_SHA,
-
?TLS_RSA_WITH_DES_CBC_SHA
- %% ?TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,
- %% ?TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA,
- %% ?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA,
- %%?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5,
- %%?TLS_RSA_EXPORT_WITH_RC4_40_MD5
].
%%--------------------------------------------------------------------
@@ -269,8 +210,7 @@ handshake_hash(Method, MasterSecret, Sender, HandshakeHash) ->
hash(Method, [MasterSecret, pad_2(Method), InnerHash]).
get_sender(client) -> "CLNT";
-get_sender(server) -> "SRVR";
-get_sender(none) -> "".
+get_sender(server) -> "SRVR".
generate_keyblock(MasterSecret, ServerRandom, ClientRandom, WantedLength) ->
gen(MasterSecret, [MasterSecret, ServerRandom, ClientRandom],
diff --git a/lib/ssl/src/ssl_sup.erl b/lib/ssl/src/ssl_sup.erl
index bd5a02417a..b7cb5c3ab3 100644
--- a/lib/ssl/src/ssl_sup.erl
+++ b/lib/ssl/src/ssl_sup.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -40,8 +40,7 @@ start_link() ->
%%%=========================================================================
%% init([]) -> {ok, {SupFlags, [ChildSpec]}}
%%
-init([]) ->
-
+init([]) ->
%% OLD ssl - moved start to ssl.erl only if old
%% ssl is acctualy run!
%%Child1 = {ssl_server, {ssl_server, start_link, []},
@@ -67,7 +66,7 @@ init([]) ->
session_and_cert_manager_child_spec() ->
Opts = manager_opts(),
Name = ssl_manager,
- StartFunc = {ssl_manager, start_link, Opts},
+ StartFunc = {ssl_manager, start_link, [Opts]},
Restart = permanent,
Shutdown = 4000,
Modules = [ssl_manager],
@@ -86,11 +85,12 @@ connection_manager_child_spec() ->
manager_opts() ->
CbOpts = case application:get_env(ssl, session_cb) of
- {ok, Cb} when is_atom(Cb) ->
- [{session_cb, Cb}];
- _ ->
- []
- end,
+ {ok, Cb} when is_atom(Cb) ->
+ InitArgs = session_cb_init_args(),
+ [{session_cb, Cb}, {session_cb_init_args, InitArgs}];
+ _ ->
+ []
+ end,
case application:get_env(ssl, session_lifetime) of
{ok, Time} when is_integer(Time) ->
[{session_lifetime, Time}| CbOpts];
@@ -98,3 +98,10 @@ manager_opts() ->
CbOpts
end.
+session_cb_init_args() ->
+ case application:get_env(ssl, session_cb_init_args) of
+ {ok, Args} when is_list(Args) ->
+ Args;
+ _ ->
+ []
+ end.
diff --git a/lib/ssl/src/ssl_tls1.erl b/lib/ssl/src/ssl_tls1.erl
index ce9a135168..70db632835 100644
--- a/lib/ssl/src/ssl_tls1.erl
+++ b/lib/ssl/src/ssl_tls1.erl
@@ -30,7 +30,7 @@
-include("ssl_debug.hrl").
-export([master_secret/3, finished/3, certificate_verify/2, mac_hash/7,
- setup_keys/5, setup_keys/6, suites/0]).
+ setup_keys/6, suites/0]).
%%====================================================================
%% Internal application API
@@ -58,14 +58,12 @@ finished(Role, MasterSecret, {MD5Hash, SHAHash}) ->
certificate_verify(Algorithm, {MD5Hash, SHAHash}) when Algorithm == rsa;
- Algorithm == dh_rsa;
Algorithm == dhe_rsa ->
MD5 = hash_final(?MD5, MD5Hash),
SHA = hash_final(?SHA, SHAHash),
<<MD5/binary, SHA/binary>>;
-certificate_verify(Algorithm, {_, SHAHash}) when Algorithm == dh_dss;
- Algorithm == dhe_dss ->
+certificate_verify(dhe_dss, {_, SHAHash}) ->
hash_final(?SHA, SHAHash).
setup_keys(MasterSecret, ServerRandom, ClientRandom, HashSize,
@@ -92,26 +90,27 @@ setup_keys(MasterSecret, ServerRandom, ClientRandom, HashSize,
{ClientWriteMacSecret, ServerWriteMacSecret, ClientWriteKey,
ServerWriteKey, ClientIV, ServerIV}.
-setup_keys(MasterSecret, ServerRandom, ClientRandom, HashSize, KeyMatLen) ->
- %% RFC 4346 - 6.3. Key calculation
- %% key_block = PRF(SecurityParameters.master_secret,
- %% "key expansion",
- %% SecurityParameters.server_random +
- %% SecurityParameters.client_random);
- %% Then the key_block is partitioned as follows:
- %% client_write_MAC_secret[SecurityParameters.hash_size]
- %% server_write_MAC_secret[SecurityParameters.hash_size]
- %% client_write_key[SecurityParameters.key_material_length]
- %% server_write_key[SecurityParameters.key_material_length]
- WantedLength = 2 * (HashSize + KeyMatLen),
- KeyBlock = prf(MasterSecret, "key expansion",
- [ServerRandom, ClientRandom], WantedLength),
- <<ClientWriteMacSecret:HashSize/binary,
- ServerWriteMacSecret:HashSize/binary,
- ClientWriteKey:KeyMatLen/binary, ServerWriteKey:KeyMatLen/binary>>
- = KeyBlock,
- {ClientWriteMacSecret, ServerWriteMacSecret, ClientWriteKey,
- ServerWriteKey, undefined, undefined}.
+%% TLS v1.1 uncomment when supported.
+%% setup_keys(MasterSecret, ServerRandom, ClientRandom, HashSize, KeyMatLen) ->
+%% %% RFC 4346 - 6.3. Key calculation
+%% %% key_block = PRF(SecurityParameters.master_secret,
+%% %% "key expansion",
+%% %% SecurityParameters.server_random +
+%% %% SecurityParameters.client_random);
+%% %% Then the key_block is partitioned as follows:
+%% %% client_write_MAC_secret[SecurityParameters.hash_size]
+%% %% server_write_MAC_secret[SecurityParameters.hash_size]
+%% %% client_write_key[SecurityParameters.key_material_length]
+%% %% server_write_key[SecurityParameters.key_material_length]
+%% WantedLength = 2 * (HashSize + KeyMatLen),
+%% KeyBlock = prf(MasterSecret, "key expansion",
+%% [ServerRandom, ClientRandom], WantedLength),
+%% <<ClientWriteMacSecret:HashSize/binary,
+%% ServerWriteMacSecret:HashSize/binary,
+%% ClientWriteKey:KeyMatLen/binary, ServerWriteKey:KeyMatLen/binary>>
+%% = KeyBlock,
+%% {ClientWriteMacSecret, ServerWriteMacSecret, ClientWriteKey,
+%% ServerWriteKey, undefined, undefined}.
mac_hash(Method, Mac_write_secret, Seq_num, Type, {Major, Minor},
Length, Fragment) ->
@@ -135,35 +134,20 @@ mac_hash(Method, Mac_write_secret, Seq_num, Type, {Major, Minor},
suites() ->
[
- %% TODO: uncomment when supported
?TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
- %%?TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
?TLS_RSA_WITH_AES_256_CBC_SHA,
?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA,
- %% ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
?TLS_RSA_WITH_3DES_EDE_CBC_SHA,
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
- %% ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
- %%?TLS_DHE_DSS_WITH_RC4_128_SHA, TODO: Support this?
- %% ?TLS_RSA_WITH_IDEA_CBC_SHA,
+ %%?TLS_RSA_WITH_IDEA_CBC_SHA,
?TLS_RSA_WITH_RC4_128_SHA,
?TLS_RSA_WITH_RC4_128_MD5,
- %%?TLS_RSA_EXPORT1024_WITH_RC4_56_MD5,
- %%?TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5,
- %%?TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA,
- %%?TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA,
- %%?TLS_RSA_EXPORT1024_WITH_RC4_56_SHA,
- %%?TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA,
- %%?TLS_DHE_DSS_WITH_RC4_128_SHA,
- %%?TLS_DHE_RSA_WITH_DES_CBC_SHA,
- %% EDH-DSS-DES-CBC-SHA TODO: ??
+ ?TLS_DHE_RSA_WITH_DES_CBC_SHA,
?TLS_RSA_WITH_DES_CBC_SHA
- %% ?TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA,
- %% ?TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA,
- %%?TLS_RSA_EXPORT_WITH_DES40_CBC_SHA,
- %%?TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5,
- %%?TLS_RSA_EXPORT_WITH_RC4_40_MD5
].
%%--------------------------------------------------------------------
@@ -245,7 +229,3 @@ hash_final(?MD5, Conntext) ->
crypto:md5_final(Conntext);
hash_final(?SHA, Conntext) ->
crypto:sha_final(Conntext).
-
-
-
-
diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile
index bd86120c98..d35cafc47b 100644
--- a/lib/ssl/test/Makefile
+++ b/lib/ssl/test/Makefile
@@ -1,19 +1,19 @@
#
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 1999-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 1999-2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
#
@@ -50,7 +50,8 @@ MODULES = \
old_ssl_protocol_SUITE \
old_transport_accept_SUITE \
old_ssl_dist_SUITE \
- make_certs
+ make_certs\
+ erl_make_certs
ERL_FILES = $(MODULES:%=%.erl)
diff --git a/lib/ssl/test/erl_make_certs.erl b/lib/ssl/test/erl_make_certs.erl
new file mode 100644
index 0000000000..1d2cea6c72
--- /dev/null
+++ b/lib/ssl/test/erl_make_certs.erl
@@ -0,0 +1,412 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%% Create test certificates
+
+-module(erl_make_certs).
+-include_lib("public_key/include/public_key.hrl").
+
+-export([make_cert/1, gen_rsa/1, verify_signature/3, write_pem/3]).
+-compile(export_all).
+
+%%--------------------------------------------------------------------
+%% @doc Create and return a der encoded certificate
+%% Option Default
+%% -------------------------------------------------------
+%% digest sha1
+%% validity {date(), date() + week()}
+%% version 3
+%% subject [] list of the following content
+%% {name, Name}
+%% {email, Email}
+%% {city, City}
+%% {state, State}
+%% {org, Org}
+%% {org_unit, OrgUnit}
+%% {country, Country}
+%% {serial, Serial}
+%% {title, Title}
+%% {dnQualifer, DnQ}
+%% issuer = {Issuer, IssuerKey} true (i.e. a ca cert is created)
+%% (obs IssuerKey migth be {Key, Password}
+%% key = KeyFile|KeyBin|rsa|dsa Subject PublicKey rsa or dsa generates key
+%%
+%%
+%% (OBS: The generated keys are for testing only)
+%% @spec ([{::atom(), ::term()}]) -> {Cert::binary(), Key::binary()}
+%% @end
+%%--------------------------------------------------------------------
+
+make_cert(Opts) ->
+ SubjectPrivateKey = get_key(Opts),
+ {TBSCert, IssuerKey} = make_tbs(SubjectPrivateKey, Opts),
+ Cert = public_key:sign(TBSCert, IssuerKey),
+ true = verify_signature(Cert, IssuerKey, undef), %% verify that the keys where ok
+ {Cert, encode_key(SubjectPrivateKey)}.
+
+%%--------------------------------------------------------------------
+%% @doc Writes pem files in Dir with FileName ++ ".pem" and FileName ++ "_key.pem"
+%% @spec (::string(), ::string(), {Cert,Key}) -> ok
+%% @end
+%%--------------------------------------------------------------------
+write_pem(Dir, FileName, {Cert, Key = {_,_,not_encrypted}}) when is_binary(Cert) ->
+ ok = public_key:der_to_pem(filename:join(Dir, FileName ++ ".pem"), [{cert, Cert, not_encrypted}]),
+ ok = public_key:der_to_pem(filename:join(Dir, FileName ++ "_key.pem"), [Key]).
+
+%%--------------------------------------------------------------------
+%% @doc Creates a rsa key (OBS: for testing only)
+%% the size are in bytes
+%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
+%% @end
+%%--------------------------------------------------------------------
+gen_rsa(Size) when is_integer(Size) ->
+ Key = gen_rsa2(Size),
+ {Key, encode_key(Key)}.
+
+%%--------------------------------------------------------------------
+%% @doc Creates a dsa key (OBS: for testing only)
+%% the sizes are in bytes
+%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
+%% @end
+%%--------------------------------------------------------------------
+gen_dsa(LSize,NSize) when is_integer(LSize), is_integer(NSize) ->
+ Key = gen_dsa2(LSize, NSize),
+ {Key, encode_key(Key)}.
+
+%%--------------------------------------------------------------------
+%% @doc Verifies cert signatures
+%% @spec (::binary(), ::tuple()) -> ::boolean()
+%% @end
+%%--------------------------------------------------------------------
+verify_signature(DerEncodedCert, DerKey, KeyParams) ->
+ Key = decode_key(DerKey),
+ case Key of
+ #'RSAPrivateKey'{modulus=Mod, publicExponent=Exp} ->
+ public_key:verify_signature(DerEncodedCert,
+ #'RSAPublicKey'{modulus=Mod, publicExponent=Exp},
+ 'NULL');
+ #'DSAPrivateKey'{p=P, q=Q, g=G, y=Y} ->
+ public_key:verify_signature(DerEncodedCert, Y, #'Dss-Parms'{p=P, q=Q, g=G});
+
+ _ ->
+ public_key:verify_signature(DerEncodedCert, Key, KeyParams)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%% Implementation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+get_key(Opts) ->
+ case proplists:get_value(key, Opts) of
+ undefined -> make_key(rsa, Opts);
+ rsa -> make_key(rsa, Opts);
+ dsa -> make_key(dsa, Opts);
+ Key ->
+ Password = proplists:get_value(password, Opts, no_passwd),
+ decode_key(Key, Password)
+ end.
+
+decode_key({Key, Pw}) ->
+ decode_key(Key, Pw);
+decode_key(Key) ->
+ decode_key(Key, no_passwd).
+
+
+decode_key(#'RSAPublicKey'{} = Key,_) ->
+ Key;
+decode_key(#'RSAPrivateKey'{} = Key,_) ->
+ Key;
+decode_key(#'DSAPrivateKey'{} = Key,_) ->
+ Key;
+decode_key(Der = {_,_,_}, Pw) ->
+ {ok, Key} = public_key:decode_private_key(Der, Pw),
+ Key;
+decode_key(FileOrDer, Pw) ->
+ {ok, [KeyInfo]} = public_key:pem_to_der(FileOrDer),
+ decode_key(KeyInfo, Pw).
+
+encode_key(Key = #'RSAPrivateKey'{}) ->
+ {ok, Der} = 'OTP-PUB-KEY':encode('RSAPrivateKey', Key),
+ {rsa_private_key, list_to_binary(Der), not_encrypted};
+encode_key(Key = #'DSAPrivateKey'{}) ->
+ {ok, Der} = 'OTP-PUB-KEY':encode('DSAPrivateKey', Key),
+ {dsa_private_key, list_to_binary(Der), not_encrypted}.
+
+make_tbs(SubjectKey, Opts) ->
+ Version = list_to_atom("v"++integer_to_list(proplists:get_value(version, Opts, 3))),
+ {Issuer, IssuerKey} = issuer(Opts, SubjectKey),
+
+ {Algo, Parameters} = sign_algorithm(IssuerKey, Opts),
+
+ SignAlgo = #'SignatureAlgorithm'{algorithm = Algo,
+ parameters = Parameters},
+
+ {#'OTPTBSCertificate'{serialNumber = trunc(random:uniform()*100000000)*10000 + 1,
+ signature = SignAlgo,
+ issuer = Issuer,
+ validity = validity(Opts),
+ subject = subject(proplists:get_value(subject, Opts),false),
+ subjectPublicKeyInfo = publickey(SubjectKey),
+ version = Version,
+ extensions = extensions(Opts)
+ }, IssuerKey}.
+
+issuer(Opts, SubjectKey) ->
+ IssuerProp = proplists:get_value(issuer, Opts, true),
+ case IssuerProp of
+ true -> %% Self signed
+ {subject(proplists:get_value(subject, Opts), true), SubjectKey};
+ {Issuer, IssuerKey} when is_binary(Issuer) ->
+ {issuer_der(Issuer), decode_key(IssuerKey)};
+ {File, IssuerKey} when is_list(File) ->
+ {ok, [{cert, Cert, _}|_]} = public_key:pem_to_der(File),
+ {issuer_der(Cert), decode_key(IssuerKey)}
+ end.
+
+issuer_der(Issuer) ->
+ {ok, Decoded} = public_key:pkix_decode_cert(Issuer, otp),
+ #'OTPCertificate'{tbsCertificate=Tbs} = Decoded,
+ #'OTPTBSCertificate'{subject=Subject} = Tbs,
+ Subject.
+
+subject(undefined, IsCA) ->
+ User = if IsCA -> "CA"; true -> os:getenv("USER") end,
+ Opts = [{email, User ++ "@erlang.org"},
+ {name, User},
+ {city, "Stockholm"},
+ {country, "SE"},
+ {org, "erlang"},
+ {org_unit, "testing dep"}],
+ subject(Opts);
+subject(Opts, _) ->
+ subject(Opts).
+
+subject(SubjectOpts) when is_list(SubjectOpts) ->
+ Encode = fun(Opt) ->
+ {Type,Value} = subject_enc(Opt),
+ [#'AttributeTypeAndValue'{type=Type, value=Value}]
+ end,
+ {rdnSequence, [Encode(Opt) || Opt <- SubjectOpts]}.
+
+%% Fill in the blanks
+subject_enc({name, Name}) -> {?'id-at-commonName', {printableString, Name}};
+subject_enc({email, Email}) -> {?'id-emailAddress', Email};
+subject_enc({city, City}) -> {?'id-at-localityName', {printableString, City}};
+subject_enc({state, State}) -> {?'id-at-stateOrProvinceName', {printableString, State}};
+subject_enc({org, Org}) -> {?'id-at-organizationName', {printableString, Org}};
+subject_enc({org_unit, OrgUnit}) -> {?'id-at-organizationalUnitName', {printableString, OrgUnit}};
+subject_enc({country, Country}) -> {?'id-at-countryName', Country};
+subject_enc({serial, Serial}) -> {?'id-at-serialNumber', Serial};
+subject_enc({title, Title}) -> {?'id-at-title', {printableString, Title}};
+subject_enc({dnQualifer, DnQ}) -> {?'id-at-dnQualifier', DnQ};
+subject_enc(Other) -> Other.
+
+
+extensions(Opts) ->
+ case proplists:get_value(extensions, Opts, []) of
+ false ->
+ asn1_NOVALUE;
+ Exts ->
+ lists:flatten([extension(Ext) || Ext <- default_extensions(Exts)])
+ end.
+
+default_extensions(Exts) ->
+ Def = [{key_usage,undefined},
+ {subject_altname, undefined},
+ {issuer_altname, undefined},
+ {basic_constraints, default},
+ {name_constraints, undefined},
+ {policy_constraints, undefined},
+ {ext_key_usage, undefined},
+ {inhibit_any, undefined},
+ {auth_key_id, undefined},
+ {subject_key_id, undefined},
+ {policy_mapping, undefined}],
+ Filter = fun({Key, _}, D) -> lists:keydelete(Key, 1, D) end,
+ Exts ++ lists:foldl(Filter, Def, Exts).
+
+extension({_, undefined}) -> [];
+extension({basic_constraints, Data}) ->
+ case Data of
+ default ->
+ #'Extension'{extnID = ?'id-ce-basicConstraints',
+ extnValue = #'BasicConstraints'{cA=true},
+ critical=true};
+ false ->
+ [];
+ Len when is_integer(Len) ->
+ #'Extension'{extnID = ?'id-ce-basicConstraints',
+ extnValue = #'BasicConstraints'{cA=true, pathLenConstraint=Len},
+ critical=true};
+ _ ->
+ #'Extension'{extnID = ?'id-ce-basicConstraints',
+ extnValue = Data}
+ end;
+extension({Id, Data, Critical}) ->
+ #'Extension'{extnID = Id, extnValue = Data, critical = Critical}.
+
+
+publickey(#'RSAPrivateKey'{modulus=N, publicExponent=E}) ->
+ Public = #'RSAPublicKey'{modulus=N, publicExponent=E},
+ Algo = #'PublicKeyAlgorithm'{algorithm= ?rsaEncryption, parameters='NULL'},
+ #'OTPSubjectPublicKeyInfo'{algorithm = Algo,
+ subjectPublicKey = Public};
+publickey(#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y}) ->
+ Algo = #'PublicKeyAlgorithm'{algorithm= ?'id-dsa',
+ parameters=#'Dss-Parms'{p=P, q=Q, g=G}},
+ #'OTPSubjectPublicKeyInfo'{algorithm = Algo, subjectPublicKey = Y}.
+
+validity(Opts) ->
+ DefFrom0 = date(),
+ DefTo0 = calendar:gregorian_days_to_date(calendar:date_to_gregorian_days(date())+7),
+ {DefFrom, DefTo} = proplists:get_value(validity, Opts, {DefFrom0, DefTo0}),
+ Format = fun({Y,M,D}) -> lists:flatten(io_lib:format("~w~2..0w~2..0w000000Z",[Y,M,D])) end,
+ #'Validity'{notBefore={generalTime, Format(DefFrom)},
+ notAfter ={generalTime, Format(DefTo)}}.
+
+sign_algorithm(#'RSAPrivateKey'{}, Opts) ->
+ Type = case proplists:get_value(digest, Opts, sha1) of
+ sha1 -> ?'sha1WithRSAEncryption';
+ sha512 -> ?'sha512WithRSAEncryption';
+ sha384 -> ?'sha384WithRSAEncryption';
+ sha256 -> ?'sha256WithRSAEncryption';
+ md5 -> ?'md5WithRSAEncryption';
+ md2 -> ?'md2WithRSAEncryption'
+ end,
+ {Type, 'NULL'};
+sign_algorithm(#'DSAPrivateKey'{p=P, q=Q, g=G}, _Opts) ->
+ {?'id-dsa-with-sha1', #'Dss-Parms'{p=P, q=Q, g=G}}.
+
+make_key(rsa, _Opts) ->
+ %% (OBS: for testing only)
+ gen_rsa2(64);
+make_key(dsa, _Opts) ->
+ gen_dsa2(128, 20). %% Bytes i.e. {1024, 160}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% RSA key generation (OBS: for testing only)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-define(SMALL_PRIMES, [65537,97,89,83,79,73,71,67,61,59,53,
+ 47,43,41,37,31,29,23,19,17,13,11,7,5,3]).
+
+gen_rsa2(Size) ->
+ P = prime(Size),
+ Q = prime(Size),
+ N = P*Q,
+ Tot = (P - 1) * (Q - 1),
+ [E|_] = lists:dropwhile(fun(Candidate) -> (Tot rem Candidate) == 0 end, ?SMALL_PRIMES),
+ {D1,D2} = extended_gcd(E, Tot),
+ D = erlang:max(D1,D2),
+ case D < E of
+ true ->
+ gen_rsa2(Size);
+ false ->
+ {Co1,Co2} = extended_gcd(Q, P),
+ Co = erlang:max(Co1,Co2),
+ #'RSAPrivateKey'{version = 'two-prime',
+ modulus = N,
+ publicExponent = E,
+ privateExponent = D,
+ prime1 = P,
+ prime2 = Q,
+ exponent1 = D rem (P-1),
+ exponent2 = D rem (Q-1),
+ coefficient = Co
+ }
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% DSA key generation (OBS: for testing only)
+%% See http://en.wikipedia.org/wiki/Digital_Signature_Algorithm
+%% and the fips_186-3.pdf
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+gen_dsa2(LSize, NSize) ->
+ Q = prime(NSize), %% Choose N-bit prime Q
+ X0 = prime(LSize),
+ P0 = prime((LSize div 2) +1),
+
+ %% Choose L-bit prime modulus P such that p–1 is a multiple of q.
+ case dsa_search(X0 div (2*Q*P0), P0, Q, 1000) of
+ error ->
+ gen_dsa2(LSize, NSize);
+ P ->
+ G = crypto:mod_exp(2, (P-1) div Q, P), % Choose G a number whose multiplicative order modulo p is q.
+ %% such that This may be done by setting g = h^(p–1)/q mod p, commonly h=2 is used.
+
+ X = prime(20), %% Choose x by some random method, where 0 < x < q.
+ Y = crypto:mod_exp(G, X, P), %% Calculate y = g^x mod p.
+
+ #'DSAPrivateKey'{version=0, p=P, q=Q, g=G, y=Y, x=X}
+ end.
+
+%% See fips_186-3.pdf
+dsa_search(T, P0, Q, Iter) when Iter > 0 ->
+ P = 2*T*Q*P0 + 1,
+ case is_prime(crypto:mpint(P), 50) of
+ true -> P;
+ false -> dsa_search(T+1, P0, Q, Iter-1)
+ end;
+dsa_search(_,_,_,_) ->
+ error.
+
+
+%%%%%%% Crypto Math %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+prime(ByteSize) ->
+ Rand = odd_rand(ByteSize),
+ crypto:erlint(prime_odd(Rand, 0)).
+
+prime_odd(Rand, N) ->
+ case is_prime(Rand, 50) of
+ true ->
+ Rand;
+ false ->
+ NotPrime = crypto:erlint(Rand),
+ prime_odd(crypto:mpint(NotPrime+2), N+1)
+ end.
+
+%% see http://en.wikipedia.org/wiki/Fermat_primality_test
+is_prime(_, 0) -> true;
+is_prime(Candidate, Test) ->
+ CoPrime = odd_rand(<<0,0,0,4, 10000:32>>, Candidate),
+ case crypto:mod_exp(CoPrime, Candidate, Candidate) of
+ CoPrime -> is_prime(Candidate, Test-1);
+ _ -> false
+ end.
+
+odd_rand(Size) ->
+ Min = 1 bsl (Size*8-1),
+ Max = (1 bsl (Size*8))-1,
+ odd_rand(crypto:mpint(Min), crypto:mpint(Max)).
+
+odd_rand(Min,Max) ->
+ Rand = <<Sz:32, _/binary>> = crypto:rand_uniform(Min,Max),
+ BitSkip = (Sz+4)*8-1,
+ case Rand of
+ Odd = <<_:BitSkip, 1:1>> -> Odd;
+ Even = <<_:BitSkip, 0:1>> ->
+ crypto:mpint(crypto:erlint(Even)+1)
+ end.
+
+extended_gcd(A, B) ->
+ case A rem B of
+ 0 ->
+ {0, 1};
+ N ->
+ {X, Y} = extended_gcd(B, N),
+ {Y, X-Y*(A div B)}
+ end.
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index 9afcbd9113..0d9a912e30 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -27,17 +27,17 @@
-include("test_server.hrl").
-include("test_server_line.hrl").
-include_lib("public_key/include/public_key.hrl").
+-include("ssl_alert.hrl").
-define('24H_in_sec', 86400).
-define(TIMEOUT, 60000).
-define(EXPIRE, 10).
-define(SLEEP, 500).
-
-behaviour(ssl_session_cache_api).
%% For the session cache tests
--export([init/0, terminate/1, lookup/2, update/3,
+-export([init/1, terminate/1, lookup/2, update/3,
delete/2, foldl/3, select_session/2]).
%% Test server callback functions
@@ -53,11 +53,15 @@
init_per_suite(Config) ->
crypto:start(),
ssl:start(),
+
+ %% make rsa certs using oppenssl
Result =
(catch make_certs:all(?config(data_dir, Config),
?config(priv_dir, Config))),
test_server:format("Make certs ~p~n", [Result]),
- ssl_test_lib:cert_options(Config).
+
+ NewConfig = ssl_test_lib:make_dsa_cert(Config),
+ ssl_test_lib:cert_options(NewConfig).
%%--------------------------------------------------------------------
%% Function: end_per_suite(Config) -> _
@@ -83,11 +87,11 @@ end_per_suite(_Config) ->
%% Description: Initialization before each test case
%%--------------------------------------------------------------------
init_per_testcase(session_cache_process_list, Config) ->
- init_customized_session_cache(Config);
+ init_customized_session_cache(list, Config);
init_per_testcase(session_cache_process_mnesia, Config) ->
mnesia:start(),
- init_customized_session_cache(Config);
+ init_customized_session_cache(mnesia, Config);
init_per_testcase(reuse_session_expired, Config0) ->
Config = lists:keydelete(watchdog, 1, Config0),
@@ -98,17 +102,50 @@ init_per_testcase(reuse_session_expired, Config0) ->
ssl:start(),
[{watchdog, Dog} | Config];
+init_per_testcase(no_authority_key_identifier, Config) ->
+ %% Clear cach so that root cert will not
+ %% be found.
+ ssl:stop(),
+ ssl:start(),
+ Config;
+
+init_per_testcase(TestCase, Config) when TestCase == ciphers_rsa_signed_certs_ssl3;
+ TestCase == ciphers_rsa_signed_certs_openssl_names_ssl3;
+ TestCase == ciphers_dsa_signed_certs_ssl3;
+ TestCase == ciphers_dsa_signed_certs_openssl_names_ssl3 ->
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, protocol_version, sslv3),
+ ssl:start(),
+ Config;
+
+init_per_testcase(protocol_versions, Config) ->
+ ssl:stop(),
+ application:load(ssl),
+ %% For backwards compatibility sslv2 should be filtered out.
+ application:set_env(ssl, protocol_version, [sslv2, sslv3, tlsv1]),
+ ssl:start(),
+ Config;
+
+init_per_testcase(empty_protocol_versions, Config) ->
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, protocol_version, []),
+ ssl:start(),
+ Config;
+
init_per_testcase(_TestCase, Config0) ->
Config = lists:keydelete(watchdog, 1, Config0),
Dog = test_server:timetrap(?TIMEOUT),
- [{watchdog, Dog} | Config].
+ [{watchdog, Dog} | Config].
-init_customized_session_cache(Config0) ->
+init_customized_session_cache(Type, Config0) ->
Config = lists:keydelete(watchdog, 1, Config0),
Dog = test_server:timetrap(?TIMEOUT),
ssl:stop(),
application:load(ssl),
application:set_env(ssl, session_cb, ?MODULE),
+ application:set_env(ssl, session_cb_init_args, [Type]),
ssl:start(),
[{watchdog, Dog} | Config].
@@ -125,11 +162,22 @@ end_per_testcase(session_cache_process_list, Config) ->
end_per_testcase(default_action, Config);
end_per_testcase(session_cache_process_mnesia, Config) ->
application:unset_env(ssl, session_cb),
+ application:unset_env(ssl, session_cb_init_args),
mnesia:stop(),
+ ssl:stop(),
+ ssl:start(),
end_per_testcase(default_action, Config);
end_per_testcase(reuse_session_expired, Config) ->
application:unset_env(ssl, session_lifetime),
end_per_testcase(default_action, Config);
+end_per_testcase(TestCase, Config) when TestCase == ciphers_rsa_signed_certs_ssl3;
+ TestCase == ciphers_rsa_signed_certs_openssl_names_ssl3;
+ TestCase == ciphers_dsa_signed_certs_ssl3;
+ TestCase == ciphers_dsa_signed_certs_openssl_names_ssl3;
+ TestCase == protocol_versions;
+ TestCase == empty_protocol_versions->
+ application:unset_env(ssl, protocol_version),
+ end_per_testcase(default_action, Config);
end_per_testcase(_TestCase, Config) ->
Dog = ?config(watchdog, Config),
case Dog of
@@ -151,30 +199,38 @@ all(doc) ->
["Test the basic ssl functionality"];
all(suite) ->
- [app, connection_info, controlling_process, controller_dies,
- client_closes_socket,
- peercert, connect_dist,
- peername, sockname, socket_options, misc_ssl_options, versions, cipher_suites,
- upgrade, upgrade_with_timeout, tcp_connect,
- ipv6, ekeyfile, ecertfile, ecacertfile, eoptions, shutdown,
- shutdown_write, shutdown_both, shutdown_error, ciphers,
- send_close, close_transport_accept, dh_params,
- server_verify_peer_passive,
+ [app, alerts, connection_info, protocol_versions,
+ empty_protocol_versions, controlling_process, controller_dies,
+ client_closes_socket, peercert, connect_dist, peername, sockname,
+ socket_options, misc_ssl_options, versions, cipher_suites,
+ upgrade, upgrade_with_timeout, tcp_connect, ipv6, ekeyfile,
+ ecertfile, ecacertfile, eoptions, shutdown, shutdown_write,
+ shutdown_both, shutdown_error,
+ ciphers_rsa_signed_certs, ciphers_rsa_signed_certs_ssl3,
+ ciphers_rsa_signed_certs_openssl_names,
+ ciphers_rsa_signed_certs_openssl_names_ssl3,
+ ciphers_dsa_signed_certs,
+ ciphers_dsa_signed_certs_ssl3,
+ ciphers_dsa_signed_certs_openssl_names,
+ ciphers_dsa_signed_certs_openssl_names_ssl3,
+ send_close,
+ close_transport_accept, dh_params, server_verify_peer_passive,
server_verify_peer_active, server_verify_peer_active_once,
- server_verify_none_passive, server_verify_none_active,
+ server_verify_none_passive, server_verify_none_active,
server_verify_none_active_once, server_verify_no_cacerts,
server_require_peer_cert_ok, server_require_peer_cert_fail,
server_verify_client_once_passive,
server_verify_client_once_active,
- server_verify_client_once_active_once,
- client_verify_none_passive,
- client_verify_none_active, client_verify_none_active_once
- %%, session_cache_process_list, session_cache_process_mnesia
- ,reuse_session, reuse_session_expired, server_does_not_want_to_reuse_session,
- client_renegotiate, server_renegotiate, client_renegotiate_reused_session,
- server_renegotiate_reused_session,
- client_no_wrap_sequence_number, server_no_wrap_sequence_number,
- extended_key_usage, validate_extensions_fun
+ server_verify_client_once_active_once, client_verify_none_passive,
+ client_verify_none_active, client_verify_none_active_once,
+ session_cache_process_list, session_cache_process_mnesia,
+ reuse_session, reuse_session_expired,
+ server_does_not_want_to_reuse_session, client_renegotiate,
+ server_renegotiate, client_renegotiate_reused_session,
+ server_renegotiate_reused_session, client_no_wrap_sequence_number,
+ server_no_wrap_sequence_number, extended_key_usage,
+ validate_extensions_fun, no_authority_key_identifier,
+ invalid_signature_client, invalid_signature_server, cert_expired
].
%% Test cases starts here.
@@ -185,7 +241,31 @@ app(suite) ->
[];
app(Config) when is_list(Config) ->
ok = test_server:app_test(ssl).
-
+%%--------------------------------------------------------------------
+alerts(doc) ->
+ "Test ssl_alert:alert_txt/1";
+alerts(suite) ->
+ [];
+alerts(Config) when is_list(Config) ->
+ Descriptions = [?CLOSE_NOTIFY, ?UNEXPECTED_MESSAGE, ?BAD_RECORD_MAC,
+ ?DECRYPTION_FAILED, ?RECORD_OVERFLOW, ?DECOMPRESSION_FAILURE,
+ ?HANDSHAKE_FAILURE, ?BAD_CERTIFICATE, ?UNSUPPORTED_CERTIFICATE,
+ ?CERTIFICATE_REVOKED,?CERTIFICATE_EXPIRED, ?CERTIFICATE_UNKNOWN,
+ ?ILLEGAL_PARAMETER, ?UNKNOWN_CA, ?ACCESS_DENIED, ?DECODE_ERROR,
+ ?DECRYPT_ERROR, ?EXPORT_RESTRICTION, ?PROTOCOL_VERSION,
+ ?INSUFFICIENT_SECURITY, ?INTERNAL_ERROR, ?USER_CANCELED,
+ ?NO_RENEGOTIATION],
+ Alerts = [?ALERT_REC(?WARNING, ?CLOSE_NOTIFY) |
+ [?ALERT_REC(?FATAL, Desc) || Desc <- Descriptions]],
+ lists:foreach(fun(Alert) ->
+ case ssl_alert:alert_txt(Alert) of
+ Txt when is_list(Txt) ->
+ ok;
+ Other ->
+ test_server:fail({unexpected, Other})
+ end
+ end, Alerts).
+%%--------------------------------------------------------------------
connection_info(doc) ->
["Test the API function ssl:connection_info/1"];
connection_info(suite) ->
@@ -214,7 +294,7 @@ connection_info(Config) when is_list(Config) ->
Version =
ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
- ServerMsg = ClientMsg = {ok, {Version, {rsa,rc4_128,sha,no_export}}},
+ ServerMsg = ClientMsg = {ok, {Version, {rsa,rc4_128,sha}}},
ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg),
@@ -226,6 +306,49 @@ connection_info_result(Socket) ->
%%--------------------------------------------------------------------
+protocol_versions(doc) ->
+ ["Test to set a list of protocol versions in app environment."];
+
+protocol_versions(suite) ->
+ [];
+
+protocol_versions(Config) when is_list(Config) ->
+ basic_test(Config).
+
+empty_protocol_versions(doc) ->
+ ["Test to set an empty list of protocol versions in app environment."];
+
+empty_protocol_versions(suite) ->
+ [];
+
+empty_protocol_versions(Config) when is_list(Config) ->
+ basic_test(Config).
+
+
+basic_test(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, send_recv_result_active, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, send_recv_result_active, []}},
+ {options, ClientOpts}]),
+
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client).
+
+%%--------------------------------------------------------------------
+
controlling_process(doc) ->
["Test API function controlling_process/2"];
@@ -283,7 +406,7 @@ controlling_process_result(Socket, Pid, Msg) ->
ssl:send(Socket, Msg),
no_result_msg.
-
+%%--------------------------------------------------------------------
controller_dies(doc) ->
["Test that the socket is closed after controlling process dies"];
controller_dies(suite) -> [];
@@ -598,9 +721,12 @@ cipher_suites(suite) ->
[];
cipher_suites(Config) when is_list(Config) ->
- MandatoryCipherSuite = {rsa,'3des_ede_cbc',sha,no_export},
+ MandatoryCipherSuite = {rsa,'3des_ede_cbc',sha},
[_|_] = Suites = ssl:cipher_suites(),
- true = lists:member(MandatoryCipherSuite, Suites).
+ true = lists:member(MandatoryCipherSuite, Suites),
+ Suites = ssl:cipher_suites(erlang),
+ [_|_] =ssl:cipher_suites(openssl).
+
%%--------------------------------------------------------------------
socket_options(doc) ->
["Test API function getopts/2 and setopts/2"];
@@ -635,9 +761,16 @@ socket_options(Config) when is_list(Config) ->
{options, ClientOpts}]),
ssl_test_lib:check_result(Server, ok, Client, ok),
-
+
ssl_test_lib:close(Server),
- ssl_test_lib:close(Client).
+ ssl_test_lib:close(Client),
+
+ {ok, Listen} = ssl:listen(0, ServerOpts),
+ {ok,[{mode,list}]} = ssl:getopts(Listen, [mode]),
+ ok = ssl:setopts(Listen, [{mode, binary}]),
+ {ok,[{mode, binary}]} = ssl:getopts(Listen, [mode]),
+ {ok,[{recbuf, _}]} = ssl:getopts(Listen, [recbuf]),
+ ssl:close(Listen).
socket_options_result(Socket, Options, DefaultValues, NewOptions, NewValues) ->
%% Test get/set emulated opts
@@ -646,6 +779,8 @@ socket_options_result(Socket, Options, DefaultValues, NewOptions, NewValues) ->
{ok, NewValues} = ssl:getopts(Socket, NewOptions),
%% Test get/set inet opts
{ok,[{nodelay,false}]} = ssl:getopts(Socket, [nodelay]),
+ ssl:setopts(Socket, [{nodelay, true}]),
+ {ok,[{nodelay, true}]} = ssl:getopts(Socket, [nodelay]),
ok.
%%--------------------------------------------------------------------
@@ -1272,20 +1407,130 @@ shutdown_error(Config) when is_list(Config) ->
ok = ssl:close(Listen),
{error, closed} = ssl:shutdown(Listen, read_write).
-%%--------------------------------------------------------------------
-ciphers(doc) ->
- [""];
+%%-------------------------------------------------------------------
+ciphers_rsa_signed_certs(doc) ->
+ ["Test all rsa ssl cipher suites in highest support ssl/tls version"];
+
+ciphers_rsa_signed_certs(suite) ->
+ [];
+
+ciphers_rsa_signed_certs(Config) when is_list(Config) ->
+ Version =
+ ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
+
+ Ciphers = ssl_test_lib:rsa_suites(),
+ test_server:format("tls1 erlang cipher suites ~p~n", [Ciphers]),
+ run_suites(Ciphers, Version, Config, rsa).
+
+ciphers_rsa_signed_certs_ssl3(doc) ->
+ ["Test all rsa ssl cipher suites in ssl3"];
+
+ciphers_rsa_signed_certs_ssl3(suite) ->
+ [];
+
+ciphers_rsa_signed_certs_ssl3(Config) when is_list(Config) ->
+ Version =
+ ssl_record:protocol_version({3,0}),
+
+ Ciphers = ssl_test_lib:rsa_suites(),
+ test_server:format("ssl3 erlang cipher suites ~p~n", [Ciphers]),
+ run_suites(Ciphers, Version, Config, rsa).
+
+ciphers_rsa_signed_certs_openssl_names(doc) ->
+ ["Test all rsa ssl cipher suites in highest support ssl/tls version"];
-ciphers(suite) ->
+ciphers_rsa_signed_certs_openssl_names(suite) ->
[];
-ciphers(Config) when is_list(Config) ->
+ciphers_rsa_signed_certs_openssl_names(Config) when is_list(Config) ->
Version =
ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
+ Ciphers = ssl_test_lib:openssl_rsa_suites(),
+ test_server:format("tls1 openssl cipher suites ~p~n", [Ciphers]),
+ run_suites(Ciphers, Version, Config, rsa).
+
+
+ciphers_rsa_signed_certs_openssl_names_ssl3(doc) ->
+ ["Test all dsa ssl cipher suites in ssl3"];
+
+ciphers_rsa_signed_certs_openssl_names_ssl3(suite) ->
+ [];
+
+ciphers_rsa_signed_certs_openssl_names_ssl3(Config) when is_list(Config) ->
+ Version = ssl_record:protocol_version({3,0}),
+ Ciphers = ssl_test_lib:openssl_rsa_suites(),
+ run_suites(Ciphers, Version, Config, rsa).
+
+
+ciphers_dsa_signed_certs(doc) ->
+ ["Test all dsa ssl cipher suites in highest support ssl/tls version"];
+
+ciphers_dsa_signed_certs(suite) ->
+ [];
+
+ciphers_dsa_signed_certs(Config) when is_list(Config) ->
+ Version =
+ ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
+
+ Ciphers = ssl_test_lib:dsa_suites(),
+ test_server:format("tls1 erlang cipher suites ~p~n", [Ciphers]),
+ run_suites(Ciphers, Version, Config, dsa).
- Ciphers = ssl:cipher_suites(),
+ciphers_dsa_signed_certs_ssl3(doc) ->
+ ["Test all dsa ssl cipher suites in ssl3"];
+
+ciphers_dsa_signed_certs_ssl3(suite) ->
+ [];
+
+ciphers_dsa_signed_certs_ssl3(Config) when is_list(Config) ->
+ Version =
+ ssl_record:protocol_version({3,0}),
+
+ Ciphers = ssl_test_lib:dsa_suites(),
+ test_server:format("ssl3 erlang cipher suites ~p~n", [Ciphers]),
+ run_suites(Ciphers, Version, Config, dsa).
+
+
+ciphers_dsa_signed_certs_openssl_names(doc) ->
+ ["Test all dsa ssl cipher suites in highest support ssl/tls version"];
+
+ciphers_dsa_signed_certs_openssl_names(suite) ->
+ [];
+
+ciphers_dsa_signed_certs_openssl_names(Config) when is_list(Config) ->
+ Version =
+ ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
+
+ Ciphers = ssl_test_lib:openssl_dsa_suites(),
+ test_server:format("tls1 openssl cipher suites ~p~n", [Ciphers]),
+ run_suites(Ciphers, Version, Config, dsa).
+
+
+ciphers_dsa_signed_certs_openssl_names_ssl3(doc) ->
+ ["Test all dsa ssl cipher suites in ssl3"];
+
+ciphers_dsa_signed_certs_openssl_names_ssl3(suite) ->
+ [];
+
+ciphers_dsa_signed_certs_openssl_names_ssl3(Config) when is_list(Config) ->
+ Version = ssl_record:protocol_version({3,0}),
+ Ciphers = ssl_test_lib:openssl_dsa_suites(),
+ run_suites(Ciphers, Version, Config, dsa).
+
+
+run_suites(Ciphers, Version, Config, Type) ->
+ {ClientOpts, ServerOpts} =
+ case Type of
+ rsa ->
+ {?config(client_opts, Config),
+ ?config(server_opts, Config)};
+ dsa ->
+ {?config(client_opts, Config),
+ ?config(server_dsa_opts, Config)}
+ end,
+
Result = lists:map(fun(Cipher) ->
- cipher(Cipher, Version, Config) end,
+ cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end,
Ciphers),
case lists:flatten(Result) of
[] ->
@@ -1294,12 +1539,15 @@ ciphers(Config) when is_list(Config) ->
test_server:format("Cipher suite errors: ~p~n", [Error]),
test_server:fail(cipher_suite_failed_see_test_case_log)
end.
-
-cipher(CipherSuite, Version, Config) ->
+
+erlang_cipher_suite(Suite) when is_list(Suite)->
+ ssl_cipher:suite_definition(ssl_cipher:openssl_suite(Suite));
+erlang_cipher_suite(Suite) ->
+ Suite.
+
+cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
process_flag(trap_exit, true),
test_server:format("Testing CipherSuite ~p~n", [CipherSuite]),
- ClientOpts = ?config(client_opts, Config),
- ServerOpts = ?config(server_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()},
@@ -1314,7 +1562,9 @@ cipher(CipherSuite, Version, Config) ->
[{ciphers,[CipherSuite]} |
ClientOpts]}]),
- ServerMsg = ClientMsg = {ok, {Version, CipherSuite}},
+ ErlangCipherSuite = erlang_cipher_suite(CipherSuite),
+
+ ServerMsg = ClientMsg = {ok, {Version, ErlangCipherSuite}},
Result = ssl_test_lib:wait_for_result(Server, ServerMsg,
Client, ClientMsg),
@@ -1333,7 +1583,7 @@ cipher(CipherSuite, Version, Config) ->
ok ->
[];
Error ->
- [{CipherSuite, Error}]
+ [{ErlangCipherSuite, Error}]
end.
%%--------------------------------------------------------------------
@@ -1933,9 +2183,7 @@ server_require_peer_cert_fail(Config) when is_list(Config) ->
{options, [{active, false} | BadClientOpts]}]),
ssl_test_lib:check_result(Server, {error, esslaccept},
- Client, {error, esslconnect}),
- ssl_test_lib:close(Server),
- ssl_test_lib:close(Client).
+ Client, {error, esslconnect}).
%%--------------------------------------------------------------------
@@ -2269,48 +2517,54 @@ extended_key_usage(suite) ->
[];
extended_key_usage(Config) when is_list(Config) ->
- ClientOpts = ?config(client_opts, Config),
- ServerOpts = ?config(server_opts, Config),
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
PrivDir = ?config(priv_dir, Config),
- CertFile = proplists:get_value(certfile, ServerOpts),
- KeyFile = proplists:get_value(keyfile, ServerOpts),
- NewCertFile = filename:join(PrivDir, "cert.pem"),
-
- {ok, [{cert, DerCert, _}]} = public_key:pem_to_der(CertFile),
-
+ KeyFile = filename:join(PrivDir, "otpCA/private/key.pem"),
{ok, [KeyInfo]} = public_key:pem_to_der(KeyFile),
-
{ok, Key} = public_key:decode_private_key(KeyInfo),
- {ok, OTPCert} = public_key:pkix_decode_cert(DerCert, otp),
-
- ExtKeyUsageExt = {'Extension', ?'id-ce-extKeyUsage', true, [?'id-kp-serverAuth']},
-
- OTPTbsCert = OTPCert#'OTPCertificate'.tbsCertificate,
-
- Extensions = OTPTbsCert#'OTPTBSCertificate'.extensions,
-
- NewOTPTbsCert = OTPTbsCert#'OTPTBSCertificate'{extensions = [ExtKeyUsageExt |Extensions]},
-
- NewDerCert = public_key:sign(NewOTPTbsCert, Key),
-
- public_key:der_to_pem(NewCertFile, [{cert, NewDerCert}]),
-
- NewServerOpts = [{certfile, NewCertFile} | proplists:delete(certfile, ServerOpts)],
+ ServerCertFile = proplists:get_value(certfile, ServerOpts),
+ NewServerCertFile = filename:join(PrivDir, "server/new_cert.pem"),
+ {ok, [{cert, ServerDerCert, _}]} = public_key:pem_to_der(ServerCertFile),
+ {ok, ServerOTPCert} = public_key:pkix_decode_cert(ServerDerCert, otp),
+ ServerExtKeyUsageExt = {'Extension', ?'id-ce-extKeyUsage', true, [?'id-kp-serverAuth']},
+ ServerOTPTbsCert = ServerOTPCert#'OTPCertificate'.tbsCertificate,
+ ServerExtensions = ServerOTPTbsCert#'OTPTBSCertificate'.extensions,
+ NewServerOTPTbsCert = ServerOTPTbsCert#'OTPTBSCertificate'{extensions =
+ [ServerExtKeyUsageExt |
+ ServerExtensions]},
+ NewServerDerCert = public_key:sign(NewServerOTPTbsCert, Key),
+ public_key:der_to_pem(NewServerCertFile, [{cert, NewServerDerCert, not_encrypted}]),
+ NewServerOpts = [{certfile, NewServerCertFile} | proplists:delete(certfile, ServerOpts)],
+
+ ClientCertFile = proplists:get_value(certfile, ClientOpts),
+ NewClientCertFile = filename:join(PrivDir, "client/new_cert.pem"),
+ {ok, [{cert, ClientDerCert, _}]} = public_key:pem_to_der(ClientCertFile),
+ {ok, ClientOTPCert} = public_key:pkix_decode_cert(ClientDerCert, otp),
+ ClientExtKeyUsageExt = {'Extension', ?'id-ce-extKeyUsage', true, [?'id-kp-clientAuth']},
+ ClientOTPTbsCert = ClientOTPCert#'OTPCertificate'.tbsCertificate,
+ ClientExtensions = ClientOTPTbsCert#'OTPTBSCertificate'.extensions,
+ NewClientOTPTbsCert = ClientOTPTbsCert#'OTPTBSCertificate'{extensions =
+ [ClientExtKeyUsageExt |
+ ClientExtensions]},
+ NewClientDerCert = public_key:sign(NewClientOTPTbsCert, Key),
+ public_key:der_to_pem(NewClientCertFile, [{cert, NewClientDerCert, not_encrypted}]),
+ NewClientOpts = [{certfile, NewClientCertFile} | proplists:delete(certfile, ClientOpts)],
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()},
{mfa, {?MODULE, send_recv_result_active, []}},
- {options, NewServerOpts}]),
+ {options, [{verify, verify_peer} | NewServerOpts]}]),
Port = ssl_test_lib:inet_port(Server),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
{from, self()},
{mfa, {?MODULE, send_recv_result_active, []}},
- {options, ClientOpts}]),
+ {options, [{verify, verify_peer} | NewClientOpts]}]),
ssl_test_lib:check_result(Server, ok, Client, ok),
@@ -2353,6 +2607,245 @@ validate_extensions_fun(Config) when is_list(Config) ->
ssl_test_lib:close(Client).
%%--------------------------------------------------------------------
+no_authority_key_identifier(doc) ->
+ ["Test cert that does not have authorityKeyIdentifier extension"];
+
+no_authority_key_identifier(suite) ->
+ [];
+no_authority_key_identifier(Config) when is_list(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ PrivDir = ?config(priv_dir, Config),
+
+ KeyFile = filename:join(PrivDir, "otpCA/private/key.pem"),
+ {ok, [KeyInfo]} = public_key:pem_to_der(KeyFile),
+ {ok, Key} = public_key:decode_private_key(KeyInfo),
+
+ CertFile = proplists:get_value(certfile, ServerOpts),
+ NewCertFile = filename:join(PrivDir, "server/new_cert.pem"),
+ {ok, [{cert, DerCert, _}]} = public_key:pem_to_der(CertFile),
+ {ok, OTPCert} = public_key:pkix_decode_cert(DerCert, otp),
+ OTPTbsCert = OTPCert#'OTPCertificate'.tbsCertificate,
+ Extensions = OTPTbsCert#'OTPTBSCertificate'.extensions,
+ NewExtensions = delete_authority_key_extension(Extensions, []),
+ NewOTPTbsCert = OTPTbsCert#'OTPTBSCertificate'{extensions = NewExtensions},
+
+ test_server:format("Extensions ~p~n, NewExtensions: ~p~n", [Extensions, NewExtensions]),
+
+ NewDerCert = public_key:sign(NewOTPTbsCert, Key),
+ public_key:der_to_pem(NewCertFile, [{cert, NewDerCert, not_encrypted}]),
+ NewServerOpts = [{certfile, NewCertFile} | proplists:delete(certfile, ServerOpts)],
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, send_recv_result_active, []}},
+ {options, NewServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, send_recv_result_active, []}},
+ {options, [{verify, verify_peer} | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client).
+
+delete_authority_key_extension([], Acc) ->
+ lists:reverse(Acc);
+delete_authority_key_extension([#'Extension'{extnID = ?'id-ce-authorityKeyIdentifier'} | Rest],
+ Acc) ->
+ delete_authority_key_extension(Rest, Acc);
+delete_authority_key_extension([Head | Rest], Acc) ->
+ delete_authority_key_extension(Rest, [Head | Acc]).
+
+%%--------------------------------------------------------------------
+
+invalid_signature_server(doc) ->
+ ["Test server with invalid signature"];
+
+invalid_signature_server(suite) ->
+ [];
+
+invalid_signature_server(Config) when is_list(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ PrivDir = ?config(priv_dir, Config),
+
+ KeyFile = filename:join(PrivDir, "server/key.pem"),
+ {ok, [KeyInfo]} = public_key:pem_to_der(KeyFile),
+ {ok, Key} = public_key:decode_private_key(KeyInfo),
+
+ ServerCertFile = proplists:get_value(certfile, ServerOpts),
+ NewServerCertFile = filename:join(PrivDir, "server/invalid_cert.pem"),
+ {ok, [{cert, ServerDerCert, _}]} = public_key:pem_to_der(ServerCertFile),
+ {ok, ServerOTPCert} = public_key:pkix_decode_cert(ServerDerCert, otp),
+ ServerOTPTbsCert = ServerOTPCert#'OTPCertificate'.tbsCertificate,
+ NewServerDerCert = public_key:sign(ServerOTPTbsCert, Key),
+ public_key:der_to_pem(NewServerCertFile, [{cert, NewServerDerCert, not_encrypted}]),
+ NewServerOpts = [{certfile, NewServerCertFile} | proplists:delete(certfile, ServerOpts)],
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, NewServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {options, [{verify, verify_peer} | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, {error, "bad certificate"},
+ Client, {error,"bad certificate"}).
+
+%%--------------------------------------------------------------------
+
+invalid_signature_client(doc) ->
+ ["Test server with invalid signature"];
+
+invalid_signature_client(suite) ->
+ [];
+
+invalid_signature_client(Config) when is_list(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ PrivDir = ?config(priv_dir, Config),
+
+ KeyFile = filename:join(PrivDir, "client/key.pem"),
+ {ok, [KeyInfo]} = public_key:pem_to_der(KeyFile),
+ {ok, Key} = public_key:decode_private_key(KeyInfo),
+
+ ClientCertFile = proplists:get_value(certfile, ClientOpts),
+ NewClientCertFile = filename:join(PrivDir, "client/invalid_cert.pem"),
+ {ok, [{cert, ClientDerCert, _}]} = public_key:pem_to_der(ClientCertFile),
+ {ok, ClientOTPCert} = public_key:pkix_decode_cert(ClientDerCert, otp),
+ ClientOTPTbsCert = ClientOTPCert#'OTPCertificate'.tbsCertificate,
+ NewClientDerCert = public_key:sign(ClientOTPTbsCert, Key),
+ public_key:der_to_pem(NewClientCertFile, [{cert, NewClientDerCert, not_encrypted}]),
+ NewClientOpts = [{certfile, NewClientCertFile} | proplists:delete(certfile, ClientOpts)],
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, [{verify, verify_peer} | ServerOpts]}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {options, NewClientOpts}]),
+
+ tcp_delivery_workaround(Server, {error, "bad certificate"},
+ Client, {error,"bad certificate"}).
+
+tcp_delivery_workaround(Server, ServMsg, Client, ClientMsg) ->
+ receive
+ {Server, ServerMsg} ->
+ receive
+ {Client, ClientMsg} ->
+ ok;
+ {Client, {error,closed}} ->
+ test_server:format("client got close");
+ Unexpected ->
+ test_server:fail(Unexpected)
+ end;
+ {Client, ClientMsg} ->
+ receive
+ {Server, ServerMsg} ->
+ ok;
+ Unexpected ->
+ test_server:fail(Unexpected)
+ end;
+ {Client, {error,closed}} ->
+ receive
+ {Server, ServerMsg} ->
+ ok;
+ Unexpected ->
+ test_server:fail(Unexpected)
+ end;
+ {Server, {error,closed}} ->
+ receive
+ {Client, ClientMsg} ->
+ ok;
+ {Client, {error,closed}} ->
+ test_server:format("client got close"),
+ ok;
+ Unexpected ->
+ test_server:fail(Unexpected)
+ end;
+ Unexpected ->
+ test_server:fail(Unexpected)
+ end.
+%%--------------------------------------------------------------------
+cert_expired(doc) ->
+ ["Test server with invalid signature"];
+
+cert_expired(suite) ->
+ [];
+
+cert_expired(Config) when is_list(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ PrivDir = ?config(priv_dir, Config),
+
+ KeyFile = filename:join(PrivDir, "otpCA/private/key.pem"),
+ {ok, [KeyInfo]} = public_key:pem_to_der(KeyFile),
+ {ok, Key} = public_key:decode_private_key(KeyInfo),
+
+ ServerCertFile = proplists:get_value(certfile, ServerOpts),
+ NewServerCertFile = filename:join(PrivDir, "server/expired_cert.pem"),
+ {ok, [{cert, DerCert, _}]} = public_key:pem_to_der(ServerCertFile),
+ {ok, OTPCert} = public_key:pkix_decode_cert(DerCert, otp),
+ OTPTbsCert = OTPCert#'OTPCertificate'.tbsCertificate,
+
+ {Year, Month, Day} = date(),
+ {Hours, Min, Sec} = time(),
+ NotBeforeStr = lists:flatten(io_lib:format("~p~s~s~s~s~sZ",[Year-2,
+ two_digits_str(Month),
+ two_digits_str(Day),
+ two_digits_str(Hours),
+ two_digits_str(Min),
+ two_digits_str(Sec)])),
+ NotAfterStr = lists:flatten(io_lib:format("~p~s~s~s~s~sZ",[Year-1,
+ two_digits_str(Month),
+ two_digits_str(Day),
+ two_digits_str(Hours),
+ two_digits_str(Min),
+ two_digits_str(Sec)])),
+ NewValidity = {'Validity', {generalTime, NotBeforeStr}, {generalTime, NotAfterStr}},
+
+ test_server:format("Validity: ~p ~n NewValidity: ~p ~n",
+ [OTPTbsCert#'OTPTBSCertificate'.validity, NewValidity]),
+
+ NewOTPTbsCert = OTPTbsCert#'OTPTBSCertificate'{validity = NewValidity},
+ NewServerDerCert = public_key:sign(NewOTPTbsCert, Key),
+ public_key:der_to_pem(NewServerCertFile, [{cert, NewServerDerCert, not_encrypted}]),
+ NewServerOpts = [{certfile, NewServerCertFile} | proplists:delete(certfile, ServerOpts)],
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, NewServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {options, [{verify, verify_peer} | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, {error, "certificate expired"},
+ Client, {error, "certificate expired"}).
+
+two_digits_str(N) when N < 10 ->
+ lists:flatten(io_lib:format("0~p", [N]));
+two_digits_str(N) ->
+ lists:flatten(io_lib:format("~p", [N])).
+
+%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
send_recv_result(Socket) ->
@@ -2411,128 +2904,34 @@ session_cache_process_mnesia(Config) when is_list(Config) ->
session_cache_process(mnesia,Config).
session_cache_process(Type,Config) when is_list(Config) ->
- process_flag(trap_exit, true),
- setup_session_cb(Type),
+ reuse_session(Config).
- ClientOpts = ?config(client_opts, Config),
- ServerOpts = ?config(server_opts, Config),
- {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
-
- Server =
- ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
- {from, self()},
- {mfa, {?MODULE, session_info_result, []}},
- {options,
- [{session_cache_cb, ?MODULE}|
- ServerOpts]}]),
- Port = ssl_test_lib:inet_port(Server),
- Client0 =
- ssl_test_lib:start_client([{node, ClientNode},
- {port, Port}, {host, Hostname},
- {mfa, {ssl_test_lib, no_result, []}},
- {from, self()}, {options, ClientOpts}]),
- SessionInfo =
- receive
- {Server, Info} ->
- Info
- end,
-
- Server ! listen,
-
- %% Make sure session is registered
- test_server:sleep(?SLEEP),
-
- Client1 =
- ssl_test_lib:start_client([{node, ClientNode},
- {port, Port}, {host, Hostname},
- {mfa, {?MODULE, session_info_result, []}},
- {from, self()}, {options, ClientOpts}]),
- receive
- {Client1, SessionInfo} ->
- ok;
- {Client1, Other} ->
- test_server:format("Expected: ~p, Unexpected: ~p~n",
- [SessionInfo, Other]),
- test_server:fail(session_not_reused)
- end,
-
- ssl_test_lib:close(Server),
- ssl_test_lib:close(Client0),
- ssl_test_lib:close(Client1),
-
- Server1 =
- ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
- {from, self()},
- {mfa, {?MODULE, session_info_result, []}},
- {options,
- [{reuse_sessions, false} | ServerOpts]}]),
- Port1 = ssl_test_lib:inet_port(Server1),
-
- Client3 =
- ssl_test_lib:start_client([{node, ClientNode},
- {port, Port1}, {host, Hostname},
- {mfa, {?MODULE, session_info_result, []}},
- {from, self()}, {options, ClientOpts}]),
-
- SessionInfo1 =
- receive
- {Server1, Info1} ->
- Info1
- end,
-
- Server1 ! listen,
-
- %% Make sure session is registered
- test_server:sleep(?SLEEP),
-
- Client4 =
- ssl_test_lib:start_client([{node, ClientNode},
- {port, Port1}, {host, Hostname},
- {mfa, {?MODULE, session_info_result, []}},
- {from, self()}, {options, ClientOpts}]),
-
- receive
- {Client4, SessionInfo1} ->
- test_server:fail(
- session_reused_when_session_reuse_disabled_by_server);
- {Client4, _Other} ->
- ok
- end,
-
- ssl_test_lib:close(Server1),
- ssl_test_lib:close(Client3),
- ssl_test_lib:close(Client4),
- process_flag(trap_exit, false).
-
-setup_session_cb(Type) ->
- ssl_test = ets:new(ssl_test,[named_table, set,public]),
- ets:insert(ssl_test, {type,Type}).
-
-session_cb() ->
- [{type,Type}] = ets:lookup(ssl_test, type),
- Type.
-
-init() ->
- io:format("~p~n",[?LINE]),
- case session_cb() of
+init([Type]) ->
+ ets:new(ssl_test, [named_table, public, set]),
+ ets:insert(ssl_test, {type, Type}),
+ case Type of
list ->
spawn(fun() -> session_loop([]) end);
mnesia ->
mnesia:start(),
- {atomic,ok} = mnesia:create_table(sess_cache, [])
+ {atomic,ok} = mnesia:create_table(sess_cache, []),
+ sess_cache
end.
+session_cb() ->
+ [{type, Type}] = ets:lookup(ssl_test, type),
+ Type.
+
terminate(Cache) ->
- io:format("~p~n",[?LINE]),
case session_cb() of
list ->
Cache ! terminate;
mnesia ->
- {atomic,ok} = mnesia:delete_table(sess_cache, [])
+ catch {atomic,ok} =
+ mnesia:delete_table(sess_cache)
end.
-lookup(Cache, Key) ->
- io:format("~p~n",[?LINE]),
+lookup(Cache, Key) ->
case session_cb() of
list ->
Cache ! {self(), lookup, Key},
@@ -2542,13 +2941,14 @@ lookup(Cache, Key) ->
mnesia:read(sess_cache,
Key, read)
end) of
- {atomic, [Session]} -> Session;
- _ -> undefined
+ {atomic, [{sess_cache, Key, Value}]} ->
+ Value;
+ _ ->
+ undefined
end
- end.
+ end.
update(Cache, Key, Value) ->
- io:format("~p~n",[?LINE]),
case session_cb() of
list ->
Cache ! {update, Key, Value};
@@ -2556,12 +2956,11 @@ update(Cache, Key, Value) ->
{atomic, ok} =
mnesia:transaction(fun() ->
mnesia:write(sess_cache,
- Key, Value)
+ {sess_cache, Key, Value}, write)
end)
end.
delete(Cache, Key) ->
- io:format("~p~n",[?LINE]),
case session_cb() of
list ->
Cache ! {delete, Key};
@@ -2573,7 +2972,6 @@ delete(Cache, Key) ->
end.
foldl(Fun, Acc, Cache) ->
- io:format("~p~n",[?LINE]),
case session_cb() of
list ->
Cache ! {self(),foldl,Fun,Acc},
@@ -2587,15 +2985,17 @@ foldl(Fun, Acc, Cache) ->
end.
select_session(Cache, PartialKey) ->
- io:format("~p~n",[?LINE]),
case session_cb() of
list ->
Cache ! {self(),select_session, PartialKey},
- receive {Cache, Res} -> Res end;
+ receive
+ {Cache, Res} ->
+ Res
+ end;
mnesia ->
Sel = fun() ->
mnesia:select(Cache,
- [{{{PartialKey,'$1'}, '$2'},
+ [{{sess_cache,{PartialKey,'$1'}, '$2'},
[],['$$']}])
end,
{atomic, Res} = mnesia:transaction(Sel),
@@ -2615,7 +3015,8 @@ session_loop(Sess) ->
end,
session_loop(Sess);
{update, Key, Value} ->
- session_loop([{Key,Value}|Sess]);
+ NewSess = [{Key,Value}| lists:keydelete(Key,1,Sess)],
+ session_loop(NewSess);
{delete, Key} ->
session_loop(lists:keydelete(Key,1,Sess));
{Pid,foldl,Fun,Acc} ->
@@ -2623,15 +3024,17 @@ session_loop(Sess) ->
Pid ! {self(), Res},
session_loop(Sess);
{Pid,select_session,PKey} ->
- Sel = fun({{Head, _},Session}, Acc) when Head =:= PKey ->
- [Session|Acc];
+ Sel = fun({{PKey0, Id},Session}, Acc) when PKey == PKey0 ->
+ [[Id, Session]|Acc];
(_,Acc) ->
Acc
- end,
- Pid ! {self(), lists:foldl(Sel, [], Sess)},
+ end,
+ Sessions = lists:foldl(Sel, [], Sess),
+ Pid ! {self(), Sessions},
session_loop(Sess)
end.
+
erlang_ssl_receive(Socket, Data) ->
receive
{ssl, Socket, Data} ->
@@ -2642,4 +3045,3 @@ erlang_ssl_receive(Socket, Data) ->
after ?SLEEP * 3 ->
test_server:fail({did_not_get, Data})
end.
-
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index 46b6eb401d..40715dbf30 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -318,6 +318,35 @@ cert_options(Config) ->
| Config].
+make_dsa_cert(Config) ->
+
+ {ServerCaCertFile, ServerCertFile, ServerKeyFile} = make_dsa_cert_files("server", Config),
+ {ClientCaCertFile, ClientCertFile, ClientKeyFile} = make_dsa_cert_files("client", Config),
+ [{server_dsa_opts, [{ssl_imp, new},{reuseaddr, true},
+ {cacertfile, ServerCaCertFile},
+ {certfile, ServerCertFile}, {keyfile, ServerKeyFile}]},
+ {client_dsa_opts, [{ssl_imp, new},{reuseaddr, true},
+ {cacertfile, ClientCaCertFile},
+ {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
+ | Config].
+
+
+
+make_dsa_cert_files(RoleStr, Config) ->
+ CaInfo = {CaCert, _} = erl_make_certs:make_cert([{key, dsa}]),
+ {Cert, CertKey} = erl_make_certs:make_cert([{key, dsa}, {issuer, CaInfo}]),
+ CaCertFile = filename:join([?config(priv_dir, Config),
+ RoleStr, "dsa_cacerts.pem"]),
+ CertFile = filename:join([?config(priv_dir, Config),
+ RoleStr, "dsa_cert.pem"]),
+ KeyFile = filename:join([?config(priv_dir, Config),
+ RoleStr, "dsa_key.pem"]),
+
+ public_key:der_to_pem(CaCertFile, [{cert, CaCert, not_encrypted}]),
+ public_key:der_to_pem(CertFile, [{cert, Cert, not_encrypted}]),
+ public_key:der_to_pem(KeyFile, [CertKey]),
+ {CaCertFile, CertFile, KeyFile}.
+
start_upgrade_server(Args) ->
Result = spawn_link(?MODULE, run_upgrade_server, [Args]),
receive
@@ -529,3 +558,42 @@ send_selected_port(Pid, 0, Socket) ->
Pid ! {self(), {port, NewPort}};
send_selected_port(_,_,_) ->
ok.
+
+rsa_suites() ->
+ lists:filter(fun({dhe_dss, _, _}) ->
+ false;
+ (_) ->
+ true
+ end,
+ ssl:cipher_suites()).
+
+dsa_suites() ->
+ lists:filter(fun({dhe_dss, _, _}) ->
+ true;
+ (_) ->
+ false
+ end,
+ ssl:cipher_suites()).
+
+
+openssl_rsa_suites() ->
+ Ciphers = ssl:cipher_suites(openssl),
+ lists:filter(fun(Str) ->
+ case re:run(Str,"DSS",[]) of
+ nomatch ->
+ true;
+ _ ->
+ false
+ end
+ end, Ciphers).
+
+openssl_dsa_suites() ->
+ Ciphers = ssl:cipher_suites(openssl),
+ lists:filter(fun(Str) ->
+ case re:run(Str,"DSS",[]) of
+ nomatch ->
+ false;
+ _ ->
+ true
+ end
+ end, Ciphers).
diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl
index 03466aec6f..4981ac0424 100644
--- a/lib/ssl/test/ssl_to_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl
@@ -33,6 +33,7 @@
-define(OPENSSL_RENEGOTIATE, "r\n").
-define(OPENSSL_QUIT, "Q\n").
-define(OPENSSL_GARBAGE, "P\n").
+-define(EXPIRE, 10).
%% Test server callback functions
%%--------------------------------------------------------------------
@@ -55,7 +56,8 @@ init_per_suite(Config) ->
(catch make_certs:all(?config(data_dir, Config),
?config(priv_dir, Config))),
test_server:format("Make certs ~p~n", [Result]),
- ssl_test_lib:cert_options(Config)
+ NewConfig = ssl_test_lib:make_dsa_cert(Config),
+ ssl_test_lib:cert_options(NewConfig)
end.
%%--------------------------------------------------------------------
@@ -81,6 +83,15 @@ end_per_suite(_Config) ->
%% variable, but should NOT alter/remove any existing entries.
%% Description: Initialization before each test case
%%--------------------------------------------------------------------
+init_per_testcase(expired_session, Config0) ->
+ Config = lists:keydelete(watchdog, 1, Config0),
+ Dog = ssl_test_lib:timetrap(?EXPIRE * 1000 * 5),
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, session_lifetime, ?EXPIRE),
+ ssl:start(),
+ [{watchdog, Dog} | Config];
+
init_per_testcase(TestCase, Config0) ->
Config = lists:keydelete(watchdog, 1, Config0),
Dog = ssl_test_lib:timetrap(?TIMEOUT),
@@ -103,14 +114,20 @@ special_init(_, Config) ->
%% A list of key/value pairs, holding the test case configuration.
%% Description: Cleanup after each test case
%%--------------------------------------------------------------------
-end_per_testcase(_TestCase, Config) ->
+end_per_testcase(reuse_session_expired, Config) ->
+ application:unset_env(ssl, session_lifetime),
+ end_per_testcase(default_action, Config);
+
+end_per_testcase(default_action, Config) ->
Dog = ?config(watchdog, Config),
case Dog of
undefined ->
ok;
_ ->
test_server:timetrap_cancel(Dog)
- end.
+ end;
+end_per_testcase(_, Config) ->
+ end_per_testcase(default_action, Config).
%%--------------------------------------------------------------------
%% Function: all(Clause) -> TestCases
@@ -126,6 +143,9 @@ all(doc) ->
all(suite) ->
[erlang_client_openssl_server,
erlang_server_openssl_client,
+ %% Comment out when new crypto sign functions is available
+ %%erlang_client_openssl_server_dsa_cert,
+ %%erlang_server_openssl_client_dsa_cert,
erlang_server_openssl_client_reuse_session,
erlang_client_openssl_server_renegotiate,
erlang_client_openssl_server_no_wrap_sequence_number,
@@ -141,8 +161,11 @@ all(suite) ->
tls1_erlang_client_openssl_server_client_cert,
tls1_erlang_server_openssl_client_client_cert,
tls1_erlang_server_erlang_client_client_cert,
- ciphers,
- erlang_client_bad_openssl_server
+ ciphers_rsa_signed_certs,
+ ciphers_dsa_signed_certs,
+ erlang_client_bad_openssl_server,
+ expired_session,
+ ssl2_erlang_server_openssl_client
].
%% Test cases starts here.
@@ -229,6 +252,94 @@ erlang_server_openssl_client(Config) when is_list(Config) ->
%%--------------------------------------------------------------------
+erlang_client_openssl_server_dsa_cert(doc) ->
+ ["Test erlang server with openssl client"];
+erlang_client_openssl_server_dsa_cert(suite) ->
+ [];
+erlang_client_openssl_server_dsa_cert(Config) when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ClientOpts = ?config(client_dsa_opts, Config),
+ ServerOpts = ?config(server_dsa_opts, Config),
+
+ {ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
+
+ Data = "From openssl to erlang",
+
+ Port = ssl_test_lib:inet_port(node()),
+ CaCertFile = proplists:get_value(cacertfile, ServerOpts),
+ CertFile = proplists:get_value(certfile, ServerOpts),
+ KeyFile = proplists:get_value(keyfile, ServerOpts),
+
+ Cmd = "openssl s_server -accept " ++ integer_to_list(Port) ++
+ " -cert " ++ CertFile ++ " -CAfile " ++ CaCertFile
+ ++ " -key " ++ KeyFile ++ " -Verify 2 -tls1 -msg",
+
+ test_server:format("openssl cmd: ~p~n", [Cmd]),
+
+ OpensslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+
+ wait_for_openssl_server(),
+
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE,
+ erlang_ssl_receive, [Data]}},
+ {options, ClientOpts}]),
+
+ port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok),
+
+ %% Clean close down! Server needs to be closed first !!
+ close_port(OpensslPort),
+
+ ssl_test_lib:close(Client),
+ process_flag(trap_exit, false),
+ ok.
+
+%%--------------------------------------------------------------------
+
+erlang_server_openssl_client_dsa_cert(doc) ->
+ ["Test erlang server with openssl client"];
+erlang_server_openssl_client_dsa_cert(suite) ->
+ [];
+erlang_server_openssl_client_dsa_cert(Config) when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ClientOpts = ?config(client_dsa_opts, Config),
+ ServerOpts = ?config(server_dsa_opts, Config),
+
+ {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+
+ Data = "From openssl to erlang",
+ CaCertFile = proplists:get_value(cacertfile, ClientOpts),
+ CertFile = proplists:get_value(certfile, ClientOpts),
+ KeyFile = proplists:get_value(keyfile, ClientOpts),
+
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, erlang_ssl_receive, [Data]}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+
+ Cmd = "openssl s_client -port " ++ integer_to_list(Port) ++
+ " -host localhost " ++ " -cert " ++ CertFile ++ " -CAfile " ++ CaCertFile
+ ++ " -key " ++ KeyFile ++ " -tls1 -msg",
+
+ test_server:format("openssl cmd: ~p~n", [Cmd]),
+
+ OpenSslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+ port_command(OpenSslPort, Data),
+
+ ssl_test_lib:check_result(Server, ok),
+
+ ssl_test_lib:close(Server),
+
+ close_port(OpenSslPort),
+ process_flag(trap_exit, false),
+ ok.
+%%--------------------------------------------------------------------
+
erlang_server_openssl_client_reuse_session(doc) ->
["Test erlang server with openssl client that reconnects with the"
"same session id, to test reusing of sessions."];
@@ -863,19 +974,46 @@ tls1_erlang_server_erlang_client_client_cert(Config) when is_list(Config) ->
ok.
%%--------------------------------------------------------------------
-ciphers(doc) ->
- [""];
+ciphers_rsa_signed_certs(doc) ->
+ ["Test cipher suites that uses rsa certs"];
-ciphers(suite) ->
+ciphers_rsa_signed_certs(suite) ->
[];
-ciphers(Config) when is_list(Config) ->
+ciphers_rsa_signed_certs(Config) when is_list(Config) ->
Version =
ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
- Ciphers = ssl:cipher_suites(),
+ Ciphers = ssl_test_lib:rsa_suites(),
+ run_suites(Ciphers, Version, Config, rsa).
+
+
+ciphers_dsa_signed_certs(doc) ->
+ ["Test cipher suites that uses dsa certs"];
+
+ciphers_dsa_signed_certs(suite) ->
+ [];
+
+ciphers_dsa_signed_certs(Config) when is_list(Config) ->
+ Version =
+ ssl_record:protocol_version(ssl_record:highest_protocol_version([])),
+
+ Ciphers = ssl_test_lib:dsa_suites(),
+ run_suites(Ciphers, Version, Config, dsa).
+
+run_suites(Ciphers, Version, Config, Type) ->
+ {ClientOpts, ServerOpts} =
+ case Type of
+ rsa ->
+ {?config(client_opts, Config),
+ ?config(server_opts, Config)};
+ dsa ->
+ {?config(client_opts, Config),
+ ?config(server_dsa_opts, Config)}
+ end,
+
Result = lists:map(fun(Cipher) ->
- cipher(Cipher, Version, Config) end,
+ cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end,
Ciphers),
case lists:flatten(Result) of
[] ->
@@ -884,12 +1022,12 @@ ciphers(Config) when is_list(Config) ->
test_server:format("Cipher suite errors: ~p~n", [Error]),
test_server:fail(cipher_suite_failed_see_test_case_log)
end.
-
-cipher(CipherSuite, Version, Config) ->
+
+
+
+cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
process_flag(trap_exit, true),
test_server:format("Testing CipherSuite ~p~n", [CipherSuite]),
- ClientOpts = ?config(client_opts, Config),
- ServerOpts = ?config(server_opts, Config),
{ClientNode, _ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Port = ssl_test_lib:inet_port(node()),
@@ -991,6 +1129,100 @@ erlang_client_bad_openssl_server(Config) when is_list(Config) ->
close_port(OpensslPort),
process_flag(trap_exit, false),
ok.
+
+%%--------------------------------------------------------------------
+
+expired_session(doc) ->
+ ["Test our ssl client handling of expired sessions. Will make"
+ "better code coverage of the ssl_manager module"];
+
+expired_session(suite) ->
+ [];
+
+expired_session(Config) when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ {ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
+
+ Port = ssl_test_lib:inet_port(node()),
+ CertFile = proplists:get_value(certfile, ServerOpts),
+ KeyFile = proplists:get_value(keyfile, ServerOpts),
+
+ Cmd = "openssl s_server -accept " ++ integer_to_list(Port) ++
+ " -cert " ++ CertFile ++ " -key " ++ KeyFile ++ "",
+
+ test_server:format("openssl cmd: ~p~n", [Cmd]),
+
+ OpensslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+
+ wait_for_openssl_server(),
+
+ Client0 =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ ssl_test_lib:close(Client0),
+
+ %% Make sure session is registered
+ test_server:sleep(?SLEEP),
+
+ Client1 =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ ssl_test_lib:close(Client1),
+ %% Make sure session is unregistered due to expiration
+ test_server:sleep((?EXPIRE+1) * 1000),
+
+ Client2 =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ close_port(OpensslPort),
+ ssl_test_lib:close(Client2),
+ process_flag(trap_exit, false).
+
+%%--------------------------------------------------------------------
+ssl2_erlang_server_openssl_client(doc) ->
+ ["Test that ssl v2 clients are rejected"];
+ssl2_erlang_server_openssl_client(suite) ->
+ [];
+ssl2_erlang_server_openssl_client(Config) when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ServerOpts = ?config(server_opts, Config),
+
+ {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+
+ Data = "From openssl to erlang",
+
+ Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+
+ Cmd = "openssl s_client -port " ++ integer_to_list(Port) ++
+ " -host localhost -ssl2 -msg",
+
+ test_server:format("openssl cmd: ~p~n", [Cmd]),
+
+ OpenSslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+ port_command(OpenSslPort, Data),
+
+ ssl_test_lib:check_result(Server, {error,"protocol version"}),
+
+ ssl_test_lib:close(Server),
+
+ close_port(OpenSslPort),
+ process_flag(trap_exit, false),
+ ok.
+
%%--------------------------------------------------------------------
erlang_ssl_receive(Socket, Data) ->
@@ -1030,8 +1262,7 @@ delayed_send(Socket, [ErlData, OpenSslData]) ->
erlang_ssl_receive(Socket, OpenSslData).
close_port(Port) ->
- port_command(Port, ?OPENSSL_QUIT),
- %%catch port_command(Port, "quit\n"),
+ catch port_command(Port, ?OPENSSL_QUIT),
close_loop(Port, 500, false).
close_loop(Port, Time, SentClose) ->
diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk
index e3db7008e3..813ce91e32 100644
--- a/lib/ssl/vsn.mk
+++ b/lib/ssl/vsn.mk
@@ -19,9 +19,11 @@
SSL_VSN = 3.11.1
-TICKETS = OTP-8588 \
+TICKETS = OTP-8679 \
+ OTP-7047 \
+ OTP-7049 \
OTP-8568 \
- OTP-7049
+ OTP-8588
#TICKETS_3.11 = OTP-8517 \
# OTP-7046 \
diff --git a/lib/stdlib/doc/src/Makefile b/lib/stdlib/doc/src/Makefile
index 13b9b2ff18..b558697d63 100644
--- a/lib/stdlib/doc/src/Makefile
+++ b/lib/stdlib/doc/src/Makefile
@@ -1,19 +1,19 @@
#
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 1997-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 1997-2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
#
include $(ERL_TOP)/make/target.mk
@@ -40,6 +40,7 @@ XML_REF3_FILES = \
array.xml \
base64.xml \
beam_lib.xml \
+ binary.xml \
c.xml \
calendar.xml \
dets.xml \
diff --git a/lib/stdlib/doc/src/binary.xml b/lib/stdlib/doc/src/binary.xml
new file mode 100644
index 0000000000..c5eb81a86a
--- /dev/null
+++ b/lib/stdlib/doc/src/binary.xml
@@ -0,0 +1,729 @@
+<?xml version="1.0" encoding="latin1" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2009</year>
+ <year>2010</year>
+ <holder>Ericsson AB, All Rights Reserved</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved on line at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson AB.
+ </legalnotice>
+
+ <title>binary</title>
+ <prepared>Patrik Nyblom</prepared>
+ <responsible>Kenneth Lundin</responsible>
+ <docno>1</docno>
+ <approved></approved>
+ <checked></checked>
+ <date>2010-05-05</date>
+ <rev>A</rev>
+ <file>binary.xml</file>
+ </header>
+ <module>binary</module>
+ <modulesummary>Library for handling binary data</modulesummary>
+ <description>
+
+ <p>This module contains functions for manipulating byte-oriented
+ binaries. Although the majority of functions could be implemented
+ using bit-syntax, the functions in this library are highly
+ optimized and are expected to either execute faster or consume
+ less memory (or both) than a counterpart written in pure Erlang.</p>
+
+ <p>The module is implemented according to the EEP (Erlang Enhancement Proposal) 31.</p>
+
+ <note>
+ <p>
+ The library handles byte-oriented data. Bitstrings that are not
+ binaries (does not contain whole octets of bits) will result in a <c>badarg</c>
+ exception being thrown from any of the functions in this
+ module.
+ </p>
+ </note>
+
+
+ </description>
+ <section>
+ <title>DATA TYPES</title>
+ <code type="none">
+ cp()
+ - Opaque data-type representing a compiled search-pattern. Guaranteed to be a tuple()
+ to allow programs to distinguish it from non precompiled search patterns.
+ </code>
+ <code type="none">
+ part() = {Start,Length}
+ Start = int()
+ Length = int()
+ - A representaion of a part (or range) in a binary. Start is a
+ zero-based offset into a binary() and Length is the length of
+ that part. As input to functions in this module, a reverse
+ part specification is allowed, constructed with a negative
+ Length, so that the part of the binary begins at Start +
+ Length and is -Length long. This is useful for referencing the
+ last N bytes of a binary as {size(Binary), -N}. The functions
+ in this module always return part()'s with positive Length.
+ </code>
+ </section>
+ <funcs>
+ <func>
+ <name>at(Subject, Pos) -> int()</name>
+ <fsummary>Returns the byte at a specific position in a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pos = int() >= 0</v>
+ </type>
+ <desc>
+
+ <p>Returns the byte at position <c>Pos</c> (zero-based) in the binary
+ <c>Subject</c> as an integer. If <c>Pos</c> &gt;= <c>byte_size(Subject)</c>,
+ a <c>badarg</c>
+ exception is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>bin_to_list(Subject) -> list()</name>
+ <fsummary>Convert a binary to a list of integers</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ </type>
+ <desc>
+ <p>The same as <c>bin_to_list(Subject,{0,byte_size(Subject)})</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>bin_to_list(Subject, PosLen) -> list()</name>
+ <fsummary>Convert a binary to a list of integers</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>PosLen = part()</v>
+ </type>
+ <desc>
+
+ <p>Converts <c>Subject</c> to a list of <c>int()</c>s, each representing
+ the value of one byte. The <c>part()</c> denotes which part of the
+ <c>binary()</c> to convert. Example:</p>
+
+<code>
+1> binary:bin_to_list(&lt;&lt;"erlang"&gt;&gt;,{1,3}).
+"rla"
+%% or [114,108,97] in list notation.
+</code>
+ <p>If <c>PosLen</c> in any way references outside the binary, a <c>badarg</c> exception is raised.</p>
+ </desc>
+ </func>
+ <func>
+ <name>bin_to_list(Subject, Pos, Len) -> list()</name>
+ <fsummary>Convert a binary to a list of integers</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pos = int()</v>
+ <v>Len = int()</v>
+ </type>
+ <desc>
+ <p>The same as<c> bin_to_list(Subject,{Pos,Len})</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>compile_pattern(Pattern) -> cp()</name>
+ <fsummary>Pre-compiles a binary search pattern</fsummary>
+ <type>
+ <v>Pattern = binary() | [ binary() ]</v>
+ </type>
+ <desc>
+
+ <p>Builds an internal structure representing a compilation of a
+ search-pattern, later to be used in the <seealso marker="#match-3">match/3</seealso>,
+ <seealso marker="#matches-3">matches/3</seealso>,
+ <seealso marker="#split-3">split/3</seealso> or
+ <seealso marker="#replace-4">replace/4</seealso>
+ functions. The <c>cp()</c> returned is guaranteed to be a
+ <c>tuple()</c> to allow programs to distinguish it from non
+ pre-compiled search patterns</p>
+
+ <p>When a list of binaries is given, it denotes a set of
+ alternative binaries to search for. I.e if
+ <c>[&lt;&lt;"functional"&gt;&gt;,&lt;&lt;"programming"&gt;&gt;]</c>
+ is given as <c>Pattern</c>, this
+ means "either <c>&lt;&lt;"functional"&gt;&gt;</c> or
+ <c>&lt;&lt;"programming"&gt;&gt;</c>". The pattern is a set of
+ alternatives; when only a single binary is given, the set has
+ only one element. The order of alternatives in a pattern is not significant.</p>
+
+ <p>The list of binaries used for search alternatives shall be flat and proper.</p>
+
+ <p>If <c>Pattern</c> is not a binary or a flat proper list of binaries with length &gt; 0,
+ a <c>badarg</c> exception will be raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>copy(Subject) -> binary()</name>
+ <fsummary>Creates a duplicate of a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ </type>
+ <desc>
+ <p>The same as <c>copy(Subject, 1)</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>copy(Subject,N) -> binary()</name>
+ <fsummary>Duplicates a binary N times and creates a new</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>N = int() >= 0</v>
+ </type>
+ <desc>
+ <p>Creates a binary with the content of <c>Subject</c> duplicated <c>N</c> times.</p>
+
+ <p>This function will always create a new binary, even if <c>N =
+ 1</c>. By using <c>copy/1</c> on a binary referencing a larger binary, one
+ might free up the larger binary for garbage collection.</p>
+
+ <note>
+ <p>By deliberately copying a single binary to avoid referencing
+ a larger binary, one might, instead of freeing up the larger
+ binary for later garbage collection, create much more binary
+ data than needed. Sharing binary data is usually good. Only in
+ special cases, when small parts reference large binaries and the
+ large binaries are no longer used in any process, deliberate
+ copying might be a good idea.</p> </note>
+
+ <p>If <c>N</c> &lt; <c>0</c>, a <c>badarg</c> exception is raised.</p>
+ </desc>
+ </func>
+ <func>
+ <name>decode_unsigned(Subject) -> Unsigned</name>
+ <fsummary>Decode a whole binary into an integer of arbitrary size</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Unsigned = int() >= 0</v>
+ </type>
+ <desc>
+ <p>The same as <c>decode_unsigned(Subject,big)</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>decode_unsigned(Subject, Endianess) -> Unsigned</name>
+ <fsummary>Decode a whole binary into an integer of arbitrary size</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Endianess = big | little</v>
+ <v>Unsigned = int() >= 0</v>
+ </type>
+ <desc>
+
+ <p>Converts the binary digit representation, in big or little
+ endian, of a positive integer in <c>Subject</c> to an Erlang <c>int()</c>.</p>
+
+ <p>Example:</p>
+
+ <code>
+1> binary:decode_unsigned(&lt;&lt;169,138,199&gt;&gt;,big).
+11111111
+ </code>
+ </desc>
+ </func>
+ <func>
+ <name>encode_unsigned(Unsigned) -> binary()</name>
+ <fsummary>Encodes an unsigned integer into the minimal binary</fsummary>
+ <type>
+ <v>Unsigned = int() >= 0</v>
+ </type>
+ <desc>
+ <p>The same as <c>encode_unsigned(Unsigned,big)</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>encode_unsigned(Unsigned,Endianess) -> binary()</name>
+ <fsummary>Encodes an unsigned integer into the minimal binary</fsummary>
+ <type>
+ <v>Unsigned = int() >= 0</v>
+ <v>Endianess = big | little</v>
+ </type>
+ <desc>
+
+ <p>Converts a positive integer to the smallest possible
+ representation in a binary digit representation, either big
+ or little endian.</p>
+
+ <p>Example:</p>
+
+ <code>
+1> binary:encode_unsigned(11111111,big).
+&lt;&lt;169,138,199&gt;&gt;
+ </code>
+ </desc>
+ </func>
+ <func>
+ <name>first(Subject) -> int()</name>
+ <fsummary>Returns the first byte of a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ </type>
+ <desc>
+
+ <p>Returns the first byte of the binary <c>Subject</c> as an integer. If the
+ size of <c>Subject</c> is zero, a <c>badarg</c> exception is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>last(Subject) -> int()</name>
+ <fsummary>Returns the last byte of a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ </type>
+ <desc>
+
+ <p>Returns the last byte of the binary <c>Subject</c> as an integer. If the
+ size of <c>Subject</c> is zero, a <c>badarg</c> exception is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>list_to_bin(ByteList) -> binary()</name>
+ <fsummary>Convert a list of integers and binaries to a binary</fsummary>
+ <type>
+ <v>ByteList = iodata() (see module erlang)</v>
+ </type>
+ <desc>
+ <p>Works exactly as <c>erlang:list_to_binary/1</c>, added for completeness.</p>
+ </desc>
+ </func>
+ <func>
+ <name>longest_common_prefix(Binaries) -> int()</name>
+ <fsummary>Returns length of longest common prefix for a set of binaries</fsummary>
+ <type>
+ <v>Binaries = [ binary() ]</v>
+ </type>
+ <desc>
+
+ <p>Returns the length of the longest common prefix of the
+ binaries in the list <c>Binaries</c>. Example:</p>
+
+<code>
+1> binary:longest_common_prefix([&lt;&lt;"erlang"&gt;&gt;,&lt;&lt;"ergonomy"&gt;&gt;]).
+2
+2> binary:longest_common_prefix([&lt;&lt;"erlang"&gt;&gt;,&lt;&lt;"perl"&gt;&gt;]).
+0
+</code>
+
+ <p>If <c>Binaries</c> is not a flat list of binaries, a <c>badarg</c> exception is raised.</p>
+ </desc>
+ </func>
+ <func>
+ <name>longest_common_suffix(Binaries) -> int()</name>
+ <fsummary>Returns length of longest common suffix for a set of binaries</fsummary>
+ <type>
+ <v>Binaries = [ binary() ]</v>
+ </type>
+ <desc>
+
+ <p>Returns the length of the longest common suffix of the
+ binaries in the list <c>Binaries</c>. Example:</p>
+
+<code>
+1> binary:longest_common_suffix([&lt;&lt;"erlang"&gt;&gt;,&lt;&lt;"fang"&gt;&gt;]).
+3
+2> binary:longest_common_suffix([&lt;&lt;"erlang"&gt;&gt;,&lt;&lt;"perl"&gt;&gt;]).
+0
+</code>
+
+ <p>If <c>Binaries</c> is not a flat list of binaries, a <c>badarg</c> exception is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>match(Subject, Pattern) -> Found | <c>nomatch</c></name>
+ <fsummary>Searches for the first match of a pattern in a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Found = part()</v>
+ </type>
+ <desc>
+ <p>The same as <c>match(Subject, Pattern, [])</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>match(Subject,Pattern,Options) -> Found | <c>nomatch</c></name>
+ <fsummary>Searches for the first match of a pattern in a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Found = part()</v>
+ <v>Options = [ Option ]</v>
+ <v>Option = {scope, part()}</v>
+ </type>
+ <desc>
+
+ <p>Searches for the first occurrence of <c>Pattern</c> in <c>Subject</c> and
+ returns the position and length.</p>
+
+ <p>The function will return <c>{Pos,Length}</c> for the binary
+ in <c>Pattern</c> starting at the lowest position in
+ <c>Subject</c>, Example:</p>
+
+<code>
+1> binary:match(&lt;&lt;"abcde"&gt;&gt;, [&lt;&lt;"bcde"&gt;&gt;,&lt;&lt;"cd"&gt;&gt;],[]).
+{1,4}
+</code>
+
+ <p>Even though <c>&lt;&lt;"cd"&gt;&gt;</c> ends before
+ <c>&lt;&lt;"bcde"&gt;&gt;</c>, <c>&lt;&lt;"bcde"&gt;&gt;</c>
+ begins first and is therefore the first match. If two
+ overlapping matches begin at the same position, the longest is
+ returned.</p>
+
+ <p>Summary of the options:</p>
+
+ <taglist>
+ <tag>{scope, {Start, Length}}</tag>
+ <item><p>Only the given part is searched. Return values still have
+ offsets from the beginning of <c>Subject</c>. A negative <c>Length</c> is
+ allowed as described in the <c>TYPES</c> section of this manual.</p></item>
+ </taglist>
+
+ <p>If none of the strings in
+ <c>Pattern</c> is found, the atom <c>nomatch</c> is returned.</p>
+
+ <p>For a description of <c>Pattern</c>, see
+ <seealso marker="#compile_pattern-1">compile_pattern/1</seealso>.</p>
+
+ <p>If <c>{scope, {Start,Length}}</c> is given in the options
+ such that <c>Start</c> is larger than the size of
+ <c>Subject</c>, <c>Start + Length</c> is less than zero or
+ <c>Start + Length</c> is larger than the size of
+ <c>Subject</c>, a <c>badarg</c> exception is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>matches(Subject, Pattern) -> Found</name>
+ <fsummary>Searches for all matches of a pattern in a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Found = [ part() ] | []</v>
+ </type>
+ <desc>
+ <p>The same as <c>matches(Subject, Pattern, [])</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>matches(Subject,Pattern,Options) -> Found</name>
+ <fsummary>Searches for all matches of a pattern in a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Found = [ part() ] | []</v>
+ <v>Options = [ Option ]</v>
+ <v>Option = {scope, part()}</v>
+ </type>
+ <desc>
+
+ <p>Works like match, but the <c>Subject</c> is searched until
+ exhausted and a list of all non-overlapping parts matching
+ <c>Pattern</c> is returned (in order). </p>
+
+ <p>The first and longest match is preferred to a shorter,
+ which is illustrated by the following example:</p>
+
+<code>
+1> binary:matches(&lt;&lt;"abcde"&gt;&gt;,
+ [&lt;&lt;"bcde"&gt;&gt;,&lt;&lt;"bc"&gt;&gt;>,&lt;&lt;"de"&gt;&gt;],[]).
+[{1,4}]
+</code>
+
+ <p>The result shows that &lt;&lt;bcde"&gt;&gt; is selected instead of the
+ shorter match &lt;&lt;"bc"&gt;&gt; (which would have given raise to one
+ more match,&lt;&lt;"de"&gt;&gt;). This corresponds to the behavior of posix
+ regular expressions (and programs like awk), but is not
+ consistent with alternative matches in re (and Perl), where
+ instead lexical ordering in the search pattern selects which
+ string matches.</p>
+
+ <p>If none of the strings in pattern is found, an empty list is returned.</p>
+
+ <p>For a description of <c>Pattern</c>, see <seealso marker="#compile_pattern-1">compile_pattern/1</seealso> and for a
+ description of available options, see <seealso marker="#match-3">match/3</seealso>.</p>
+
+ <p>If <c>{scope, {Start,Length}}</c> is given in the options such that
+ <c>Start</c> is larger than the size of <c>Subject</c>, <c>Start + Length</c> is
+ less than zero or <c>Start + Length</c> is larger than the size of
+ <c>Subject</c>, a <c>badarg</c> exception is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>part(Subject, PosLen) -> binary()</name>
+ <fsummary>Extracts a part of a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>PosLen = part()</v>
+ </type>
+ <desc>
+
+ <p>Extracts the part of the binary <c>Subject</c> described by <c>PosLen</c>.</p>
+
+ <p>Negative length can be used to extract bytes at the end of a binary:</p>
+
+<code>
+1> Bin = &lt;&lt;1,2,3,4,5,6,7,8,9,10&gt;&gt;.
+2> binary:part(Bin,{byte_size(Bin), -5)).
+&lt;&lt;6,7,8,9,10&gt;&gt;
+</code>
+
+ <note>
+ <p><seealso marker="#part-2">part/2</seealso>and <seealso
+ marker="#part-3">part/3</seealso> are also available in the
+ <c>erlang</c> module under the names <c>binary_part/2</c> and
+ <c>binary_part/3</c>. Those BIFs are allowed in guard tests.</p>
+ </note>
+
+ <p>If <c>PosLen</c> in any way references outside the binary, a <c>badarg</c> exception
+ is raised.</p>
+
+ </desc>
+ </func>
+ <func>
+ <name>part(Subject, Pos, Len) -> binary()</name>
+ <fsummary>Extracts a part of a binary</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pos = int()</v>
+ <v>Len = int()</v>
+ </type>
+ <desc>
+ <p>The same as <c>part(Subject, {Pos, Len})</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>referenced_byte_size(binary()) -> int()</name>
+ <fsummary>Determines the size of the actual binary pointed out by a sub-binary</fsummary>
+ <desc>
+
+ <p>If a binary references a larger binary (often described as
+ being a sub-binary), it can be useful to get the size of the
+ actual referenced binary. This function can be used in a program
+ to trigger the use of <c>copy/1</c>. By copying a binary, one might
+ dereference the original, possibly large, binary which a smaller
+ binary is a reference to.</p>
+
+ <p>Example:</p>
+
+ <code>
+store(Binary, GBSet) ->
+ NewBin =
+ case binary:referenced_byte_size(Binary) of
+ Large when Large > 2 * byte_size(Binary) ->
+ binary:copy(Binary);
+ _ ->
+ Binary
+ end,
+ gb_sets:insert(NewBin,GBSet).
+ </code>
+
+ <p>In this example, we chose to copy the binary content before
+ inserting it in the <c>gb_set()</c> if it references a binary more than
+ twice the size of the data we're going to keep. Of course
+ different rules for when copying will apply to different
+ programs.</p>
+
+ <p>Binary sharing will occur whenever binaries are taken apart,
+ this is the fundamental reason why binaries are fast,
+ decomposition can always be done with O(1) complexity. In rare
+ circumstances this data sharing is however undesirable, why this
+ function together with <c>copy/1</c> might be useful when optimizing
+ for memory use.</p>
+
+ <p>Example of binary sharing:</p>
+
+ <code>
+1> A = binary:copy(&lt;&lt;1&gt;&gt;,100).
+&lt;&lt;1,1,1,1,1 ...
+2> byte_size(A).
+100
+3> binary:referenced_byte_size(A)
+100
+4> &lt;&lt;_:10/binary,B:10/binary,_/binary&gt;&gt; = A.
+&lt;&lt;1,1,1,1,1 ...
+5> byte_size(B).
+10
+6> binary:referenced_byte_size(B)
+100
+ </code>
+
+ <note>
+ <p>Binary data is shared among processes. If another process
+ still references the larger binary, copying the part this
+ process uses only consumes more memory and will not free up the
+ larger binary for garbage collection. Use this kind of intrusive
+ functions with extreme care, and only if a real problem is
+ detected.</p>
+ </note>
+
+ </desc>
+ </func>
+ <func>
+ <name>replace(Subject,Pattern,Replacement) -> Result</name>
+ <fsummary>Replaces bytes in a binary according to a pattern</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Replacement = binary()</v>
+ <v>Result = binary()</v>
+ </type>
+ <desc>
+ <p>The same as <c>replace(Subject,Pattern,Replacement,[])</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>replace(Subject,Pattern,Replacement,Options) -> Result</name>
+ <fsummary>Replaces bytes in a binary according to a pattern</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Replacement = binary()</v>
+ <v>Result = binary()</v>
+ <v>Options = [ Option ]</v>
+ <v>Option = global | {scope, part()} | {insert_replaced, InsPos}</v>
+ <v>InsPos = OnePos | [ OnePos ]</v>
+ <v>OnePos = int() =&lt; byte_size(Replacement)</v>
+ </type>
+ <desc>
+
+ <p>Constructs a new binary by replacing the parts in
+ <c>Subject</c> matching <c>Pattern</c> with the content of
+ <c>Replacement</c>.</p>
+
+ <p>If the matching sub-part of <c>Subject</c> giving raise to the
+ replacement is to be inserted in the result, the option
+ <c>{insert_replaced, InsPos}</c> will insert the matching part into
+ <c>Replacement</c> at the given position (or positions) before actually
+ inserting <c>Replacement</c> into the <c>Subject</c>. Example:</p>
+
+<code>
+1> binary:replace(&lt;&lt;"abcde"&gt;&gt;,&lt;&lt;"b"&gt;&gt;,&lt;&lt;"[]"&gt;&gt;,[{insert_replaced,1}]).
+&lt;&lt;"a[b]cde"&gt;&gt;
+2> binary:replace(&lt;&lt;"abcde"&gt;&gt;,[&lt;&lt;"b"&gt;&gt;,&lt;&lt;"d"&gt;&gt;],&lt;&lt;"[]"&gt;&gt;,
+ [global,{insert_replaced,1}]).
+&lt;&lt;"a[b]c[d]e"&gt;&gt;
+3> binary:replace(&lt;&lt;"abcde"&gt;&gt;,[&lt;&lt;"b"&gt;&gt;,&lt;&lt;"d"&gt;&gt;],&lt;&lt;"[]"&gt;&gt;,
+ [global,{insert_replaced,[1,1]}]).
+&lt;&lt;"a[bb]c[dd]e"&gt;&gt;
+4> binary:replace(&lt;&lt;"abcde"&gt;&gt;,[&lt;&lt;"b"&gt;&gt;,&lt;&lt;"d"&gt;&gt;],&lt;&lt;"[-]"&gt;&gt;,
+ [global,{insert_replaced,[1,2]}]).
+&lt;&lt;"a[b-b]c[d-d]e"&gt;&gt;
+</code>
+
+ <p>If any position given in <c>InsPos</c> is greater than the size of the replacement binary, a <c>badarg</c> exception is raised.</p>
+
+ <p>The options <c>global</c> and <c>{scope, part()}</c> work as for <seealso marker="#split-3">split/3</seealso>. The return type is always a <c>binary()</c>.</p>
+
+ <p>For a description of <c>Pattern</c>, see <seealso marker="#compile_pattern-1">compile_pattern/1</seealso>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>split(Subject,Pattern) -> Parts</name>
+ <fsummary>Splits a binary according to a pattern</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Parts = [ binary() ]</v>
+ </type>
+ <desc>
+ <p>The same as <c>split(Subject, Pattern, [])</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name>split(Subject,Pattern,Options) -> Parts</name>
+ <fsummary>Splits a binary according to a pattern</fsummary>
+ <type>
+ <v>Subject = binary()</v>
+ <v>Pattern = binary() | [ binary() ] | cp()</v>
+ <v>Parts = [ binary() ]</v>
+ <v>Options = [ Option ]</v>
+ <v>Option = {scope, part()} | trim | global</v>
+ </type>
+ <desc>
+
+ <p>Splits Binary into a list of binaries based on Pattern. If
+ the option global is not given, only the first occurrence of
+ Pattern in Subject will give rise to a split.</p>
+
+ <p>The parts of Pattern actually found in Subject are not included in the result.</p>
+
+ <p>Example:</p>
+
+<code>
+1> binary:split(&lt;&lt;1,255,4,0,0,0,2,3&gt;&gt;, [&lt;&lt;0,0,0&gt;&gt;,&lt;&lt;2&gt;&gt;],[]).
+[&lt;&lt;1,255,4&gt;&gt;, &lt;&lt;2,3&gt;&gt;]
+2> binary:split(&lt;&lt;0,1,0,0,4,255,255,9&gt;&gt;, [&lt;&lt;0,0&gt;&gt;, &lt;&lt;255,255&gt;&gt;],[global]).
+[&lt;&lt;0,1&gt;&gt;,&lt;&lt;4&gt;&gt;,&lt;&lt;9&gt;&gt;]
+</code>
+
+ <p>Summary of options:</p>
+ <taglist>
+
+ <tag>{scope, part()}</tag>
+
+ <item><p>Works as in <seealso marker="#match-3">match/3</seealso> and
+ <seealso marker="#matches-3">matches/3</seealso>. Note that
+ this only defines the scope of the search for matching strings,
+ it does not cut the binary before splitting. The bytes before
+ and after the scope will be kept in the result. See example
+ below.</p></item>
+
+ <tag>trim</tag>
+
+ <item><p>Removes trailing empty parts of the result (as does trim in <c>re:split/3</c>)</p></item>
+
+ <tag>global</tag>
+
+ <item><p>Repeats the split until the <c>Subject</c> is
+ exhausted. Conceptually the global option makes split work on
+ the positions returned by <seealso marker="#matches-3">matches/3</seealso>,
+ while it normally
+ works on the position returned by
+ <seealso marker="#match-3">match/3</seealso>.</p></item>
+
+ </taglist>
+
+ <p>Example of the difference between a scope and taking the
+ binary apart before splitting:</p>
+
+<code>
+1> binary:split(&lt;&lt;"banana"&gt;&gt;,[&lt;&lt;"a"&gt;&gt;],[{scope,{2,3}}]).
+[&lt;&lt;"ban"&gt;&gt;,&lt;&lt;"na"&gt;&gt;]
+2> binary:split(binary:part(&lt;&lt;"banana"&gt;&gt;,{2,3}),[&lt;&lt;"a"&gt;&gt;],[]).
+[&lt;&lt;"n"&gt;&gt;,&lt;&lt;"n"&gt;&gt;]
+</code>
+
+ <p>The return type is always a list of binaries that are all
+ referencing <c>Subject</c>. This means that the data in <c>Subject</c> is not
+ actually copied to new binaries and that <c>Subject</c> cannot be
+ garbage collected until the results of the split are no longer
+ referenced.</p>
+
+ <p>For a description of <c>Pattern</c>, see <seealso marker="#compile_pattern-1">compile_pattern/1</seealso>.</p>
+
+ </desc>
+ </func>
+ </funcs>
+</erlref>
diff --git a/lib/stdlib/doc/src/gen_event.xml b/lib/stdlib/doc/src/gen_event.xml
index df09294de6..2234a62ac3 100644
--- a/lib/stdlib/doc/src/gen_event.xml
+++ b/lib/stdlib/doc/src/gen_event.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2009</year>
+ <year>1996</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>gen_event</title>
@@ -630,12 +630,66 @@ gen_event:stop -----> Module:terminate/2
<p>The function should return the updated internal state.</p>
</desc>
</func>
+ <func>
+ <name>Module:format_status(Opt, [PDict, State]) -> Status</name>
+ <fsummary>Optional function for providing a term describing the
+ current event handler state.</fsummary>
+ <type>
+ <v>Opt = normal | terminate</v>
+ <v>PDict = [{Key, Value}]</v>
+ <v>State = term()</v>
+ <v>Status = term()</v>
+ </type>
+ <desc>
+ <note>
+ <p>This callback is optional, so event handler modules need
+ not export it. If a handler does not export this function,
+ the gen_event module uses the handler state directly for
+ the purposes described below.</p>
+ </note>
+ <p>This function is called by a gen_event process when:</p>
+ <list typed="bulleted">
+ <item>One
+ of <seealso marker="sys#get_status/1">sys:get_status/1,2</seealso>
+ is invoked to get the gen_event status. <c>Opt</c> is set
+ to the atom <c>normal</c> for this case.</item>
+ <item>The event handler terminates abnormally and gen_event
+ logs an error. <c>Opt</c> is set to the
+ atom <c>terminate</c> for this case.</item>
+ </list>
+ <p>This function is useful for customising the form and
+ appearance of the event handler state for these cases. An
+ event handler callback module wishing to customise
+ the <c>sys:get_status/1,2</c> return value as well as how
+ its state appears in termination error logs exports an
+ instance of <c>format_status/2</c> that returns a term
+ describing the current state of the event handler.</p>
+ <p><c>PDict</c> is the current value of the gen_event's
+ process dictionary.</p>
+ <p><c>State</c> is the internal state of the event
+ handler.</p>
+ <p>The function should return <c>Status</c>, a term that
+ customises the details of the current state of the event
+ handler. Any term is allowed for <c>Status</c>. The
+ gen_event module uses <c>Status</c> as follows:</p>
+ <list typed="bulleted">
+ <item>When <c>sys:get_status/1,2</c> is called, gen_event
+ ensures that its return value contains <c>Status</c> in
+ place of the event handler's actual state term.</item>
+ <item>When an event handler terminates abnormally, gen_event
+ logs <c>Status</c> in place of the event handler's actual
+ state term.</item>
+ </list>
+ <p>One use for this function is to return compact alternative
+ state representations to avoid having large state terms
+ printed in logfiles.</p>
+ </desc>
+ </func>
</funcs>
<section>
<title>SEE ALSO</title>
- <p><seealso marker="supervisor">supervisor(3)</seealso>,
+ <p><seealso marker="supervisor">supervisor(3)</seealso>,
<seealso marker="sys">sys(3)</seealso></p>
</section>
</erlref>
-
diff --git a/lib/stdlib/doc/src/gen_fsm.xml b/lib/stdlib/doc/src/gen_fsm.xml
index 739cd0bffd..d15383c621 100644
--- a/lib/stdlib/doc/src/gen_fsm.xml
+++ b/lib/stdlib/doc/src/gen_fsm.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2009</year>
+ <year>1996</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>gen_fsm</title>
@@ -730,33 +730,58 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4
</desc>
</func>
<func>
- <name>Module:format_status(normal, [PDict, StateData]) -> Status</name>
+ <name>Module:format_status(Opt, [PDict, StateData]) -> Status</name>
<fsummary>Optional function for providing a term describing the
current gen_fsm status.</fsummary>
<type>
+ <v>Opt = normal | terminate</v>
<v>PDict = [{Key, Value}]</v>
<v>StateData = term()</v>
- <v>Status = [term()]</v>
+ <v>Status = term()</v>
</type>
<desc>
- <p><em>This callback is optional, so callback modules need not
- export it. The gen_fsm module provides a default
- implementation of this function that returns the callback
- module state data.</em></p>
- <p>This function is called by a gen_fsm process when one
- of <seealso marker="sys#get_status/1">sys:get_status/1,2</seealso>
- is invoked to get the gen_fsm status. A callback module
- wishing to customise the <c>sys:get_status/1,2</c> return
- value exports an instance of <c>format_status/2</c> that
- returns a term describing the current status of the
- gen_fsm.</p>
+ <note>
+ <p>This callback is optional, so callback modules need not
+ export it. The gen_fsm module provides a default
+ implementation of this function that returns the callback
+ module state data.</p>
+ </note>
+ <p>This function is called by a gen_fsm process when:</p>
+ <list typed="bulleted">
+ <item>One
+ of <seealso marker="sys#get_status/1">sys:get_status/1,2</seealso>
+ is invoked to get the gen_fsm status. <c>Opt</c> is set to
+ the atom <c>normal</c> for this case.</item>
+ <item>The gen_fsm terminates abnormally and logs an
+ error. <c>Opt</c> is set to the atom <c>terminate</c> for
+ this case.</item>
+ </list>
+ <p>This function is useful for customising the form and
+ appearance of the gen_fsm status for these cases. A callback
+ module wishing to customise the <c>sys:get_status/1,2</c>
+ return value as well as how its status appears in
+ termination error logs exports an instance
+ of <c>format_status/2</c> that returns a term describing the
+ current status of the gen_fsm.</p>
<p><c>PDict</c> is the current value of the gen_fsm's
process dictionary.</p>
<p><c>StateData</c> is the internal state data of the
gen_fsm.</p>
- <p>The function should return <c>Status</c>, a list of one or
- more terms that customise the details of the current state
- and status of the gen_fsm.</p>
+ <p>The function should return <c>Status</c>, a term that
+ customises the details of the current state and status of
+ the gen_fsm. There are no restrictions on the
+ form <c>Status</c> can take, but for
+ the <c>sys:get_status/1,2</c> case (when <c>Opt</c>
+ is <c>normal</c>), the recommended form for
+ the <c>Status</c> value is <c>[{data, [{"StateData",
+ Term}]}]</c> where <c>Term</c> provides relevant details of
+ the gen_fsm state data. Following this recommendation isn't
+ required, but doing so will make the callback module status
+ consistent with the rest of the <c>sys:get_status/1,2</c>
+ return value.</p>
+ <p>One use for this function is to return compact alternative
+ state data representations to avoid having large state terms
+ printed in logfiles.</p>
</desc>
</func>
</funcs>
@@ -770,4 +795,3 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4
<seealso marker="sys">sys(3)</seealso></p>
</section>
</erlref>
-
diff --git a/lib/stdlib/doc/src/gen_server.xml b/lib/stdlib/doc/src/gen_server.xml
index 30c04d1d52..1045766e01 100644
--- a/lib/stdlib/doc/src/gen_server.xml
+++ b/lib/stdlib/doc/src/gen_server.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2009</year>
+ <year>1996</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>gen_server</title>
@@ -599,32 +599,57 @@ gen_server:abcast -----> Module:handle_cast/2
</desc>
</func>
<func>
- <name>Module:format_status(normal, [PDict, State]) -> Status</name>
+ <name>Module:format_status(Opt, [PDict, State]) -> Status</name>
<fsummary>Optional function for providing a term describing the
current gen_server status.</fsummary>
<type>
+ <v>Opt = normal | terminate</v>
<v>PDict = [{Key, Value}]</v>
<v>State = term()</v>
- <v>Status = [term()]</v>
+ <v>Status = term()</v>
</type>
<desc>
- <p><em>This callback is optional, so callback modules need not
- export it. The gen_server module provides a default
- implementation of this function that returns the callback
- module state.</em></p>
- <p>This function is called by a gen_server process when one
+ <note>
+ <p>This callback is optional, so callback modules need not
+ export it. The gen_server module provides a default
+ implementation of this function that returns the callback
+ module state.</p>
+ </note>
+ <p>This function is called by a gen_server process when:</p>
+ <list typed="bulleted">
+ <item>One
of <seealso marker="sys#get_status/1">sys:get_status/1,2</seealso>
- is invoked to get the gen_server status. A callback module
- wishing to customise the <c>sys:get_status/1,2</c> return
- value exports an instance of <c>format_status/2</c> that
- returns a term describing the current status of the
- gen_server.</p>
+ is invoked to get the gen_server status. <c>Opt</c> is set
+ to the atom <c>normal</c> for this case.</item>
+ <item>The gen_server terminates abnormally and logs an
+ error. <c>Opt</c> is set to the atom <c>terminate</c> for this
+ case.</item>
+ </list>
+ <p>This function is useful for customising the form and
+ appearance of the gen_server status for these cases. A
+ callback module wishing to customise
+ the <c>sys:get_status/1,2</c> return value as well as how
+ its status appears in termination error logs exports an
+ instance of <c>format_status/2</c> that returns a term
+ describing the current status of the gen_server.</p>
<p><c>PDict</c> is the current value of the gen_server's
process dictionary.</p>
<p><c>State</c> is the internal state of the gen_server.</p>
- <p>The function should return <c>Status</c>, a list of one or
- more terms that customise the details of the current state
- and status of the gen_server.</p>
+ <p>The function should return <c>Status</c>, a term that
+ customises the details of the current state and status of
+ the gen_server. There are no restrictions on the
+ form <c>Status</c> can take, but for
+ the <c>sys:get_status/1,2</c> case (when <c>Opt</c>
+ is <c>normal</c>), the recommended form for
+ the <c>Status</c> value is <c>[{data, [{"State",
+ Term}]}]</c> where <c>Term</c> provides relevant details of
+ the gen_server state. Following this recommendation isn't
+ required, but doing so will make the callback module status
+ consistent with the rest of the <c>sys:get_status/1,2</c>
+ return value.</p>
+ <p>One use for this function is to return compact alternative
+ state representations to avoid having large state terms
+ printed in logfiles.</p>
</desc>
</func>
</funcs>
diff --git a/lib/stdlib/doc/src/re.xml b/lib/stdlib/doc/src/re.xml
index 4d2a0e0995..80adc3e347 100644
--- a/lib/stdlib/doc/src/re.xml
+++ b/lib/stdlib/doc/src/re.xml
@@ -80,7 +80,11 @@
- a unicode_binary is allowed as the tail of the list</code>
<code type="none">
- mp() = Opaque datatype containing a compiled regular expression.</code>
+ mp() = Opaque datatype containing a compiled regular expression.
+ - The mp() is guaranteed to be a tuple() having the atom
+ 're_pattern' as it's first element, to allow for matching in
+ guards. The arity of the tuple() or the content of the other fields
+ is however not to be trusted.</code>
</section>
<funcs>
<func>
diff --git a/lib/stdlib/doc/src/ref_man.xml b/lib/stdlib/doc/src/ref_man.xml
index f6ae368e92..85aae6151d 100644
--- a/lib/stdlib/doc/src/ref_man.xml
+++ b/lib/stdlib/doc/src/ref_man.xml
@@ -4,7 +4,7 @@
<application xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>1996</year><year>2009</year>
+ <year>1996</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -13,12 +13,12 @@
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at http://www.erlang.org/.
-
+
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
+
</legalnotice>
<title>STDLIB Reference Manual</title>
@@ -37,6 +37,7 @@
<xi:include href="array.xml"/>
<xi:include href="base64.xml"/>
<xi:include href="beam_lib.xml"/>
+ <xi:include href="binary.xml"/>
<xi:include href="c.xml"/>
<xi:include href="calendar.xml"/>
<xi:include href="dets.xml"/>
diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile
index 237818c08b..600303d7e1 100644
--- a/lib/stdlib/src/Makefile
+++ b/lib/stdlib/src/Makefile
@@ -43,6 +43,7 @@ MODULES= \
array \
base64 \
beam_lib \
+ binary \
c \
calendar \
dets \
diff --git a/lib/stdlib/src/beam_lib.erl b/lib/stdlib/src/beam_lib.erl
index c71dad6163..91ff2438c6 100644
--- a/lib/stdlib/src/beam_lib.erl
+++ b/lib/stdlib/src/beam_lib.erl
@@ -41,6 +41,8 @@
terminate/2,code_change/3]).
-export([make_crypto_key/2, get_crypto_key/1]). %Utilities used by compiler
+-export_type([attrib_entry/0, compinfo_entry/0, labeled_entry/0]).
+
-import(lists, [append/1, delete/2, foreach/2, keysort/2,
member/2, reverse/1, sort/1, splitwith/2]).
diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl
new file mode 100644
index 0000000000..f6489788b2
--- /dev/null
+++ b/lib/stdlib/src/binary.erl
@@ -0,0 +1,177 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(binary).
+%%
+%% The following functions implemented as BIF's
+%% binary:compile_pattern/1
+%% binary:match/{2,3}
+%% binary:matches/{2,3}
+%% binary:longest_common_prefix/1
+%% binary:longest_common_suffix/1
+%% binary:first/1
+%% binary:last/1
+%% binary:at/2
+%% binary:part/{2,3}
+%% binary:bin_to_list/{1,2,3}
+%% binary:list_to_bin/1
+%% binary:copy/{1,2}
+%% binary:referenced_byte_size/1
+%% binary:decode_unsigned/{1,2}
+%% - Not yet:
+%%
+%% Implemented in this module:
+-export([split/2,split/3,replace/3,replace/4]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% split
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+split(H,N) ->
+ split(H,N,[]).
+split(Haystack,Needles,Options) ->
+ try
+ {Part,Global,Trim} = get_opts_split(Options,{no,false,false}),
+ Moptlist = case Part of
+ no ->
+ [];
+ {A,B} ->
+ [{scope,{A,B}}]
+ end,
+ MList = if
+ Global ->
+ binary:matches(Haystack,Needles,Moptlist);
+ true ->
+ case binary:match(Haystack,Needles,Moptlist) of
+ nomatch -> [];
+ Match -> [Match]
+ end
+ end,
+ do_split(Haystack,MList,0,Trim)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+do_split(H,[],N,true) when N >= byte_size(H) ->
+ [];
+do_split(H,[],N,_) ->
+ [binary:part(H,{N,byte_size(H)-N})];
+do_split(H,[{A,B}|T],N,Trim) ->
+ case binary:part(H,{N,A-N}) of
+ <<>> ->
+ Rest = do_split(H,T,A+B,Trim),
+ case {Trim, Rest} of
+ {true,[]} ->
+ [];
+ _ ->
+ [<<>> | Rest]
+ end;
+ Oth ->
+ [Oth | do_split(H,T,A+B,Trim)]
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% replace
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+replace(H,N,R) ->
+ replace(H,N,R,[]).
+replace(Haystack,Needles,Replacement,Options) ->
+ try
+ true = is_binary(Replacement), % Make badarg instead of function clause
+ {Part,Global,Insert} = get_opts_replace(Options,{no,false,[]}),
+ Moptlist = case Part of
+ no ->
+ [];
+ {A,B} ->
+ [{scope,{A,B}}]
+ end,
+ MList = if
+ Global ->
+ binary:matches(Haystack,Needles,Moptlist);
+ true ->
+ case binary:match(Haystack,Needles,Moptlist) of
+ nomatch -> [];
+ Match -> [Match]
+ end
+ end,
+ ReplList = case Insert of
+ [] ->
+ Replacement;
+ Y when is_integer(Y) ->
+ splitat(Replacement,0,[Y]);
+ Li when is_list(Li) ->
+ splitat(Replacement,0,lists:sort(Li))
+ end,
+ erlang:iolist_to_binary(do_replace(Haystack,MList,ReplList,0))
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+
+do_replace(H,[],_,N) ->
+ [binary:part(H,{N,byte_size(H)-N})];
+do_replace(H,[{A,B}|T],Replacement,N) ->
+ [binary:part(H,{N,A-N}),
+ if
+ is_list(Replacement) ->
+ do_insert(Replacement, binary:part(H,{A,B}));
+ true ->
+ Replacement
+ end
+ | do_replace(H,T,Replacement,A+B)].
+
+do_insert([X],_) ->
+ [X];
+do_insert([H|T],R) ->
+ [H,R|do_insert(T,R)].
+
+splitat(H,N,[]) ->
+ [binary:part(H,{N,byte_size(H)-N})];
+splitat(H,N,[I|T]) ->
+ [binary:part(H,{N,I-N})|splitat(H,I,T)].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Simple helper functions
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+get_opts_split([],{Part,Global,Trim}) ->
+ {Part,Global,Trim};
+get_opts_split([{scope,{A,B}} | T],{_Part,Global,Trim}) ->
+ get_opts_split(T,{{A,B},Global,Trim});
+get_opts_split([global | T],{Part,_Global,Trim}) ->
+ get_opts_split(T,{Part,true,Trim});
+get_opts_split([trim | T],{Part,Global,_Trim}) ->
+ get_opts_split(T,{Part,Global,true});
+get_opts_split(_,_) ->
+ throw(badopt).
+
+get_opts_replace([],{Part,Global,Insert}) ->
+ {Part,Global,Insert};
+get_opts_replace([{scope,{A,B}} | T],{_Part,Global,Insert}) ->
+ get_opts_replace(T,{{A,B},Global,Insert});
+get_opts_replace([global | T],{Part,_Global,Insert}) ->
+ get_opts_replace(T,{Part,true,Insert});
+get_opts_replace([{insert_replaced,N} | T],{Part,Global,_Insert}) ->
+ get_opts_replace(T,{Part,Global,N});
+get_opts_replace(_,_) ->
+ throw(badopt).
+
diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl
index 7f1c13770b..4584b8184f 100644
--- a/lib/stdlib/src/dets.erl
+++ b/lib/stdlib/src/dets.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(dets).
@@ -88,6 +88,7 @@
%% Not documented, or not ready for publication.
-export([lookup_keys/2]).
+-export_type([tab_name/0]).
-compile({inline, [{einval,2},{badarg,2},{undefined,1},
{badarg_exit,2},{lookup_reply,2}]}).
diff --git a/lib/stdlib/src/digraph.erl b/lib/stdlib/src/digraph.erl
index 9bdea671a9..b5f52da921 100644
--- a/lib/stdlib/src/digraph.erl
+++ b/lib/stdlib/src/digraph.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(digraph).
@@ -36,6 +36,8 @@
-export([get_short_path/3, get_short_cycle/2]).
+-export_type([d_type/0, vertex/0]).
+
-record(digraph, {vtab = notable :: ets:tab(),
etab = notable :: ets:tab(),
ntab = notable :: ets:tab(),
diff --git a/lib/stdlib/src/edlin.erl b/lib/stdlib/src/edlin.erl
index 6cb441dbed..026bd9038f 100644
--- a/lib/stdlib/src/edlin.erl
+++ b/lib/stdlib/src/edlin.erl
@@ -24,6 +24,7 @@
-export([init/0,start/1,edit_line/2,prefix_arg/1]).
-export([erase_line/1,erase_inp/1,redraw_line/1]).
-export([length_before/1,length_after/1,prompt/1]).
+-export([current_line/1]).
%%-export([expand/1]).
-export([edit_line1/2]).
@@ -421,6 +422,7 @@ over_paren_auto([], _, _, _) ->
%% length_before(Line)
%% length_after(Line)
%% prompt(Line)
+%% current_line(Line)
%% Various functions for accessing bits of a line.
erase_line({line,Pbs,{Bef,Aft},_}) ->
@@ -447,6 +449,9 @@ length_after({line,_,{_Bef,Aft},_}) ->
prompt({line,Pbs,_,_}) ->
Pbs.
+current_line({line,_,{Bef, Aft},_}) ->
+ reverse(Bef, Aft ++ "\n").
+
%% %% expand(CurrentBefore) ->
%% %% {yes,Expansion} | no
%% %% Try to expand the word before as either a module name or a function
diff --git a/lib/stdlib/src/epp.erl b/lib/stdlib/src/epp.erl
index f144cbb938..81b2431f40 100644
--- a/lib/stdlib/src/epp.erl
+++ b/lib/stdlib/src/epp.erl
@@ -111,6 +111,8 @@ format_error({bad,W}) ->
io_lib:format("badly formed '~s'", [W]);
format_error(missing_parenthesis) ->
io_lib:format("badly formed define: missing closing right parenthesis",[]);
+format_error(premature_end) ->
+ "premature end";
format_error({call,What}) ->
io_lib:format("illegal macro call '~s'",[What]);
format_error({undefined,M,none}) ->
@@ -163,7 +165,7 @@ parse_file(Epp) ->
case normalize_typed_record_fields(Fields) of
{typed, NewFields} ->
[{attribute, La, record, {Record, NewFields}},
- {attribute, La, type,
+ {attribute, La, type,
{{record, Record}, Fields, []}}
|parse_file(Epp)];
not_typed ->
@@ -188,7 +190,7 @@ normalize_typed_record_fields([], NewFields, Typed) ->
true -> {typed, lists:reverse(NewFields)};
false -> not_typed
end;
-normalize_typed_record_fields([{typed_record_field,Field,_}|Rest],
+normalize_typed_record_fields([{typed_record_field,Field,_}|Rest],
NewFields, _Typed) ->
normalize_typed_record_fields(Rest, [Field|NewFields], true);
normalize_typed_record_fields([Field|Rest], NewFields, Typed) ->
@@ -324,7 +326,7 @@ wait_req_scan(St) ->
wait_req_skip(St, Sis) ->
From = wait_request(St),
skip_toks(From, St, Sis).
-
+
%% enter_file(Path, FileName, IncludeToken, From, EppState)
%% leave_file(From, EppState)
%% Handle entering and leaving included files. Notify caller when the
@@ -380,16 +382,16 @@ file_name(N) when is_atom(N) ->
leave_file(From, St) ->
case St#epp.istk of
- [I|Cis] ->
+ [I|Cis] ->
epp_reply(From,
- {error,{St#epp.location,epp,
+ {error,{St#epp.location,epp,
{illegal,"unterminated",I}}}),
leave_file(wait_request(St),St#epp{istk=Cis});
[] ->
case St#epp.sstk of
[OldSt|Sts] ->
close_file(St),
- enter_file_reply(From, OldSt#epp.name,
+ enter_file_reply(From, OldSt#epp.name,
OldSt#epp.location, OldSt#epp.location),
Ms = dict:store({atom,'FILE'},
{none,
@@ -491,9 +493,9 @@ scan_extends(_Ts, _As, Ms) -> Ms.
%% scan_define(Tokens, DefineToken, From, EppState)
-scan_define([{'(',_Lp},{Type,_Lm,M}=Mac,{',',_Lc}|Toks], _Def, From, St)
+scan_define([{'(',_Lp},{Type,_Lm,M}=Mac,{',',Lc}|Toks], _Def, From, St)
when Type =:= atom; Type =:= var ->
- case catch macro_expansion(Toks) of
+ case catch macro_expansion(Toks, Lc) of
Expansion when is_list(Expansion) ->
case dict:find({atom,M}, St#epp.macs) of
{ok, Defs} when is_list(Defs) ->
@@ -608,7 +610,7 @@ scan_undef(_Toks, Undef, From, St) ->
%% scan_include(Tokens, IncludeToken, From, St)
-scan_include([{'(',_Llp},{string,_Lf,NewName0},{')',_Lrp},{dot,_Ld}], Inc,
+scan_include([{'(',_Llp},{string,_Lf,NewName0},{')',_Lrp},{dot,_Ld}], Inc,
From, St) ->
NewName = expand_var(NewName0),
enter_file(St#epp.path, NewName, Inc, From, St);
@@ -644,7 +646,7 @@ scan_include_lib([{'(',_Llp},{string,_Lf,NewName0},{')',_Lrp},{dot,_Ld}],
case file:open(LibName, [read]) of
{ok,NewF} ->
ExtraPath = [filename:dirname(LibName)],
- wait_req_scan(enter_file2(NewF, LibName, From,
+ wait_req_scan(enter_file2(NewF, LibName, From,
St, Loc, ExtraPath));
{error,_E2} ->
epp_reply(From,
@@ -773,7 +775,7 @@ scan_file(_Toks, Tf, From, St) ->
new_location(Ln, Le, Lf) when is_integer(Lf) ->
Ln+(Le-Lf);
-new_location(Ln, {Le,_}, {Lf,_}) ->
+new_location(Ln, {Le,_}, {Lf,_}) ->
{Ln+(Le-Lf),1}.
%% skip_toks(From, EppState, SkipIstack)
@@ -814,22 +816,23 @@ skip_else(_Else, From, St, Sis) ->
skip_toks(From, St, Sis).
%% macro_pars(Tokens, ArgStack)
-%% macro_expansion(Tokens)
+%% macro_expansion(Tokens, Line)
%% Extract the macro parameters and the expansion from a macro definition.
-macro_pars([{')',_Lp}, {',',_Ld}|Ex], Args) ->
- {ok, {lists:reverse(Args), macro_expansion(Ex)}};
-macro_pars([{var,_,Name}, {')',_Lp}, {',',_Ld}|Ex], Args) ->
+macro_pars([{')',_Lp}, {',',Ld}|Ex], Args) ->
+ {ok, {lists:reverse(Args), macro_expansion(Ex, Ld)}};
+macro_pars([{var,_,Name}, {')',_Lp}, {',',Ld}|Ex], Args) ->
false = lists:member(Name, Args), %Prolog is nice
- {ok, {lists:reverse([Name|Args]), macro_expansion(Ex)}};
+ {ok, {lists:reverse([Name|Args]), macro_expansion(Ex, Ld)}};
macro_pars([{var,_L,Name}, {',',_}|Ts], Args) ->
- false = lists:member(Name, Args),
+ false = lists:member(Name, Args),
macro_pars(Ts, [Name|Args]).
-macro_expansion([{')',_Lp},{dot,_Ld}]) -> [];
-macro_expansion([{dot,Ld}]) -> throw({error,Ld,missing_parenthesis});
-macro_expansion([T|Ts]) ->
- [T|macro_expansion(Ts)].
+macro_expansion([{')',_Lp},{dot,_Ld}], _L0) -> [];
+macro_expansion([{dot,Ld}], _L0) -> throw({error,Ld,missing_parenthesis});
+macro_expansion([T|Ts], _L0) ->
+ [T|macro_expansion(Ts, element(2, T))];
+macro_expansion([], L0) -> throw({error,L0,premature_end}).
%% expand_macros(Tokens, Macros)
%% expand_macro(Tokens, MacroToken, RestTokens)
@@ -1084,11 +1087,11 @@ epp_reply(From, Rep) ->
wait_epp_reply(Epp, Mref) ->
receive
- {epp_reply,Epp,Rep} ->
+ {epp_reply,Epp,Rep} ->
erlang:demonitor(Mref),
receive {'DOWN',Mref,_,_,_} -> ok after 0 -> ok end,
Rep;
- {'DOWN',Mref,_,_,E} ->
+ {'DOWN',Mref,_,_,E} ->
receive {epp_reply,Epp,Rep} -> Rep
after 0 -> exit(E)
end
@@ -1145,7 +1148,7 @@ get_line({Line,_Column}) ->
%% mainly aimed at yecc, the parser generator, which uses the -file
%% attribute to get correct lines in messages referring to code
%% supplied by the user (actions etc in .yrl files).
-%%
+%%
%% In a perfect world (read: perfectly implemented applications such
%% as Xref, Cover, Debugger, etc.) it would not be necessary to
%% distinguish -file attributes from epp and the input file. The
@@ -1165,7 +1168,7 @@ get_line({Line,_Column}) ->
%% have been output by epp (corresponding to -include and
%% -include_lib) are kept, but the user's -file attributes are
%% removed. This seems sufficient for now.
-%%
+%%
%% It turns out to be difficult to distinguish -file attributes in the
%% input file from the ones added by epp unless some action is taken.
%% The (less than perfect) solution employed is to let epp assign
@@ -1177,7 +1180,7 @@ get_line({Line,_Column}) ->
interpret_file_attribute(Forms) ->
interpret_file_attr(Forms, 0, []).
-interpret_file_attr([{attribute,Loc,file,{File,Line}}=Form | Forms],
+interpret_file_attr([{attribute,Loc,file,{File,Line}}=Form | Forms],
Delta, Fs) ->
{line, L} = erl_scan:attributes_info(Loc, line),
if
diff --git a/lib/stdlib/src/erl_compile.erl b/lib/stdlib/src/erl_compile.erl
index d9d15e05f8..abff37e4bc 100644
--- a/lib/stdlib/src/erl_compile.erl
+++ b/lib/stdlib/src/erl_compile.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(erl_compile).
@@ -23,6 +23,8 @@
-export([compile_cmdline/1]).
+-export_type([cmd_line_arg/0]).
+
%% Mapping from extension to {M,F} to run the correct compiler.
compiler(".erl") -> {compile, compile};
diff --git a/lib/stdlib/src/erl_expand_records.erl b/lib/stdlib/src/erl_expand_records.erl
index a38b7639d8..61ce41f714 100644
--- a/lib/stdlib/src/erl_expand_records.erl
+++ b/lib/stdlib/src/erl_expand_records.erl
@@ -95,8 +95,9 @@ forms([F | Fs0], St0) ->
forms([], St) -> {[],St}.
clauses([{clause,Line,H0,G0,B0} | Cs0], St0) ->
- {H,St1} = head(H0, St0),
- {G,St2} = guard(G0, St1),
+ {H1,St1} = head(H0, St0),
+ {G1,St2} = guard(G0, St1),
+ {H,G} = optimize_is_record(H1, G1, St2),
{B,St3} = exprs(B0, St2),
{Cs,St4} = clauses(Cs0, St3),
{[{clause,Line,H,G,B} | Cs],St4};
@@ -800,5 +801,137 @@ imported(F, A, St) ->
error -> no
end.
+%%%
+%%% Replace is_record/3 in guards with matching if possible.
+%%%
+
+optimize_is_record(H0, G0, #exprec{compile=Opts}) ->
+ case opt_rec_vars(G0) of
+ [] ->
+ {H0,G0};
+ Rs0 ->
+ case lists:member(no_is_record_optimization, Opts) of
+ true ->
+ {H0,G0};
+ false ->
+ {H,Rs} = opt_pattern_list(H0, Rs0),
+ G = opt_remove(G0, Rs),
+ {H,G}
+ end
+ end.
+
+
+%% opt_rec_vars(Guards) -> Vars.
+%% Search through the guard expression, looking for
+%% variables referenced in those is_record/3 calls that
+%% will fail the entire guard if they evaluate to 'false'
+%%
+%% In the following code
+%%
+%% f(X, Y, Z) when is_record(X, r1) andalso
+%% (is_record(Y, r2) orelse is_record(Z, r3))
+%%
+%% the entire guard will be false if the record test for
+%% X fails, and the clause can be rewritten to:
+%%
+%% f({r1,...}=X, Y, Z) when true andalso
+%% (is_record(Y, r2) or is_record(Z, r3))
+%%
+opt_rec_vars([G|Gs]) ->
+ Rs = opt_rec_vars_1(G, orddict:new()),
+ opt_rec_vars(Gs, Rs);
+opt_rec_vars([]) -> orddict:new().
+
+opt_rec_vars([G|Gs], Rs0) ->
+ Rs1 = opt_rec_vars_1(G, orddict:new()),
+ Rs = ordsets:intersection(Rs0, Rs1),
+ opt_rec_vars(Gs, Rs);
+opt_rec_vars([], Rs) -> Rs.
+
+opt_rec_vars_1([T|Ts], Rs0) ->
+ Rs = opt_rec_vars_2(T, Rs0),
+ opt_rec_vars_1(Ts, Rs);
+opt_rec_vars_1([], Rs) -> Rs.
+
+opt_rec_vars_2({op,_,'and',A1,A2}, Rs) ->
+ opt_rec_vars_1([A1,A2], Rs);
+opt_rec_vars_2({op,_,'andalso',A1,A2}, Rs) ->
+ opt_rec_vars_1([A1,A2], Rs);
+opt_rec_vars_2({op,_,'orelse',Arg,{atom,_,fail}}, Rs) ->
+ %% Since the second argument guarantees failure,
+ %% it is safe to inspect the first argument.
+ opt_rec_vars_2(Arg, Rs);
+opt_rec_vars_2({call,_,{remote,_,{atom,_,erlang},{atom,_,is_record}},
+ [{var,_,V},{atom,_,Tag},{integer,_,Sz}]}, Rs) ->
+ orddict:store(V, {Tag,Sz}, Rs);
+opt_rec_vars_2({call,_,{atom,_,is_record},
+ [{var,_,V},{atom,_,Tag},{integer,_,Sz}]}, Rs) ->
+ orddict:store(V, {Tag,Sz}, Rs);
+opt_rec_vars_2(_, Rs) -> Rs.
+
+opt_pattern_list(Ps, Rs) ->
+ opt_pattern_list(Ps, Rs, []).
+
+opt_pattern_list([P0|Ps], Rs0, Acc) ->
+ {P,Rs} = opt_pattern(P0, Rs0),
+ opt_pattern_list(Ps, Rs, [P|Acc]);
+opt_pattern_list([], Rs, Acc) ->
+ {reverse(Acc),Rs}.
+
+opt_pattern({var,_,V}=Var, Rs0) ->
+ case orddict:find(V, Rs0) of
+ {ok,{Tag,Sz}} ->
+ Rs = orddict:store(V, {remove,Tag,Sz}, Rs0),
+ {opt_var(Var, Tag, Sz),Rs};
+ _ ->
+ {Var,Rs0}
+ end;
+opt_pattern({cons,Line,H0,T0}, Rs0) ->
+ {H,Rs1} = opt_pattern(H0, Rs0),
+ {T,Rs} = opt_pattern(T0, Rs1),
+ {{cons,Line,H,T},Rs};
+opt_pattern({tuple,Line,Es0}, Rs0) ->
+ {Es,Rs} = opt_pattern_list(Es0, Rs0),
+ {{tuple,Line,Es},Rs};
+opt_pattern({match,Line,Pa0,Pb0}, Rs0) ->
+ {Pa,Rs1} = opt_pattern(Pa0, Rs0),
+ {Pb,Rs} = opt_pattern(Pb0, Rs1),
+ {{match,Line,Pa,Pb},Rs};
+opt_pattern(P, Rs) -> {P,Rs}.
+
+opt_var({var,Line,_}=Var, Tag, Sz) ->
+ Rp = record_pattern(2, -1, ignore, Sz, Line, [{atom,Line,Tag}]),
+ {match,Line,{tuple,Line,Rp},Var}.
+
+opt_remove(Gs, Rs) ->
+ [opt_remove_1(G, Rs) || G <- Gs].
+
+opt_remove_1(Ts, Rs) ->
+ [opt_remove_2(T, Rs) || T <- Ts].
+
+opt_remove_2({op,L,'and'=Op,A1,A2}, Rs) ->
+ {op,L,Op,opt_remove_2(A1, Rs),opt_remove_2(A2, Rs)};
+opt_remove_2({op,L,'andalso'=Op,A1,A2}, Rs) ->
+ {op,L,Op,opt_remove_2(A1, Rs),opt_remove_2(A2, Rs)};
+opt_remove_2({op,L,'orelse',A1,A2}, Rs) ->
+ {op,L,'orelse',opt_remove_2(A1, Rs),A2};
+opt_remove_2({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
+ [{var,_,V},{atom,_,Tag},{integer,_,Sz}]}=A, Rs) ->
+ case orddict:find(V, Rs) of
+ {ok,{remove,Tag,Sz}} ->
+ {atom,Line,true};
+ _ ->
+ A
+ end;
+opt_remove_2({call,Line,{atom,_,is_record},
+ [{var,_,V},{atom,_,Tag},{integer,_,Sz}]}=A, Rs) ->
+ case orddict:find(V, Rs) of
+ {ok,{remove,Tag,Sz}} ->
+ {atom,Line,true};
+ _ ->
+ A
+ end;
+opt_remove_2(A, _) -> A.
+
neg_line(L) ->
erl_parse:set_line(L, fun(Line) -> -abs(Line) end).
diff --git a/lib/stdlib/src/erl_internal.erl b/lib/stdlib/src/erl_internal.erl
index 16173d8210..bf6e5bc5ca 100644
--- a/lib/stdlib/src/erl_internal.erl
+++ b/lib/stdlib/src/erl_internal.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(erl_internal).
@@ -48,7 +48,7 @@
%%
-export([bif/2,bif/3,guard_bif/2,
- type_test/2,new_type_test/2,old_type_test/2]).
+ type_test/2,new_type_test/2,old_type_test/2,old_bif/2]).
-export([arith_op/2,bool_op/2,comp_op/2,list_op/2,send_op/2,op_type/2]).
%%---------------------------------------------------------------------------
@@ -87,6 +87,8 @@ guard_bif(is_reference, 1) -> true;
guard_bif(is_tuple, 1) -> true;
guard_bif(is_record, 2) -> true;
guard_bif(is_record, 3) -> true;
+guard_bif(binary_part, 2) -> true;
+guard_bif(binary_part, 3) -> true;
guard_bif(Name, A) when is_atom(Name), is_integer(A) -> false.
%% Erlang type tests.
@@ -229,11 +231,14 @@ bif(apply, 2) -> true;
bif(apply, 3) -> true;
bif(atom_to_binary, 2) -> true;
bif(atom_to_list, 1) -> true;
+bif(binary_part, 2) -> true;
+bif(binary_part, 3) -> true;
bif(binary_to_atom, 2) -> true;
bif(binary_to_existing_atom, 2) -> true;
bif(binary_to_list, 1) -> true;
bif(binary_to_list, 3) -> true;
bif(binary_to_term, 1) -> true;
+bif(binary_to_term, 2) -> true;
bif(bitsize, 1) -> true;
bif(bit_size, 1) -> true;
bif(bitstring_to_list, 1) -> true;
@@ -294,6 +299,8 @@ bif(list_to_pid, 1) -> true;
bif(list_to_tuple, 1) -> true;
bif(load_module, 2) -> true;
bif(make_ref, 0) -> true;
+bif(max,2) -> true;
+bif(min,2) -> true;
bif(module_loaded, 1) -> true;
bif(monitor_node, 2) -> true;
bif(node, 0) -> true;
@@ -305,6 +312,7 @@ bif(open_port, 2) -> true;
bif(pid_to_list, 1) -> true;
bif(port_close, 1) -> true;
bif(port_command, 2) -> true;
+bif(port_command, 3) -> true;
bif(port_connect, 2) -> true;
bif(port_control, 3) -> true;
bif(pre_loaded, 0) -> true;
@@ -349,3 +357,134 @@ bif(unlink, 1) -> true;
bif(unregister, 1) -> true;
bif(whereis, 1) -> true;
bif(Name, A) when is_atom(Name), is_integer(A) -> false.
+
+-spec old_bif(Name::atom(), Arity::arity()) -> boolean().
+%% Returns true if erlang:Name/Arity is an old (pre R14) auto-imported BIF, false otherwise.
+%% Use erlang:is_bultin(Mod, Name, Arity) to find whether a function is a BIF
+%% (meaning implemented in C) or not.
+
+old_bif(abs, 1) -> true;
+old_bif(apply, 2) -> true;
+old_bif(apply, 3) -> true;
+old_bif(atom_to_binary, 2) -> true;
+old_bif(atom_to_list, 1) -> true;
+old_bif(binary_to_atom, 2) -> true;
+old_bif(binary_to_existing_atom, 2) -> true;
+old_bif(binary_to_list, 1) -> true;
+old_bif(binary_to_list, 3) -> true;
+old_bif(binary_to_term, 1) -> true;
+old_bif(bitsize, 1) -> true;
+old_bif(bit_size, 1) -> true;
+old_bif(bitstring_to_list, 1) -> true;
+old_bif(byte_size, 1) -> true;
+old_bif(check_process_code, 2) -> true;
+old_bif(concat_binary, 1) -> true;
+old_bif(date, 0) -> true;
+old_bif(delete_module, 1) -> true;
+old_bif(disconnect_node, 1) -> true;
+old_bif(element, 2) -> true;
+old_bif(erase, 0) -> true;
+old_bif(erase, 1) -> true;
+old_bif(exit, 1) -> true;
+old_bif(exit, 2) -> true;
+old_bif(float, 1) -> true;
+old_bif(float_to_list, 1) -> true;
+old_bif(garbage_collect, 0) -> true;
+old_bif(garbage_collect, 1) -> true;
+old_bif(get, 0) -> true;
+old_bif(get, 1) -> true;
+old_bif(get_keys, 1) -> true;
+old_bif(group_leader, 0) -> true;
+old_bif(group_leader, 2) -> true;
+old_bif(halt, 0) -> true;
+old_bif(halt, 1) -> true;
+old_bif(hd, 1) -> true;
+old_bif(integer_to_list, 1) -> true;
+old_bif(iolist_size, 1) -> true;
+old_bif(iolist_to_binary, 1) -> true;
+old_bif(is_alive, 0) -> true;
+old_bif(is_process_alive, 1) -> true;
+old_bif(is_atom, 1) -> true;
+old_bif(is_boolean, 1) -> true;
+old_bif(is_binary, 1) -> true;
+old_bif(is_bitstr, 1) -> true;
+old_bif(is_bitstring, 1) -> true;
+old_bif(is_float, 1) -> true;
+old_bif(is_function, 1) -> true;
+old_bif(is_function, 2) -> true;
+old_bif(is_integer, 1) -> true;
+old_bif(is_list, 1) -> true;
+old_bif(is_number, 1) -> true;
+old_bif(is_pid, 1) -> true;
+old_bif(is_port, 1) -> true;
+old_bif(is_reference, 1) -> true;
+old_bif(is_tuple, 1) -> true;
+old_bif(is_record, 2) -> true;
+old_bif(is_record, 3) -> true;
+old_bif(length, 1) -> true;
+old_bif(link, 1) -> true;
+old_bif(list_to_atom, 1) -> true;
+old_bif(list_to_binary, 1) -> true;
+old_bif(list_to_bitstring, 1) -> true;
+old_bif(list_to_existing_atom, 1) -> true;
+old_bif(list_to_float, 1) -> true;
+old_bif(list_to_integer, 1) -> true;
+old_bif(list_to_pid, 1) -> true;
+old_bif(list_to_tuple, 1) -> true;
+old_bif(load_module, 2) -> true;
+old_bif(make_ref, 0) -> true;
+old_bif(module_loaded, 1) -> true;
+old_bif(monitor_node, 2) -> true;
+old_bif(node, 0) -> true;
+old_bif(node, 1) -> true;
+old_bif(nodes, 0) -> true;
+old_bif(nodes, 1) -> true;
+old_bif(now, 0) -> true;
+old_bif(open_port, 2) -> true;
+old_bif(pid_to_list, 1) -> true;
+old_bif(port_close, 1) -> true;
+old_bif(port_command, 2) -> true;
+old_bif(port_connect, 2) -> true;
+old_bif(port_control, 3) -> true;
+old_bif(pre_loaded, 0) -> true;
+old_bif(process_flag, 2) -> true;
+old_bif(process_flag, 3) -> true;
+old_bif(process_info, 1) -> true;
+old_bif(process_info, 2) -> true;
+old_bif(processes, 0) -> true;
+old_bif(purge_module, 1) -> true;
+old_bif(put, 2) -> true;
+old_bif(register, 2) -> true;
+old_bif(registered, 0) -> true;
+old_bif(round, 1) -> true;
+old_bif(self, 0) -> true;
+old_bif(setelement, 3) -> true;
+old_bif(size, 1) -> true;
+old_bif(spawn, 1) -> true;
+old_bif(spawn, 2) -> true;
+old_bif(spawn, 3) -> true;
+old_bif(spawn, 4) -> true;
+old_bif(spawn_link, 1) -> true;
+old_bif(spawn_link, 2) -> true;
+old_bif(spawn_link, 3) -> true;
+old_bif(spawn_link, 4) -> true;
+old_bif(spawn_monitor, 1) -> true;
+old_bif(spawn_monitor, 3) -> true;
+old_bif(spawn_opt, 2) -> true;
+old_bif(spawn_opt, 3) -> true;
+old_bif(spawn_opt, 4) -> true;
+old_bif(spawn_opt, 5) -> true;
+old_bif(split_binary, 2) -> true;
+old_bif(statistics, 1) -> true;
+old_bif(term_to_binary, 1) -> true;
+old_bif(term_to_binary, 2) -> true;
+old_bif(throw, 1) -> true;
+old_bif(time, 0) -> true;
+old_bif(tl, 1) -> true;
+old_bif(trunc, 1) -> true;
+old_bif(tuple_size, 1) -> true;
+old_bif(tuple_to_list, 1) -> true;
+old_bif(unlink, 1) -> true;
+old_bif(unregister, 1) -> true;
+old_bif(whereis, 1) -> true;
+old_bif(Name, A) when is_atom(Name), is_integer(A) -> false.
diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl
index 94ad560549..077621ac91 100644
--- a/lib/stdlib/src/erl_lint.erl
+++ b/lib/stdlib/src/erl_lint.erl
@@ -40,7 +40,7 @@
%% Value.
%% The option handling functions.
--spec bool_option(atom(), atom(), boolean(), [_]) -> boolean().
+-spec bool_option(atom(), atom(), boolean(), [compile:option()]) -> boolean().
bool_option(On, Off, Default, Opts) ->
foldl(fun (Opt, _Def) when Opt =:= On -> true;
@@ -72,6 +72,10 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) ->
%%-define(DEBUGF(X,Y), io:format(X, Y)).
-define(DEBUGF(X,Y), void).
+-type line() :: erl_scan:line(). % a convenient alias
+-type fa() :: {atom(), arity()}. % function+arity
+-type ta() :: {atom(), arity()}. % type+arity
+
%% Usage of records, functions, and imports. The variable table, which
%% is passed on as an argument, holds the usage of variables.
-record(usage, {
@@ -94,9 +98,11 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) ->
mod_imports=dict:new() :: dict(), %Module Imports
compile=[], %Compile flags
records=dict:new() :: dict(), %Record definitions
+ locals=gb_sets:empty() :: gb_set(), %All defined functions (prescanned)
+ no_auto=gb_sets:empty() :: gb_set(), %Functions explicitly not autoimported
defined=gb_sets:empty() :: gb_set(), %Defined fuctions
- on_load=[] :: [{atom(),integer()}], %On-load function
- on_load_line=0 :: integer(), %Line for on_load
+ on_load=[] :: [fa()], %On-load function
+ on_load_line=0 :: line(), %Line for on_load
clashes=[], %Exported functions named as BIFs
not_deprecated=[], %Not considered deprecated
func=[], %Current function
@@ -110,10 +116,11 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) ->
%outside any fun or lc
xqlc= false :: boolean(), %true if qlc.hrl included
new = false :: boolean(), %Has user-defined 'new/N'
- called= [], %Called functions
+ called= [] :: [{fa(),line()}], %Called functions
usage = #usage{} :: #usage{},
specs = dict:new() :: dict(), %Type specifications
- types = dict:new() :: dict() %Type definitions
+ types = dict:new() :: dict(), %Type definitions
+ exp_types=gb_sets:empty():: gb_set() %Exported types
}).
-type lint_state() :: #lint{}.
@@ -161,6 +168,9 @@ format_error({bad_nowarn_unused_function,{F,A}}) ->
io_lib:format("function ~w/~w undefined", [F,A]);
format_error({bad_nowarn_bif_clash,{F,A}}) ->
io_lib:format("function ~w/~w undefined", [F,A]);
+format_error(disallowed_nowarn_bif_clash) ->
+ io_lib:format("compile directive nowarn_bif_clash is no longer allowed,~n"
+ " - use explicit module names or -compile({no_auto_import, [F/A]})", []);
format_error({bad_nowarn_deprecated_function,{M,F,A}}) ->
io_lib:format("~w:~w/~w is not a deprecated function", [M,F,A]);
format_error({bad_on_load,Term}) ->
@@ -186,13 +196,21 @@ format_error({define_import,{F,A}}) ->
io_lib:format("defining imported function ~w/~w", [F,A]);
format_error({unused_function,{F,A}}) ->
io_lib:format("function ~w/~w is unused", [F,A]);
-format_error({redefine_bif,{F,A}}) ->
- io_lib:format("defining BIF ~w/~w", [F,A]);
format_error({call_to_redefined_bif,{F,A}}) ->
- io_lib:format("call to ~w/~w will call erlang:~w/~w; "
- "not ~w/~w in this module \n"
- " (add an explicit module name to the call to avoid this error)",
- [F,A,F,A,F,A]);
+ io_lib:format("ambiguous call of overridden auto-imported BIF ~w/~w~n"
+ " - use erlang:~w/~w or \"-compile({no_auto_import,[~w/~w]}).\" "
+ "to resolve name clash", [F,A,F,A,F,A]);
+format_error({call_to_redefined_old_bif,{F,A}}) ->
+ io_lib:format("ambiguous call of overridden pre R14 auto-imported BIF ~w/~w~n"
+ " - use erlang:~w/~w or \"-compile({no_auto_import,[~w/~w]}).\" "
+ "to resolve name clash", [F,A,F,A,F,A]);
+format_error({redefine_old_bif_import,{F,A}}) ->
+ io_lib:format("import directive overrides pre R14 auto-imported BIF ~w/~w~n"
+ " - use \"-compile({no_auto_import,[~w/~w]}).\" "
+ "to resolve name clash", [F,A,F,A]);
+format_error({redefine_bif_import,{F,A}}) ->
+ io_lib:format("import directive overrides auto-imported BIF ~w/~w~n"
+ " - use \"-compile({no_auto_import,[~w/~w]}).\" to resolve name clash", [F,A,F,A]);
format_error({deprecated, MFA, ReplacementMFA, Rel}) ->
io_lib:format("~s is deprecated and will be removed in ~s; use ~s",
@@ -213,6 +231,9 @@ format_error(illegal_pattern) -> "illegal pattern";
format_error(illegal_bin_pattern) ->
"binary patterns cannot be matched in parallel using '='";
format_error(illegal_expr) -> "illegal expression";
+format_error({illegal_guard_local_call, {F,A}}) ->
+ io_lib:format("call to local/imported function ~w/~w is illegal in guard",
+ [F,A]);
format_error(illegal_guard_expr) -> "illegal guard expression";
%% --- exports ---
format_error({explicit_export,F,A}) ->
@@ -242,10 +263,10 @@ format_error({untyped_record,T}) ->
format_error({unbound_var,V}) ->
io_lib:format("variable ~w is unbound", [V]);
format_error({unsafe_var,V,{What,Where}}) ->
- io_lib:format("variable ~w unsafe in ~w ~s",
+ io_lib:format("variable ~w unsafe in ~w ~s",
[V,What,format_where(Where)]);
format_error({exported_var,V,{What,Where}}) ->
- io_lib:format("variable ~w exported from ~w ~s",
+ io_lib:format("variable ~w exported from ~w ~s",
[V,What,format_where(Where)]);
format_error({shadowed_var,V,In}) ->
io_lib:format("variable ~w shadowed in ~w", [V,In]);
@@ -290,22 +311,24 @@ format_error({ill_defined_behaviour_callbacks,Behaviour}) ->
%% --- types and specs ---
format_error({singleton_typevar, Name}) ->
io_lib:format("type variable ~w is only used once (is unbound)", [Name]);
+format_error({duplicated_export_type, {T, A}}) ->
+ io_lib:format("type ~w/~w already exported", [T, A]);
format_error({undefined_type, {TypeName, Arity}}) ->
io_lib:format("type ~w~s undefined", [TypeName, gen_type_paren(Arity)]);
format_error({unused_type, {TypeName, Arity}}) ->
io_lib:format("type ~w~s is unused", [TypeName, gen_type_paren(Arity)]);
format_error({new_builtin_type, {TypeName, Arity}}) ->
io_lib:format("type ~w~s is a new builtin type; "
- "its (re)definition is allowed only until the next release",
+ "its (re)definition is allowed only until the next release",
[TypeName, gen_type_paren(Arity)]);
format_error({builtin_type, {TypeName, Arity}}) ->
- io_lib:format("type ~w~s is a builtin type; it cannot be redefined",
+ io_lib:format("type ~w~s is a builtin type; it cannot be redefined",
[TypeName, gen_type_paren(Arity)]);
format_error({renamed_type, OldName, NewName}) ->
io_lib:format("type ~w() is now called ~w(); "
"please use the new name instead", [OldName, NewName]);
format_error({redefine_type, {TypeName, Arity}}) ->
- io_lib:format("type ~w~s already defined",
+ io_lib:format("type ~w~s already defined",
[TypeName, gen_type_paren(Arity)]);
format_error({type_syntax, Constr}) ->
io_lib:format("bad ~w type", [Constr]);
@@ -354,7 +377,7 @@ pseudolocals() ->
%%
%% Used by erl_eval.erl to check commands.
-%%
+%%
exprs(Exprs, BindingsList) ->
exprs_opt(Exprs, BindingsList, []).
@@ -362,7 +385,7 @@ exprs_opt(Exprs, BindingsList, Opts) ->
{St0,Vs} = foldl(fun({{record,_SequenceNumber,_Name},Attr0}, {St1,Vs1}) ->
Attr = zip_file_and_line(Attr0, "none"),
{attribute_state(Attr, St1),Vs1};
- ({V,_}, {St1,Vs1}) ->
+ ({V,_}, {St1,Vs1}) ->
{St1,[{V,{bound,unused,[]}} | Vs1]}
end, {start("nofile",Opts),[]}, BindingsList),
Vt = orddict:from_list(Vs),
@@ -391,7 +414,7 @@ module(Forms) ->
Opts = compiler_options(Forms),
St = forms(Forms, start("nofile", Opts)),
return_status(St).
-
+
module(Forms, FileName) ->
Opts = compiler_options(Forms),
St = forms(Forms, start(FileName, Opts)),
@@ -506,7 +529,7 @@ pack_errors(Es) ->
%% Sort on line number.
pack_warnings(Ws) ->
- [{File,lists:sort([W || {F,W} <- Ws, F =:= File])} ||
+ [{File,lists:sort([W || {F,W} <- Ws, F =:= File])} ||
File <- lists:usort([F || {F,_} <- Ws])].
%% add_error(ErrorDescriptor, State) -> State'
@@ -516,13 +539,13 @@ pack_warnings(Ws) ->
add_error(E, St) -> St#lint{errors=[{St#lint.file,E}|St#lint.errors]}.
-add_error(FileLine, E, St) ->
+add_error(FileLine, E, St) ->
{File,Location} = loc(FileLine),
add_error({Location,erl_lint,E}, St#lint{file = File}).
add_warning(W, St) -> St#lint{warnings=[{St#lint.file,W}|St#lint.warnings]}.
-add_warning(FileLine, W, St) ->
+add_warning(FileLine, W, St) ->
{File,Location} = loc(FileLine),
add_warning({Location,erl_lint,W}, St#lint{file = File}).
@@ -538,8 +561,12 @@ loc(L) ->
forms(Forms0, St0) ->
Forms = eval_file_attribute(Forms0, St0),
+ Locals = local_functions(Forms),
+ AutoImportSuppressed = auto_import_suppressed(St0#lint.compile),
+ StDeprecated = disallowed_compile_flags(Forms,St0),
%% Line numbers are from now on pairs {File,Line}.
- St1 = includes_qlc_hrl(Forms, St0),
+ St1 = includes_qlc_hrl(Forms, StDeprecated#lint{locals = Locals,
+ no_auto = AutoImportSuppressed}),
St2 = bif_clashes(Forms, St1),
St3 = not_deprecated(Forms, St2),
St4 = foldl(fun form/2, pre_scan(Forms, St3), Forms),
@@ -561,7 +588,7 @@ pre_scan([_ | Fs], St) ->
pre_scan(Fs, St);
pre_scan([], St) ->
St.
-
+
includes_qlc_hrl(Forms, St) ->
%% QLC calls erl_lint several times, sometimes with the compile
%% attribute removed. The file attribute, however, is left as is.
@@ -667,6 +694,8 @@ attribute_state({attribute,L,extends,_M}, St) ->
add_error(L, invalid_extends, St);
attribute_state({attribute,L,export,Es}, St) ->
export(L, Es, St);
+attribute_state({attribute,L,export_type,Es}, St) ->
+ export_type(L, Es, St);
attribute_state({attribute,L,import,Is}, St) ->
import(L, Is, St);
attribute_state({attribute,L,record,{Name,Fields}}, St) ->
@@ -724,27 +753,38 @@ bif_clashes(Forms, St) ->
Clashes = ordsets:subtract(ordsets:from_list(Clashes0), Nowarn),
St#lint{clashes=Clashes}.
--spec is_bif_clash(atom(), byte(), lint_state()) -> boolean().
-
-is_bif_clash(_Name, _Arity, #lint{clashes=[]}) ->
- false;
-is_bif_clash(Name, Arity, #lint{clashes=Clashes}) ->
- ordsets:is_element({Name,Arity}, Clashes).
-
%% not_deprecated(Forms, State0) -> State
not_deprecated(Forms, St0) ->
%% There are no line numbers in St0#lint.compile.
- MFAsL = [{MFA,L} ||
+ MFAsL = [{MFA,L} ||
{attribute, L, compile, Args} <- Forms,
{nowarn_deprecated_function, MFAs0} <- lists:flatten([Args]),
MFA <- lists:flatten([MFAs0])],
Nowarn = [MFA || {MFA,_L} <- MFAsL],
- Bad = [MFAL || {{M,F,A},_L}=MFAL <- MFAsL,
+ Bad = [MFAL || {{M,F,A},_L}=MFAL <- MFAsL,
otp_internal:obsolete(M, F, A) =:= no],
St1 = func_line_warning(bad_nowarn_deprecated_function, Bad, St0),
St1#lint{not_deprecated = ordsets:from_list(Nowarn)}.
+%% The nowarn_bif_clash directive is not only deprecated, it's actually an error from R14A
+disallowed_compile_flags(Forms, St0) ->
+ %% There are (still) no line numbers in St0#lint.compile.
+ Errors0 = [ {St0#lint.file,{L,erl_lint,disallowed_nowarn_bif_clash}} ||
+ {attribute,[{line,{_,L}}],compile,nowarn_bif_clash} <- Forms ],
+ Errors1 = [ {St0#lint.file,{L,erl_lint,disallowed_nowarn_bif_clash}} ||
+ {attribute,[{line,{_,L}}],compile,{nowarn_bif_clash, {_,_}}} <- Forms ],
+ Disabled = (not is_warn_enabled(bif_clash, St0)),
+ Errors = if
+ Disabled andalso Errors0 =:= [] ->
+ [{St0#lint.file,{erl_lint,disallowed_nowarn_bif_clash}} | St0#lint.errors];
+ Disabled ->
+ Errors0 ++ Errors1 ++ St0#lint.errors;
+ true ->
+ Errors1 ++ St0#lint.errors
+ end,
+ St0#lint{errors=Errors}.
+
%% post_traversal_check(Forms, State0) -> State.
%% Do some further checking after the forms have been traversed and
%% data about calls etc. have been collected.
@@ -862,7 +902,7 @@ check_deprecated(Forms, St0) ->
Bad = [{E,L} || {attribute, L, deprecated, Depr} <- Forms,
D <- lists:flatten([Depr]),
E <- depr_cat(D, X, Mod)],
- foldl(fun ({E,L}, St1) ->
+ foldl(fun ({E,L}, St1) ->
add_error(L, E, St1)
end, St0, Bad).
@@ -912,7 +952,7 @@ check_imports(Forms, St0) ->
true ->
Usage = St0#lint.usage,
Unused = ordsets:subtract(St0#lint.imports, Usage#usage.imported),
- Imports = [{{FA,list_to_atom(package_to_string(Mod))},L}
+ Imports = [{{FA,list_to_atom(package_to_string(Mod))},L}
|| {attribute,L,import,{Mod,Fs}} <- Forms,
FA <- lists:usort(Fs)],
Bad = [{FM,L} || FM <- Unused, {FM2,L} <- Imports, FM =:= FM2],
@@ -932,7 +972,7 @@ check_unused_functions(Forms, St0) ->
Opts = St1#lint.compile,
case member(export_all, Opts) orelse
not is_warn_enabled(unused_function, St1) of
- true ->
+ true ->
St1;
false ->
Nowarn = nowarn_function(nowarn_unused_function, Opts),
@@ -1003,12 +1043,13 @@ check_option_functions(Forms, Tag0, Type, St0) ->
{Tag, FAs0} <- lists:flatten([Args]),
Tag0 =:= Tag,
FA <- lists:flatten([FAs0])],
- DefFunctions = gb_sets:to_list(St0#lint.defined) -- pseudolocals(),
+ DefFunctions = (gb_sets:to_list(St0#lint.defined) -- pseudolocals()) ++
+ [{F,A} || {{F,A},_} <- orddict:to_list(St0#lint.imports)],
Bad = [{FA,L} || {FA,L} <- FAsL, not member(FA, DefFunctions)],
func_line_error(Type, Bad, St0).
nowarn_function(Tag, Opts) ->
- ordsets:from_list([FA || {Tag1,FAs} <- Opts,
+ ordsets:from_list([FA || {Tag1,FAs} <- Opts,
Tag1 =:= Tag,
FA <- lists:flatten([FAs])]).
@@ -1048,10 +1089,10 @@ check_unused_records(Forms, St0) ->
%% functions count.
Usage = St0#lint.usage,
UsedRecords = sets:to_list(Usage#usage.used_records),
- URecs = foldl(fun (Used, Recs) ->
- dict:erase(Used, Recs)
+ URecs = foldl(fun (Used, Recs) ->
+ dict:erase(Used, Recs)
end, St0#lint.records, UsedRecords),
- Unused = [{Name,FileLine} ||
+ Unused = [{Name,FileLine} ||
{Name,{FileLine,_Fields}} <- dict:to_list(URecs),
element(1, loc(FileLine)) =:= FirstFile],
foldl(fun ({N,L}, St) ->
@@ -1061,18 +1102,19 @@ check_unused_records(Forms, St0) ->
St0
end.
-%% For storing the import list we use the orddict module.
+%% For storing the import list we use the orddict module.
%% We know an empty set is [].
-%% export(Line, Exports, State) -> State.
+-spec export(line(), [fa()], lint_state()) -> lint_state().
%% Mark functions as exported, also as called from the export line.
export(Line, Es, #lint{exports = Es0, called = Called} = St0) ->
- {Es1,C1,St1} =
+ {Es1,C1,St1} =
foldl(fun (NA, {E,C,St2}) ->
St = case gb_sets:is_element(NA, E) of
true ->
- add_warning(Line, {duplicated_export, NA}, St2);
+ Warn = {duplicated_export,NA},
+ add_warning(Line, Warn, St2);
false ->
St2
end,
@@ -1081,8 +1123,27 @@ export(Line, Es, #lint{exports = Es0, called = Called} = St0) ->
{Es0,Called,St0}, Es),
St1#lint{exports = Es1, called = C1}.
-%% import(Line, Imports, State) -> State.
-%% imported(Name, Arity, State) -> {yes,Module} | no.
+-spec export_type(line(), [ta()], lint_state()) -> lint_state().
+%% Mark types as exported; also mark them as used from the export line.
+
+export_type(Line, ETs, #lint{usage = Usage, exp_types = ETs0} = St0) ->
+ UTs0 = Usage#usage.used_types,
+ {ETs1,UTs1,St1} =
+ foldl(fun (TA, {E,U,St2}) ->
+ St = case gb_sets:is_element(TA, E) of
+ true ->
+ Warn = {duplicated_export_type,TA},
+ add_warning(Line, Warn, St2);
+ false ->
+ St2
+ end,
+ {gb_sets:add_element(TA, E), dict:store(TA, Line, U), St}
+ end,
+ {ETs0,UTs0,St0}, ETs),
+ St1#lint{usage = Usage#usage{used_types = UTs1}, exp_types = ETs1}.
+
+-type import() :: {module(), [fa()]} | module().
+-spec import(line(), import(), lint_state()) -> lint_state().
import(Line, {Mod,Fs}, St) ->
Mod1 = package_to_string(Mod),
@@ -1094,11 +1155,41 @@ import(Line, {Mod,Fs}, St) ->
St#lint{imports=add_imports(list_to_atom(Mod1), Mfs,
St#lint.imports)};
Efs ->
- foldl(fun (Ef, St0) ->
- add_error(Line, {redefine_import,Ef},
- St0)
+ {Err, St1} =
+ foldl(fun ({bif,{F,A},_}, {Err,St0}) ->
+ %% BifClash - import directive
+ Warn = is_warn_enabled(bif_clash, St0)
+ and (not bif_clash_specifically_disabled(St0,{F,A})),
+ AutoImpSup = is_autoimport_suppressed(St0#lint.no_auto,{F,A}),
+ OldBif = erl_internal:old_bif(F,A),
+ {Err,if
+ Warn and (not AutoImpSup) and OldBif ->
+ add_error
+ (Line,
+ {redefine_old_bif_import, {F,A}},
+ St0);
+ Warn and (not AutoImpSup) ->
+ add_warning
+ (Line,
+ {redefine_bif_import, {F,A}},
+ St0);
+ true ->
+ St0
+ end};
+ (Ef, {_Err,St0}) ->
+ {true,add_error(Line,
+ {redefine_import,Ef},
+ St0)}
end,
- St, Efs)
+ {false,St}, Efs),
+ if
+ not Err ->
+ St1#lint{imports=
+ add_imports(list_to_atom(Mod1), Mfs,
+ St#lint.imports)};
+ true ->
+ St1
+ end
end;
false ->
add_error(Line, {bad_module_name, Mod1}, St)
@@ -1141,13 +1232,15 @@ check_imports(_Line, Fs, Is) ->
add_imports(Mod, Fs, Is) ->
foldl(fun (F, Is0) -> orddict:store(F, Mod, Is0) end, Is, Fs).
+-spec imported(atom(), arity(), lint_state()) -> {'yes',module()} | 'no'.
+
imported(F, A, St) ->
case orddict:find({F,A}, St#lint.imports) of
{ok,Mod} -> {yes,Mod};
error -> no
end.
-%% on_load(Line, Val, State) -> State.
+-spec on_load(line(), fa(), lint_state()) -> lint_state().
%% Check an on_load directive and remember it.
on_load(Line, {Name,Arity}=Fa, #lint{on_load=OnLoad0}=St0)
@@ -1179,7 +1272,7 @@ check_on_load(#lint{defined=Defined,on_load=[{_,0}=Fa],
end;
check_on_load(St) -> St.
-%% call_function(Line, Name, Arity, State) -> State.
+-spec call_function(line(), atom(), arity(), lint_state()) -> lint_state().
%% Add to both called and calls.
call_function(Line, F, A, #lint{usage=Usage0,called=Cd,func=Func}=St) ->
@@ -1191,12 +1284,6 @@ call_function(Line, F, A, #lint{usage=Usage0,called=Cd,func=Func}=St) ->
end,
St#lint{called=[{NA,Line}|Cd], usage=Usage}.
-%% is_function_exported(Name, Arity, State) -> false|true.
-
-is_function_exported(Name, Arity, #lint{exports=Exports,compile=Compile}) ->
- gb_sets:is_element({Name,Arity}, Exports) orelse
- member(export_all, Compile).
-
%% function(Line, Name, Arity, Clauses, State) -> State.
function(Line, instance, _Arity, _Cs, St) when St#lint.global_vt =/= [] ->
@@ -1205,7 +1292,7 @@ function(Line, Name, Arity, Cs, St0) ->
St1 = define_function(Line, Name, Arity, St0#lint{func={Name,Arity}}),
clauses(Cs, St1#lint.global_vt, St1).
-%% define_function(Line, Name, Arity, State) -> State.
+-spec define_function(line(), atom(), arity(), lint_state()) -> lint_state().
define_function(Line, Name, Arity, St0) ->
St1 = keyword_warning(Line, Name, St0),
@@ -1215,14 +1302,9 @@ define_function(Line, Name, Arity, St0) ->
add_error(Line, {redefine_function,NA}, St1);
false ->
St2 = St1#lint{defined=gb_sets:add_element(NA, St1#lint.defined)},
- St = case erl_internal:bif(Name, Arity) andalso
- not is_function_exported(Name, Arity, St2) of
- true -> add_warning(Line, {redefine_bif,NA}, St2);
- false -> St2
- end,
- case imported(Name, Arity, St) of
- {yes,_M} -> add_error(Line, {define_import,NA}, St);
- no -> St
+ case imported(Name, Arity, St2) of
+ {yes,_M} -> add_error(Line, {define_import,NA}, St2);
+ no -> St2
end
end.
@@ -1258,7 +1340,7 @@ head([P|Ps], Vt, Old, St0) ->
{vtmerge_pat(Pvt, Psvt),vtmerge_pat(Bvt1,Bvt2),St2};
head([], _Vt, _Env, St) -> {[],[],St}.
-%% pattern(Pattern, VarTable, Old, BinVarTable, State) ->
+%% pattern(Pattern, VarTable, Old, BinVarTable, State) ->
%% {UpdVarTable,BinVarTable,State}.
%% Check pattern return variables. Old is the set of variables used for
%% deciding whether an occurrence is a binding occurrence or a use, and
@@ -1276,7 +1358,7 @@ pattern(P, Vt, St) ->
pattern({var,_Line,'_'}, _Vt, _Old, _Bvt, St) ->
{[],[],St}; %Ignore anonymous variable
-pattern({var,Line,V}, _Vt, Old, Bvt, St) ->
+pattern({var,Line,V}, _Vt, Old, Bvt, St) ->
pat_var(V, Line, Old, Bvt, St);
pattern({char,_Line,_C}, _Vt, _Old, _Bvt, St) -> {[],[],St};
pattern({integer,_Line,_I}, _Vt, _Old, _Bvt, St) -> {[],[],St};
@@ -1294,7 +1376,7 @@ pattern({tuple,_Line,Ps}, Vt, Old, Bvt, St) ->
%%pattern({struct,_Line,_Tag,Ps}, Vt, Old, Bvt, St) ->
%% pattern_list(Ps, Vt, Old, Bvt, St);
pattern({record_index,Line,Name,Field}, _Vt, _Old, _Bvt, St) ->
- {Vt1,St1} =
+ {Vt1,St1} =
check_record(Line, Name, St,
fun (Dfs, St1) ->
pattern_field(Field, Name, Dfs, St1)
@@ -1309,7 +1391,7 @@ pattern({record_field,Line,_,_}=M, _Vt, _Old, _Bvt, St0) ->
end;
pattern({record,Line,Name,Pfs}, Vt, Old, Bvt, St) ->
case dict:find(Name, St#lint.records) of
- {ok,{_Line,Fields}} ->
+ {ok,{_Line,Fields}} ->
St1 = used_record(Name, St),
pattern_fields(Pfs, Name, Fields, Vt, Old, Bvt, St1);
error -> {[],[],add_error(Line, {undefined_record,Name}, St)}
@@ -1369,7 +1451,7 @@ reject_bin_alias({cons,_,H1,T1}, {cons,_,H2,T2}, St0) ->
reject_bin_alias(T1, T2, St);
reject_bin_alias({tuple,_,Es1}, {tuple,_,Es2}, St) ->
reject_bin_alias_list(Es1, Es2, St);
-reject_bin_alias({record,_,Name1,Pfs1}, {record,_,Name2,Pfs2},
+reject_bin_alias({record,_,Name1,Pfs1}, {record,_,Name2,Pfs2},
#lint{records=Recs}=St) ->
case {dict:find(Name1, Recs),dict:find(Name2, Recs)} of
{{ok,{_Line1,Fields1}},{ok,{_Line2,Fields2}}} ->
@@ -1451,7 +1533,7 @@ is_pattern_expr_1({op,_Line,Op,A1,A2}) ->
erl_internal:arith_op(Op, 2) andalso all(fun is_pattern_expr/1, [A1,A2]);
is_pattern_expr_1(_Other) -> false.
-%% pattern_bin([Element], VarTable, Old, BinVarTable, State) ->
+%% pattern_bin([Element], VarTable, Old, BinVarTable, State) ->
%% {UpdVarTable,UpdBinVarTable,State}.
%% Check a pattern group. BinVarTable are used binsize variables.
@@ -1498,7 +1580,7 @@ good_string_size_type(default, Ts) ->
end, Ts);
good_string_size_type(_, _) -> false.
-%% pat_bit_expr(Pattern, OldVarTable, BinVarTable,State) ->
+%% pat_bit_expr(Pattern, OldVarTable, BinVarTable,State) ->
%% {UpdVarTable,UpdBinVarTable,State}.
%% Check pattern bit expression, only allow really valid patterns!
@@ -1513,7 +1595,7 @@ pat_bit_expr(P, _Old, _Bvt, St) ->
false -> {[],[],add_error(element(2, P), illegal_pattern, St)}
end.
-%% pat_bit_size(Size, VarTable, BinVarTable, State) ->
+%% pat_bit_size(Size, VarTable, BinVarTable, State) ->
%% {Value,UpdVarTable,UpdBinVarTable,State}.
%% Check pattern size expression, only allow really valid sizes!
@@ -1596,7 +1678,7 @@ bit_size_check(Line, Size, #bittype{type=Type,unit=Unit}, St) ->
Sz = Unit * Size, %Total number of bits!
St2 = elemtype_check(Line, Type, Sz, St),
{Sz,St2}.
-
+
elemtype_check(_Line, float, 32, St) -> St;
elemtype_check(_Line, float, 64, St) -> St;
elemtype_check(Line, float, _Size, St) ->
@@ -1678,8 +1760,6 @@ gexpr({cons,_Line,H,T}, Vt, St) ->
gexpr_list([H,T], Vt, St);
gexpr({tuple,_Line,Es}, Vt, St) ->
gexpr_list(Es, Vt, St);
-%%gexpr({struct,_Line,_Tag,Es}, Vt, St) ->
-%% gexpr_list(Es, Vt, St);
gexpr({record_index,Line,Name,Field}, _Vt, St) ->
check_record(Line, Name, St,
fun (Dfs, St1) -> record_field(Field, Name, Dfs, St1) end );
@@ -1710,7 +1790,7 @@ gexpr({call,_Line,{atom,_Lr,is_record},[E,{atom,Ln,Name}]}, Vt, St0) ->
gexpr({call,Line,{atom,_Lr,is_record},[E,R]}, Vt, St0) ->
{Asvt,St1} = gexpr_list([E,R], Vt, St0),
{Asvt,add_error(Line, illegal_guard_expr, St1)};
-gexpr({call,Line,{remote,_Lr,{atom,_Lm,erlang},{atom,Lf,is_record}},[E,A]},
+gexpr({call,Line,{remote,_Lr,{atom,_Lm,erlang},{atom,Lf,is_record}},[E,A]},
Vt, St0) ->
gexpr({call,Line,{atom,Lf,is_record},[E,A]}, Vt, St0);
gexpr({call,_Line,{atom,_Lr,is_record},[E,{atom,_,_Name},{integer,_,_}]},
@@ -1725,14 +1805,22 @@ gexpr({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}=Isr},[_,_,_]=Args}
gexpr({call,Line,{atom,_La,F},As}, Vt, St0) ->
{Asvt,St1} = gexpr_list(As, Vt, St0),
A = length(As),
- case erl_internal:guard_bif(F, A) of
+ %% BifClash - Function called in guard
+ case erl_internal:guard_bif(F, A) andalso no_guard_bif_clash(St1,{F,A}) of
true ->
%% Also check that it is auto-imported.
case erl_internal:bif(F, A) of
true -> {Asvt,St1};
false -> {Asvt,add_error(Line, {explicit_export,F,A}, St1)}
end;
- false -> {Asvt,add_error(Line, illegal_guard_expr, St1)}
+ false ->
+ case is_local_function(St1#lint.locals,{F,A}) orelse
+ is_imported_function(St1#lint.imports,{F,A}) of
+ true ->
+ {Asvt,add_error(Line, {illegal_guard_local_call,{F,A}}, St1)};
+ _ ->
+ {Asvt,add_error(Line, illegal_guard_expr, St1)}
+ end
end;
gexpr({call,Line,{remote,_Lr,{atom,_Lm,erlang},{atom,_Lf,F}},As}, Vt, St0) ->
{Asvt,St1} = gexpr_list(As, Vt, St0),
@@ -1777,7 +1865,7 @@ is_guard_test(E) ->
%% is_guard_test(Expression, Forms) -> boolean().
is_guard_test(Expression, Forms) ->
RecordAttributes = [A || A = {attribute, _, record, _D} <- Forms],
- St0 = foldl(fun(Attr0, St1) ->
+ St0 = foldl(fun(Attr0, St1) ->
Attr = zip_file_and_line(Attr0, "none"),
attribute_state(Attr, St1)
end, start(), RecordAttributes),
@@ -1798,7 +1886,7 @@ is_guard_test2(G, RDs) ->
%% is_guard_expr(Expression) -> boolean().
%% Test if an expression is a guard expression.
-is_guard_expr(E) -> is_gexpr(E, []).
+is_guard_expr(E) -> is_gexpr(E, []).
is_gexpr({var,_L,_V}, _RDs) -> true;
is_gexpr({char,_L,_C}, _RDs) -> true;
@@ -1820,7 +1908,7 @@ is_gexpr({record_field,_L,Rec,_Name,Field}, RDs) ->
is_gexpr({record,L,Name,Inits}, RDs) ->
is_gexpr_fields(Inits, L, Name, RDs);
is_gexpr({bin,_L,Fs}, RDs) ->
- all(fun ({bin_element,_Line,E,Sz,_Ts}) ->
+ all(fun ({bin_element,_Line,E,Sz,_Ts}) ->
is_gexpr(E, RDs) and (Sz =:= default orelse is_gexpr(Sz, RDs))
end, Fs);
is_gexpr({call,_L,{atom,_Lf,F},As}, RDs) ->
@@ -1895,15 +1983,13 @@ expr({bc,_Line,E,Qs}, Vt0, St0) ->
{vtold(Vt,Vt0),St}; %Don't export local variables
expr({tuple,_Line,Es}, Vt, St) ->
expr_list(Es, Vt, St);
-%%expr({struct,Line,Tag,Es}, Vt, St) ->
-%% expr_list(Es, Vt, St);
expr({record_index,Line,Name,Field}, _Vt, St) ->
check_record(Line, Name, St,
fun (Dfs, St1) -> record_field(Field, Name, Dfs, St1) end);
expr({record,Line,Name,Inits}, Vt, St) ->
check_record(Line, Name, St,
- fun (Dfs, St1) ->
- init_fields(Inits, Line, Name, Dfs, Vt, St1)
+ fun (Dfs, St1) ->
+ init_fields(Inits, Line, Name, Dfs, Vt, St1)
end);
expr({record_field,Line,_,_}=M, _Vt, St0) ->
case expand_package(M, St0) of
@@ -1958,8 +2044,11 @@ expr({'fun',Line,Body}, Vt, St) ->
{Bvt, St1} = fun_clauses(Cs, Vt, St),
{vtupdate(Bvt, Vt), St1};
{function,F,A} ->
+ %% BifClash - Fun expression
%% N.B. Only allows BIFs here as well, NO IMPORTS!!
- case erl_internal:bif(F, A) of
+ case ((not is_local_function(St#lint.locals,{F,A})) andalso
+ (erl_internal:bif(F, A) andalso
+ (not is_autoimport_suppressed(St#lint.no_auto,{F,A})))) of
true -> {[],St};
false -> {[],call_function(Line, F, A, St)}
end;
@@ -1969,7 +2058,7 @@ expr({'fun',Line,Body}, Vt, St) ->
expr({call,_Line,{atom,_Lr,is_record},[E,{atom,Ln,Name}]}, Vt, St0) ->
{Rvt,St1} = expr(E, Vt, St0),
{Rvt,exist_record(Ln, Name, St1)};
-expr({call,Line,{remote,_Lr,{atom,_Lm,erlang},{atom,Lf,is_record}},[E,A]},
+expr({call,Line,{remote,_Lr,{atom,_Lm,erlang},{atom,Lf,is_record}},[E,A]},
Vt, St0) ->
expr({call,Line,{atom,Lf,is_record},[E,A]}, Vt, St0);
expr({call,L,{tuple,Lt,[{atom,Lm,erlang},{atom,Lf,is_record}]},As}, Vt, St) ->
@@ -1992,29 +2081,54 @@ expr({call,Line,{atom,La,F},As}, Vt, St0) ->
St1 = keyword_warning(La, F, St0),
{Asvt,St2} = expr_list(As, Vt, St1),
A = length(As),
- case erl_internal:bif(F, A) of
+ IsLocal = is_local_function(St2#lint.locals,{F,A}),
+ IsAutoBif = erl_internal:bif(F, A),
+ AutoSuppressed = is_autoimport_suppressed(St2#lint.no_auto,{F,A}),
+ Warn = is_warn_enabled(bif_clash, St2) and (not bif_clash_specifically_disabled(St2,{F,A})),
+ Imported = imported(F, A, St2),
+ case ((not IsLocal) andalso (Imported =:= no) andalso
+ IsAutoBif andalso (not AutoSuppressed)) of
true ->
St3 = deprecated_function(Line, erlang, F, As, St2),
- {Asvt,case is_warn_enabled(bif_clash, St3) andalso
- is_bif_clash(F, A, St3) of
- false ->
- St3;
- true ->
- add_error(Line, {call_to_redefined_bif,{F,A}}, St3)
- end};
+ {Asvt,St3};
false ->
- {Asvt,case imported(F, A, St2) of
+ {Asvt,case Imported of
{yes,M} ->
St3 = check_remote_function(Line, M, F, As, St2),
U0 = St3#lint.usage,
Imp = ordsets:add_element({{F,A},M},U0#usage.imported),
St3#lint{usage=U0#usage{imported = Imp}};
no ->
- case {F,A} of
- {record_info,2} ->
+ case {F,A} of
+ {record_info,2} ->
check_record_info_call(Line,La,As,St2);
- N when N =:= St2#lint.func -> St2;
- _ -> call_function(Line, F, A, St2)
+ N ->
+ %% BifClash - function call
+ %% Issue these warnings/errors even if it's a recursive call
+ St3 = if
+ (not AutoSuppressed) andalso IsAutoBif andalso Warn ->
+ case erl_internal:old_bif(F,A) of
+ true ->
+ add_error
+ (Line,
+ {call_to_redefined_old_bif, {F,A}},
+ St2);
+ false ->
+ add_warning
+ (Line,
+ {call_to_redefined_bif, {F,A}},
+ St2)
+ end;
+ true ->
+ St2
+ end,
+ %% ...but don't lint recursive calls
+ if
+ N =:= St3#lint.func ->
+ St3;
+ true ->
+ call_function(Line, F, A, St3)
+ end
end
end}
end;
@@ -2155,7 +2269,7 @@ def_fields(Fs0, Name, St0) ->
foldl(fun ({record_field,Lf,{atom,La,F},V}, {Fs,St}) ->
case exist_field(F, Fs) of
true -> {Fs,add_error(Lf, {redefine_field,Name,F}, St)};
- false ->
+ false ->
St1 = St#lint{recdef_top = true},
{_,St2} = expr(V, [], St1),
%% Warnings and errors found are kept, but
@@ -2306,7 +2420,7 @@ init_fields(Ifs, Line, Name, Dfs, Vt0, St0) ->
Defs = init_fields(Ifs, Line, Dfs),
{_,St2} = check_fields(Defs, Name, Dfs, Vt1, St1, fun expr/3),
{Vt1,St1#lint{usage = St2#lint.usage}}.
-
+
ginit_fields(Ifs, Line, Name, Dfs, Vt0, St0) ->
{Vt1,St1} = check_fields(Ifs, Name, Dfs, Vt0, St0, fun gexpr/3),
Defs = init_fields(Ifs, Line, Dfs),
@@ -2316,7 +2430,7 @@ ginit_fields(Ifs, Line, Name, Dfs, Vt0, St0) ->
IllErrs = [E || {_File,{_Line,erl_lint,illegal_guard_expr}}=E <- Errors],
St4 = St1#lint{usage = Usage, errors = IllErrs ++ St1#lint.errors},
{Vt1,St4}.
-
+
%% Default initializations to be carried out
init_fields(Ifs, Line, Dfs) ->
[ {record_field,Lf,{atom,La,F},copy_expr(Di, Line)} ||
@@ -2394,7 +2508,7 @@ check_type({ann_type, _L, [_Var, Type]}, SeenVars, St) ->
check_type(Type, SeenVars, St);
check_type({paren_type, _L, [Type]}, SeenVars, St) ->
check_type(Type, SeenVars, St);
-check_type({remote_type, L, [{atom, _, Mod}, {atom, _, Name}, Args]},
+check_type({remote_type, L, [{atom, _, Mod}, {atom, _, Name}, Args]},
SeenVars, #lint{module=CurrentMod} = St) ->
St1 =
case (dict:is_key({Name, length(Args)}, default_types())
@@ -2432,7 +2546,7 @@ check_type({type, L, 'fun', [Dom, Range]}, SeenVars, St) ->
check_type({type, -1, product, [Dom, Range]}, SeenVars, St1);
check_type({type, L, range, [From, To]}, SeenVars, St) ->
St1 =
- case {From, To} of
+ case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
{{integer, _, X}, {integer, _, Y}} when X < Y -> St;
_ -> add_error(L, {type_syntax, range}, St)
end,
@@ -2441,8 +2555,8 @@ check_type({type, _L, tuple, any}, SeenVars, St) -> {SeenVars, St};
check_type({type, _L, any}, SeenVars, St) -> {SeenVars, St};
check_type({type, L, binary, [Base, Unit]}, SeenVars, St) ->
St1 =
- case {Base, Unit} of
- {{integer, _, BaseVal},
+ case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of
+ {{integer, _, BaseVal},
{integer, _, UnitVal}} when BaseVal >= 0, UnitVal >= 0 -> St;
_ -> add_error(L, {type_syntax, binary}, St)
end,
@@ -2467,7 +2581,13 @@ check_type({type, La, TypeName, Args}, SeenVars, #lint{usage=Usage} = St) ->
UsedTypes = dict:store({TypeName, Arity}, La, OldUsed),
St#lint{usage=Usage#usage{used_types=UsedTypes}}
end,
- check_type({type, -1, product, Args}, SeenVars, St1).
+ check_type({type, -1, product, Args}, SeenVars, St1);
+check_type(I, SeenVars, St) ->
+ case erl_eval:partial_eval(I) of
+ {integer,_ILn,_Integer} -> {SeenVars, St};
+ _Other ->
+ {SeenVars, add_error(element(2, I), {type_syntax, integer}, St)}
+ end.
check_record_types(Line, Name, Fields, SeenVars, St) ->
case dict:find(Name, St#lint.records) of
@@ -2475,12 +2595,12 @@ check_record_types(Line, Name, Fields, SeenVars, St) ->
case lists:all(fun({type, _, field_type, _}) -> true;
(_) -> false
end, Fields) of
- true ->
+ true ->
check_record_types(Fields, Name, DefFields, SeenVars, St, []);
false ->
{SeenVars, add_error(Line, {type_syntax, record}, St)}
end;
- error ->
+ error ->
{SeenVars, add_error(Line, {undefined_record, Name}, St)}
end.
@@ -2563,7 +2683,6 @@ default_types() ->
{set, 0},
{string, 0},
{term, 0},
- {tid, 0},
{timeout, 0},
{var, 1}],
dict:from_list([{T, -1} || T <- DefTypes]).
@@ -2585,7 +2704,6 @@ is_newly_introduced_builtin_type({gb_tree, 0}) -> true; % opaque
is_newly_introduced_builtin_type({iodata, 0}) -> true;
is_newly_introduced_builtin_type({queue, 0}) -> true; % opaque
is_newly_introduced_builtin_type({set, 0}) -> true; % opaque
-is_newly_introduced_builtin_type({tid, 0}) -> true; % opaque
%% R13B01
is_newly_introduced_builtin_type({boolean, 0}) -> true;
is_newly_introduced_builtin_type({Name, _}) when is_atom(Name) -> false.
@@ -2606,7 +2724,7 @@ spec_decl(Line, MFA0, TypeSpecs, St0 = #lint{specs = Specs, module = Mod}) ->
check_specs([FunType|Left], Arity, St0) ->
{FunType1, CTypes} =
case FunType of
- {type, _, bounded_fun, [FT = {type, _, 'fun', _}, Cs]} ->
+ {type, _, bounded_fun, [FT = {type, _, 'fun', _}, Cs]} ->
Types0 = [T || {type, _, constraint, [_, T]} <- Cs],
{FT, lists:append(Types0)};
{type, _, 'fun', _} = FT -> {FT, []}
@@ -2666,10 +2784,12 @@ add_missing_spec_warnings(Forms, St0, Type) ->
add_warning(L, {missing_spec,FA}, St)
end, St0, Warns).
-check_unused_types(Forms, St = #lint{usage=Usage, types=Types}) ->
+check_unused_types(Forms, #lint{usage=Usage, types=Ts, exp_types=ExpTs}=St) ->
case [File || {attribute,_L,file,{File,_Line}} <- Forms] of
[FirstFile|_] ->
- UsedTypes = Usage#usage.used_types,
+ D = Usage#usage.used_types,
+ L = gb_sets:to_list(ExpTs) ++ dict:fetch_keys(D),
+ UsedTypes = gb_sets:from_list(L),
FoldFun =
fun(_Type, -1, AccSt) ->
%% Default type
@@ -2677,19 +2797,18 @@ check_unused_types(Forms, St = #lint{usage=Usage, types=Types}) ->
(Type, FileLine, AccSt) ->
case loc(FileLine) of
{FirstFile, _} ->
- case dict:is_key(Type, UsedTypes) of
+ case gb_sets:is_member(Type, UsedTypes) of
true -> AccSt;
- false ->
- add_warning(FileLine,
- {unused_type, Type},
- AccSt)
+ false ->
+ Warn = {unused_type,Type},
+ add_warning(FileLine, Warn, AccSt)
end;
_ ->
- %% Don't warn about unused types in include file
+ %% No warns about unused types in include files
AccSt
end
end,
- dict:fold(FoldFun, St, Types);
+ dict:fold(FoldFun, St, Ts);
[] ->
St
end.
@@ -2834,7 +2953,7 @@ fun_clause({clause,_Line,H,G,B}, Vt0, St0) ->
%%
%% used variable has been used
%% unused variable has been bound but not used
-%%
+%%
%% Lines is a list of line numbers where the variable was bound.
%%
%% Report variable errors/warnings as soon as possible and then change
@@ -2864,9 +2983,9 @@ pat_var(V, Line, Vt, Bvt, St) ->
case orddict:find(V, Bvt) of
{ok, {bound,_Usage,Ls}} ->
{[],[{V,{bound,used,Ls}}],St};
- error ->
+ error ->
case orddict:find(V, Vt) of
- {ok,{bound,_Usage,Ls}} ->
+ {ok,{bound,_Usage,Ls}} ->
{[{V,{bound,used,Ls}}],[],St};
{ok,{{unsafe,In},_Usage,Ls}} ->
{[{V,{bound,used,Ls}}],[],
@@ -2919,7 +3038,7 @@ pat_binsize_var(V, Line, Vt, Bvt, St) ->
expr_var(V, Line, Vt, St0) ->
case orddict:find(V, Vt) of
- {ok,{bound,_Usage,Ls}} ->
+ {ok,{bound,_Usage,Ls}} ->
{[{V,{bound,used,Ls}}],St0};
{ok,{{unsafe,In},_Usage,Ls}} ->
{[{V,{bound,used,Ls}}],
@@ -2957,7 +3076,7 @@ check_old_unused_vars(Vt, Vt0, St0) ->
warn_unused_vars(U, Vt, St0).
unused_vars(Vt, Vt0, _St0) ->
- U0 = orddict:filter(fun (V, {_State,unused,_Ls}) ->
+ U0 = orddict:filter(fun (V, {_State,unused,_Ls}) ->
case atom_to_list(V) of
"_"++_ -> false;
_ -> true
@@ -2973,7 +3092,7 @@ warn_unused_vars(U, Vt, St0) ->
false -> St0;
true ->
foldl(fun ({V,{_,unused,Ls}}, St) ->
- foldl(fun (L, St2) ->
+ foldl(fun (L, St2) ->
add_warning(L, {unused_var,V},
St2)
end, St, Ls)
@@ -3073,7 +3192,7 @@ vt_no_unsafe(Vt) -> [V || {_,{S,_U,_L}}=V <- Vt,
-ifdef(NOTUSED).
vunion(Vs1, Vs2) -> ordsets:union(vtnames(Vs1), vtnames(Vs2)).
-vunion(Vss) -> foldl(fun (Vs, Uvs) ->
+vunion(Vss) -> foldl(fun (Vs, Uvs) ->
ordsets:union(vtnames(Vs), Uvs)
end, [], Vss).
@@ -3103,7 +3222,7 @@ modify_line(T, F0) ->
%% Forms.
modify_line1({function,F,A}, _Mf) -> {function,F,A};
modify_line1({function,M,F,A}, _Mf) -> {function,M,F,A};
-modify_line1({attribute,L,record,{Name,Fields}}, Mf) ->
+modify_line1({attribute,L,record,{Name,Fields}}, Mf) ->
{attribute,Mf(L),record,{Name,modify_line1(Fields, Mf)}};
modify_line1({attribute,L,spec,{Fun,Types}}, Mf) ->
{attribute,Mf(L),spec,{Fun,modify_line1(Types, Mf)}};
@@ -3118,7 +3237,7 @@ modify_line1({warning,W}, _Mf) -> {warning,W};
modify_line1({error,W}, _Mf) -> {error,W};
%% Expressions.
modify_line1({clauses,Cs}, Mf) -> {clauses,modify_line1(Cs, Mf)};
-modify_line1({typed_record_field,Field,Type}, Mf) ->
+modify_line1({typed_record_field,Field,Type}, Mf) ->
{typed_record_field,modify_line1(Field, Mf),modify_line1(Type, Mf)};
modify_line1({Tag,L}, Mf) -> {Tag,Mf(L)};
modify_line1({Tag,L,E1}, Mf) ->
@@ -3154,7 +3273,7 @@ check_record_info_call(Line,_La,_As,St) ->
has_wildcard_field([{record_field,_Lf,{var,_La,'_'},_Val}|_Fs]) -> true;
has_wildcard_field([_|Fs]) -> has_wildcard_field(Fs);
has_wildcard_field([]) -> false.
-
+
%% check_remote_function(Line, ModuleName, FuncName, [Arg], State) -> State.
%% Perform checks on known remote calls.
@@ -3170,7 +3289,7 @@ check_remote_function(Line, M, F, As, St0) ->
check_qlc_hrl(Line, M, F, As, St) ->
Arity = length(As),
case As of
- [{lc,_L,_E,_Qs}|_] when M =:= qlc, F =:= q,
+ [{lc,_L,_E,_Qs}|_] when M =:= qlc, F =:= q,
Arity < 3, not St#lint.xqlc ->
add_warning(Line, {missing_qlc_hrl, Arity}, St);
_ ->
@@ -3355,11 +3474,11 @@ extract_sequence(3, [$.,_|Fmt], Need) ->
extract_sequence(4, Fmt, Need);
extract_sequence(3, Fmt, Need) ->
extract_sequence(4, Fmt, Need);
-extract_sequence(4, [$t, $c | Fmt], Need) ->
- extract_sequence(5, [$c|Fmt], Need);
-extract_sequence(4, [$t, $s | Fmt], Need) ->
- extract_sequence(5, [$s|Fmt], Need);
-extract_sequence(4, [$t, C | _Fmt], _Need) ->
+extract_sequence(4, [$t, $c | Fmt], Need) ->
+ extract_sequence(5, [$c|Fmt], Need);
+extract_sequence(4, [$t, $s | Fmt], Need) ->
+ extract_sequence(5, [$s|Fmt], Need);
+extract_sequence(4, [$t, C | _Fmt], _Need) ->
{error,"invalid control ~t" ++ [C]};
extract_sequence(4, Fmt, Need) ->
extract_sequence(5, Fmt, Need);
@@ -3437,3 +3556,56 @@ expand_package(M, St0) ->
{error, St1}
end
end.
+
+
+%% Prebuild set of local functions (to override auto-import)
+local_functions(Forms) ->
+ gb_sets:from_list([ {Func,Arity} || {function,_,Func,Arity,_} <- Forms ]).
+%% Predicate to find out if the function is locally defined
+is_local_function(LocalSet,{Func,Arity}) ->
+ gb_sets:is_element({Func,Arity},LocalSet).
+%% Predicate to see if a function is explicitly imported
+is_imported_function(ImportSet,{Func,Arity}) ->
+ case orddict:find({Func,Arity}, ImportSet) of
+ {ok,_Mod} -> true;
+ error -> false
+ end.
+%% Predicate to see if a function is explicitly imported from the erlang module
+is_imported_from_erlang(ImportSet,{Func,Arity}) ->
+ case orddict:find({Func,Arity}, ImportSet) of
+ {ok,erlang} -> true;
+ _ -> false
+ end.
+%% Build set of functions where auto-import is explicitly supressed
+auto_import_suppressed(CompileFlags) ->
+ L0 = [ X || {no_auto_import,X} <- CompileFlags ],
+ L1 = [ {Y,Z} || {Y,Z} <- lists:flatten(L0), is_atom(Y), is_integer(Z) ],
+ gb_sets:from_list(L1).
+%% Predicate to find out if autoimport is explicitly supressed for a function
+is_autoimport_suppressed(NoAutoSet,{Func,Arity}) ->
+ gb_sets:is_element({Func,Arity},NoAutoSet).
+%% Predicate to find out if a function specific bif-clash supression (old deprecated) is present
+bif_clash_specifically_disabled(St,{F,A}) ->
+ Nowarn = nowarn_function(nowarn_bif_clash, St#lint.compile),
+ lists:member({F,A},Nowarn).
+
+%% Predicate to find out if an autoimported guard_bif is not overriden in some way
+%% Guard Bif without module name is disallowed if
+%% * It is overridden by local function
+%% * It is overridden by -import and that import is not of itself (i.e. from module erlang)
+%% * The autoimport is suppressed or it's not reimported by -import directive
+%% Otherwise it's OK (given that it's actually a guard bif and actually is autoimported)
+no_guard_bif_clash(St,{F,A}) ->
+ (
+ (not is_local_function(St#lint.locals,{F,A}))
+ andalso
+ (
+ (not is_imported_function(St#lint.imports,{F,A})) orelse
+ is_imported_from_erlang(St#lint.imports,{F,A})
+ )
+ andalso
+ (
+ (not is_autoimport_suppressed(St#lint.no_auto, {F,A})) orelse
+ is_imported_from_erlang(St#lint.imports,{F,A})
+ )
+ ).
diff --git a/lib/stdlib/src/erl_parse.yrl b/lib/stdlib/src/erl_parse.yrl
index 5287f55e59..bb4b18cf9b 100644
--- a/lib/stdlib/src/erl_parse.yrl
+++ b/lib/stdlib/src/erl_parse.yrl
@@ -47,7 +47,7 @@ opt_bit_size_expr bit_size_expr opt_bit_type_list bit_type_list bit_type
top_type top_type_100 top_types type typed_expr typed_attr_val
type_sig type_sigs type_guard type_guards fun_type fun_type_100 binary_type
type_spec spec_fun typed_exprs typed_record_fields field_types field_type
-bin_base_type bin_unit_type int_type.
+bin_base_type bin_unit_type type_200 type_300 type_400 type_500.
Terminals
char integer float atom string var
@@ -61,7 +61,7 @@ char integer float atom string var
'++' '--'
'==' '/=' '=<' '<' '>=' '>' '=:=' '=/=' '<='
'<<' '>>'
-'!' '=' '::'
+'!' '=' '::' '..' '...'
'spec' % helper
dot.
@@ -120,8 +120,24 @@ top_types -> top_type ',' top_types : ['$1'|'$3'].
top_type -> var '::' top_type_100 : {ann_type, ?line('$1'), ['$1','$3']}.
top_type -> top_type_100 : '$1'.
-top_type_100 -> type : '$1'.
-top_type_100 -> type '|' top_type_100 : lift_unions('$1','$3').
+top_type_100 -> type_200 : '$1'.
+top_type_100 -> type_200 '|' top_type_100 : lift_unions('$1','$3').
+
+type_200 -> type_300 '..' type_300 : {type, ?line('$1'), range,
+ [skip_paren('$1'),
+ skip_paren('$3')]}.
+type_200 -> type_300 : '$1'.
+
+type_300 -> type_300 add_op type_400 : ?mkop2(skip_paren('$1'),
+ '$2', skip_paren('$3')).
+type_300 -> type_400 : '$1'.
+
+type_400 -> type_400 mult_op type_500 : ?mkop2(skip_paren('$1'),
+ '$2', skip_paren('$3')).
+type_400 -> type_500 : '$1'.
+
+type_500 -> prefix_op type : ?mkop1('$1', skip_paren('$2')).
+type_500 -> type : '$1'.
type -> '(' top_type ')' : {paren_type, ?line('$2'), ['$2']}.
type -> var : '$1'.
@@ -135,7 +151,7 @@ type -> atom ':' atom '(' top_types ')' : {remote_type, ?line('$1'),
['$1', '$3', '$5']}.
type -> '[' ']' : {type, ?line('$1'), nil, []}.
type -> '[' top_type ']' : {type, ?line('$1'), list, ['$2']}.
-type -> '[' top_type ',' '.' '.' '.' ']' : {type, ?line('$1'),
+type -> '[' top_type ',' '...' ']' : {type, ?line('$1'),
nonempty_list, ['$2']}.
type -> '{' '}' : {type, ?line('$1'), tuple, []}.
type -> '{' top_types '}' : {type, ?line('$1'), tuple, '$2'}.
@@ -143,19 +159,13 @@ type -> '#' atom '{' '}' : {type, ?line('$1'), record, ['$2']}.
type -> '#' atom '{' field_types '}' : {type, ?line('$1'),
record, ['$2'|'$4']}.
type -> binary_type : '$1'.
-type -> int_type : '$1'.
-type -> int_type '.' '.' int_type : {type, ?line('$1'), range,
- ['$1', '$4']}.
+type -> integer : '$1'.
type -> 'fun' '(' ')' : {type, ?line('$1'), 'fun', []}.
type -> 'fun' '(' fun_type_100 ')' : '$3'.
-int_type -> integer : '$1'.
-int_type -> '-' integer : abstract(-normalise('$2'),
- ?line('$2')).
-
-fun_type_100 -> '(' '.' '.' '.' ')' '->' top_type
+fun_type_100 -> '(' '...' ')' '->' top_type
: {type, ?line('$1'), 'fun',
- [{type, ?line('$1'), any}, '$7']}.
+ [{type, ?line('$1'), any}, '$5']}.
fun_type_100 -> fun_type : '$1'.
fun_type -> '(' ')' '->' top_type : {type, ?line('$1'), 'fun',
@@ -180,9 +190,9 @@ binary_type -> '<<' bin_unit_type '>>' : {type, ?line('$1'),binary,
binary_type -> '<<' bin_base_type ',' bin_unit_type '>>'
: {type, ?line('$1'), binary, ['$2', '$4']}.
-bin_base_type -> var ':' integer : build_bin_type(['$1'], '$3').
+bin_base_type -> var ':' type : build_bin_type(['$1'], '$3').
-bin_unit_type -> var ':' var '*' integer : build_bin_type(['$1', '$3'], '$5').
+bin_unit_type -> var ':' var '*' type : build_bin_type(['$1', '$3'], '$5').
attr_val -> expr : ['$1'].
attr_val -> expr ',' exprs : ['$1' | '$3'].
@@ -607,6 +617,11 @@ lift_unions(T1, {type, _La, union, List}) ->
lift_unions(T1, T2) ->
{type, ?line(T1), union, [T1, T2]}.
+skip_paren({paren_type,_L,[Type]}) ->
+ skip_paren(Type);
+skip_paren(Type) ->
+ Type.
+
build_gen_type({atom, La, tuple}) ->
{type, La, tuple, any};
build_gen_type({atom, La, Name}) ->
@@ -615,7 +630,7 @@ build_gen_type({atom, La, Name}) ->
build_bin_type([{var, _, '_'}|Left], Int) ->
build_bin_type(Left, Int);
build_bin_type([], Int) ->
- Int;
+ skip_paren(Int);
build_bin_type([{var, La, _}|_], _) ->
ret_err(La, "Bad binary type").
diff --git a/lib/stdlib/src/erl_pp.erl b/lib/stdlib/src/erl_pp.erl
index 0859bf0466..df4a20b833 100644
--- a/lib/stdlib/src/erl_pp.erl
+++ b/lib/stdlib/src/erl_pp.erl
@@ -115,7 +115,7 @@ lattribute({attribute,_Line,Name,Arg}, Hook) ->
lattribute(module, {M,Vs}, _Hook) ->
attr("module",[{var,0,pname(M)},
- foldr(fun(V, C) -> {cons,0,{var,0,V},C}
+ foldr(fun(V, C) -> {cons,0,{var,0,V},C}
end, {nil,0}, Vs)]);
lattribute(module, M, _Hook) ->
attr("module", [{var,0,pname(M)}]);
@@ -140,7 +140,7 @@ typeattr(Tag, {TypeName,Type,Args}, _Hook) ->
ltype({ann_type,_Line,[V,T]}) ->
typed(lexpr(V, none), T);
ltype({paren_type,_Line,[T]}) ->
- [$(,ltype(T),$)];
+ [$(,ltype(T),$)];
ltype({type,_Line,union,Ts}) ->
{seq,[],[],[' |'],ltypes(Ts)};
ltype({type,_Line,list,[T]}) ->
@@ -153,7 +153,7 @@ ltype({type,Line,tuple,any}) ->
simple_type({atom,Line,tuple}, []);
ltype({type,_Line,tuple,Ts}) ->
tuple_type(Ts, fun ltype/1);
-ltype({type,_Line,record,[N|Fs]}) ->
+ltype({type,_Line,record,[{atom,_,N}|Fs]}) ->
record_type(N, Fs);
ltype({type,_Line,range,[_I1,_I2]=Es}) ->
expr_list(Es, '..', fun lexpr/2, none);
@@ -174,12 +174,15 @@ ltype({atom,_,T}) ->
ltype(E) ->
lexpr(E, 0, none).
-binary_type({integer,_,Int1}=I1, {integer,_,Int2}=I2) ->
- E1 = [[leaf("_:"),lexpr(I1, 0, none)] || Int1 =/= 0],
- E2 = [[leaf("_:_*"),lexpr(I2, 0, none)] || Int2 =/= 0],
+binary_type(I1, I2) ->
+ B = [[] || {integer,_,0} <- [I1]] =:= [],
+ U = [[] || {integer,_,0} <- [I2]] =:= [],
+ P = max_prec(),
+ E1 = [[leaf("_:"),lexpr(I1, P, none)] || B],
+ E2 = [[leaf("_:_*"),lexpr(I2, P, none)] || U],
{seq,'<<','>>',[$,],E1++E2}.
-record_type({atom,_,Name}, Fields) ->
+record_type(Name, Fields) ->
{first,[record_name(Name)],field_types(Fields)}.
field_types(Fs) ->
@@ -443,7 +446,7 @@ lexpr({op,_,Op,Arg}, Prec, Hook) ->
Ol = leaf(format("~s ", [Op])),
El = [Ol,lexpr(Arg, R, Hook)],
maybe_paren(P, Prec, El);
-lexpr({op,_,Op,Larg,Rarg}, Prec, Hook) when Op =:= 'orelse';
+lexpr({op,_,Op,Larg,Rarg}, Prec, Hook) when Op =:= 'orelse';
Op =:= 'andalso' ->
%% Breaks lines since R12B.
{L,P,R} = inop_prec(Op),
@@ -727,15 +730,15 @@ frmt(Item, I) ->
%%% and indentation are inserted between IPs.
%%% - {first,I,IP2}: IP2 follows after I, and is output with an indentation
%%% updated with the width of I.
-%%% - {seq,Before,After,Separator,IPs}: a sequence of Is separated by
-%%% Separator. Before is output before IPs, and the indentation of IPs
+%%% - {seq,Before,After,Separator,IPs}: a sequence of Is separated by
+%%% Separator. Before is output before IPs, and the indentation of IPs
%%% is updated with the width of Before. After follows after IPs.
%%% - {force_nl,ExtraInfo,I}: fun-info (a comment) forces linebreak before I.
%%% - {prefer_nl,Sep,IPs}: forces linebreak between Is unlesss negative
%%% indentation.
%%% - {string,S}: a string.
%%% - {hook,...}, {ehook,...}: hook expressions.
-%%%
+%%%
%%% list, first, seq, force_nl, and prefer_nl all accept IPs, where each
%%% element is either an item or a tuple {step|cstep,I1,I2}. step means
%%% that I2 is output after linebreak and an incremented indentation.
@@ -761,7 +764,7 @@ f({seq,Before,After,Sep,LItems}, I0, ST, WT) ->
{CharsL,SizeL} = unz(CharsSizeL),
{BCharsL,BSizeL} = unz1([BCharsSize]),
Sizes = BSizeL ++ SizeL,
- NSepChars = if
+ NSepChars = if
is_list(Sep), Sep =/= [] ->
erlang:max(0, length(CharsL)-1);
true ->
@@ -876,7 +879,7 @@ nl_indent(I, T) when I > 0 ->
[$\n|spaces(I, T)].
same_line(I0, SizeL, NSepChars) ->
- try
+ try
Size = lists:sum(SizeL) + NSepChars,
true = incr(I0, Size) =< ?MAXLINE,
{yes,Size}
@@ -956,9 +959,9 @@ write_a_string(S, N, Len) ->
-define(N_SPACES, 30).
spacetab() ->
- {[_|L],_} = mapfoldl(fun(_, A) -> {A,[$\s|A]}
+ {[_|L],_} = mapfoldl(fun(_, A) -> {A,[$\s|A]}
end, [], lists:seq(0, ?N_SPACES)),
- list_to_tuple(L).
+ list_to_tuple(L).
spaces(N, T) when N =< ?N_SPACES ->
element(N, T);
@@ -966,7 +969,7 @@ spaces(N, T) ->
[element(?N_SPACES, T)|spaces(N-?N_SPACES, T)].
wordtable() ->
- L = [begin {leaf,Sz,S} = leaf(W), {S,Sz} end ||
+ L = [begin {leaf,Sz,S} = leaf(W), {S,Sz} end ||
W <- [" ->"," =","<<",">>","[]","after","begin","case","catch",
"end","fun","if","of","receive","try","when"," ::","..",
" |"]],
diff --git a/lib/stdlib/src/erl_scan.erl b/lib/stdlib/src/erl_scan.erl
index 1013d54bdc..18f64c46d0 100644
--- a/lib/stdlib/src/erl_scan.erl
+++ b/lib/stdlib/src/erl_scan.erl
@@ -55,18 +55,13 @@
token_info/1,token_info/2,
attributes_info/1,attributes_info/2,set_attribute/3]).
-%%% Local record.
--record(erl_scan,
- {resword_fun=fun reserved_word/1,
- ws=false,
- comment=false,
- text=false}).
+-export_type([error_info/0, line/0, tokens_result/0]).
%%%
-%%% Exported functions
+%%% Defines and type definitions
%%%
--define(COLUMN(C), is_integer(C), C >= 1).
+-define(COLUMN(C), (is_integer(C) andalso C >= 1)).
%% Line numbers less than zero have always been allowed:
-define(ALINE(L), is_integer(L)).
-define(STRING(S), is_list(S)).
@@ -95,6 +90,15 @@
-type error_description() :: term().
-type error_info() :: {location(), module(), error_description()}.
+%%% Local record.
+-record(erl_scan,
+ {resword_fun = fun reserved_word/1 :: resword_fun(),
+ ws = false :: boolean(),
+ comment = false :: boolean(),
+ text = false :: boolean()}).
+
+%%----------------------------------------------------------------------------
+
-spec format_error(Error :: term()) -> string().
format_error({string,Quote,Head}) ->
lists:flatten(["unterminated " ++ string_thing(Quote) ++
@@ -307,10 +311,10 @@ options(Opt) ->
options([Opt]).
opts(Options, [Key|Keys], L) ->
- V = case lists:keysearch(Key, 1, Options) of
- {value,{reserved_word_fun,F}} when ?RESWORDFUN(F) ->
+ V = case lists:keyfind(Key, 1, Options) of
+ {reserved_word_fun,F} when ?RESWORDFUN(F) ->
{ok,F};
- {value,{Key,_}} ->
+ {Key,_} ->
badarg;
false ->
{ok,default_option(Key)}
@@ -333,12 +337,13 @@ expand_opt(O, Os) ->
[O|Os].
attr_info(Attrs, Item) ->
- case catch lists:keysearch(Item, 1, Attrs) of
- {value,{Item,Value}} ->
- {Item,Value};
+ try lists:keyfind(Item, 1, Attrs) of
+ {_Item, _Value} = T ->
+ T;
false ->
- undefined;
- _ ->
+ undefined
+ catch
+ _:_ ->
erlang:error(badarg, [Attrs, Item])
end.
@@ -442,6 +447,14 @@ scan1([$\%=C|Cs], St, Line, Col, Toks) ->
scan_comment(Cs, St, Line, Col, Toks, [C]);
scan1([C|Cs], St, Line, Col, Toks) when ?DIGIT(C) ->
scan_number(Cs, St, Line, Col, Toks, [C]);
+scan1("..."++Cs, St, Line, Col, Toks) ->
+ tok2(Cs, St, Line, Col, Toks, "...", '...', 3);
+scan1(".."=Cs, _St, Line, Col, Toks) ->
+ {more,{Cs,Col,Toks,Line,[],fun scan/6}};
+scan1(".."++Cs, St, Line, Col, Toks) ->
+ tok2(Cs, St, Line, Col, Toks, "..", '..', 2);
+scan1("."=Cs, _St, Line, Col, Toks) ->
+ {more,{Cs,Col,Toks,Line,[],fun scan/6}};
scan1([$.=C|Cs], St, Line, Col, Toks) ->
scan_dot(Cs, St, Line, Col, Toks, [C]);
scan1([$"|Cs], St, Line, Col, Toks) -> %" Emacs
@@ -644,8 +657,6 @@ scan_dot([$\n=C|Cs], St, Line, Col, Toks, Ncs) ->
scan_dot([C|Cs], St, Line, Col, Toks, Ncs) when ?WHITE_SPACE(C) ->
Attrs = attributes(Line, Col, St, Ncs++[C]),
{ok,[{dot,Attrs}|Toks],Cs,Line,incr_column(Col, 2)};
-scan_dot([]=Cs, _St, Line, Col, Toks, Ncs) ->
- {more,{Cs,Col,Toks,Line,Ncs,fun scan_dot/6}};
scan_dot(eof=Cs, St, Line, Col, Toks, Ncs) ->
Attrs = attributes(Line, Col, St, Ncs),
{ok,[{dot,Attrs}|Toks],Cs,Line,incr_column(Col, 1)};
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index d7b5dbc636..1d033f6f7b 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -42,10 +42,15 @@
-export([i/0, i/1, i/2, i/3]).
-%%------------------------------------------------------------------------------
+-export_type([tab/0, tid/0]).
+
+%%-----------------------------------------------------------------------------
-type tab() :: atom() | tid().
+%% a similar definition is also in erl_types
+-opaque tid() :: integer().
+
-type ext_info() :: 'md5sum' | 'object_count'.
-type protection() :: 'private' | 'protected' | 'public'.
-type type() :: 'bag' | 'duplicate_bag' | 'ordered_set' | 'set'.
@@ -63,7 +68,7 @@
-type match_pattern() :: atom() | tuple().
-type match_specs() :: [{match_pattern(), [_], [_]}].
-%%------------------------------------------------------------------------------
+%%-----------------------------------------------------------------------------
%% The following functions used to be found in this module, but
%% are now BIFs (i.e. implemented in C).
diff --git a/lib/stdlib/src/file_sorter.erl b/lib/stdlib/src/file_sorter.erl
index e21a0c88f3..3875eca39d 100644
--- a/lib/stdlib/src/file_sorter.erl
+++ b/lib/stdlib/src/file_sorter.erl
@@ -191,7 +191,7 @@ options([{format, Format} | L], Opts) when Format =:= binary;
options([{format, binary_term} | L], Opts) ->
options(L, Opts#opts{format = binary_term_fun()});
options([{size, Size} | L], Opts) when is_integer(Size), Size >= 0 ->
- options(L, Opts#opts{size = max(Size, 1)});
+ options(L, Opts#opts{size = erlang:max(Size, 1)});
options([{no_files, NoFiles} | L], Opts) when is_integer(NoFiles),
NoFiles > 1 ->
options(L, Opts#opts{no_files = NoFiles});
@@ -997,10 +997,10 @@ close_read_fun(Fd, FileName, fsort) ->
file:delete(FileName).
read_objs(Fd, FileName, I, L, Bin0, Size0, LSz, W) ->
- Max = max(Size0, ?CHUNKSIZE),
+ Max = erlang:max(Size0, ?CHUNKSIZE),
BSz0 = byte_size(Bin0),
Min = Size0 - BSz0 + W#w.hdlen, % Min > 0
- NoBytes = max(Min, Max),
+ NoBytes = erlang:max(Min, Max),
case read(Fd, FileName, NoBytes, W) of
{ok, Bin} ->
BSz = byte_size(Bin),
@@ -1180,9 +1180,6 @@ make_key2([Kp], T) ->
make_key2([Kp | Kps], T) ->
[element(Kp, T) | make_key2(Kps, T)].
-max(A, B) when A < B -> B;
-max(A, _) -> A.
-
infun(W) ->
W1 = W#w{in = undefined},
try (W#w.in)(read) of
diff --git a/lib/stdlib/src/gen_event.erl b/lib/stdlib/src/gen_event.erl
index 27ff9441e6..b1e9e3a02f 100644
--- a/lib/stdlib/src/gen_event.erl
+++ b/lib/stdlib/src/gen_event.erl
@@ -677,12 +677,23 @@ report_error(Handler, Reason, State, LastIn, SName) ->
_ ->
Reason
end,
+ Mod = Handler#handler.module,
+ FmtState = case erlang:function_exported(Mod, format_status, 2) of
+ true ->
+ Args = [get(), State],
+ case catch Mod:format_status(terminate, Args) of
+ {'EXIT', _} -> State;
+ Else -> Else
+ end;
+ _ ->
+ State
+ end,
error_msg("** gen_event handler ~p crashed.~n"
"** Was installed in ~p~n"
"** Last event was: ~p~n"
"** When handler state == ~p~n"
"** Reason == ~p~n",
- [handler(Handler),SName,LastIn,State,Reason1]).
+ [handler(Handler),SName,LastIn,FmtState,Reason1]).
handler(Handler) when not Handler#handler.id ->
Handler#handler.module;
@@ -711,10 +722,20 @@ get_modules(MSL) ->
%%-----------------------------------------------------------------
%% Status information
%%-----------------------------------------------------------------
-format_status(_Opt, StatusData) ->
- [_PDict, SysState, Parent, _Debug, [ServerName, MSL, _Hib]] = StatusData,
+format_status(Opt, StatusData) ->
+ [PDict, SysState, Parent, _Debug, [ServerName, MSL, _Hib]] = StatusData,
Header = lists:concat(["Status for event handler ", ServerName]),
+ FmtMSL = [case erlang:function_exported(Mod, format_status, 2) of
+ true ->
+ Args = [PDict, State],
+ case catch Mod:format_status(Opt, Args) of
+ {'EXIT', _} -> MSL;
+ Else -> MS#handler{state = Else}
+ end;
+ _ ->
+ MS
+ end || #handler{module = Mod, state = State} = MS <- MSL],
[{header, Header},
{data, [{"Status", SysState},
{"Parent", Parent}]},
- {items, {"Installed handlers", MSL}}].
+ {items, {"Installed handlers", FmtMSL}}].
diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl
index 9961646418..7d9960b912 100644
--- a/lib/stdlib/src/gen_fsm.erl
+++ b/lib/stdlib/src/gen_fsm.erl
@@ -542,7 +542,18 @@ terminate(Reason, Name, Msg, Mod, StateName, StateData, Debug) ->
{shutdown,_}=Shutdown ->
exit(Shutdown);
_ ->
- error_info(Reason, Name, Msg, StateName, StateData, Debug),
+ FmtStateData =
+ case erlang:function_exported(Mod, format_status, 2) of
+ true ->
+ Args = [get(), StateData],
+ case catch Mod:format_status(terminate, Args) of
+ {'EXIT', _} -> StateData;
+ Else -> Else
+ end;
+ _ ->
+ StateData
+ end,
+ error_info(Reason,Name,Msg,StateName,FmtStateData,Debug),
exit(Reason)
end
end.
@@ -603,22 +614,27 @@ get_msg(Msg) -> Msg.
format_status(Opt, StatusData) ->
[PDict, SysState, Parent, Debug, [Name, StateName, StateData, Mod, _Time]] =
StatusData,
- NameTag = if is_pid(Name) ->
- pid_to_list(Name);
- is_atom(Name) ->
- Name
- end,
- Header = lists:concat(["Status for state machine ", NameTag]),
+ StatusHdr = "Status for state machine",
+ Header = if
+ is_pid(Name) ->
+ lists:concat([StatusHdr, " ", pid_to_list(Name)]);
+ is_atom(Name); is_list(Name) ->
+ lists:concat([StatusHdr, " ", Name]);
+ true ->
+ {StatusHdr, Name}
+ end,
Log = sys:get_debug(log, Debug, []),
- Specfic =
+ DefaultStatus = [{data, [{"StateData", StateData}]}],
+ Specfic =
case erlang:function_exported(Mod, format_status, 2) of
true ->
case catch Mod:format_status(Opt,[PDict,StateData]) of
- {'EXIT', _} -> [{data, [{"StateData", StateData}]}];
- Else -> Else
+ {'EXIT', _} -> DefaultStatus;
+ StatusList when is_list(StatusList) -> StatusList;
+ Else -> [Else]
end;
_ ->
- [{data, [{"StateData", StateData}]}]
+ DefaultStatus
end,
[{header, Header},
{data, [{"Status", SysState},
diff --git a/lib/stdlib/src/gen_server.erl b/lib/stdlib/src/gen_server.erl
index 1c9e5270b6..ac81df9cab 100644
--- a/lib/stdlib/src/gen_server.erl
+++ b/lib/stdlib/src/gen_server.erl
@@ -705,7 +705,18 @@ terminate(Reason, Name, Msg, Mod, State, Debug) ->
{shutdown,_}=Shutdown ->
exit(Shutdown);
_ ->
- error_info(Reason, Name, Msg, State, Debug),
+ FmtState =
+ case erlang:function_exported(Mod, format_status, 2) of
+ true ->
+ Args = [get(), State],
+ case catch Mod:format_status(terminate, Args) of
+ {'EXIT', _} -> State;
+ Else -> Else
+ end;
+ _ ->
+ State
+ end,
+ error_info(Reason, Name, Msg, FmtState, Debug),
exit(Reason)
end
end.
@@ -829,22 +840,27 @@ name_to_pid(Name) ->
%%-----------------------------------------------------------------
format_status(Opt, StatusData) ->
[PDict, SysState, Parent, Debug, [Name, State, Mod, _Time]] = StatusData,
- NameTag = if is_pid(Name) ->
- pid_to_list(Name);
- is_atom(Name) ->
- Name
- end,
- Header = lists:concat(["Status for generic server ", NameTag]),
+ StatusHdr = "Status for generic server",
+ Header = if
+ is_pid(Name) ->
+ lists:concat([StatusHdr, " ", pid_to_list(Name)]);
+ is_atom(Name); is_list(Name) ->
+ lists:concat([StatusHdr, " ", Name]);
+ true ->
+ {StatusHdr, Name}
+ end,
Log = sys:get_debug(log, Debug, []),
- Specfic =
+ DefaultStatus = [{data, [{"State", State}]}],
+ Specfic =
case erlang:function_exported(Mod, format_status, 2) of
true ->
case catch Mod:format_status(Opt, [PDict, State]) of
- {'EXIT', _} -> [{data, [{"State", State}]}];
- Else -> Else
+ {'EXIT', _} -> DefaultStatus;
+ StatusList when is_list(StatusList) -> StatusList;
+ Else -> [Else]
end;
_ ->
- [{data, [{"State", State}]}]
+ DefaultStatus
end,
[{header, Header},
{data, [{"Status", SysState},
diff --git a/lib/stdlib/src/io.erl b/lib/stdlib/src/io.erl
index 1f8076e864..1d0f9374bc 100644
--- a/lib/stdlib/src/io.erl
+++ b/lib/stdlib/src/io.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(io).
@@ -32,6 +32,7 @@
parse_erl_form/1,parse_erl_form/2,parse_erl_form/3]).
-export([request/1,request/2,requests/1,requests/2]).
+-export_type([device/0, format/0]).
%%-------------------------------------------------------------------------
diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl
index 26f6ec8931..4ca9d079b7 100644
--- a/lib/stdlib/src/io_lib.erl
+++ b/lib/stdlib/src/io_lib.erl
@@ -75,6 +75,8 @@
collect_line/2, collect_line/3, collect_line/4,
get_until/3, get_until/4]).
+-export_type([chars/0]).
+
%%----------------------------------------------------------------------
%% XXX: overapproximates a deep list of (unicode) characters
diff --git a/lib/stdlib/src/io_lib_fread.erl b/lib/stdlib/src/io_lib_fread.erl
index 74316dc730..33553692bc 100644
--- a/lib/stdlib/src/io_lib_fread.erl
+++ b/lib/stdlib/src/io_lib_fread.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(io_lib_fread).
@@ -22,6 +22,8 @@
-export([fread/2,fread/3]).
+-export_type([continuation/0, fread_2_ret/0, fread_3_ret/0]).
+
-import(lists, [reverse/1,reverse/2]).
%%-----------------------------------------------------------------------
diff --git a/lib/stdlib/src/lists.erl b/lib/stdlib/src/lists.erl
index 857eda8161..08ee595f4d 100644
--- a/lib/stdlib/src/lists.erl
+++ b/lib/stdlib/src/lists.erl
@@ -18,6 +18,9 @@
%%
-module(lists).
+-compile({no_auto_import,[max/2]}).
+-compile({no_auto_import,[min/2]}).
+
-export([append/2, append/1, subtract/2, reverse/1,
nth/2, nthtail/2, prefix/2, suffix/2, last/1,
seq/2, seq/3, sum/1, duplicate/2, min/1, max/1, sublist/2, sublist/3,
diff --git a/lib/stdlib/src/proc_lib.erl b/lib/stdlib/src/proc_lib.erl
index 9aa5e0a71e..4fb64a3353 100644
--- a/lib/stdlib/src/proc_lib.erl
+++ b/lib/stdlib/src/proc_lib.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(proc_lib).
@@ -34,6 +34,8 @@
%% Internal exports.
-export([wake_up/3]).
+-export_type([spawn_option/0]).
+
%%-----------------------------------------------------------------------------
-type priority_level() :: 'high' | 'low' | 'max' | 'normal'.
diff --git a/lib/stdlib/src/proplists.erl b/lib/stdlib/src/proplists.erl
index 35d14891f1..6a45e0f868 100644
--- a/lib/stdlib/src/proplists.erl
+++ b/lib/stdlib/src/proplists.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%% =====================================================================
@@ -49,6 +49,8 @@
%% ---------------------------------------------------------------------
+-export_type([property/0]).
+
-type property() :: atom() | tuple().
-type aliases() :: [{any(), any()}].
diff --git a/lib/stdlib/src/stdlib.app.src b/lib/stdlib/src/stdlib.app.src
index 3e52c48e42..9d15f01683 100644
--- a/lib/stdlib/src/stdlib.app.src
+++ b/lib/stdlib/src/stdlib.app.src
@@ -1,20 +1,20 @@
%% This is an -*- erlang -*- file.
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
{application, stdlib,
@@ -23,6 +23,7 @@
{modules, [array,
base64,
beam_lib,
+ binary,
c,
calendar,
dets,
diff --git a/lib/stdlib/src/supervisor.erl b/lib/stdlib/src/supervisor.erl
index 22269a8d1b..f5d5441184 100644
--- a/lib/stdlib/src/supervisor.erl
+++ b/lib/stdlib/src/supervisor.erl
@@ -21,7 +21,7 @@
-behaviour(gen_server).
%% External exports
--export([start_link/2,start_link/3,
+-export([start_link/2, start_link/3,
start_child/2, restart_child/2,
delete_child/2, terminate_child/2,
which_children/1, count_children/1,
@@ -33,25 +33,47 @@
-export([init/1, handle_call/3, handle_info/2, terminate/2, code_change/3]).
-export([handle_cast/2]).
+-export_type([child_spec/0, strategy/0]).
+
+%%--------------------------------------------------------------------------
+
+-type child_id() :: pid() | 'undefined'.
+-type mfargs() :: {module(), atom(), [term()]}.
+-type modules() :: [module()] | 'dynamic'.
+-type restart() :: 'permanent' | 'transient' | 'temporary'.
+-type shutdown() :: 'brutal_kill' | timeout().
+-type worker() :: 'worker' | 'supervisor'.
+-type sup_name() :: {'local', atom()} | {'global', atom()}.
+-type sup_ref() :: atom() | {atom(), atom()} | {'global', atom()} | pid().
+-type child_spec() :: {term(),mfargs(),restart(),shutdown(),worker(),modules()}.
+
+-type strategy() :: 'one_for_all' | 'one_for_one'
+ | 'rest_for_one' | 'simple_one_for_one'.
+
+%%--------------------------------------------------------------------------
+
+-record(child, {% pid is undefined when child is not running
+ pid = undefined :: child_id(),
+ name,
+ mfargs :: mfargs(),
+ restart_type :: restart(),
+ shutdown :: shutdown(),
+ child_type :: worker(),
+ modules = [] :: modules()}).
+-type child() :: #child{}.
+
-define(DICT, dict).
-record(state, {name,
- strategy,
- children = [],
- dynamics = ?DICT:new(),
- intensity,
- period,
+ strategy :: strategy(),
+ children = [] :: [child()],
+ dynamics = ?DICT:new() :: ?DICT(),
+ intensity :: non_neg_integer(),
+ period :: pos_integer(),
restarts = [],
module,
args}).
-
--record(child, {pid = undefined, % pid is undefined when child is not running
- name,
- mfa,
- restart_type,
- shutdown,
- child_type,
- modules = []}).
+-type state() :: #state{}.
-define(is_simple(State), State#state.strategy =:= simple_one_for_one).
@@ -65,21 +87,40 @@ behaviour_info(_Other) ->
%%% Servers/processes should/could also be built using gen_server.erl.
%%% SupName = {local, atom()} | {global, atom()}.
%%% ---------------------------------------------------
+
+-type startlink_err() :: {'already_started', pid()} | 'shutdown' | term().
+-type startlink_ret() :: {'ok', pid()} | 'ignore' | {'error', startlink_err()}.
+
+-spec start_link(module(), term()) -> startlink_ret().
start_link(Mod, Args) ->
gen_server:start_link(supervisor, {self, Mod, Args}, []).
+-spec start_link(sup_name(), module(), term()) -> startlink_ret().
start_link(SupName, Mod, Args) ->
gen_server:start_link(SupName, supervisor, {SupName, Mod, Args}, []).
%%% ---------------------------------------------------
%%% Interface functions.
%%% ---------------------------------------------------
+
+-type info() :: term().
+-type startchild_err() :: 'already_present'
+ | {'already_started', child_id()} | term().
+-type startchild_ret() :: {'ok', child_id()} | {'ok', child_id(), info()}
+ | {'error', startchild_err()}.
+
+-spec start_child(sup_ref(), child_spec() | [term()]) -> startchild_ret().
start_child(Supervisor, ChildSpec) ->
call(Supervisor, {start_child, ChildSpec}).
+-type restart_err() :: 'running' | 'not_found' | 'simple_one_for_one' | term().
+-spec restart_child(sup_ref(), term()) ->
+ {'ok', child_id()} | {'ok', child_id(), info()} | {'error', restart_err()}.
restart_child(Supervisor, Name) ->
call(Supervisor, {restart_child, Name}).
+-type del_err() :: 'running' | 'not_found' | 'simple_one_for_one'.
+-spec delete_child(sup_ref(), term()) -> 'ok' | {'error', del_err()}.
delete_child(Supervisor, Name) ->
call(Supervisor, {delete_child, Name}).
@@ -89,9 +130,13 @@ delete_child(Supervisor, Name) ->
%% Note that the child is *always* terminated in some
%% way (maybe killed).
%%-----------------------------------------------------------------
+
+-type term_err() :: 'not_found' | 'simple_one_for_one'.
+-spec terminate_child(sup_ref(), term()) -> 'ok' | {'error', term_err()}.
terminate_child(Supervisor, Name) ->
call(Supervisor, {terminate_child, Name}).
+-spec which_children(sup_ref()) -> [{term(), child_id(), worker(), modules()}].
which_children(Supervisor) ->
call(Supervisor, which_children).
@@ -101,6 +146,7 @@ count_children(Supervisor) ->
call(Supervisor, Req) ->
gen_server:call(Supervisor, Req, infinity).
+-spec check_childspecs([child_spec()]) -> 'ok' | {'error', term()}.
check_childspecs(ChildSpecs) when is_list(ChildSpecs) ->
case check_startspec(ChildSpecs) of
{ok, _} -> ok;
@@ -113,6 +159,14 @@ check_childspecs(X) -> {error, {badarg, X}}.
%%% Initialize the supervisor.
%%%
%%% ---------------------------------------------------
+
+-type stop_rsn() :: 'shutdown' | {'bad_return', {module(),'init', term()}}
+ | {'bad_start_spec', term()} | {'start_spec', term()}
+ | {'supervisor_data', term()}.
+
+-spec init({sup_name(), module(), [term()]}) ->
+ {'ok', state()} | 'ignore' | {'stop', stop_rsn()}.
+
init({SupName, Mod, Args}) ->
process_flag(trap_exit, true),
case Mod:init(Args) of
@@ -158,12 +212,12 @@ init_dynamic(_State, StartSpec) ->
%%-----------------------------------------------------------------
%% Func: start_children/2
-%% Args: Children = [#child] in start order
-%% SupName = {local, atom()} | {global, atom()} | {pid(),Mod}
+%% Args: Children = [child()] in start order
+%% SupName = {local, atom()} | {global, atom()} | {pid(), Mod}
%% Purpose: Start all children. The new list contains #child's
%% with pids.
%% Returns: {ok, NChildren} | {error, NChildren}
-%% NChildren = [#child] in termination order (reversed
+%% NChildren = [child()] in termination order (reversed
%% start order)
%%-----------------------------------------------------------------
start_children(Children, SupName) -> start_children(Children, [], SupName).
@@ -182,8 +236,8 @@ start_children([], NChildren, _SupName) ->
{ok, NChildren}.
do_start_child(SupName, Child) ->
- #child{mfa = {M, F, A}} = Child,
- case catch apply(M, F, A) of
+ #child{mfargs = {M, F, Args}} = Child,
+ case catch apply(M, F, Args) of
{ok, Pid} when is_pid(Pid) ->
NChild = Child#child{pid = Pid},
report_progress(NChild, SupName),
@@ -192,7 +246,7 @@ do_start_child(SupName, Child) ->
NChild = Child#child{pid = Pid},
report_progress(NChild, SupName),
{ok, Pid, Extra};
- ignore ->
+ ignore ->
{ok, undefined};
{error, What} -> {error, What};
What -> {error, What}
@@ -211,15 +265,17 @@ do_start_child_i(M, F, A) ->
What ->
{error, What}
end.
-
%%% ---------------------------------------------------
%%%
%%% Callback functions.
%%%
%%% ---------------------------------------------------
+-type call() :: 'which_children' | 'count_children' | {_, _}. % XXX: refine
+-spec handle_call(call(), term(), state()) -> {'reply', term(), state()}.
+
handle_call({start_child, EArgs}, _From, State) when ?is_simple(State) ->
- #child{mfa = {M, F, A}} = hd(State#state.children),
+ #child{mfargs = {M, F, A}} = hd(State#state.children),
Args = A ++ EArgs,
case do_start_child_i(M, F, Args) of
{ok, Pid} ->
@@ -235,7 +291,7 @@ handle_call({start_child, EArgs}, _From, State) when ?is_simple(State) ->
end;
%%% The requests terminate_child, delete_child and restart_child are
-%%% invalid for simple_one_for_one supervisors.
+%%% invalid for simple_one_for_one supervisors.
handle_call({_Req, _Data}, _From, State) when ?is_simple(State) ->
{reply, {error, simple_one_for_one}, State};
@@ -297,7 +353,7 @@ handle_call(which_children, _From, State) ->
Resp =
lists:map(fun(#child{pid = Pid, name = Name,
child_type = ChildType, modules = Mods}) ->
- {Name, Pid, ChildType, Mods}
+ {Name, Pid, ChildType, Mods}
end,
State#state.children),
{reply, Resp, State};
@@ -318,7 +374,6 @@ handle_call(count_children, _From, State) when ?is_simple(State) ->
{reply, Reply, State};
handle_call(count_children, _From, State) ->
-
%% Specs and children are together on the children list...
{Specs, Active, Supers, Workers} =
lists:foldl(fun(Child, Counts) ->
@@ -347,15 +402,19 @@ count_child(#child{pid = Pid, child_type = supervisor},
%%% Hopefully cause a function-clause as there is no API function
%%% that utilizes cast.
+-spec handle_cast('null', state()) -> {'noreply', state()}.
+
handle_cast(null, State) ->
error_logger:error_msg("ERROR: Supervisor received cast-message 'null'~n",
[]),
-
{noreply, State}.
%%
%% Take care of terminated children.
%%
+-spec handle_info(term(), state()) ->
+ {'noreply', state()} | {'stop', 'shutdown', state()}.
+
handle_info({'EXIT', Pid, Reason}, State) ->
case restart_child(Pid, Reason, State) of
{ok, State1} ->
@@ -368,9 +427,12 @@ handle_info(Msg, State) ->
error_logger:error_msg("Supervisor received unexpected message: ~p~n",
[Msg]),
{noreply, State}.
+
%%
%% Terminate this server.
%%
+-spec terminate(term(), state()) -> 'ok'.
+
terminate(_Reason, State) ->
terminate_children(State#state.children, State#state.name),
ok.
@@ -384,6 +446,9 @@ terminate(_Reason, State) ->
%% NOTE: This requires that the init function of the call-back module
%% does not have any side effects.
%%
+-spec code_change(term(), state(), term()) ->
+ {'ok', state()} | {'error', term()}.
+
code_change(_, State, _) ->
case (State#state.module):init(State#state.args) of
{ok, {SupFlags, StartSpec}} ->
@@ -411,7 +476,7 @@ check_flags({Strategy, MaxIntensity, Period}) ->
check_flags(What) ->
{bad_flags, What}.
-update_childspec(State, StartSpec) when ?is_simple(State) ->
+update_childspec(State, StartSpec) when ?is_simple(State) ->
case check_startspec(StartSpec) of
{ok, [Child]} ->
{ok, State#state{children = [Child]}};
@@ -437,7 +502,7 @@ update_childspec1([Child|OldC], Children, KeepOld) ->
update_childspec1(OldC, Children, [Child|KeepOld])
end;
update_childspec1([], Children, KeepOld) ->
- % Return them in (keeped) reverse start order.
+ %% Return them in (kept) reverse start order.
lists:reverse(Children ++ KeepOld).
update_chsp(OldCh, Children) ->
@@ -482,7 +547,7 @@ handle_start_child(Child, State) ->
%%% ---------------------------------------------------
%%% Restart. A process has terminated.
-%%% Returns: {ok, #state} | {shutdown, #state}
+%%% Returns: {ok, state()} | {shutdown, state()}
%%% ---------------------------------------------------
restart_child(Pid, Reason, State) when ?is_simple(State) ->
@@ -490,19 +555,19 @@ restart_child(Pid, Reason, State) when ?is_simple(State) ->
{ok, Args} ->
[Child] = State#state.children,
RestartType = Child#child.restart_type,
- {M, F, _} = Child#child.mfa,
- NChild = Child#child{pid = Pid, mfa = {M, F, Args}},
+ {M, F, _} = Child#child.mfargs,
+ NChild = Child#child{pid = Pid, mfargs = {M, F, Args}},
do_restart(RestartType, Reason, NChild, State);
error ->
{ok, State}
end;
restart_child(Pid, Reason, State) ->
Children = State#state.children,
- case lists:keysearch(Pid, #child.pid, Children) of
- {value, Child} ->
+ case lists:keyfind(Pid, #child.pid, Children) of
+ #child{} = Child ->
RestartType = Child#child.restart_type,
do_restart(RestartType, Reason, Child, State);
- _ ->
+ false ->
{ok, State}
end.
@@ -534,7 +599,7 @@ restart(Child, State) ->
end.
restart(simple_one_for_one, Child, State) ->
- #child{mfa = {M, F, A}} = Child,
+ #child{mfargs = {M, F, A}} = Child,
Dynamics = ?DICT:erase(Child#child.pid, State#state.dynamics),
case do_start_child_i(M, F, A) of
{ok, Pid} ->
@@ -580,9 +645,9 @@ restart(one_for_all, Child, State) ->
%%-----------------------------------------------------------------
%% Func: terminate_children/2
-%% Args: Children = [#child] in termination order
+%% Args: Children = [child()] in termination order
%% SupName = {local, atom()} | {global, atom()} | {pid(),Mod}
-%% Returns: NChildren = [#child] in
+%% Returns: NChildren = [child()] in
%% startup order (reversed termination order)
%%-----------------------------------------------------------------
terminate_children(Children, SupName) ->
@@ -617,7 +682,6 @@ do_terminate(Child, _SupName) ->
%% Returns: ok | {error, OtherReason} (this should be reported)
%%-----------------------------------------------------------------
shutdown(Pid, brutal_kill) ->
-
case monitor_child(Pid) of
ok ->
exit(Pid, kill),
@@ -630,9 +694,7 @@ shutdown(Pid, brutal_kill) ->
{error, Reason} ->
{error, Reason}
end;
-
shutdown(Pid, Time) ->
-
case monitor_child(Pid) of
ok ->
exit(Pid, shutdown), %% Try to shutdown gracefully
@@ -738,9 +800,9 @@ remove_child(Child, State) ->
%% MaxIntensity = integer()
%% Period = integer()
%% Mod :== atom()
-%% Arsg :== term()
+%% Args :== term()
%% Purpose: Check that Type is of correct type (!)
-%% Returns: {ok, #state} | Error
+%% Returns: {ok, state()} | Error
%%-----------------------------------------------------------------
init_state(SupName, Type, Mod, Args) ->
case catch init_state1(SupName, Type, Mod, Args) of
@@ -755,11 +817,11 @@ init_state1(SupName, {Strategy, MaxIntensity, Period}, Mod, Args) ->
validIntensity(MaxIntensity),
validPeriod(Period),
{ok, #state{name = supname(SupName,Mod),
- strategy = Strategy,
- intensity = MaxIntensity,
- period = Period,
- module = Mod,
- args = Args}};
+ strategy = Strategy,
+ intensity = MaxIntensity,
+ period = Period,
+ module = Mod,
+ args = Args}};
init_state1(_SupName, Type, _, _) ->
{invalid_type, Type}.
@@ -771,26 +833,26 @@ validStrategy(What) -> throw({invalid_strategy, What}).
validIntensity(Max) when is_integer(Max),
Max >= 0 -> true;
-validIntensity(What) -> throw({invalid_intensity, What}).
+validIntensity(What) -> throw({invalid_intensity, What}).
validPeriod(Period) when is_integer(Period),
Period > 0 -> true;
validPeriod(What) -> throw({invalid_period, What}).
-supname(self,Mod) -> {self(),Mod};
-supname(N,_) -> N.
+supname(self, Mod) -> {self(), Mod};
+supname(N, _) -> N.
%%% ------------------------------------------------------
%%% Check that the children start specification is valid.
%%% Shall be a six (6) tuple
%%% {Name, Func, RestartType, Shutdown, ChildType, Modules}
%%% where Name is an atom
-%%% Func is {Mod, Fun, Args} == {atom, atom, list}
+%%% Func is {Mod, Fun, Args} == {atom(), atom(), list()}
%%% RestartType is permanent | temporary | transient
%%% Shutdown = integer() | infinity | brutal_kill
%%% ChildType = supervisor | worker
%%% Modules = [atom()] | dynamic
-%%% Returns: {ok, [#child]} | Error
+%%% Returns: {ok, [child()]} | Error
%%% ------------------------------------------------------
check_startspec(Children) -> check_startspec(Children, []).
@@ -818,14 +880,14 @@ check_childspec(Name, Func, RestartType, Shutdown, ChildType, Mods) ->
validChildType(ChildType),
validShutdown(Shutdown, ChildType),
validMods(Mods),
- {ok, #child{name = Name, mfa = Func, restart_type = RestartType,
+ {ok, #child{name = Name, mfargs = Func, restart_type = RestartType,
shutdown = Shutdown, child_type = ChildType, modules = Mods}}.
validChildType(supervisor) -> true;
validChildType(worker) -> true;
validChildType(What) -> throw({invalid_child_type, What}).
-validName(_Name) -> true.
+validName(_Name) -> true.
validFunc({M, F, A}) when is_atom(M),
is_atom(F),
@@ -923,7 +985,7 @@ report_error(Error, Reason, Child, SupName) ->
extract_child(Child) ->
[{pid, Child#child.pid},
{name, Child#child.name},
- {mfa, Child#child.mfa},
+ {mfargs, Child#child.mfargs},
{restart_type, Child#child.restart_type},
{shutdown, Child#child.shutdown},
{child_type, Child#child.child_type}].
diff --git a/lib/stdlib/test/Makefile b/lib/stdlib/test/Makefile
index 9beac93eb8..3bbd9ce318 100644
--- a/lib/stdlib/test/Makefile
+++ b/lib/stdlib/test/Makefile
@@ -9,6 +9,8 @@ MODULES= \
array_SUITE \
base64_SUITE \
beam_lib_SUITE \
+ binary_module_SUITE \
+ binref \
c_SUITE \
calendar_SUITE \
dets_SUITE \
diff --git a/lib/stdlib/test/binary_module_SUITE.erl b/lib/stdlib/test/binary_module_SUITE.erl
new file mode 100644
index 0000000000..16ed9a2c26
--- /dev/null
+++ b/lib/stdlib/test/binary_module_SUITE.erl
@@ -0,0 +1,1323 @@
+-module(binary_module_SUITE).
+
+-export([all/1, interesting/1,random_ref_comp/1,random_ref_sr_comp/1,
+ random_ref_fla_comp/1,parts/1, bin_to_list/1, list_to_bin/1,
+ copy/1, referenced/1,guard/1,encode_decode/1,badargs/1,longest_common_trap/1]).
+
+-export([random_number/1, make_unaligned/1]).
+
+
+
+%%-define(STANDALONE,1).
+
+-ifdef(STANDALONE).
+
+-define(line,erlang:display({?MODULE,?LINE}),).
+
+-else.
+
+-include("test_server.hrl").
+-export([init_per_testcase/2, fin_per_testcase/2]).
+% Default timetrap timeout (set in init_per_testcase).
+% Some of these testcases are really heavy...
+-define(default_timeout, ?t:minutes(20)).
+
+-endif.
+
+
+
+-ifdef(STANDALONE).
+-export([run/0]).
+
+run() ->
+ [ apply(?MODULE,X,[[]]) || X <- all(suite) ].
+
+-else.
+
+init_per_testcase(_Case, Config) ->
+ ?line Dog = ?t:timetrap(?default_timeout),
+ [{watchdog, Dog} | Config].
+
+fin_per_testcase(_Case, Config) ->
+ ?line Dog = ?config(watchdog, Config),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+-endif.
+
+all(suite) -> [interesting,random_ref_fla_comp,random_ref_sr_comp,
+ random_ref_comp,parts,bin_to_list, list_to_bin, copy,
+ referenced,guard,encode_decode,badargs,longest_common_trap].
+
+-define(MASK_ERROR(EXPR),mask_error((catch (EXPR)))).
+
+
+badargs(doc) ->
+ ["Tests various badarg exceptions in the module"];
+badargs(Config) when is_list(Config) ->
+ ?line badarg = ?MASK_ERROR(binary:compile_pattern([<<1,2,3:3>>])),
+ ?line badarg = ?MASK_ERROR(binary:compile_pattern([<<1,2,3>>|<<1,2>>])),
+ ?line badarg = ?MASK_ERROR(binary:compile_pattern(<<1,2,3:3>>)),
+ ?line badarg = ?MASK_ERROR(binary:compile_pattern(<<>>)),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3:3>>,<<1>>)),
+ ?line badarg = ?MASK_ERROR(binary:matches(<<1,2,3:3>>,<<1>>)),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{0,1},1}])),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,
+ [{scape,{0,1}}])),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{0,1,1}}])),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,[{scope,0,1}])),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,[{scope,[0,1]}])),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{0.1,1}}])),
+ ?line badarg = ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{1,1.1}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{16#FF,
+ 16#FFFFFFFFFFFFFFFF}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{16#FFFFFFFFFFFFFFFF,
+ -16#7FFFFFFFFFFFFFFF-1}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:match(<<1,2,3>>,<<1>>,
+ [{scope,{16#FFFFFFFFFFFFFFFF,
+ 16#7FFFFFFFFFFFFFFF}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:part(<<1,2,3>>,{16#FF,
+ 16#FFFFFFFFFFFFFFFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:part(<<1,2,3>>,{16#FFFFFFFFFFFFFFFF,
+ -16#7FFFFFFFFFFFFFFF-1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:part(<<1,2,3>>,{16#FFFFFFFFFFFFFFFF,
+ 16#7FFFFFFFFFFFFFFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:part(make_unaligned(<<1,2,3>>),{1,1,1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary_part(make_unaligned(<<1,2,3>>),{1,1,1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary_part(make_unaligned(<<1,2,3>>),{16#FFFFFFFFFFFFFFFF,
+ -16#7FFFFFFFFFFFFFFF-1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary_part(make_unaligned(<<1,2,3>>),{16#FF,
+ 16#FFFFFFFFFFFFFFFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary_part(make_unaligned(<<1,2,3>>),{16#FFFFFFFFFFFFFFFF,
+ 16#7FFFFFFFFFFFFFFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary_part(make_unaligned(<<1,2,3>>),{16#FFFFFFFFFFFFFFFFFF,
+ -16#7FFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary_part(make_unaligned(<<1,2,3>>),{16#FF,
+ -16#7FFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,{16#FF,
+ 16#FFFFFFFFFFFFFFFF})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,{16#FFFFFFFFFFFFFFFF,
+ -16#7FFFFFFFFFFFFFFF-1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,{16#FFFFFFFFFFFFFFFF,
+ 16#7FFFFFFFFFFFFFFF})),
+ ?line [1,2,3] =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,[])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,{1,2,3})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,{1.0,1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3>>,{1,1.0})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3:3>>,{1,1})),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list(<<1,2,3:3>>)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:bin_to_list([1,2,3])),
+
+ ?line nomatch =
+ ?MASK_ERROR(binary:match(<<1,2,3>>,<<1>>,[{scope,{0,0}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:match(<<1,2,3>>,{bm,<<>>},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:match(<<1,2,3>>,[],[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:match(<<1,2,3>>,{ac,<<>>},[{scope,{0,1}}])),
+ ?line {bm,BMMagic} = binary:compile_pattern([<<1,2,3>>]),
+ ?line {ac,ACMagic} = binary:compile_pattern([<<1,2,3>>,<<4,5>>]),
+ ?line badarg =
+ ?MASK_ERROR(binary:match(<<1,2,3>>,{bm,ACMagic},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:match(<<1,2,3>>,{ac,BMMagic},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:match(<<1,2,3>>,
+ {bm,ets:match_spec_compile([{'_',[],['$_']}])},
+ [{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:match(<<1,2,3>>,
+ {ac,ets:match_spec_compile([{'_',[],['$_']}])},
+ [{scope,{0,1}}])),
+ ?line nomatch =
+ ?MASK_ERROR(binary:matches(<<1,2,3>>,<<1>>,[{scope,{0,0}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:matches(<<1,2,3>>,{bm,<<>>},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:matches(<<1,2,3>>,[],[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:matches(<<1,2,3>>,{ac,<<>>},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:matches(<<1,2,3>>,{bm,ACMagic},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:matches(<<1,2,3>>,{ac,BMMagic},[{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:matches(<<1,2,3>>,
+ {bm,ets:match_spec_compile([{'_',[],['$_']}])},
+ [{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:matches(<<1,2,3>>,
+ {ac,ets:match_spec_compile([{'_',[],['$_']}])},
+ [{scope,{0,1}}])),
+ ?line badarg =
+ ?MASK_ERROR(binary:longest_common_prefix(
+ [<<0:10000,1,2,4,1:3>>,
+ <<0:10000,1,2,3>>])),
+ ?line badarg =
+ ?MASK_ERROR(binary:longest_common_suffix(
+ [<<0:10000,1,2,4,1:3>>,
+ <<0:10000,1,2,3>>])),
+ ?line badarg =
+ ?MASK_ERROR(binary:encode_unsigned(-1)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:encode_unsigned(-16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:first(<<1,2,4,1:3>>)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:first([1,2,4])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:last(<<1,2,4,1:3>>)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:last([1,2,4])),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:at(<<1,2,4,1:3>>,2)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:at(<<>>,2)),
+ ?line badarg =
+ ?MASK_ERROR(
+ binary:at([1,2,4],2)),
+ ok.
+
+longest_common_trap(doc) ->
+ ["Whitebox test to force special trap conditions in longest_common_{prefix,suffix}"];
+longest_common_trap(Config) when is_list(Config) ->
+ ?line erts_debug:set_internal_state(available_internal_state,true),
+ ?line io:format("oldlimit: ~p~n",
+ [erts_debug:set_internal_state(binary_loop_limit,10)]),
+ erlang:bump_reductions(10000000),
+ ?line _ = binary:longest_common_prefix(
+ [<<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0:10000,1,3,3>>,
+ <<0:10000,1,2,4>>]),
+ ?line _ = binary:longest_common_prefix(
+ [<<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0,0,0,0,0,0,0,0,0,0,0,0,0,0>>,
+ <<0:10000,1,2,4>>]),
+ erlang:bump_reductions(10000000),
+ ?line _ = binary:longest_common_suffix(
+ [<<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,3,3,0:10000,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>,
+ <<1,2,4,0:10000>>]),
+ ?line _ = binary:longest_common_suffix(
+ [<<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<1,2,4,0:10000>>,
+ <<0,0,0,0,0,0,0,0,0,0,0,0,0,0>>,
+ <<1,2,4,0:10000>>]),
+ Subj = subj(),
+ Len = byte_size(Subj),
+ ?line Len = binary:longest_common_suffix(
+ [Subj,Subj,Subj]),
+ ?line io:format("limit was: ~p~n",
+ [erts_debug:set_internal_state(binary_loop_limit,
+ default)]),
+ ?line erts_debug:set_internal_state(available_internal_state,false),
+ ok.
+
+subj() ->
+ Me = self(),
+ spawn(fun() ->
+ X0 = iolist_to_binary([
+ "1234567890",
+ %lists:seq(16#21, 16#7e),
+ lists:duplicate(100, $x)
+ ]),
+ Me ! X0,
+ receive X -> X end
+ end),
+ X0 = receive A -> A end,
+ <<X1:32/binary,_/binary>> = X0,
+ Subject= <<X1/binary>>,
+ Subject.
+
+
+interesting(doc) ->
+ ["Try some interesting patterns"];
+interesting(Config) when is_list(Config) ->
+ X = do_interesting(binary),
+ X = do_interesting(binref).
+
+do_interesting(Module) ->
+ ?line {0,4} = Module:match(<<"123456">>,
+ Module:compile_pattern([<<"12">>,<<"1234">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>,<<"6">>])),
+ ?line [{0,4},{5,1}] = Module:matches(<<"123456">>,
+ Module:compile_pattern([<<"12">>,<<"1234">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>,<<"6">>])),
+ ?line [{0,4}] = Module:matches(<<"123456">>,
+ Module:compile_pattern([<<"12">>,<<"1234">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>])),
+ ?line [{0,2},{2,2}] = Module:matches(<<"123456">>,
+ Module:compile_pattern([<<"12">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>])),
+ ?line {1,4} = Module:match(<<"123456">>,
+ Module:compile_pattern([<<"34">>,<<"34">>,
+ <<"12347">>,<<"2345">>])),
+ ?line [{1,4}] = Module:matches(<<"123456">>,
+ Module:compile_pattern([<<"34">>,<<"34">>,
+ <<"12347">>,<<"2345">>])),
+ ?line [{2,2}] = Module:matches(<<"123456">>,
+ Module:compile_pattern([<<"34">>,<<"34">>,
+ <<"12347">>,<<"2346">>])),
+
+ ?line {0,4} = Module:match(<<"123456">>,
+ [<<"12">>,<<"1234">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>,<<"6">>]),
+ ?line [{0,4},{5,1}] = Module:matches(<<"123456">>,
+ [<<"12">>,<<"1234">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>,<<"6">>]),
+ ?line [{0,4}] = Module:matches(<<"123456">>,
+ [<<"12">>,<<"1234">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>]),
+ ?line [{0,2},{2,2}] = Module:matches(<<"123456">>,
+ [<<"12">>,
+ <<"23">>,<<"3">>,
+ <<"34">>,<<"456">>,
+ <<"45">>]),
+ ?line {1,4} = Module:match(<<"123456">>,
+ [<<"34">>,<<"34">>,
+ <<"12347">>,<<"2345">>]),
+ ?line [{1,4}] = Module:matches(<<"123456">>,
+ [<<"34">>,<<"34">>,
+ <<"12347">>,<<"2345">>]),
+ ?line [{2,2}] = Module:matches(<<"123456">>,
+ [<<"34">>,<<"34">>,
+ <<"12347">>,<<"2346">>]),
+ ?line nomatch = Module:match(<<1,2,3,4>>,<<2>>,[{scope,{0,1}}]),
+ ?line {1,1} = Module:match(<<1,2,3,4>>,<<2>>,[{scope,{0,2}}]),
+ ?line nomatch = Module:match(<<1,2,3,4>>,<<2,3>>,[{scope,{0,2}}]),
+ ?line {1,2} = Module:match(<<1,2,3,4>>,<<2,3>>,[{scope,{0,3}}]),
+ ?line {1,2} = Module:match(<<1,2,3,4>>,<<2,3>>,[{scope,{0,4}}]),
+ ?line badarg = ?MASK_ERROR(Module:match(<<1,2,3,4>>,<<2,3>>,
+ [{scope,{0,5}}])),
+ ?line {1,2} = Module:match(<<1,2,3,4>>,<<2,3>>,[{scope,{4,-4}}]),
+ ?line {0,3} = Module:match(<<1,2,3,4>>,<<1,2,3>>,[{scope,{4,-4}}]),
+ ?line {0,4} = Module:match(<<1,2,3,4>>,<<1,2,3,4>>,[{scope,{4,-4}}]),
+ ?line badarg = ?MASK_ERROR(Module:match(<<1,2,3,4>>,<<1,2,3,4>>,
+ [{scope,{3,-4}}])),
+ ?line [] = Module:matches(<<1,2,3,4>>,<<2>>,[{scope,{0,1}}]),
+ ?line [{1,1}] = Module:matches(<<1,2,3,4>>,[<<2>>,<<3>>],[{scope,{0,2}}]),
+ ?line [] = Module:matches(<<1,2,3,4>>,<<2,3>>,[{scope,{0,2}}]),
+ ?line [{1,2}] = Module:matches(<<1,2,3,4>>,<<2,3>>,[{scope,{0,3}}]),
+ ?line [{1,2}] = Module:matches(<<1,2,3,4>>,<<2,3>>,[{scope,{0,4}}]),
+ ?line [{1,2}] = Module:matches(<<1,2,3,4>>,[<<2,3>>,<<4>>],
+ [{scope,{0,3}}]),
+ ?line [{1,2},{3,1}] = Module:matches(<<1,2,3,4>>,[<<2,3>>,<<4>>],
+ [{scope,{0,4}}]),
+ ?line badarg = ?MASK_ERROR(Module:matches(<<1,2,3,4>>,<<2,3>>,
+ [{scope,{0,5}}])),
+ ?line [{1,2}] = Module:matches(<<1,2,3,4>>,<<2,3>>,[{scope,{4,-4}}]),
+ ?line [{1,2},{3,1}] = Module:matches(<<1,2,3,4>>,[<<2,3>>,<<4>>],
+ [{scope,{4,-4}}]),
+ ?line [{0,3}] = Module:matches(<<1,2,3,4>>,<<1,2,3>>,[{scope,{4,-4}}]),
+ ?line [{0,4}] = Module:matches(<<1,2,3,4>>,<<1,2,3,4>>,[{scope,{4,-4}}]),
+ ?line badarg = ?MASK_ERROR(Module:matches(<<1,2,3,4>>,<<1,2,3,4>>,
+ [{scope,{3,-4}}])),
+ ?line badarg = ?MASK_ERROR(Module:matches(<<1,2,3,4>>,[<<1,2,3,4>>],
+ [{scope,{3,-4}}])),
+ ?line [<<1,2,3>>,<<6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>,<<4,5>>),
+ ?line [<<1,2,3>>,<<6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>]),
+ ?line [<<1,2,3>>,<<6>>,<<8>>] = Module:split(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>],[global]),
+ ?line [<<1,2,3>>,<<6>>,<<>>,<<>>] = Module:split(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],
+ [global]),
+ ?line [<<1,2,3>>,<<6>>] = Module:split(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],
+ [global,trim]),
+ ?line [<<1,2,3,4,5,6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],
+ [global,trim,{scope,{0,4}}]),
+ ?line [<<1,2,3>>,<<6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],
+ [global,trim,{scope,{0,5}}]),
+ ?line badarg = ?MASK_ERROR(
+ Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global,trim,{scope,{0,5}}])),
+ ?line <<1,2,3,99,6,7,8>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,[]),
+ ?line <<1,2,3,99,6,99,99>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global]),
+ ?line <<1,2,3,99,6,7,8>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global,{scope,{0,5}}]),
+ ?line <<1,2,3,99,6,7,8>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global,{scope,{0,5}}]),
+ ?line <<1,2,3,99,6,7,8>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global,{scope,{0,5}}]),
+ ?line badarg = ?MASK_ERROR(Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global,{scope,{0,5}},
+ {insert,1}])),
+ ?line <<1,2,3,99,4,5,6,7,8>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<99>>,
+ [global,{scope,{0,5}},
+ {insert_replaced,1}]),
+ ?line <<1,2,3,9,4,5,9,6,7,8>> = Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],
+ <<9,9>>,
+ [global,{scope,{0,5}},
+ {insert_replaced,1}]),
+ ?line badarg = ?MASK_ERROR(Module:replace(<<1,2,3,4,5,6,7,8>>,
+ [<<4,5>>,<<7>>,<<8>>],<<>>,
+ [global,{scope,{0,5}},
+ {insert_replaced,1}])),
+ ?line 2 = Module:longest_common_prefix([<<1,2,4>>,<<1,2,3>>]),
+ ?line 2 = Module:longest_common_prefix([<<1,2,4>>,<<1,2>>]),
+ ?line 1 = Module:longest_common_prefix([<<1,2,4>>,<<1>>]),
+ ?line 0 = Module:longest_common_prefix([<<1,2,4>>,<<>>]),
+ ?line 1 = Module:longest_common_prefix([<<1,2,4>>,<<1,2,3>>,<<1,3,3>>]),
+ ?line 1 = Module:longest_common_prefix([<<1,2,4>>,<<1,2,3>>,<<1,3,3>>,<<1,2,4>>]),
+ ?line 1251 = Module:longest_common_prefix([<<0:10000,1,2,4>>,
+ <<0:10000,1,2,3>>,
+ <<0:10000,1,3,3>>,
+ <<0:10000,1,2,4>>]),
+ ?line 12501 = Module:longest_common_prefix([<<0:100000,1,2,4>>,
+ <<0:100000,1,2,3>>,
+ <<0:100000,1,3,3>>,
+ <<0:100000,1,2,4>>]),
+ ?line 1251 = Module:longest_common_prefix(
+ [make_unaligned(<<0:10000,1,2,4>>),
+ <<0:10000,1,2,3>>,
+ make_unaligned(<<0:10000,1,3,3>>),
+ <<0:10000,1,2,4>>]),
+ ?line 12501 = Module:longest_common_prefix(
+ [<<0:100000,1,2,4>>,
+ make_unaligned(<<0:100000,1,2,3>>),
+ <<0:100000,1,3,3>>,
+ make_unaligned(<<0:100000,1,2,4>>)]),
+ ?line 1250001 = Module:longest_common_prefix([<<0:10000000,1,2,4>>,
+ <<0:10000000,1,2,3>>,
+ <<0:10000000,1,3,3>>,
+ <<0:10000000,1,2,4>>]),
+ if % Too cruel for the reference implementation
+ Module =:= binary ->
+ ?line erts_debug:set_internal_state(available_internal_state,true),
+ ?line io:format("oldlimit: ~p~n",
+ [erts_debug:set_internal_state(
+ binary_loop_limit,100)]),
+ ?line 1250001 = Module:longest_common_prefix(
+ [<<0:10000000,1,2,4>>,
+ <<0:10000000,1,2,3>>,
+ <<0:10000000,1,3,3>>,
+ <<0:10000000,1,2,4>>]),
+ ?line io:format("limit was: ~p~n",
+ [erts_debug:set_internal_state(binary_loop_limit,
+ default)]),
+ ?line erts_debug:set_internal_state(available_internal_state,
+ false);
+ true ->
+ ok
+ end,
+ ?line 1 = Module:longest_common_suffix([<<0:100000000,1,2,4,5>>,
+ <<0:100000000,1,2,3,5>>,
+ <<0:100000000,1,3,3,5>>,
+ <<0:100000000,1,2,4,5>>]),
+ ?line 1 = Module:longest_common_suffix([<<1,2,4,5>>,
+ <<0:100000000,1,2,3,5>>,
+ <<0:100000000,1,3,3,5>>,
+ <<0:100000000,1,2,4,5>>]),
+ ?line 1 = Module:longest_common_suffix([<<1,2,4,5,5>>,<<5,5>>,
+ <<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5>>]),
+ ?line 0 = Module:longest_common_suffix([<<1,2,4,5,5>>,<<5,5>>,
+ <<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4>>]),
+ ?line 2 = Module:longest_common_suffix([<<1,2,4,5,5>>,<<5,5>>,
+ <<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5,5>>]),
+ ?line 1 = Module:longest_common_suffix([<<1,2,4,5,5>>,<<5>>,
+ <<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5,5>>]),
+ ?line 0 = Module:longest_common_suffix([<<1,2,4,5,5>>,<<>>,
+ <<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5,5>>]),
+ ?line 0 = Module:longest_common_suffix([<<>>,<<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5,5>>]),
+ ?line 0 = Module:longest_common_suffix([<<>>,<<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5,5>>]),
+ ?line 2 = Module:longest_common_suffix([<<5,5>>,<<0:100000000,1,3,3,5,5>>,
+ <<0:100000000,1,2,4,5,5>>]),
+ ?line 2 = Module:longest_common_suffix([<<5,5>>,<<5,5>>,<<4,5,5>>]),
+ ?line 2 = Module:longest_common_suffix([<<5,5>>,<<5,5>>,<<5,5>>]),
+ ?line 3 = Module:longest_common_suffix([<<4,5,5>>,<<4,5,5>>,<<4,5,5>>]),
+ ?line 0 = Module:longest_common_suffix([<<>>]),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_suffix([])),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_suffix([apa])),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_suffix([[<<>>]])),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_suffix([[<<0>>,
+ <<1:9>>]])),
+ ?line 0 = Module:longest_common_prefix([<<>>]),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_prefix([])),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_prefix([apa])),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_prefix([[<<>>]])),
+ ?line badarg = ?MASK_ERROR(Module:longest_common_prefix([[<<0>>,
+ <<1:9>>]])),
+
+ ?line <<1:6,Bin:3/binary,_:2>> = <<1:6,1,2,3,1:2>>,
+ ?line <<1,2,3>> = Bin,
+ ?line 1 = Module:first(Bin),
+ ?line 1 = Module:first(<<1>>),
+ ?line 1 = Module:first(<<1,2,3>>),
+ ?line badarg = ?MASK_ERROR(Module:first(<<>>)),
+ ?line badarg = ?MASK_ERROR(Module:first(apa)),
+ ?line 3 = Module:last(Bin),
+ ?line 1 = Module:last(<<1>>),
+ ?line 3 = Module:last(<<1,2,3>>),
+ ?line badarg = ?MASK_ERROR(Module:last(<<>>)),
+ ?line badarg = ?MASK_ERROR(Module:last(apa)),
+ ?line 1 = Module:at(Bin,0),
+ ?line 1 = Module:at(<<1>>,0),
+ ?line 1 = Module:at(<<1,2,3>>,0),
+ ?line 2 = Module:at(<<1,2,3>>,1),
+ ?line 3 = Module:at(<<1,2,3>>,2),
+ ?line badarg = ?MASK_ERROR(Module:at(<<1,2,3>>,3)),
+ ?line badarg = ?MASK_ERROR(Module:at(<<1,2,3>>,-1)),
+ ?line badarg = ?MASK_ERROR(Module:at(<<1,2,3>>,apa)),
+ ?line "hejsan" = [ Module:at(<<"hejsan">>,I) || I <- lists:seq(0,5) ],
+
+ ?line badarg = ?MASK_ERROR(Module:bin_to_list(<<1,2,3>>,3,-4)),
+ ?line [1,2,3] = ?MASK_ERROR(Module:bin_to_list(<<1,2,3>>,3,-3)),
+
+ ?line badarg = ?MASK_ERROR(Module:decode_unsigned(<<1,2,1:2>>,big)),
+ ?line badarg = ?MASK_ERROR(Module:decode_unsigned(<<1,2,1:2>>,little)),
+ ?line badarg = ?MASK_ERROR(Module:decode_unsigned(apa)),
+ ?line badarg = ?MASK_ERROR(Module:decode_unsigned(125,little)),
+ ?line 0 = ?MASK_ERROR(Module:decode_unsigned(<<>>,little)),
+ ?line 0 = ?MASK_ERROR(Module:decode_unsigned(<<>>,big)),
+ ?line 0 = ?MASK_ERROR(Module:decode_unsigned(<<0>>,little)),
+ ?line 0 = ?MASK_ERROR(Module:decode_unsigned(<<0>>,big)),
+ ?line 0 = ?MASK_ERROR(Module:decode_unsigned(make_unaligned(<<0>>),
+ little)),
+ ?line 0 = ?MASK_ERROR(Module:decode_unsigned(make_unaligned(<<0>>),big)),
+ ?line badarg = ?MASK_ERROR(Module:encode_unsigned(apa)),
+ ?line badarg = ?MASK_ERROR(Module:encode_unsigned(125.3,little)),
+ ?line badarg = ?MASK_ERROR(Module:encode_unsigned({1},little)),
+ ?line badarg = ?MASK_ERROR(Module:encode_unsigned([1],little)),
+ ?line <<0>> = ?MASK_ERROR(Module:encode_unsigned(0,little)),
+ ?line <<0>> = ?MASK_ERROR(Module:encode_unsigned(0,big)),
+ ok.
+
+encode_decode(doc) ->
+ ["test binary:encode_unsigned/1,2 and binary:decode_unsigned/1,2"];
+encode_decode(Config) when is_list(Config) ->
+ ?line random:seed({1271,769940,559934}),
+ ?line ok = encode_decode_loop({1,200},1000), % Need to be long enough
+ % to create offheap binaries
+ ok.
+
+encode_decode_loop(_Range,0) ->
+ ok;
+encode_decode_loop(Range, X) ->
+ ?line N = random_number(Range),
+ ?line A = binary:encode_unsigned(N),
+ ?line B = binary:encode_unsigned(N,big),
+ ?line C = binref:encode_unsigned(N),
+ ?line D = binref:encode_unsigned(N,big),
+ ?line E = binary:encode_unsigned(N,little),
+ ?line F = binref:encode_unsigned(N,little),
+ ?line G = binary:decode_unsigned(A),
+ ?line H = binary:decode_unsigned(A,big),
+ ?line I = binref:decode_unsigned(A),
+ ?line J = binary:decode_unsigned(E,little),
+ ?line K = binref:decode_unsigned(E,little),
+ ?line L = binary:decode_unsigned(make_unaligned(A)),
+ ?line M = binary:decode_unsigned(make_unaligned(E),little),
+ ?line PaddedBig = <<0:48,A/binary>>,
+ ?line PaddedLittle = <<E/binary,0:48>>,
+ ?line O = binary:decode_unsigned(PaddedBig),
+ ?line P = binary:decode_unsigned(make_unaligned(PaddedBig)),
+ ?line Q = binary:decode_unsigned(PaddedLittle,little),
+ ?line R = binary:decode_unsigned(make_unaligned(PaddedLittle),little),
+ ?line S = binref:decode_unsigned(PaddedLittle,little),
+ ?line T = binref:decode_unsigned(PaddedBig),
+ case (((A =:= B) and (B =:= C) and (C =:= D)) and
+ ((E =:= F)) and
+ ((N =:= G) and (G =:= H) and (H =:= I) and
+ (I =:= J) and (J =:= K) and (K =:= L) and (L =:= M)) and
+ ((M =:= O) and (O =:= P) and (P =:= Q) and (Q =:= R) and
+ (R =:= S) and (S =:= T)))of
+ true ->
+ encode_decode_loop(Range,X-1);
+ _ ->
+ io:format("Failed to encode/decode ~w~n(Results ~p)~n",
+ [N,[A,B,C,D,E,F,G,H,I,J,K,L,M,x,O,P,Q,R,S,T]]),
+ exit(mismatch)
+ end.
+
+guard(doc) ->
+ ["Smoke test of the guard BIFs binary_part/2,3"];
+guard(Config) when is_list(Config) ->
+ {comment, "Guard tests are run in emulator test suite"}.
+
+referenced(doc) ->
+ ["Test refernced_byte_size/1 bif."];
+referenced(Config) when is_list(Config) ->
+ ?line badarg = ?MASK_ERROR(binary:referenced_byte_size([])),
+ ?line badarg = ?MASK_ERROR(binary:referenced_byte_size(apa)),
+ ?line badarg = ?MASK_ERROR(binary:referenced_byte_size({})),
+ ?line badarg = ?MASK_ERROR(binary:referenced_byte_size(1)),
+ ?line A = <<1,2,3>>,
+ ?line B = binary:copy(A,1000),
+ ?line 3 = binary:referenced_byte_size(A),
+ ?line 3000 = binary:referenced_byte_size(B),
+ ?line <<_:8,C:2/binary>> = A,
+ ?line 3 = binary:referenced_byte_size(C),
+ ?line 2 = binary:referenced_byte_size(binary:copy(C)),
+ ?line <<_:7,D:2/binary,_:1>> = A,
+ ?line 2 = binary:referenced_byte_size(binary:copy(D)),
+ ?line 3 = binary:referenced_byte_size(D),
+ ?line <<_:8,E:2/binary,_/binary>> = B,
+ ?line 3000 = binary:referenced_byte_size(E),
+ ?line 2 = binary:referenced_byte_size(binary:copy(E)),
+ ?line <<_:7,F:2/binary,_:1,_/binary>> = B,
+ ?line 2 = binary:referenced_byte_size(binary:copy(F)),
+ ?line 3000 = binary:referenced_byte_size(F),
+ ok.
+
+
+
+list_to_bin(doc) ->
+ ["Test list_to_bin/1 bif"];
+list_to_bin(Config) when is_list(Config) ->
+ %% Just some smoke_tests first, then go nuts with random cases
+ ?line badarg = ?MASK_ERROR(binary:list_to_bin({})),
+ ?line badarg = ?MASK_ERROR(binary:list_to_bin(apa)),
+ ?line badarg = ?MASK_ERROR(binary:list_to_bin(<<"apa">>)),
+ F1 = fun(L) ->
+ ?MASK_ERROR(binref:list_to_bin(L))
+ end,
+ F2 = fun(L) ->
+ ?MASK_ERROR(binary:list_to_bin(L))
+ end,
+ ?line random_iolist:run(1000,F1,F2),
+ ok.
+
+copy(doc) ->
+ ["Test copy/1,2 bif's"];
+copy(Config) when is_list(Config) ->
+ ?line <<1,2,3>> = binary:copy(<<1,2,3>>),
+ ?line RS = random_string({1,10000}),
+ ?line RS = RS2 = binary:copy(RS),
+ ?line false = erts_debug:same(RS,RS2),
+ ?line <<>> = ?MASK_ERROR(binary:copy(<<1,2,3>>,0)),
+ ?line badarg = ?MASK_ERROR(binary:copy(<<1,2,3:3>>,2)),
+ ?line badarg = ?MASK_ERROR(binary:copy([],0)),
+ ?line <<>> = ?MASK_ERROR(binary:copy(<<>>,0)),
+ ?line badarg = ?MASK_ERROR(binary:copy(<<1,2,3>>,1.0)),
+ ?line badarg = ?MASK_ERROR(binary:copy(<<1,2,3>>,
+ 16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF)),
+ ?line <<>> = binary:copy(<<>>,10000),
+ ?line random:seed({1271,769940,559934}),
+ ?line ok = random_copy(3000),
+ ?line erts_debug:set_internal_state(available_internal_state,true),
+ ?line io:format("oldlimit: ~p~n",
+ [erts_debug:set_internal_state(binary_loop_limit,10)]),
+ ?line Subj = subj(),
+ ?line XX = binary:copy(Subj,1000),
+ ?line XX = binref:copy(Subj,1000),
+ ?line ok = random_copy(1000),
+ ?line kill_copy_loop(1000),
+ ?line io:format("limit was: ~p~n",
+ [erts_debug:set_internal_state(binary_loop_limit,
+ default)]),
+ ?line erts_debug:set_internal_state(available_internal_state,false),
+ ok.
+
+kill_copy_loop(0) ->
+ ok;
+kill_copy_loop(N) ->
+ {Pid,Ref} = spawn_monitor(fun() ->
+ ok = random_copy(1000)
+ end),
+ receive
+ after 10 ->
+ ok
+ end,
+ exit(Pid,kill),
+ receive
+ {'DOWN',Ref,process,Pid,_} ->
+ kill_copy_loop(N-1)
+ after 1000 ->
+ exit(did_not_die)
+ end.
+
+random_copy(0) ->
+ ok;
+random_copy(N) ->
+ Str = random_string({0,N}),
+ Num = random:uniform(N div 10+1),
+ A = ?MASK_ERROR(binary:copy(Str,Num)),
+ B = ?MASK_ERROR(binref:copy(Str,Num)),
+ C = ?MASK_ERROR(binary:copy(make_unaligned(Str),Num)),
+ case {(A =:= B), (B =:= C)} of
+ {true,true} ->
+ random_copy(N-1);
+ _ ->
+ io:format("Failed to pick copy ~s ~p times~n",
+ [Str,Num]),
+ io:format("A:~p,~nB:~p,~n,C:~p.~n",
+ [A,B,C]),
+ exit(mismatch)
+ end.
+
+bin_to_list(doc) ->
+ ["Test bin_to_list/1,2,3 bif's"];
+bin_to_list(Config) when is_list(Config) ->
+ %% Just some smoke_tests first, then go nuts with random cases
+ ?line X = <<1,2,3,4,0:1000000,5>>,
+ ?line Y = make_unaligned(X),
+ ?line LX = binary:bin_to_list(X),
+ ?line LX = binary:bin_to_list(X,0,byte_size(X)),
+ ?line LX = binary:bin_to_list(X,byte_size(X),-byte_size(X)),
+ ?line LX = binary:bin_to_list(X,{0,byte_size(X)}),
+ ?line LX = binary:bin_to_list(X,{byte_size(X),-byte_size(X)}),
+ ?line LY = binary:bin_to_list(Y),
+ ?line LY = binary:bin_to_list(Y,0,byte_size(Y)),
+ ?line LY = binary:bin_to_list(Y,byte_size(Y),-byte_size(Y)),
+ ?line LY = binary:bin_to_list(Y,{0,byte_size(Y)}),
+ ?line LY = binary:bin_to_list(Y,{byte_size(Y),-byte_size(Y)}),
+ ?line 1 = hd(LX),
+ ?line 5 = lists:last(LX),
+ ?line 1 = hd(LY),
+ ?line 5 = lists:last(LY),
+ ?line X = list_to_binary(LY),
+ ?line Y = list_to_binary(LY),
+ ?line X = list_to_binary(LY),
+ ?line [5] = lists:nthtail(byte_size(X)-1,LX),
+ ?line [0,5] = lists:nthtail(byte_size(X)-2,LX),
+ ?line [0,5] = lists:nthtail(byte_size(Y)-2,LY),
+ ?line random:seed({1271,769940,559934}),
+ ?line ok = random_bin_to_list(5000),
+ ok.
+
+random_bin_to_list(0) ->
+ ok;
+random_bin_to_list(N) ->
+ Str = random_string({1,N}),
+ Parts0 = random_parts(10,N),
+ Parts1 = Parts0 ++ [ {X+Y,-Y} || {X,Y} <- Parts0 ],
+ [ begin
+ try
+ true = ?MASK_ERROR(binary:bin_to_list(Str,Z)) =:=
+ ?MASK_ERROR(binref:bin_to_list(Str,Z)),
+ true = ?MASK_ERROR(binary:bin_to_list(Str,Z)) =:=
+ ?MASK_ERROR(binary:bin_to_list(make_unaligned(Str),Z))
+ catch
+ _:_ ->
+ io:format("Error, Str = <<\"~s\">>.~nZ = ~p.~n",
+ [Str,Z]),
+ exit(badresult)
+ end
+ end || Z <- Parts1 ],
+ [ begin
+ try
+ true = ?MASK_ERROR(binary:bin_to_list(Str,A,B)) =:=
+ ?MASK_ERROR(binref:bin_to_list(Str,A,B)),
+ true = ?MASK_ERROR(binary:bin_to_list(Str,A,B)) =:=
+ ?MASK_ERROR(binary:bin_to_list(make_unaligned(Str),A,B))
+ catch
+ _:_ ->
+ io:format("Error, Str = <<\"~s\">>.~nA = ~p.~nB = ~p.~n",
+ [Str,A,B]),
+ exit(badresult)
+ end
+ end || {A,B} <- Parts1 ],
+ random_bin_to_list(N-1).
+
+parts(doc) ->
+ ["Test the part/2,3 bif's"];
+parts(Config) when is_list(Config) ->
+ %% Some simple smoke tests to begin with
+ ?line Simple = <<1,2,3,4,5,6,7,8>>,
+ ?line <<1,2>> = binary:part(Simple,0,2),
+ ?line <<1,2>> = binary:part(Simple,{0,2}),
+ ?line Simple = binary:part(Simple,0,8),
+ ?line Simple = binary:part(Simple,{0,8}),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,0,9)),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{0,9})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,1,8)),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{1,8})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{3,-4})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{3.0,1})),
+ ?line badarg = ?MASK_ERROR(
+ binary:part(Simple,{16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFF
+ ,1})),
+ ?line <<2,3,4,5,6,7,8>> = binary:part(Simple,{1,7}),
+ ?line <<2,3,4,5,6,7,8>> = binary:part(Simple,{8,-7}),
+ ?line Simple = binary:part(Simple,{8,-8}),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{1,-8})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{8,-9})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{0,-1})),
+ ?line <<>> = binary:part(Simple,{8,0}),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{9,0})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{-1,0})),
+ ?line badarg = ?MASK_ERROR(binary:part(Simple,{7,2})),
+ ?line <<8>> = binary:part(Simple,{7,1}),
+ ?line random:seed({1271,769940,559934}),
+ ?line random_parts(5000),
+ ok.
+
+
+random_parts(0) ->
+ ok;
+random_parts(N) ->
+ Str = random_string({1,N}),
+ Parts0 = random_parts(10,N),
+ Parts1 = Parts0 ++ [ {X+Y,-Y} || {X,Y} <- Parts0 ],
+ [ begin
+ true = ?MASK_ERROR(binary:part(Str,Z)) =:=
+ ?MASK_ERROR(binref:part(Str,Z)),
+ true = ?MASK_ERROR(binary:part(Str,Z)) =:=
+ ?MASK_ERROR(erlang:binary_part(Str,Z)),
+ true = ?MASK_ERROR(binary:part(Str,Z)) =:=
+ ?MASK_ERROR(binary:part(make_unaligned(Str),Z))
+ end || Z <- Parts1 ],
+ random_parts(N-1).
+
+random_parts(0,_) ->
+ [];
+random_parts(X,N) ->
+ Pos = random:uniform(N),
+ Len = random:uniform((Pos * 12) div 10),
+ [{Pos,Len} | random_parts(X-1,N)].
+
+random_ref_comp(doc) ->
+ ["Test pseudorandomly generated cases against reference imlementation"];
+random_ref_comp(Config) when is_list(Config) ->
+ ?line put(success_counter,0),
+ ?line random:seed({1271,769940,559934}),
+ ?line do_random_match_comp(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_match_comp2(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_match_comp3(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_match_comp4(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_matches_comp(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_matches_comp2(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_matches_comp3(5,{1,40},{30,1000}),
+ ?line erts_debug:set_internal_state(available_internal_state,true),
+ ?line io:format("oldlimit: ~p~n",[ erts_debug:set_internal_state(binary_loop_limit,100)]),
+ ?line do_random_match_comp(5000,{1,40},{30,1000}),
+ ?line do_random_matches_comp3(5,{1,40},{30,1000}),
+ ?line io:format("limit was: ~p~n",[ erts_debug:set_internal_state(binary_loop_limit,default)]),
+ ?line erts_debug:set_internal_state(available_internal_state,false),
+ ok.
+
+random_ref_sr_comp(doc) ->
+ ["Test pseudorandomly generated cases against reference imlementation of split and replace"];
+random_ref_sr_comp(Config) when is_list(Config) ->
+ ?line put(success_counter,0),
+ ?line random:seed({1271,769940,559934}),
+ ?line do_random_split_comp(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_replace_comp(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_split_comp2(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ?line do_random_replace_comp2(5000,{1,40},{30,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ok.
+random_ref_fla_comp(doc) ->
+ ["Test pseudorandomly generated cases against reference imlementation of split and replace"];
+random_ref_fla_comp(Config) when is_list(Config) ->
+ ?line put(success_counter,0),
+ ?line random:seed({1271,769940,559934}),
+ ?line do_random_first_comp(5000,{1,1000}),
+ ?line do_random_last_comp(5000,{1,1000}),
+ ?line do_random_at_comp(5000,{1,1000}),
+ io:format("Number of successes: ~p~n",[get(success_counter)]),
+ ok.
+
+do_random_first_comp(0,_) ->
+ ok;
+do_random_first_comp(N,Range) ->
+ S = random_string(Range),
+ A = ?MASK_ERROR(binref:first(S)),
+ B = ?MASK_ERROR(binary:first(S)),
+ C = ?MASK_ERROR(binary:first(make_unaligned(S))),
+ case {(A =:= B), (B =:= C)} of
+ {true,true} ->
+ do_random_first_comp(N-1,Range);
+ _ ->
+ io:format("Failed to pick first of ~s~n",
+ [S]),
+ io:format("A:~p,~nB:~p,~n,C:~p.~n",
+ [A,B,C]),
+ exit(mismatch)
+ end.
+
+do_random_last_comp(0,_) ->
+ ok;
+do_random_last_comp(N,Range) ->
+ S = random_string(Range),
+ A = ?MASK_ERROR(binref:last(S)),
+ B = ?MASK_ERROR(binary:last(S)),
+ C = ?MASK_ERROR(binary:last(make_unaligned(S))),
+ case {(A =:= B), (B =:= C)} of
+ {true,true} ->
+ do_random_last_comp(N-1,Range);
+ _ ->
+ io:format("Failed to pick last of ~s~n",
+ [S]),
+ io:format("A:~p,~nB:~p,~n,C:~p.~n",
+ [A,B,C]),
+ exit(mismatch)
+ end.
+do_random_at_comp(0,_) ->
+ ok;
+do_random_at_comp(N,{Min,Max}=Range) ->
+ S = random_string(Range),
+ XMax = Min + ((Max - Min) * 3) div 4,
+ Pos = random_length({Min,XMax}), %% some out of range
+ A = ?MASK_ERROR(binref:at(S,Pos)),
+ B = ?MASK_ERROR(binary:at(S,Pos)),
+ C = ?MASK_ERROR(binary:at(make_unaligned(S),Pos)),
+ if
+ A =/= badarg ->
+ put(success_counter,get(success_counter)+1);
+ true ->
+ ok
+ end,
+ case {(A =:= B), (B =:= C)} of
+ {true,true} ->
+ do_random_at_comp(N-1,Range);
+ _ ->
+ io:format("Failed to pick last of ~s~n",
+ [S]),
+ io:format("A:~p,~nB:~p,~n,C:~p.~n",
+ [A,B,C]),
+ exit(mismatch)
+ end.
+
+do_random_matches_comp(0,_,_) ->
+ ok;
+do_random_matches_comp(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Needles = [random_string(NeedleRange) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ Haystack = random_string(HaystackRange),
+ true = do_matches_comp(Needles,Haystack),
+ do_random_matches_comp(N-1,NeedleRange,HaystackRange).
+
+do_random_matches_comp2(0,_,_) ->
+ ok;
+do_random_matches_comp2(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Haystack = random_string(HaystackRange),
+ Needles = [random_substring(NeedleRange,Haystack) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ true = do_matches_comp(Needles,Haystack),
+ do_random_matches_comp2(N-1,NeedleRange,HaystackRange).
+
+do_random_matches_comp3(0,_,_) ->
+ ok;
+do_random_matches_comp3(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Haystack = random_string(HaystackRange),
+ Needles = [random_substring(NeedleRange,Haystack) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ RefRes = binref:matches(Haystack,Needles),
+ true = do_matches_comp_loop(10000,Needles,Haystack, RefRes),
+ do_random_matches_comp3(N-1,NeedleRange,HaystackRange).
+
+do_matches_comp_loop(0,_,_,_) ->
+ true;
+do_matches_comp_loop(N, Needles, Haystack0,RR) ->
+ DummySize=N*8,
+ Haystack1 = <<0:DummySize,Haystack0/binary>>,
+ RR1=[{X+N,Y} || {X,Y} <- RR],
+ true = do_matches_comp2(Needles,Haystack1,RR1),
+ Haystack2 = <<Haystack0/binary,Haystack1/binary>>,
+ RR2 = RR ++ [{X2+N+byte_size(Haystack0),Y2} || {X2,Y2} <- RR],
+ true = do_matches_comp2(Needles,Haystack2,RR2),
+ do_matches_comp_loop(N-1, Needles, Haystack0,RR).
+
+
+do_matches_comp2(N,H,A) ->
+ C = ?MASK_ERROR(binary:matches(H,N)),
+ case (A =:= C) of
+ true ->
+ true;
+ _ ->
+ io:format("Failed to match ~p (needle) against ~s (haystack)~n",
+ [N,H]),
+ io:format("A:~p,~n,C:~p.~n",
+ [A,C]),
+ exit(mismatch)
+ end.
+do_matches_comp(N,H) ->
+ A = ?MASK_ERROR(binref:matches(H,N)),
+ B = ?MASK_ERROR(binref:matches(H,binref:compile_pattern(N))),
+ C = ?MASK_ERROR(binary:matches(H,N)),
+ D = ?MASK_ERROR(binary:matches(make_unaligned(H),
+ binary:compile_pattern([make_unaligned2(X) || X <- N]))),
+ if
+ A =/= nomatch ->
+ put(success_counter,get(success_counter)+1);
+ true ->
+ ok
+ end,
+ case {(A =:= B), (B =:= C),(C =:= D)} of
+ {true,true,true} ->
+ true;
+ _ ->
+ io:format("Failed to match ~p (needle) against ~s (haystack)~n",
+ [N,H]),
+ io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p.~n",
+ [A,B,C,D]),
+ exit(mismatch)
+ end.
+
+do_random_match_comp(0,_,_) ->
+ ok;
+do_random_match_comp(N,NeedleRange,HaystackRange) ->
+ Needle = random_string(NeedleRange),
+ Haystack = random_string(HaystackRange),
+ true = do_match_comp(Needle,Haystack),
+ do_random_match_comp(N-1,NeedleRange,HaystackRange).
+
+do_random_match_comp2(0,_,_) ->
+ ok;
+do_random_match_comp2(N,NeedleRange,HaystackRange) ->
+ Haystack = random_string(HaystackRange),
+ Needle = random_substring(NeedleRange,Haystack),
+ true = do_match_comp(Needle,Haystack),
+ do_random_match_comp2(N-1,NeedleRange,HaystackRange).
+
+do_random_match_comp3(0,_,_) ->
+ ok;
+do_random_match_comp3(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Haystack = random_string(HaystackRange),
+ Needles = [random_substring(NeedleRange,Haystack) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ true = do_match_comp3(Needles,Haystack),
+ do_random_match_comp3(N-1,NeedleRange,HaystackRange).
+
+do_random_match_comp4(0,_,_) ->
+ ok;
+do_random_match_comp4(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Haystack = random_string(HaystackRange),
+ Needles = [random_string(NeedleRange) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ true = do_match_comp3(Needles,Haystack),
+ do_random_match_comp4(N-1,NeedleRange,HaystackRange).
+
+do_match_comp(N,H) ->
+ A = ?MASK_ERROR(binref:match(H,N)),
+ B = ?MASK_ERROR(binref:match(H,binref:compile_pattern([N]))),
+ C = ?MASK_ERROR(binary:match(make_unaligned(H),N)),
+ D = ?MASK_ERROR(binary:match(H,binary:compile_pattern([N]))),
+ E = ?MASK_ERROR(binary:match(H,binary:compile_pattern(make_unaligned(N)))),
+ if
+ A =/= nomatch ->
+ put(success_counter,get(success_counter)+1);
+ true ->
+ ok
+ end,
+ case {(A =:= B), (B =:= C),(C =:= D),(D =:= E)} of
+ {true,true,true,true} ->
+ true;
+ _ ->
+ io:format("Failed to match ~s (needle) against ~s (haystack)~n",
+ [N,H]),
+ io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p,E:~p.~n",
+ [A,B,C,D,E]),
+ exit(mismatch)
+ end.
+
+do_match_comp3(N,H) ->
+ A = ?MASK_ERROR(binref:match(H,N)),
+ B = ?MASK_ERROR(binref:match(H,binref:compile_pattern(N))),
+ C = ?MASK_ERROR(binary:match(H,N)),
+ D = ?MASK_ERROR(binary:match(H,binary:compile_pattern(N))),
+ if
+ A =/= nomatch ->
+ put(success_counter,get(success_counter)+1);
+ true ->
+ ok
+ end,
+ case {(A =:= B), (B =:= C),(C =:= D)} of
+ {true,true,true} ->
+ true;
+ _ ->
+ io:format("Failed to match ~s (needle) against ~s (haystack)~n",
+ [N,H]),
+ io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p.~n",
+ [A,B,C,D]),
+ exit(mismatch)
+ end.
+
+do_random_split_comp(0,_,_) ->
+ ok;
+do_random_split_comp(N,NeedleRange,HaystackRange) ->
+ Haystack = random_string(HaystackRange),
+ Needle = random_substring(NeedleRange,Haystack),
+ true = do_split_comp(Needle,Haystack,[]),
+ true = do_split_comp(Needle,Haystack,[global]),
+ true = do_split_comp(Needle,Haystack,[global,trim]),
+ do_random_split_comp(N-1,NeedleRange,HaystackRange).
+do_random_split_comp2(0,_,_) ->
+ ok;
+do_random_split_comp2(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Haystack = random_string(HaystackRange),
+ Needles = [random_substring(NeedleRange,Haystack) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ true = do_split_comp(Needles,Haystack,[]),
+ true = do_split_comp(Needles,Haystack,[global]),
+ do_random_split_comp2(N-1,NeedleRange,HaystackRange).
+
+do_split_comp(N,H,Opts) ->
+ A = ?MASK_ERROR(binref:split(H,N,Opts)),
+ D = ?MASK_ERROR(binary:split(H,binary:compile_pattern(N),Opts)),
+ if
+ (A =/= [N]) and is_list(A) ->
+ put(success_counter,get(success_counter)+1);
+ true ->
+ ok
+ end,
+ case (A =:= D) of
+ true ->
+ true;
+ _ ->
+ io:format("Failed to split ~n~p ~n(haystack) with ~n~p ~n(needle) "
+ "~nand options ~p~n",
+ [H,N,Opts]),
+ io:format("A:~p,D:~p.~n",
+ [A,D]),
+ exit(mismatch)
+ end.
+
+do_random_replace_comp(0,_,_) ->
+ ok;
+do_random_replace_comp(N,NeedleRange,HaystackRange) ->
+ Haystack = random_string(HaystackRange),
+ Needle = random_substring(NeedleRange,Haystack),
+ Repl = random_string(NeedleRange),
+ Insertat = random_length(NeedleRange), %Sometimes larger than Repl
+ true = do_replace_comp(Needle,Haystack,Repl,[]),
+ true = do_replace_comp(Needle,Haystack,Repl,[global]),
+ true = do_replace_comp(Needle,Haystack,Repl,
+ [global,{insert_replaced,Insertat}]),
+ do_random_replace_comp(N-1,NeedleRange,HaystackRange).
+do_random_replace_comp2(0,_,_) ->
+ ok;
+do_random_replace_comp2(N,NeedleRange,HaystackRange) ->
+ NumNeedles = element(2,HaystackRange) div element(2,NeedleRange),
+ Haystack = random_string(HaystackRange),
+ Needles = [random_substring(NeedleRange,Haystack) ||
+ _ <- lists:duplicate(NumNeedles,a)],
+ Repl = random_string(NeedleRange),
+ Insertat = random_length(NeedleRange), %Sometimes larger than Repl
+ true = do_replace_comp(Needles,Haystack,Repl,[]),
+ true = do_replace_comp(Needles,Haystack,Repl,[global]),
+ true = do_replace_comp(Needles,Haystack,Repl,
+ [global,{insert_replaced,Insertat}]),
+ do_random_replace_comp2(N-1,NeedleRange,HaystackRange).
+
+do_replace_comp(N,H,R,Opts) ->
+ A = ?MASK_ERROR(binref:replace(H,N,R,Opts)),
+ D = ?MASK_ERROR(binary:replace(H,binary:compile_pattern(N),R,Opts)),
+ if
+ (A =/= N) and is_binary(A) ->
+ put(success_counter,get(success_counter)+1);
+ true ->
+ ok
+ end,
+ case (A =:= D) of
+ true ->
+ true;
+ _ ->
+ io:format("Failed to replace ~s (haystack) by ~s (needle) "
+ "inserting ~s (replacement) and options ~p~n",
+ [H,N,R,Opts]),
+ io:format("A:~p,D:~p.~n",
+ [A,D]),
+ exit(mismatch)
+ end.
+
+one_random_number(N) ->
+ M = ((N - 1) rem 10) + 1,
+ element(M,{$0,$1,$2,$3,$4,$5,$6,$7,$8,$9}).
+
+one_random(N) ->
+ M = ((N - 1) rem 68) + 1,
+ element(M,{$a,$b,$c,$d,$e,$f,$g,$h,$i,$j,$k,$l,$m,$n,$o,$p,$q,$r,$s,$t,
+ $u,$v,$w,$x,$y,$z,$�,$�,$�,$A,$B,$C,$D,$E,$F,$G,$H,
+ $I,$J,$K,$L,$M,$N,$O,$P,$Q,$R,$S,$T,$U,$V,$W,$X,$Y,$Z,$�,
+ $�,$�,$0,$1,$2,$3,$4,$5,$6,$7,$8,$9}).
+
+random_number({Min,Max}) -> % Min and Max are *length* of number in
+ % decimal positions
+ X = random:uniform(Max - Min + 1) + Min - 1,
+ list_to_integer([one_random_number(random:uniform(10)) || _ <- lists:seq(1,X)]).
+
+
+random_length({Min,Max}) ->
+ random:uniform(Max - Min + 1) + Min - 1.
+random_string({Min,Max}) ->
+ X = random:uniform(Max - Min + 1) + Min - 1,
+ list_to_binary([one_random(random:uniform(68)) || _ <- lists:seq(1,X)]).
+random_substring({Min,Max},Hay) ->
+ X = random:uniform(Max - Min + 1) + Min - 1,
+ Y = byte_size(Hay),
+ Z = if
+ X > Y -> Y;
+ true -> X
+ end,
+ PMax = Y - Z,
+ Pos = random:uniform(PMax + 1) - 1,
+ <<_:Pos/binary,Res:Z/binary,_/binary>> = Hay,
+ Res.
+
+mask_error({'EXIT',{Err,_}}) ->
+ Err;
+mask_error(Else) ->
+ Else.
+
+make_unaligned(Bin0) when is_binary(Bin0) ->
+ Bin1 = <<0:3,Bin0/binary,31:5>>,
+ Sz = byte_size(Bin0),
+ <<0:3,Bin:Sz/binary,31:5>> = id(Bin1),
+ Bin.
+make_unaligned2(Bin0) when is_binary(Bin0) ->
+ Bin1 = <<31:5,Bin0/binary,0:3>>,
+ Sz = byte_size(Bin0),
+ <<31:5,Bin:Sz/binary,0:3>> = id(Bin1),
+ Bin.
+
+id(I) -> I.
diff --git a/lib/stdlib/test/binref.erl b/lib/stdlib/test/binref.erl
new file mode 100644
index 0000000000..6d96736ef3
--- /dev/null
+++ b/lib/stdlib/test/binref.erl
@@ -0,0 +1,588 @@
+-module(binref).
+
+-export([compile_pattern/1,match/2,match/3,matches/2,matches/3,
+ split/2,split/3,replace/3,replace/4,first/1,last/1,at/2,
+ part/2,part/3,copy/1,copy/2,encode_unsigned/1,encode_unsigned/2,
+ decode_unsigned/1,decode_unsigned/2,referenced_byte_size/1,
+ longest_common_prefix/1,longest_common_suffix/1,bin_to_list/1,
+ bin_to_list/2,bin_to_list/3,list_to_bin/1]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% compile_pattern, a dummy
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+compile_pattern(Pattern) when is_binary(Pattern) ->
+ {[Pattern]};
+compile_pattern(Pattern) ->
+ try
+ [ true = is_binary(P) || P <- Pattern ],
+ {Pattern}
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% match and matches
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+match(H,N) ->
+ match(H,N,[]).
+match(Haystack,Needle,Options) when is_binary(Needle) ->
+ match(Haystack,[Needle],Options);
+match(Haystack,{Needles},Options) ->
+ match(Haystack,Needles,Options);
+match(Haystack,Needles,Options) ->
+ try
+ true = is_binary(Haystack) and is_list(Needles), % badarg, not function_clause
+ case get_opts_match(Options,nomatch) of
+ nomatch ->
+ mloop(Haystack,Needles);
+ {A,B} when B > 0 ->
+ <<_:A/binary,SubStack:B/binary,_/binary>> = Haystack,
+ mloop(SubStack,Needles,A,B+A);
+ {A,B} when B < 0 ->
+ Start = A + B,
+ Len = -B,
+ <<_:Start/binary,SubStack:Len/binary,_/binary>> = Haystack,
+ mloop(SubStack,Needles,Start,Len+Start);
+ _ ->
+ nomatch
+ end
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+matches(H,N) ->
+ matches(H,N,[]).
+matches(Haystack,Needle,Options) when is_binary(Needle) ->
+ matches(Haystack,[Needle],Options);
+matches(Haystack,{Needles},Options) ->
+ matches(Haystack,Needles,Options);
+matches(Haystack,Needles,Options) ->
+ try
+ true = is_binary(Haystack) and is_list(Needles), % badarg, not function_clause
+ case get_opts_match(Options,nomatch) of
+ nomatch ->
+ msloop(Haystack,Needles);
+ {A,B} when B > 0 ->
+ <<_:A/binary,SubStack:B/binary,_/binary>> = Haystack,
+ msloop(SubStack,Needles,A,B+A);
+ {A,B} when B < 0 ->
+ Start = A + B,
+ Len = -B,
+ <<_:Start/binary,SubStack:Len/binary,_/binary>> = Haystack,
+ msloop(SubStack,Needles,Start,Len+Start);
+ _ ->
+ []
+ end
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+mloop(Haystack,Needles) ->
+ mloop(Haystack,Needles,0,byte_size(Haystack)).
+
+mloop(_Haystack,_Needles,N,M) when N >= M ->
+ nomatch;
+mloop(Haystack,Needles,N,M) ->
+ case mloop2(Haystack,Needles,N,nomatch) of
+ nomatch ->
+ % Not found
+ <<_:8,NewStack/binary>> = Haystack,
+ mloop(NewStack,Needles,N+1,M);
+ {N,Len} ->
+ {N,Len}
+ end.
+
+msloop(Haystack,Needles) ->
+ msloop(Haystack,Needles,0,byte_size(Haystack)).
+
+msloop(_Haystack,_Needles,N,M) when N >= M ->
+ [];
+msloop(Haystack,Needles,N,M) ->
+ case mloop2(Haystack,Needles,N,nomatch) of
+ nomatch ->
+ % Not found
+ <<_:8,NewStack/binary>> = Haystack,
+ msloop(NewStack,Needles,N+1,M);
+ {N,Len} ->
+ NewN = N+Len,
+ if
+ NewN >= M ->
+ [{N,Len}];
+ true ->
+ <<_:Len/binary,NewStack/binary>> = Haystack,
+ [{N,Len} | msloop(NewStack,Needles,NewN,M)]
+ end
+ end.
+
+mloop2(_Haystack,[],_N,Res) ->
+ Res;
+mloop2(Haystack,[Needle|Tail],N,Candidate) ->
+ NS = byte_size(Needle),
+ case Haystack of
+ <<Needle:NS/binary,_/binary>> ->
+ NewCandidate = case Candidate of
+ nomatch ->
+ {N,NS};
+ {N,ONS} when ONS < NS ->
+ {N,NS};
+ Better ->
+ Better
+ end,
+ mloop2(Haystack,Tail,N,NewCandidate);
+ _ ->
+ mloop2(Haystack,Tail,N,Candidate)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% split
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+split(H,N) ->
+ split(H,N,[]).
+split(Haystack,{Needles},Options) ->
+ split(Haystack, Needles, Options);
+split(Haystack,Needles0,Options) ->
+ try
+ Needles = if
+ is_list(Needles0) ->
+ Needles0;
+ is_binary(Needles0) ->
+ [Needles0];
+ true ->
+ exit(badtype)
+ end,
+ {Part,Global,Trim} = get_opts_split(Options,{nomatch,false,false}),
+ {Start,End,NewStack} =
+ case Part of
+ nomatch ->
+ {0,byte_size(Haystack),Haystack};
+ {A,B} when B >= 0 ->
+ <<_:A/binary,SubStack:B/binary,_/binary>> = Haystack,
+ {A,A+B,SubStack};
+ {A,B} when B < 0 ->
+ S = A + B,
+ L = -B,
+ <<_:S/binary,SubStack:L/binary,_/binary>> = Haystack,
+ {S,S+L,SubStack}
+ end,
+ MList = if
+ Global ->
+ msloop(NewStack,Needles,Start,End);
+ true ->
+ case mloop(NewStack,Needles,Start,End) of
+ nomatch ->
+ [];
+ X ->
+ [X]
+ end
+ end,
+ do_split(Haystack,MList,0,Trim)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+do_split(H,[],N,true) when N >= byte_size(H) ->
+ [];
+do_split(H,[],N,_) ->
+ [part(H,{N,byte_size(H)-N})];
+do_split(H,[{A,B}|T],N,Trim) ->
+ case part(H,{N,A-N}) of
+ <<>> ->
+ Rest = do_split(H,T,A+B,Trim),
+ case {Trim, Rest} of
+ {true,[]} ->
+ [];
+ _ ->
+ [<<>> | Rest]
+ end;
+ Oth ->
+ [Oth | do_split(H,T,A+B,Trim)]
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% replace
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+replace(H,N,R) ->
+ replace(H,N,R,[]).
+replace(Haystack,{Needles},Replacement,Options) ->
+ replace(Haystack,Needles,Replacement,Options);
+
+replace(Haystack,Needles0,Replacement,Options) ->
+ try
+ Needles = if
+ is_list(Needles0) ->
+ Needles0;
+ is_binary(Needles0) ->
+ [Needles0];
+ true ->
+ exit(badtype)
+ end,
+ true = is_binary(Replacement), % Make badarg instead of function clause
+ {Part,Global,Insert} = get_opts_replace(Options,{nomatch,false,[]}),
+ {Start,End,NewStack} =
+ case Part of
+ nomatch ->
+ {0,byte_size(Haystack),Haystack};
+ {A,B} when B >= 0 ->
+ <<_:A/binary,SubStack:B/binary,_/binary>> = Haystack,
+ {A,A+B,SubStack};
+ {A,B} when B < 0 ->
+ S = A + B,
+ L = -B,
+ <<_:S/binary,SubStack:L/binary,_/binary>> = Haystack,
+ {S,S+L,SubStack}
+ end,
+ MList = if
+ Global ->
+ msloop(NewStack,Needles,Start,End);
+ true ->
+ case mloop(NewStack,Needles,Start,End) of
+ nomatch ->
+ [];
+ X ->
+ [X]
+ end
+ end,
+ ReplList = case Insert of
+ [] ->
+ Replacement;
+ Y when is_integer(Y) ->
+ splitat(Replacement,0,[Y]);
+ Li when is_list(Li) ->
+ splitat(Replacement,0,lists:sort(Li))
+ end,
+ erlang:iolist_to_binary(do_replace(Haystack,MList,ReplList,0))
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+
+do_replace(H,[],_,N) ->
+ [part(H,{N,byte_size(H)-N})];
+do_replace(H,[{A,B}|T],Replacement,N) ->
+ [part(H,{N,A-N}),
+ if
+ is_list(Replacement) ->
+ do_insert(Replacement, part(H,{A,B}));
+ true ->
+ Replacement
+ end
+ | do_replace(H,T,Replacement,A+B)].
+
+do_insert([X],_) ->
+ [X];
+do_insert([H|T],R) ->
+ [H,R|do_insert(T,R)].
+
+splitat(H,N,[]) ->
+ [part(H,{N,byte_size(H)-N})];
+splitat(H,N,[I|T]) ->
+ [part(H,{N,I-N})|splitat(H,I,T)].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% first, last and at
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+first(Subject) ->
+ try
+ <<A:8,_/binary>> = Subject,
+ A
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+last(Subject) ->
+ try
+ N = byte_size(Subject) - 1,
+ <<_:N/binary,A:8>> = Subject,
+ A
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+at(Subject,X) ->
+ try
+ <<_:X/binary,A:8,_/binary>> = Subject,
+ A
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% bin_to_list
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+bin_to_list(Subject) ->
+ try
+ binary_to_list(Subject)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+bin_to_list(Subject,T) ->
+ try
+ {A0,B0} = T,
+ {A,B} = if
+ B0 < 0 ->
+ {A0+B0,-B0};
+ true ->
+ {A0,B0}
+ end,
+ binary_to_list(Subject,A+1,A+B)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+bin_to_list(Subject,A,B) ->
+ try
+ bin_to_list(Subject,{A,B})
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% list_to_bin
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+list_to_bin(List) ->
+ try
+ erlang:list_to_binary(List)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% longest_common_prefix
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+longest_common_prefix(LB) ->
+ try
+ true = is_list(LB) and (length(LB) > 0), % Make badarg instead of function clause
+ do_longest_common_prefix(LB,0)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+do_longest_common_prefix(LB,X) ->
+ case do_lcp(LB,X,no) of
+ true ->
+ do_longest_common_prefix(LB,X+1);
+ false ->
+ X
+ end.
+do_lcp([],_,_) ->
+ true;
+do_lcp([Bin|_],X,_) when byte_size(Bin) =< X ->
+ false;
+do_lcp([Bin|T],X,no) ->
+ Ch = at(Bin,X),
+ do_lcp(T,X,Ch);
+do_lcp([Bin|T],X,Ch) ->
+ Ch2 = at(Bin,X),
+ if
+ Ch =:= Ch2 ->
+ do_lcp(T,X,Ch);
+ true ->
+ false
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% longest_common_suffix
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+longest_common_suffix(LB) ->
+ try
+ true = is_list(LB) and (length(LB) > 0), % Make badarg instead of function clause
+ do_longest_common_suffix(LB,0)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+do_longest_common_suffix(LB,X) ->
+ case do_lcs(LB,X,no) of
+ true ->
+ do_longest_common_suffix(LB,X+1);
+ false ->
+ X
+ end.
+do_lcs([],_,_) ->
+ true;
+do_lcs([Bin|_],X,_) when byte_size(Bin) =< X ->
+ false;
+do_lcs([Bin|T],X,no) ->
+ Ch = at(Bin,byte_size(Bin) - 1 - X),
+ do_lcs(T,X,Ch);
+do_lcs([Bin|T],X,Ch) ->
+ Ch2 = at(Bin,byte_size(Bin) - 1 - X),
+ if
+ Ch =:= Ch2 ->
+ do_lcs(T,X,Ch);
+ true ->
+ false
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% part
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+part(Subject,Part) ->
+ try
+ do_part(Subject,Part)
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+part(Subject,Pos,Len) ->
+ part(Subject,{Pos,Len}).
+
+do_part(Bin,{A,B}) when B >= 0 ->
+ <<_:A/binary,Sub:B/binary,_/binary>> = Bin,
+ Sub;
+do_part(Bin,{A,B}) when B < 0 ->
+ S = A + B,
+ L = -B,
+ <<_:S/binary,Sub:L/binary,_/binary>> = Bin,
+ Sub.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% copy
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+copy(Subject) ->
+ copy(Subject,1).
+copy(Subject,N) ->
+ try
+ true = is_integer(N) and (N >= 0) and is_binary(Subject), % Badarg, not function clause
+ erlang:list_to_binary(lists:duplicate(N,Subject))
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_unsigned
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+encode_unsigned(Unsigned) ->
+ encode_unsigned(Unsigned,big).
+encode_unsigned(Unsigned,Endian) ->
+ try
+ true = is_integer(Unsigned) and (Unsigned >= 0),
+ if
+ Unsigned =:= 0 ->
+ <<0>>;
+ true ->
+ case Endian of
+ big ->
+ list_to_binary(do_encode(Unsigned,[]));
+ little ->
+ list_to_binary(do_encode_r(Unsigned))
+ end
+ end
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+do_encode(0,L) ->
+ L;
+do_encode(N,L) ->
+ Byte = N band 255,
+ NewN = N bsr 8,
+ do_encode(NewN,[Byte|L]).
+
+do_encode_r(0) ->
+ [];
+do_encode_r(N) ->
+ Byte = N band 255,
+ NewN = N bsr 8,
+ [Byte|do_encode_r(NewN)].
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_unsigned
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+decode_unsigned(Subject) ->
+ decode_unsigned(Subject,big).
+
+decode_unsigned(Subject,Endian) ->
+ try
+ true = is_binary(Subject),
+ case Endian of
+ big ->
+ do_decode(Subject,0);
+ little ->
+ do_decode_r(Subject,0)
+ end
+ catch
+ _:_ ->
+ erlang:error(badarg)
+ end.
+
+do_decode(<<>>,N) ->
+ N;
+do_decode(<<X:8,Bin/binary>>,N) ->
+ do_decode(Bin,(N bsl 8) bor X).
+
+do_decode_r(<<>>,N) ->
+ N;
+do_decode_r(Bin,N) ->
+ Sz = byte_size(Bin) - 1,
+ <<NewBin:Sz/binary,X>> = Bin,
+ do_decode_r(NewBin, (N bsl 8) bor X).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% referenced_byte_size cannot
+%% be implemented in pure
+%% erlang
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+referenced_byte_size(Bin) when is_binary(Bin) ->
+ erlang:error(not_implemented);
+referenced_byte_size(_) ->
+ erlang:error(badarg).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Simple helper functions
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Option "parsing"
+get_opts_match([],Part) ->
+ Part;
+get_opts_match([{scope,{A,B}} | T],_Part) ->
+ get_opts_match(T,{A,B});
+get_opts_match(_,_) ->
+ throw(badopt).
+
+get_opts_split([],{Part,Global,Trim}) ->
+ {Part,Global,Trim};
+get_opts_split([{scope,{A,B}} | T],{_Part,Global,Trim}) ->
+ get_opts_split(T,{{A,B},Global,Trim});
+get_opts_split([global | T],{Part,_Global,Trim}) ->
+ get_opts_split(T,{Part,true,Trim});
+get_opts_split([trim | T],{Part,Global,_Trim}) ->
+ get_opts_split(T,{Part,Global,true});
+get_opts_split(_,_) ->
+ throw(badopt).
+
+get_opts_replace([],{Part,Global,Insert}) ->
+ {Part,Global,Insert};
+get_opts_replace([{scope,{A,B}} | T],{_Part,Global,Insert}) ->
+ get_opts_replace(T,{{A,B},Global,Insert});
+get_opts_replace([global | T],{Part,_Global,Insert}) ->
+ get_opts_replace(T,{Part,true,Insert});
+get_opts_replace([{insert_replaced,N} | T],{Part,Global,_Insert}) ->
+ get_opts_replace(T,{Part,Global,N});
+get_opts_replace(_,_) ->
+ throw(badopt).
diff --git a/lib/stdlib/test/dummy1_h.erl b/lib/stdlib/test/dummy1_h.erl
index 4377d774a3..5b503d5984 100644
--- a/lib/stdlib/test/dummy1_h.erl
+++ b/lib/stdlib/test/dummy1_h.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(dummy1_h).
@@ -21,7 +21,7 @@
%% Test event handler for gen_event_SUITE.erl
-export([init/1, handle_event/2, handle_call/2, handle_info/2,
- terminate/2]).
+ terminate/2, format_status/2]).
init(make_error) ->
{error, my_error};
@@ -67,4 +67,5 @@ terminate(remove_handler, Parent) ->
terminate(_Reason, _State) ->
ok.
-
+format_status(_Opt, [_PDict, _State]) ->
+ "dummy1_h handler state".
diff --git a/lib/stdlib/test/epp_SUITE.erl b/lib/stdlib/test/epp_SUITE.erl
index 4806b5d361..e31dfdd764 100644
--- a/lib/stdlib/test/epp_SUITE.erl
+++ b/lib/stdlib/test/epp_SUITE.erl
@@ -19,12 +19,12 @@
-module(epp_SUITE).
-export([all/1]).
--export([rec_1/1, predef_mac/1,
+-export([rec_1/1, predef_mac/1,
upcase_mac/1, upcase_mac_1/1, upcase_mac_2/1,
variable/1, variable_1/1, otp_4870/1, otp_4871/1, otp_5362/1,
pmod/1, not_circular/1, skip_header/1, otp_6277/1, otp_7702/1,
otp_8130/1, overload_mac/1, otp_8388/1, otp_8470/1, otp_8503/1,
- otp_8562/1]).
+ otp_8562/1, otp_8665/1]).
-export([epp_parse_erl_form/2]).
@@ -39,7 +39,7 @@
-define(config(A,B),config(A,B)).
%% -define(t, test_server).
-define(t, io).
-config(priv_dir, _) ->
+config(priv_dir, _) ->
filename:absname("./epp_SUITE_priv");
config(data_dir, _) ->
filename:absname("./epp_SUITE_data").
@@ -64,7 +64,7 @@ all(doc) ->
all(suite) ->
[rec_1, upcase_mac, predef_mac, variable, otp_4870, otp_4871, otp_5362,
pmod, not_circular, skip_header, otp_6277, otp_7702, otp_8130,
- overload_mac, otp_8388, otp_8470, otp_8503, otp_8562].
+ overload_mac, otp_8388, otp_8470, otp_8503, otp_8562, otp_8665].
rec_1(doc) ->
["Recursive macros hang or crash epp (OTP-1398)."];
@@ -192,7 +192,7 @@ variable_1(Config) when is_list(Config) ->
%% variable_1.erl includes variable_1_include.hrl and
%% variable_1_include_dir.hrl.
?line {ok, List} = epp:parse_file(File, [], []),
- ?line {value, {attribute,_,a,{value1,value2}}} =
+ ?line {value, {attribute,_,a,{value1,value2}}} =
lists:keysearch(a,3,List),
ok.
@@ -219,13 +219,13 @@ otp_4871(Config) when is_list(Config) ->
%% Testing crash in erl_scan. Unfortunately there currently is
%% no known way to crash erl_scan so it is emulated by killing the
%% file io server. This assumes lots of things about how
- %% the processes are started and how monitors are set up,
+ %% the processes are started and how monitors are set up,
%% so there are some sanity checks before killing.
?line {ok,Epp} = epp:open(File, []),
timer:sleep(1),
?line {current_function,{epp,_,_}} = process_info(Epp, current_function),
?line {monitored_by,[Io]} = process_info(Epp, monitored_by),
- ?line {current_function,{file_io_server,_,_}} =
+ ?line {current_function,{file_io_server,_,_}} =
process_info(Io, current_function),
?line exit(Io, emulate_crash),
timer:sleep(1),
@@ -302,7 +302,7 @@ otp_5362(Config) when is_list(Config) ->
Back_hrl = [<<"
-file(\"">>,File_Back,<<"\", 2).
">>],
-
+
?line ok = file:write_file(File_Back, Back),
?line ok = file:write_file(File_Back_hrl, list_to_binary(Back_hrl)),
@@ -333,7 +333,7 @@ otp_5362(Config) when is_list(Config) ->
?line ok = file:write_file(File_Change, list_to_binary(Change)),
- ?line {ok, change_5362, ChangeWarnings} =
+ ?line {ok, change_5362, ChangeWarnings} =
compile:file(File_Change, Copts),
?line true = message_compare(
[{File_Change,[{{1002,21},erl_lint,{unused_var,'B'}}]},
@@ -441,9 +441,9 @@ skip_header(Config) when is_list(Config) ->
that should be skipped
-module(epp_test_skip_header).
-export([main/1]).
-
+
main(_) -> ?MODULE.
-
+
">>),
?line {ok, Fd} = file:open(File, [read]),
?line io:get_line(Fd, ''),
@@ -494,9 +494,9 @@ otp_7702(Config) when is_list(Config) ->
t() ->
?RECEIVE(foo, bar).">>,
?line ok = file:write_file(File, Contents),
- ?line {ok, file_7702, []} =
+ ?line {ok, file_7702, []} =
compile:file(File, [debug_info,return,{outdir,Dir}]),
-
+
BeamFile = filename:join(Dir, "file_7702.beam"),
{ok, AC} = beam_lib:chunks(BeamFile, [abstract_code]),
@@ -506,7 +506,7 @@ otp_7702(Config) when is_list(Config) ->
L
end,
Forms2 = [erl_lint:modify_line(Form, Fun) || Form <- Forms],
- ?line
+ ?line
[{attribute,1,file,_},
_,
_,
@@ -637,7 +637,7 @@ otp_8130(Config) when is_list(Config) ->
],
?line [] = run(Config, Ts),
-
+
Cs = [{otp_8130_c1,
<<"-define(M1(A), if\n"
"A =:= 1 -> B;\n"
@@ -681,7 +681,7 @@ otp_8130(Config) when is_list(Config) ->
<<"\n-include_lib(\"$apa/foo.hrl\").\n">>,
{errors,[{{2,2},epp,{include,lib,"$apa/foo.hrl"}}],[]}},
-
+
{otp_8130_c9,
<<"-define(S, ?S).\n"
"t() -> ?S.\n">>,
@@ -775,7 +775,7 @@ otp_8130(Config) when is_list(Config) ->
?line Dir = ?config(priv_dir, Config),
?line File = filename:join(Dir, "otp_8130.erl"),
- ?line ok = file:write_file(File,
+ ?line ok = file:write_file(File,
"-module(otp_8130).\n"
"-define(a, 3.14).\n"
"t() -> ?a.\n"),
@@ -788,7 +788,7 @@ otp_8130(Config) when is_list(Config) ->
?line {eof,_} = epp:scan_erl_form(Epp),
?line ['BASE_MODULE','BASE_MODULE_STRING','BEAM','FILE','LINE',
'MACHINE','MODULE','MODULE_STRING',a] = macs(Epp),
- ?line epp:close(Epp),
+ ?line epp:close(Epp),
%% escript
ModuleStr = "any_name",
@@ -815,7 +815,7 @@ otp_8130(Config) when is_list(Config) ->
PreDefMacros = [{a,1},a],
?line {error,{redefine,a}} = epp:open(File, [], PreDefMacros)
end(),
-
+
?line {error,enoent} = epp:open("no such file", []),
?line {error,enoent} = epp:parse_file("no such file", [], []),
@@ -941,7 +941,7 @@ ifdef(Config) ->
<<"\n-if.\n"
"-endif.\n">>,
{errors,[{{2,2},epp,{'NYI','if'}}],[]}},
-
+
{define_c7,
<<"-ifndef(a).\n"
"-elif.\n"
@@ -1197,6 +1197,18 @@ otp_8562(Config) when is_list(Config) ->
?line [] = compile(Config, Cs),
ok.
+otp_8665(doc) ->
+ ["OTP-8665. Bugfix premature end."];
+otp_8665(suite) ->
+ [];
+otp_8665(Config) when is_list(Config) ->
+ Cs = [{otp_8562,
+ <<"-define(A, a)\n">>,
+ {errors,[{{1,54},epp,premature_end}],[]}}
+ ],
+ ?line [] = compile(Config, Cs),
+ ok.
+
check(Config, Tests) ->
eval_tests(Config, fun check_test/2, Tests).
@@ -1213,7 +1225,7 @@ eval_tests(Config, Fun, Tests) ->
case message_compare(E, Return) of
true ->
BadL;
- false ->
+ false ->
?t:format("~nTest ~p failed. Expected~n ~p~n"
"but got~n ~p~n", [N, E, Return]),
fail()
@@ -1228,9 +1240,9 @@ check_test(Config, Test) ->
?line File = filename:join(PrivDir, Filename),
?line ok = file:write_file(File, Test),
?line case epp:parse_file(File, [PrivDir], []) of
- {ok,Forms} ->
+ {ok,Forms} ->
[E || E={error,_} <- Forms];
- {error,Error} ->
+ {error,Error} ->
Error
end.
@@ -1245,7 +1257,7 @@ compile_test(Config, Test0) ->
{ok, Ws} -> warnings(File, Ws);
Else -> Else
end.
-
+
warnings(File, Ws) ->
case lists:append([W || {F, W} <- Ws, F =:= File]) of
[] -> [];
@@ -1289,7 +1301,7 @@ message_compare(T, T) ->
message_compare(T1, T2) ->
ln(T1) =:= T2.
-%% Replaces locations like {Line,Column} with Line.
+%% Replaces locations like {Line,Column} with Line.
ln({warnings,L}) ->
{warnings,ln0(L)};
ln({errors,EL,WL}) ->
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index 8581b496aa..d0c0d68b4a 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -1784,6 +1784,9 @@ otp_5362(Config) when is_list(Config) ->
{15,erl_lint,{undefined_field,ok,nix}},
{16,erl_lint,{field_name_is_variable,ok,'Var'}}]}},
+ %% Nowarn_bif_clash has changed behaviour as local functions
+ %% nowdays supersede auto-imported BIFs, why nowarn_bif_clash in itself generates an error
+ %% (OTP-8579) /PaN
{otp_5362_4,
<<"-compile(nowarn_deprecated_function).
-compile(nowarn_bif_clash).
@@ -1795,9 +1798,8 @@ otp_5362(Config) when is_list(Config) ->
warn_deprecated_function,
warn_bif_clash]},
{error,
- [{5,erl_lint,{call_to_redefined_bif,{spawn,1}}}],
- [{3,erl_lint,{redefine_bif,{spawn,1}}},
- {4,erl_lint,{deprecated,{erlang,hash,2},{erlang,phash2,2},
+ [{5,erl_lint,{call_to_redefined_old_bif,{spawn,1}}}],
+ [{4,erl_lint,{deprecated,{erlang,hash,2},{erlang,phash2,2},
"in a future release"}}]}},
{otp_5362_5,
@@ -1808,8 +1810,8 @@ otp_5362(Config) when is_list(Config) ->
spawn(A).
">>,
{[nowarn_unused_function]},
- {warnings,
- [{3,erl_lint,{redefine_bif,{spawn,1}}}]}},
+ {errors,
+ [{2,erl_lint,disallowed_nowarn_bif_clash}],[]}},
%% The special nowarn_X are not affected by general warn_X.
{otp_5362_6,
@@ -1822,8 +1824,8 @@ otp_5362(Config) when is_list(Config) ->
{[nowarn_unused_function,
warn_deprecated_function,
warn_bif_clash]},
- {warnings,
- [{3,erl_lint,{redefine_bif,{spawn,1}}}]}},
+ {errors,
+ [{2,erl_lint,disallowed_nowarn_bif_clash}],[]}},
{otp_5362_7,
<<"-export([spawn/1]).
@@ -1838,7 +1840,9 @@ otp_5362(Config) when is_list(Config) ->
spawn(A).
">>,
{[nowarn_unused_function]},
- {error,[{4,erl_lint,{bad_nowarn_bif_clash,{spawn,2}}}],
+ {error,[{3,erl_lint,disallowed_nowarn_bif_clash},
+ {4,erl_lint,disallowed_nowarn_bif_clash},
+ {4,erl_lint,{bad_nowarn_bif_clash,{spawn,2}}}],
[{5,erl_lint,{bad_nowarn_deprecated_function,{3,hash,-1}}},
{5,erl_lint,{bad_nowarn_deprecated_function,{erlang,hash,-1}}},
{5,erl_lint,{bad_nowarn_deprecated_function,{{a,b,c},hash,-1}}}]}
@@ -1865,7 +1869,21 @@ otp_5362(Config) when is_list(Config) ->
t() -> #a{}.
">>,
{[]},
- []}
+ []},
+
+ {otp_5362_10,
+ <<"-compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ -compile({nowarn_bif_clash,{spawn,1}}).
+ -import(x,[spawn/1]).
+ spin(A) ->
+ erlang:hash(A, 3000),
+ spawn(A).
+ ">>,
+ {[nowarn_unused_function,
+ warn_deprecated_function,
+ warn_bif_clash]},
+ {errors,
+ [{2,erl_lint,disallowed_nowarn_bif_clash}],[]}}
],
@@ -2234,7 +2252,7 @@ otp_5878(Config) when is_list(Config) ->
{15,erl_lint,{undefined_field,r3,q}},
{17,erl_lint,{undefined_field,r,q}},
{21,erl_lint,illegal_guard_expr},
- {23,erl_lint,illegal_guard_expr}],
+ {23,erl_lint,{illegal_guard_local_call,{l,0}}}],
[]} =
run_test2(Config, Ill1, [warn_unused_record]),
@@ -2389,9 +2407,9 @@ bif_clash(Config) when is_list(Config) ->
N.
">>,
[],
- {errors,[{2,erl_lint,{call_to_redefined_bif,{size,1}}}],[]}},
+ {errors,[{2,erl_lint,{call_to_redefined_old_bif,{size,1}}}],[]}},
- %% Verify that (some) warnings can be turned off.
+ %% Verify that warnings can not be turned off in the old way.
{clash2,
<<"-export([t/1,size/1]).
t(X) ->
@@ -2400,17 +2418,198 @@ bif_clash(Config) when is_list(Config) ->
size({N,_}) ->
N.
- %% My own abs/1 function works on lists too.
- %% Unfortunately, it is not exported, so there will
- %% be a warning that can't be turned off.
+ %% My own abs/1 function works on lists too. From R14 this really works.
abs([H|T]) when $a =< H, H =< $z -> [H-($a-$A)|abs(T)];
abs([H|T]) -> [H|abs(T)];
abs([]) -> [];
abs(X) -> erlang:abs(X).
">>,
- {[nowarn_bif_clash]},
- {warnings,[{11,erl_lint,{redefine_bif,{abs,1}}},
- {11,erl_lint,{unused_function,{abs,1}}}]}}],
+ {[nowarn_unused_function,nowarn_bif_clash]},
+ {errors,[{erl_lint,disallowed_nowarn_bif_clash}],[]}},
+ %% As long as noone calls an overridden BIF, it's totally OK
+ {clash3,
+ <<"-export([size/1]).
+ size({N,_}) ->
+ N;
+ size(X) ->
+ erlang:size(X).
+ ">>,
+ [],
+ []},
+ %% But this is totally wrong - meaning of the program changed in R14, so this is an error
+ {clash4,
+ <<"-export([size/1]).
+ size({N,_}) ->
+ N;
+ size(X) ->
+ size(X).
+ ">>,
+ [],
+ {errors,[{5,erl_lint,{call_to_redefined_old_bif,{size,1}}}],[]}},
+ %% For a post R14 bif, its only a warning
+ {clash5,
+ <<"-export([binary_part/2]).
+ binary_part({B,_},{X,Y}) ->
+ binary_part(B,{X,Y});
+ binary_part(B,{X,Y}) ->
+ binary:part(B,X,Y).
+ ">>,
+ [],
+ {warnings,[{3,erl_lint,{call_to_redefined_bif,{binary_part,2}}}]}},
+ %% If you really mean to call yourself here, you can "unimport" size/1
+ {clash6,
+ <<"-export([size/1]).
+ -compile({no_auto_import,[size/1]}).
+ size([]) ->
+ 0;
+ size({N,_}) ->
+ N;
+ size([_|T]) ->
+ 1+size(T).
+ ">>,
+ [],
+ []},
+ %% Same for the post R14 autoimport warning
+ {clash7,
+ <<"-export([binary_part/2]).
+ -compile({no_auto_import,[binary_part/2]}).
+ binary_part({B,_},{X,Y}) ->
+ binary_part(B,{X,Y});
+ binary_part(B,{X,Y}) ->
+ binary:part(B,X,Y).
+ ">>,
+ [],
+ []},
+ %% but this doesn't mean the local function is allowed in a guard...
+ {clash8,
+ <<"-export([x/1]).
+ -compile({no_auto_import,[binary_part/2]}).
+ x(X) when binary_part(X,{1,2}) =:= <<1,2>> ->
+ hej.
+ binary_part({B,_},{X,Y}) ->
+ binary_part(B,{X,Y});
+ binary_part(B,{X,Y}) ->
+ binary:part(B,X,Y).
+ ">>,
+ [],
+ {errors,[{3,erl_lint,{illegal_guard_local_call,{binary_part,2}}}],[]}},
+ %% no_auto_import is not like nowarn_bif_clash, it actually removes the autoimport
+ {clash9,
+ <<"-export([x/1]).
+ -compile({no_auto_import,[binary_part/2]}).
+ x(X) ->
+ binary_part(X,{1,2}) =:= <<1,2>>.
+ ">>,
+ [],
+ {errors,[{4,erl_lint,{undefined_function,{binary_part,2}}}],[]}},
+ %% but we could import it again...
+ {clash10,
+ <<"-export([x/1]).
+ -compile({no_auto_import,[binary_part/2]}).
+ -import(erlang,[binary_part/2]).
+ x(X) ->
+ binary_part(X,{1,2}) =:= <<1,2>>.
+ ">>,
+ [],
+ []},
+ %% and actually use it in a guard...
+ {clash11,
+ <<"-export([x/1]).
+ -compile({no_auto_import,[binary_part/2]}).
+ -import(erlang,[binary_part/2]).
+ x(X) when binary_part(X,{0,1}) =:= <<0>> ->
+ binary_part(X,{1,2}) =:= <<1,2>>.
+ ">>,
+ [],
+ []},
+ %% but for non-obvious historical reasons, imported functions cannot be used in
+ %% fun construction without the module name...
+ {clash12,
+ <<"-export([x/1]).
+ -compile({no_auto_import,[binary_part/2]}).
+ -import(erlang,[binary_part/2]).
+ x(X) when binary_part(X,{0,1}) =:= <<0>> ->
+ binary_part(X,{1,2}) =:= fun binary_part/2.
+ ">>,
+ [],
+ {errors,[{5,erl_lint,{undefined_function,{binary_part,2}}}],[]}},
+ %% Not from erlang and not from anywhere else
+ {clash13,
+ <<"-export([x/1]).
+ -compile({no_auto_import,[binary_part/2]}).
+ -import(x,[binary_part/2]).
+ x(X) ->
+ binary_part(X,{1,2}) =:= fun binary_part/2.
+ ">>,
+ [],
+ {errors,[{5,erl_lint,{undefined_function,{binary_part,2}}}],[]}},
+ %% ...while real auto-import is OK.
+ {clash14,
+ <<"-export([x/1]).
+ x(X) when binary_part(X,{0,1}) =:= <<0>> ->
+ binary_part(X,{1,2}) =:= fun binary_part/2.
+ ">>,
+ [],
+ []},
+ %% Import directive clashing with old bif is an error, regardless of if it's called or not
+ {clash15,
+ <<"-export([x/1]).
+ -import(x,[abs/1]).
+ x(X) ->
+ binary_part(X,{1,2}).
+ ">>,
+ [],
+ {errors,[{2,erl_lint,{redefine_old_bif_import,{abs,1}}}],[]}},
+ %% For a new BIF, it's only a warning
+ {clash16,
+ <<"-export([x/1]).
+ -import(x,[binary_part/3]).
+ x(X) ->
+ abs(X).
+ ">>,
+ [],
+ {warnings,[{2,erl_lint,{redefine_bif_import,{binary_part,3}}}]}},
+ %% And, you cannot redefine already imported things that aren't auto-imported
+ {clash17,
+ <<"-export([x/1]).
+ -import(x,[binary_port/3]).
+ -import(y,[binary_port/3]).
+ x(X) ->
+ abs(X).
+ ">>,
+ [],
+ {errors,[{3,erl_lint,{redefine_import,{{binary_port,3},x}}}],[]}},
+ %% Not with local functions either
+ {clash18,
+ <<"-export([x/1]).
+ -import(x,[binary_port/3]).
+ binary_port(A,B,C) ->
+ binary_part(A,B,C).
+ x(X) ->
+ abs(X).
+ ">>,
+ [],
+ {errors,[{3,erl_lint,{define_import,{binary_port,3}}}],[]}},
+ %% Like clash8: Dont accept a guard if it's explicitly module-name called either
+ {clash19,
+ <<"-export([binary_port/3]).
+ -compile({no_auto_import,[binary_part/3]}).
+ -import(x,[binary_part/3]).
+ binary_port(A,B,C) when x:binary_part(A,B,C) ->
+ binary_part(A,B,C+1).
+ ">>,
+ [],
+ {errors,[{4,erl_lint,illegal_guard_expr}],[]}},
+ %% Not with local functions either
+ {clash20,
+ <<"-export([binary_port/3]).
+ -import(x,[binary_part/3]).
+ binary_port(A,B,C) ->
+ binary_part(A,B,C).
+ ">>,
+ [warn_unused_import],
+ {warnings,[{2,erl_lint,{redefine_bif_import,{binary_part,3}}}]}}
+ ],
?line [] = run(Config, Ts),
ok.
diff --git a/lib/stdlib/test/erl_pp_SUITE.erl b/lib/stdlib/test/erl_pp_SUITE.erl
index 66730b7b94..c57541fba9 100644
--- a/lib/stdlib/test/erl_pp_SUITE.erl
+++ b/lib/stdlib/test/erl_pp_SUITE.erl
@@ -46,7 +46,7 @@
neg_indent/1,
tickets/1,
otp_6321/1, otp_6911/1, otp_6914/1, otp_8150/1, otp_8238/1,
- otp_8473/1, otp_8522/1, otp_8567/1]).
+ otp_8473/1, otp_8522/1, otp_8567/1, otp_8664/1]).
%% Internal export.
-export([ehook/6]).
@@ -765,7 +765,7 @@ neg_indent(Config) when is_list(Config) ->
tickets(suite) ->
[otp_6321, otp_6911, otp_6914, otp_8150, otp_8238, otp_8473, otp_8522,
- otp_8567].
+ otp_8567, otp_8664].
otp_6321(doc) ->
"OTP_6321. Bug fix of exprs().";
@@ -995,6 +995,38 @@ otp_8567(Config) when is_list(Config) ->
ok.
+otp_8664(doc) ->
+ "OTP_8664. Types with integer expressions.";
+otp_8664(suite) -> [];
+otp_8664(Config) when is_list(Config) ->
+ FileName = filename('otp_8664.erl', Config),
+ C1 = <<"-module(otp_8664).\n"
+ "-export([t/0]).\n"
+ "-define(A, -3).\n"
+ "-define(B, (?A*(-1 band (((2)))))).\n"
+ "-type t1() :: ?B | ?A.\n"
+ "-type t2() :: ?B-1 .. -?B.\n"
+ "-type t3() :: 9 band (8 - 3) | 1+2 | 5 band 3.\n"
+ "-type b1() :: <<_:_*(3-(-1))>>\n"
+ " | <<_:(-(?B))>>\n"
+ " | <<_:4>>.\n"
+ "-type u() :: 1 .. 2 | 3.. 4 | (8-3) ..6 | 5+0..6.\n"
+ "-type t() :: t1() | t2() | t3() | b1() | u().\n"
+ "-spec t() -> t().\n"
+ "t() -> 3.\n">>,
+ ?line ok = file:write_file(FileName, C1),
+ ?line {ok, _, []} = compile:file(FileName, [return]),
+
+ C2 = <<"-module(otp_8664).\n"
+ "-export([t/0]).\n"
+ "-spec t() -> 9 and 4.\n"
+ "t() -> 0.\n">>,
+ ?line ok = file:write_file(FileName, C2),
+ ?line {error,[{_,[{3,erl_lint,{type_syntax,integer}}]}],_} =
+ compile:file(FileName, [return]),
+
+ ok.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
compile(Config, Tests) ->
diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl
index afeb67eeb1..32eb97bc92 100644
--- a/lib/stdlib/test/erl_scan_SUITE.erl
+++ b/lib/stdlib/test/erl_scan_SUITE.erl
@@ -185,7 +185,7 @@ reserved_words() ->
'andalso', 'orelse', 'end', 'fun', 'if', 'let', 'of',
'query', 'receive', 'when', 'bnot', 'not', 'div',
'rem', 'band', 'and', 'bor', 'bxor', 'bsl', 'bsr',
- 'or', 'xor'] ,
+ 'or', 'xor'],
[begin
?line {RW, true} = {RW, erl_scan:reserved_word(RW)},
S = atom_to_list(RW),
@@ -244,6 +244,9 @@ punctuations() ->
{'\\',1},{'^',1},{'`',1},{'~',1}],
?line test_string("#&*+/:<>?@\\^`~", PTs2),
+ ?line test_string(".. ", [{'..',1}]),
+ ?line test("1 .. 2"),
+ ?line test_string("...", [{'...',1}]),
ok.
comments() ->
diff --git a/lib/stdlib/test/gen_event_SUITE.erl b/lib/stdlib/test/gen_event_SUITE.erl
index 8cbffaca56..4f7de451e3 100644
--- a/lib/stdlib/test/gen_event_SUITE.erl
+++ b/lib/stdlib/test/gen_event_SUITE.erl
@@ -23,9 +23,11 @@
-export([all/1]).
-export([start/1, test_all/1, add_handler/1, add_sup_handler/1,
delete_handler/1, swap_handler/1, swap_sup_handler/1,
- notify/1, sync_notify/1, call/1, info/1, hibernate/1]).
+ notify/1, sync_notify/1, call/1, info/1, hibernate/1,
+ call_format_status/1, error_format_status/1]).
-all(suite) -> {req, [stdlib], [start, test_all, hibernate]}.
+all(suite) -> {req, [stdlib], [start, test_all, hibernate,
+ call_format_status, error_format_status]}.
%% --------------------------------------
%% Start an event manager.
@@ -844,3 +846,56 @@ info(Config) when is_list(Config) ->
?line ok = gen_event:stop(my_dummy_handler),
ok.
+
+call_format_status(suite) ->
+ [];
+call_format_status(doc) ->
+ ["Test that sys:get_status/1,2 calls format_status/2"];
+call_format_status(Config) when is_list(Config) ->
+ ?line {ok, Pid} = gen_event:start({local, my_dummy_handler}),
+ %% State here intentionally differs from what we expect from format_status
+ State = self(),
+ FmtState = "dummy1_h handler state",
+ ?line ok = gen_event:add_handler(my_dummy_handler, dummy1_h, [State]),
+ ?line Status1 = sys:get_status(Pid),
+ ?line Status2 = sys:get_status(Pid, 5000),
+ ?line ok = gen_event:stop(Pid),
+ ?line {status, Pid, _, [_, _, Pid, [], Data1]} = Status1,
+ ?line HandlerInfo1 = proplists:get_value(items, Data1),
+ ?line {"Installed handlers", [{_,dummy1_h,_,FmtState,_}]} = HandlerInfo1,
+ ?line {status, Pid, _, [_, _, Pid, [], Data2]} = Status2,
+ ?line HandlerInfo2 = proplists:get_value(items, Data2),
+ ?line {"Installed handlers", [{_,dummy1_h,_,FmtState,_}]} = HandlerInfo2,
+ ok.
+
+error_format_status(suite) ->
+ [];
+error_format_status(doc) ->
+ ["Test that a handler error calls format_status/2"];
+error_format_status(Config) when is_list(Config) ->
+ ?line error_logger_forwarder:register(),
+ OldFl = process_flag(trap_exit, true),
+ State = self(),
+ ?line {ok, Pid} = gen_event:start({local, my_dummy_handler}),
+ ?line ok = gen_event:add_sup_handler(my_dummy_handler, dummy1_h, [State]),
+ ?line ok = gen_event:notify(my_dummy_handler, do_crash),
+ ?line receive
+ {gen_event_EXIT,dummy1_h,{'EXIT',_}} -> ok
+ after 5000 ->
+ ?t:fail(exit_gen_event)
+ end,
+ FmtState = "dummy1_h handler state",
+ receive
+ {error,_GroupLeader, {Pid,
+ "** gen_event handler"++_,
+ [dummy1_h,my_dummy_handler,do_crash,
+ FmtState, _]}} ->
+ ok;
+ Other ->
+ ?line io:format("Unexpected: ~p", [Other]),
+ ?line ?t:fail()
+ end,
+ ?t:messages_get(),
+ ?line ok = gen_event:stop(Pid),
+ process_flag(trap_exit, OldFl),
+ ok.
diff --git a/lib/stdlib/test/gen_fsm_SUITE.erl b/lib/stdlib/test/gen_fsm_SUITE.erl
index 23c1d9a193..dd120f8c05 100644
--- a/lib/stdlib/test/gen_fsm_SUITE.erl
+++ b/lib/stdlib/test/gen_fsm_SUITE.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(gen_fsm_SUITE).
@@ -30,7 +30,7 @@
-export([shutdown/1]).
--export([sys/1, sys1/1, call_format_status/1]).
+-export([sys/1, sys1/1, call_format_status/1, error_format_status/1]).
-export([hibernate/1,hiber_idle/3,hiber_wakeup/3,hiber_idle/2,hiber_wakeup/2]).
@@ -305,7 +305,7 @@ shutdown(Config) when is_list(Config) ->
ok.
-sys(suite) -> [sys1, call_format_status].
+sys(suite) -> [sys1, call_format_status, error_format_status].
sys1(Config) when is_list(Config) ->
?line {ok, Pid} =
@@ -320,10 +320,53 @@ sys1(Config) when is_list(Config) ->
call_format_status(Config) when is_list(Config) ->
?line {ok, Pid} = gen_fsm:start(gen_fsm_SUITE, [], []),
?line Status = sys:get_status(Pid),
- ?line {status, Pid, _Mod, [_PDict, running, _Parent, _, Data]} = Status,
+ ?line {status, Pid, _Mod, [_PDict, running, _, _, Data]} = Status,
?line [format_status_called | _] = lists:reverse(Data),
- ?line stop_it(Pid).
+ ?line stop_it(Pid),
+
+ %% check that format_status can handle a name being an atom (pid is
+ %% already checked by the previous test)
+ ?line {ok, Pid2} = gen_fsm:start({local, gfsm}, gen_fsm_SUITE, [], []),
+ ?line Status2 = sys:get_status(gfsm),
+ ?line {status, Pid2, _Mod, [_PDict2, running, _, _, Data2]} = Status2,
+ ?line [format_status_called | _] = lists:reverse(Data2),
+ ?line stop_it(Pid2),
+ %% check that format_status can handle a name being a term other than a
+ %% pid or atom
+ GlobalName1 = {global, "CallFormatStatus"},
+ ?line {ok, Pid3} = gen_fsm:start(GlobalName1, gen_fsm_SUITE, [], []),
+ ?line Status3 = sys:get_status(GlobalName1),
+ ?line {status, Pid3, _Mod, [_PDict3, running, _, _, Data3]} = Status3,
+ ?line [format_status_called | _] = lists:reverse(Data3),
+ ?line stop_it(Pid3),
+ GlobalName2 = {global, {name, "term"}},
+ ?line {ok, Pid4} = gen_fsm:start(GlobalName2, gen_fsm_SUITE, [], []),
+ ?line Status4 = sys:get_status(GlobalName2),
+ ?line {status, Pid4, _Mod, [_PDict4, running, _, _, Data4]} = Status4,
+ ?line [format_status_called | _] = lists:reverse(Data4),
+ ?line stop_it(Pid4).
+
+error_format_status(Config) when is_list(Config) ->
+ ?line error_logger_forwarder:register(),
+ OldFl = process_flag(trap_exit, true),
+ StateData = "called format_status",
+ ?line {ok, Pid} = gen_fsm:start(gen_fsm_SUITE, {state_data, StateData}, []),
+ %% bad return value in the gen_fsm loop
+ ?line {'EXIT',{{bad_return_value, badreturn},_}} =
+ (catch gen_fsm:sync_send_event(Pid, badreturn)),
+ receive
+ {error,_GroupLeader,{Pid,
+ "** State machine"++_,
+ [Pid,{_,_,badreturn},idle,StateData,_]}} ->
+ ok;
+ Other ->
+ ?line io:format("Unexpected: ~p", [Other]),
+ ?line ?t:fail()
+ end,
+ ?t:messages_get(),
+ process_flag(trap_exit, OldFl),
+ ok.
%% Hibernation
hibernate(suite) -> [];
@@ -704,6 +747,8 @@ init(hiber) ->
{ok, hiber_idle, []};
init(hiber_now) ->
{ok, hiber_idle, [], hibernate};
+init({state_data, StateData}) ->
+ {ok, idle, StateData};
init(_) ->
{ok, idle, state_data}.
@@ -844,5 +889,7 @@ handle_sync_event(stop_shutdown_reason, _From, _State, Data) ->
handle_sync_event({get, _Pid}, _From, State, Data) ->
{reply, {state, State, Data}, State, Data}.
-format_status(_Opt, [_Pdict, _StateData]) ->
+format_status(terminate, [_Pdict, StateData]) ->
+ StateData;
+format_status(normal, [_Pdict, _StateData]) ->
[format_status_called].
diff --git a/lib/stdlib/test/gen_server_SUITE.erl b/lib/stdlib/test/gen_server_SUITE.erl
index 0f60c2c4ee..99388ba2e3 100644
--- a/lib/stdlib/test/gen_server_SUITE.erl
+++ b/lib/stdlib/test/gen_server_SUITE.erl
@@ -31,7 +31,7 @@
spec_init_local_registered_parent/1,
spec_init_global_registered_parent/1,
otp_5854/1, hibernate/1, otp_7669/1, call_format_status/1,
- call_with_huge_message_queue/1
+ error_format_status/1, call_with_huge_message_queue/1
]).
% spawn export
@@ -52,7 +52,8 @@ all(suite) ->
call_remote_n2, call_remote_n3, spec_init,
spec_init_local_registered_parent,
spec_init_global_registered_parent,
- otp_5854, hibernate, otp_7669, call_format_status,
+ otp_5854, hibernate, otp_7669,
+ call_format_status, error_format_status,
call_with_huge_message_queue].
-define(default_timeout, ?t:minutes(1)).
@@ -897,13 +898,64 @@ call_format_status(doc) ->
["Test that sys:get_status/1,2 calls format_status/2"];
call_format_status(Config) when is_list(Config) ->
?line {ok, Pid} = gen_server:start_link({local, call_format_status},
- gen_server_SUITE, [], []),
+ ?MODULE, [], []),
?line Status1 = sys:get_status(call_format_status),
?line {status, Pid, _Mod, [_PDict, running, _Parent, _, Data1]} = Status1,
?line [format_status_called | _] = lists:reverse(Data1),
?line Status2 = sys:get_status(call_format_status, 5000),
?line {status, Pid, _Mod, [_PDict, running, _Parent, _, Data2]} = Status2,
?line [format_status_called | _] = lists:reverse(Data2),
+
+ %% check that format_status can handle a name being a pid (atom is
+ %% already checked by the previous test)
+ ?line {ok, Pid3} = gen_server:start_link(gen_server_SUITE, [], []),
+ ?line Status3 = sys:get_status(Pid3),
+ ?line {status, Pid3, _Mod, [_PDict3, running, _Parent, _, Data3]} = Status3,
+ ?line [format_status_called | _] = lists:reverse(Data3),
+
+ %% check that format_status can handle a name being a term other than a
+ %% pid or atom
+ GlobalName1 = {global, "CallFormatStatus"},
+ ?line {ok, Pid4} = gen_server:start_link(GlobalName1,
+ gen_server_SUITE, [], []),
+ ?line Status4 = sys:get_status(Pid4),
+ ?line {status, Pid4, _Mod, [_PDict4, running, _Parent, _, Data4]} = Status4,
+ ?line [format_status_called | _] = lists:reverse(Data4),
+ GlobalName2 = {global, {name, "term"}},
+ ?line {ok, Pid5} = gen_server:start_link(GlobalName2,
+ gen_server_SUITE, [], []),
+ ?line Status5 = sys:get_status(GlobalName2),
+ ?line {status, Pid5, _Mod, [_PDict5, running, _Parent, _, Data5]} = Status5,
+ ?line [format_status_called | _] = lists:reverse(Data5),
+ ok.
+
+%% Verify that error termination correctly calls our format_status/2 fun
+%%
+error_format_status(suite) ->
+ [];
+error_format_status(doc) ->
+ ["Test that an error termination calls format_status/2"];
+error_format_status(Config) when is_list(Config) ->
+ ?line error_logger_forwarder:register(),
+ OldFl = process_flag(trap_exit, true),
+ State = "called format_status",
+ ?line {ok, Pid} = gen_server:start_link(?MODULE, {state, State}, []),
+ ?line {'EXIT',{crashed,_}} = (catch gen_server:call(Pid, crash)),
+ receive
+ {'EXIT', Pid, crashed} ->
+ ok
+ end,
+ receive
+ {error,_GroupLeader,{Pid,
+ "** Generic server"++_,
+ [Pid,crash,State,crashed]}} ->
+ ok;
+ Other ->
+ ?line io:format("Unexpected: ~p", [Other]),
+ ?line ?t:fail()
+ end,
+ ?t:messages_get(),
+ process_flag(trap_exit, OldFl),
ok.
%% Test that the time for a huge message queue is not
@@ -1105,5 +1157,7 @@ terminate({From, stopped_info}, _State) ->
terminate(_Reason, _State) ->
ok.
-format_status(_Opt, [_PDict, _State]) ->
- [format_status_called].
+format_status(terminate, [_PDict, State]) ->
+ State;
+format_status(normal, [_PDict, _State]) ->
+ format_status_called.
diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl
index aa12ed57da..e21de8770a 100644
--- a/lib/stdlib/test/qlc_SUITE.erl
+++ b/lib/stdlib/test/qlc_SUITE.erl
@@ -3184,7 +3184,9 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1,b},{2,3}])">>,
- {warnings,[{{3,48},qlc,nomatch_filter}]}},
+ {warnings,[{2,sys_core_fold,nomatch_guard},
+ {3,qlc,nomatch_filter},
+ {3,sys_core_fold,{eval_failure,badarg}}]}},
<<"etsc(fun(E) ->
Q = qlc:q([X || {X} <- ets:table(E), element(1,{X}) =:= 1]),
diff --git a/lib/syntax_tools/src/erl_comment_scan.erl b/lib/syntax_tools/src/erl_comment_scan.erl
index e2c6976a2b..108ab3bffd 100644
--- a/lib/syntax_tools/src/erl_comment_scan.erl
+++ b/lib/syntax_tools/src/erl_comment_scan.erl
@@ -26,6 +26,7 @@
-export([file/1, join_lines/1, scan_lines/1, string/1]).
+-export_type([comment/0]).
%% =====================================================================
diff --git a/lib/syntax_tools/src/erl_recomment.erl b/lib/syntax_tools/src/erl_recomment.erl
index 145bbc6f37..94e760dad7 100644
--- a/lib/syntax_tools/src/erl_recomment.erl
+++ b/lib/syntax_tools/src/erl_recomment.erl
@@ -486,7 +486,7 @@ build_tree(Node) ->
%% Include L, while preserving Min =< Max.
tree_node(minpos(L, Min),
- max(L, Max),
+ erlang:max(L, Max),
erl_syntax:type(Node),
erl_syntax:get_attrs(Node),
Subtrees)
@@ -507,7 +507,7 @@ build_list(Ts) ->
build_list([T | Ts], Min, Max, Ack) ->
Node = build_tree(T),
Min1 = minpos(node_min(Node), Min),
- Max1 = max(node_max(Node), Max),
+ Max1 = erlang:max(node_max(Node), Max),
build_list(Ts, Min1, Max1, [Node | Ack]);
build_list([], Min, Max, Ack) ->
list_node(Min, Max, lists:reverse(Ack)).
@@ -518,7 +518,7 @@ build_list_list(Ls) ->
build_list_list([L | Ls], Min, Max, Ack) ->
Node = build_list(L),
Min1 = minpos(node_min(Node), Min),
- Max1 = max(node_max(Node), Max),
+ Max1 = erlang:max(node_max(Node), Max),
build_list_list(Ls, Min1, Max1, [Node | Ack]);
build_list_list([], Min, Max, Ack) ->
{lists:reverse(Ack), Min, Max}.
@@ -723,9 +723,6 @@ tree_node_attrs(#tree{attrs = Attrs}) ->
%% Just the generic "maximum" function
-max(X, Y) when X > Y -> X;
-max(_, Y) -> Y.
-
%% Return the least positive integer of X and Y, or zero if none of them
%% are positive. (This is necessary for computing minimum source line
%% numbers, since zero (or negative) numbers may occur, but they
diff --git a/lib/syntax_tools/src/erl_syntax.erl b/lib/syntax_tools/src/erl_syntax.erl
index 9a2967d550..a40bf83c5a 100644
--- a/lib/syntax_tools/src/erl_syntax.erl
+++ b/lib/syntax_tools/src/erl_syntax.erl
@@ -309,6 +309,7 @@
data/1,
is_tree/1]).
+-export_type([forms/0, syntaxTree/0, syntaxTreeAttributes/0]).
%% =====================================================================
%% IMPLEMENTATION NOTES:
diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl
index 5c4e074488..4808971a59 100644
--- a/lib/syntax_tools/src/erl_syntax_lib.erl
+++ b/lib/syntax_tools/src/erl_syntax_lib.erl
@@ -46,6 +46,8 @@
new_variable_names/2, new_variable_names/3, strip_comments/1,
to_comment/1, to_comment/2, to_comment/3, variables/1]).
+-export_type([info_pair/0]).
+
%% =====================================================================
-type ordset(X) :: [X]. % XXX: TAKE ME OUT
@@ -400,10 +402,7 @@ new_variable_name(N, R, _T, F, S) ->
%% implementation of `sets'.
start_range(S) ->
- max(sets:size(S) * ?START_RANGE_FACTOR, ?MINIMUM_RANGE).
-
-max(X, Y) when X > Y -> X;
-max(_, Y) -> Y.
+ erlang:max(sets:size(S) * ?START_RANGE_FACTOR, ?MINIMUM_RANGE).
%% The previous number might or might not be used to compute the
%% next number to be tried. It is currently not used.
diff --git a/lib/syntax_tools/src/prettypr.erl b/lib/syntax_tools/src/prettypr.erl
index 1868f63e54..c13fa30998 100644
--- a/lib/syntax_tools/src/prettypr.erl
+++ b/lib/syntax_tools/src/prettypr.erl
@@ -48,6 +48,8 @@
nest/2, par/1, par/2, sep/1, text/1, null_text/1, text_par/1,
text_par/2]).
+-export_type([document/0]).
+
%% ---------------------------------------------------------------------
-type deep_string() :: [char() | deep_string()].
diff --git a/lib/syntax_tools/vsn.mk b/lib/syntax_tools/vsn.mk
index 2ba5eac582..6051fb8e39 100644
--- a/lib/syntax_tools/vsn.mk
+++ b/lib/syntax_tools/vsn.mk
@@ -1 +1 @@
-SYNTAX_TOOLS_VSN = 1.6.5
+SYNTAX_TOOLS_VSN = 1.6.6
diff --git a/lib/tools/emacs/Makefile b/lib/tools/emacs/Makefile
index 0028df247c..8533488463 100644
--- a/lib/tools/emacs/Makefile
+++ b/lib/tools/emacs/Makefile
@@ -42,6 +42,7 @@ EMACS_FILES= \
erlang_appwiz \
erlang-start \
erlang-eunit \
+ erlang-flymake \
erlang
README_FILES= README
diff --git a/lib/tools/emacs/README b/lib/tools/emacs/README
index ca068d04c4..cc107dcd41 100644
--- a/lib/tools/emacs/README
+++ b/lib/tools/emacs/README
@@ -42,7 +42,14 @@ Files\erl-<Ver>:
(setq erlang-root-dir "C:/Program Files/erl<Ver>")
(setq exec-path (cons "C:/Program Files/erl<Ver>/bin" exec-path))
(require 'erlang-start)
-
+Miscellaneous addons
+--------------------
+
+In order to check erlang source code on the fly, add the following
+line to your .emacs file (after erlang-start, see above). See
+erlang-flymake.el for more information on how to customize the syntax
+check.
+ (require 'erlang-flymake)
diff --git a/lib/tools/emacs/erlang-eunit.el b/lib/tools/emacs/erlang-eunit.el
index b2598f93e6..f2c0db67dd 100644
--- a/lib/tools/emacs/erlang-eunit.el
+++ b/lib/tools/emacs/erlang-eunit.el
@@ -23,8 +23,22 @@
(eval-when-compile
(require 'cl))
-(defvar erlang-eunit-separate-src-and-test-directories t
- "*Whether or not to keep source and EUnit test files in separate directories")
+(defvar erlang-eunit-src-candidate-dirs '("../src" ".")
+ "*Name of directories which to search for source files matching
+an EUnit test file. The first directory in the list will be used,
+if there is no match.")
+
+(defvar erlang-eunit-test-candidate-dirs '("../test" ".")
+ "*Name of directories which to search for EUnit test files matching
+a source file. The first directory in the list will be used,
+if there is no match.")
+
+(defvar erlang-eunit-autosave nil
+ "*Set to non-nil to automtically save unsaved buffers before running tests.
+This is useful, reducing the save-compile-load-test cycle to one keychord.")
+
+(defvar erlang-eunit-recent-info '((mode . nil) (module . nil) (test . nil) (cover . nil))
+ "Info about the most recent running of an EUnit test representation.")
;;;
;;; Switch between src/EUnit test buffers
@@ -44,7 +58,6 @@ buffer and vice versa"
"Open the EUnit test file which corresponds to a src file"
(find-file-other-window (erlang-eunit-test-filename src-file-path)))
-
;;;
;;; Open the src file which corresponds to the an EUnit test file
;;;
@@ -55,37 +68,55 @@ buffer and vice versa"
;;; Return the name and path of the EUnit test file
;;, (input may be either the source filename itself or the EUnit test filename)
(defun erlang-eunit-test-filename (file-path)
- (erlang-eunit-rewrite-filename file-path "test" "_tests"))
+ (if (erlang-eunit-test-file-p file-path)
+ file-path
+ (erlang-eunit-rewrite-filename file-path erlang-eunit-test-candidate-dirs)))
;;; Return the name and path of the source file
;;, (input may be either the source filename itself or the EUnit test filename)
(defun erlang-eunit-src-filename (file-path)
- (erlang-eunit-rewrite-filename file-path "src" ""))
+ (if (erlang-eunit-src-file-p file-path)
+ file-path
+ (erlang-eunit-rewrite-filename file-path erlang-eunit-src-candidate-dirs)))
;;; Rewrite a filename from the src or test filename to the other
-(defun erlang-eunit-rewrite-filename (orig-file-path dest-dirname dest-suffix)
- (let* ((root-dir-name (erlang-eunit-file-root-dir-name orig-file-path))
- (src-module-name (erlang-eunit-source-module-name orig-file-path))
- (dest-base-name (concat src-module-name dest-suffix ".erl"))
- (dest-dir-name-1 (file-name-directory orig-file-path))
- (dest-dir-name-2 (filename-join root-dir-name dest-dirname))
- (dest-file-name-1 (filename-join dest-dir-name-1 dest-base-name))
- (dest-file-name-2 (filename-join dest-dir-name-2 dest-base-name)))
- ;; This function tries to be a bit intelligent:
- ;; * if there already is a test (or source) file in the same
- ;; directory as a source (or test) file, it'll be picked
- ;; * if there already is a test (or source) file in a separate
- ;; test (or src) directory, it'll be picked
- ;; * otherwise it'll resort to whatever alternative (same or
- ;; separate directories) that the user has chosen
- (cond ((file-readable-p dest-file-name-1)
- dest-file-name-1)
- ((file-readable-p dest-file-name-2)
- dest-file-name-2)
- (erlang-eunit-separate-src-and-test-directories
- dest-file-name-2)
- (t
- dest-file-name-1))))
+(defun erlang-eunit-rewrite-filename (orig-file-path candidate-dirs)
+ (or (erlang-eunit-locate-buddy orig-file-path candidate-dirs)
+ (erlang-eunit-buddy-file-path orig-file-path (car candidate-dirs))))
+
+;;; Search for a file's buddy file (a source file's EUnit test file,
+;;; or an EUnit test file's source file) in a list of candidate
+;;; directories.
+(defun erlang-eunit-locate-buddy (orig-file-path candidate-dirs)
+ (when candidate-dirs
+ (let ((buddy-file-path (erlang-eunit-buddy-file-path
+ orig-file-path
+ (car candidate-dirs))))
+ (if (file-readable-p buddy-file-path)
+ buddy-file-path
+ (erlang-eunit-locate-buddy orig-file-path (cdr candidate-dirs))))))
+
+(defun erlang-eunit-buddy-file-path (orig-file-path buddy-dir-name)
+ (let* ((orig-dir-name (file-name-directory orig-file-path))
+ (buddy-dir-name (file-truename
+ (filename-join orig-dir-name buddy-dir-name)))
+ (buddy-base-name (erlang-eunit-buddy-basename orig-file-path)))
+ (filename-join buddy-dir-name buddy-base-name)))
+
+;;; Return the basename of the buddy file:
+;;; /tmp/foo/src/x.erl --> x_tests.erl
+;;; /tmp/foo/test/x_tests.erl --> x.erl
+(defun erlang-eunit-buddy-basename (file-path)
+ (let ((src-module-name (erlang-eunit-source-module-name file-path)))
+ (cond
+ ((erlang-eunit-src-file-p file-path)
+ (concat src-module-name "_tests.erl"))
+ ((erlang-eunit-test-file-p file-path)
+ (concat src-module-name ".erl")))))
+
+;;; Checks whether a file is a source file or not
+(defun erlang-eunit-src-file-p (file-path)
+ (not (erlang-eunit-test-file-p file-path)))
;;; Checks whether a file is a EUnit test file or not
(defun erlang-eunit-test-file-p (file-path)
@@ -96,23 +127,17 @@ buffer and vice versa"
;;; /tmp/foo/test/x_tests.erl --> x
(defun erlang-eunit-source-module-name (file-path)
(interactive)
- (let* ((file-name (file-name-nondirectory file-path))
- (base-name (file-name-sans-extension file-name)))
- (if (string-match "^\\(.+\\)_tests$" base-name)
- (substring base-name (match-beginning 1) (match-end 1))
- base-name)))
-
-;;; Return the directory name which is common to both src and test
-;;; /tmp/foo/src/x.erl --> /tmp/foo
-;;; /tmp/foo/test/x_tests.erl --> /tmp/foo
-(defun erlang-eunit-file-root-dir-name (file-path)
- (erlang-eunit-dir-parent-dirname (file-name-directory file-path)))
-
-;;; Return the parent directory name of a directory
-;;; /tmp/foo/ --> /tmp
-;;; /tmp/foo --> /tmp
-(defun erlang-eunit-dir-parent-dirname (dir-name)
- (file-name-directory (directory-file-name dir-name)))
+ (let ((module-name (erlang-eunit-module-name file-path)))
+ (if (string-match "^\\(.+\\)_tests$" module-name)
+ (substring module-name (match-beginning 1) (match-end 1))
+ module-name)))
+
+;;; Return the module name of the file
+;;; /tmp/foo/src/x.erl --> x
+;;; /tmp/foo/test/x_tests.erl --> x_tests
+(defun erlang-eunit-module-name (file-path)
+ (interactive)
+ (file-name-sans-extension (file-name-nondirectory file-path)))
;;; Older emacsen don't have string-match-p.
(defun erlang-eunit-string-match-p (regexp string &optional start)
@@ -128,25 +153,158 @@ buffer and vice versa"
(concat dir file)
(concat dir "/" file)))
+;;; Get info about the most recent running of EUnit
+(defun erlang-eunit-recent (key)
+ (cdr (assq key erlang-eunit-recent-info)))
+
+;;; Record info about the most recent running of EUnit
+;;; Known modes are 'module-mode and 'test-mode
+(defun erlang-eunit-record-recent (mode module test)
+ (setcdr (assq 'mode erlang-eunit-recent-info) mode)
+ (setcdr (assq 'module erlang-eunit-recent-info) module)
+ (setcdr (assq 'test erlang-eunit-recent-info) test))
+
+;;; Record whether the most recent running of EUnit included cover
+;;; compilation
+(defun erlang-eunit-record-recent-compile (under-cover)
+ (setcdr (assq 'cover erlang-eunit-recent-info) under-cover))
+
+;;; Determine options for EUnit.
+(defun erlang-eunit-opts ()
+ (if current-prefix-arg ", [verbose]" ""))
+
+;;; Determine current test function
+(defun erlang-eunit-current-test ()
+ (save-excursion
+ (erlang-end-of-function 1)
+ (erlang-beginning-of-function 1)
+ (erlang-name-of-function)))
+
+(defun erlang-eunit-simple-test-p (test-name)
+ (if (erlang-eunit-string-match-p "^\\(.+\\)_test$" test-name) t nil))
+
+(defun erlang-eunit-test-generator-p (test-name)
+ (if (erlang-eunit-string-match-p "^\\(.+\\)_test_$" test-name) t nil))
+
+;;; Run one EUnit test
+(defun erlang-eunit-run-test (module-name test-name)
+ (let ((command
+ (cond ((erlang-eunit-simple-test-p test-name)
+ (format "eunit:test({%s, %s}%s)."
+ module-name test-name (erlang-eunit-opts)))
+ ((erlang-eunit-test-generator-p test-name)
+ (format "eunit:test({generator, %s, %s}%s)."
+ module-name test-name (erlang-eunit-opts)))
+ (t (format "%% WARNING: '%s' is not a test function" test-name)))))
+ (erlang-eunit-record-recent 'test-mode module-name test-name)
+ (erlang-eunit-inferior-erlang-send-command command)))
+
;;; Run EUnit tests for the current module
-(defun erlang-eunit-run-tests ()
- "Run the EUnit test suite for the current module.
+(defun erlang-eunit-run-module-tests (module-name)
+ (let ((command (format "eunit:test(%s%s)." module-name (erlang-eunit-opts))))
+ (erlang-eunit-record-recent 'module-mode module-name nil)
+ (erlang-eunit-inferior-erlang-send-command command)))
+
+(defun erlang-eunit-compile-and-run-recent ()
+ "Compile the source and test files and repeat the most recent EUnit test run.
-With prefix arg, runs tests with the verbose flag set."
+With prefix arg, compiles for debug and runs tests with the verbose flag set."
(interactive)
- (let* ((module-name (erlang-add-quotes-if-needed
- (erlang-eunit-source-module-name buffer-file-name)))
- (opts (if current-prefix-arg ", [verbose]" ""))
- (command (format "eunit:test(%s%s)." module-name opts)))
- (erlang-eunit-inferior-erlang-send-command command)))
+ (case (erlang-eunit-recent 'mode)
+ ('test-mode
+ (erlang-eunit-compile-and-test
+ 'erlang-eunit-run-test (list (erlang-eunit-recent 'module)
+ (erlang-eunit-recent 'test))))
+ ('module-mode
+ (erlang-eunit-compile-and-test
+ 'erlang-eunit-run-module-tests (list (erlang-eunit-recent 'module))
+ (erlang-eunit-recent 'cover)))
+ (t (error "EUnit has not yet been run. Please run a test first."))))
+
+(defun erlang-eunit-cover-compile ()
+ "Cover compile current module."
+ (interactive)
+ (let* ((erlang-compile-extra-opts
+ (append (list 'debug_info) erlang-compile-extra-opts))
+ (module-name
+ (erlang-add-quotes-if-needed
+ (erlang-eunit-module-name buffer-file-name)))
+ (compile-command
+ (format "cover:compile_beam(%s)." module-name)))
+ (erlang-compile)
+ (if (erlang-eunit-last-compilation-successful-p)
+ (erlang-eunit-inferior-erlang-send-command compile-command))))
+
+(defun erlang-eunit-analyze-coverage ()
+ "Analyze the data collected by cover tool for the module in the
+current buffer.
+
+Assumes that the module has been cover compiled prior to this
+call. This function will do two things: print the number of
+covered and uncovered functions in the erlang shell and display a
+new buffer called *<module name> coverage* which shows the source
+code along with the coverage analysis results."
+ (interactive)
+ (let* ((module-name (erlang-add-quotes-if-needed
+ (erlang-eunit-module-name buffer-file-name)))
+ (tmp-filename (make-temp-file "cover"))
+ (analyze-command (format "cover:analyze_to_file(%s, \"%s\"). "
+ module-name tmp-filename))
+ (buf-name (format "*%s coverage*" module-name)))
+ (erlang-eunit-inferior-erlang-send-command analyze-command)
+ ;; The purpose of the following snippet is to get the result of the
+ ;; analysis from a file into a new buffer (or an old, if one with
+ ;; the specified name already exists). Also we want the erlang-mode
+ ;; *and* view-mode to be enabled.
+ (save-excursion
+ (let ((buf (get-buffer-create (format "*%s coverage*" module-name))))
+ (set-buffer buf)
+ (setq buffer-read-only nil)
+ (insert-file-contents tmp-filename nil nil nil t)
+ (if (= (buffer-size) 0)
+ (kill-buffer buf)
+ ;; FIXME: this would be a good place to enable (emacs-mode)
+ ;; to get some nice syntax highlighting in the
+ ;; coverage report, but it doesn't play well with
+ ;; flymake. Leave it off for now.
+ (view-buffer buf))))
+ (delete-file tmp-filename)))
+
+(defun erlang-eunit-compile-and-run-current-test ()
+ "Compile the source and test files and run the current EUnit test.
-;;; Compile source and EUnit test file and finally run EUnit tests for
-;;; the current module
-(defun erlang-eunit-compile-and-run-tests ()
- "Compile the source and test files and run the EUnit test suite.
+With prefix arg, compiles for debug and runs tests with the verbose flag set."
+ (interactive)
+ (let ((module-name (erlang-add-quotes-if-needed
+ (erlang-eunit-module-name buffer-file-name)))
+ (test-name (erlang-eunit-current-test)))
+ (erlang-eunit-compile-and-test
+ 'erlang-eunit-run-test (list module-name test-name))))
+
+(defun erlang-eunit-compile-and-run-module-tests ()
+ "Compile the source and test files and run all EUnit tests in the module.
With prefix arg, compiles for debug and runs tests with the verbose flag set."
(interactive)
+ (let ((module-name (erlang-add-quotes-if-needed
+ (erlang-eunit-source-module-name buffer-file-name))))
+ (erlang-eunit-compile-and-test
+ 'erlang-eunit-run-module-tests (list module-name))))
+
+;;; Compile source and EUnit test file and finally run EUnit tests for
+;;; the current module
+(defun erlang-eunit-compile-and-test (test-fun test-args &optional under-cover)
+ "Compile the source and test files and run the EUnit test suite.
+
+If under-cover is set to t, the module under test is compile for
+code coverage analysis. If under-cover is left out or not set,
+coverage analysis is disabled. The result of the code coverage
+is both printed to the erlang shell (the number of covered vs
+uncovered functions in a module) and written to a buffer called
+*<module> coverage* (which shows the source code for the module
+and the number of times each line is covered).
+With prefix arg, compiles for debug and runs tests with the verbose flag set."
+ (erlang-eunit-record-recent-compile under-cover)
(let ((src-filename (erlang-eunit-src-filename buffer-file-name))
(test-filename (erlang-eunit-test-filename buffer-file-name)))
@@ -154,7 +312,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;; below, is to ask the question about saving buffers only once,
;; instead of possibly several: one for each file to compile,
;; for instance for both x.erl and x_tests.erl.
- (save-some-buffers)
+ (save-some-buffers erlang-eunit-autosave)
(flet ((save-some-buffers (&optional any) nil))
;; Compilation of the source file is mandatory (the file must
@@ -162,23 +320,56 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;; test file on the other hand, is optional, since eunit tests may
;; be placed in the source file instead. Any compilation error
;; will prevent the subsequent steps to be run (hence the `and')
- (and (erlang-eunit-compile-file src-filename)
+ (and (erlang-eunit-compile-file src-filename under-cover)
(if (file-readable-p test-filename)
(erlang-eunit-compile-file test-filename)
t)
- (erlang-eunit-run-tests)))))
+ (apply test-fun test-args)
+ (if under-cover
+ (save-excursion
+ (set-buffer (find-file-noselect src-filename))
+ (erlang-eunit-analyze-coverage)))))))
+
+(defun erlang-eunit-compile-and-run-module-tests-under-cover ()
+ "Compile the source and test files and run the EUnit test suite and measure
+code coverage.
+
+With prefix arg, compiles for debug and runs tests with the verbose flag set."
+ (interactive)
+ (let ((module-name (erlang-add-quotes-if-needed
+ (erlang-eunit-source-module-name buffer-file-name))))
+ (erlang-eunit-compile-and-test
+ 'erlang-eunit-run-module-tests (list module-name) t)))
-(defun erlang-eunit-compile-file (file-path)
+(defun erlang-eunit-compile-file (file-path &optional under-cover)
(if (file-readable-p file-path)
(save-excursion
- (set-buffer (find-file-noselect file-path))
- (erlang-compile)
- (erlang-eunit-last-compilation-successful-p))
+ (set-buffer (find-file-noselect file-path))
+ ;; In order to run a code coverage analysis on a
+ ;; module, we have two options:
+ ;;
+ ;; * either compile the module with cover:compile instead of the
+ ;; regular compiler
+ ;;
+ ;; * or first compile the module with the regular compiler (but
+ ;; *with* debug_info) and then compile it for coverage
+ ;; analysis using cover:compile_beam.
+ ;;
+ ;; We could accomplish the first by changing the
+ ;; erlang-compile-erlang-function to cover:compile, but there's
+ ;; a risk that that's used for other purposes. Therefore, a
+ ;; safer alternative (although with more steps) is to add
+ ;; debug_info to the list of compiler options and go for the
+ ;; second alternative.
+ (if under-cover
+ (erlang-eunit-cover-compile)
+ (erlang-compile))
+ (erlang-eunit-last-compilation-successful-p))
(let ((msg (format "Could not read %s" file-path)))
- (erlang-eunit-inferior-erlang-send-command
+ (erlang-eunit-inferior-erlang-send-command
(format "%% WARNING: %s" msg))
(error msg))))
-
+
(defun erlang-eunit-last-compilation-successful-p ()
(save-excursion
(set-buffer inferior-erlang-buffer)
@@ -187,7 +378,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(lambda (re) (let ((continue t)
(result t))
(while continue ; ignore warnings, stop at errors
- (if (re-search-forward re (point-max) t)
+ (if (re-search-forward re (point-max) t)
(if (erlang-eunit-is-compilation-warning)
t
(setq result nil)
@@ -198,7 +389,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(mapcar (lambda (e) (car e)) erlang-error-regexp-alist))))
(defun erlang-eunit-is-compilation-warning ()
- (erlang-eunit-string-match-p
+ (erlang-eunit-string-match-p
"[0-9]+: Warning:"
(buffer-substring (line-beginning-position) (line-end-position))))
@@ -224,22 +415,22 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;;; Key bindings
;;;====================================================================
-(defvar erlang-eunit-toggle-src-and-test-file-other-window-key "\C-c\C-et"
- "*Key to which the `erlang-eunit-toggle-src-and-test-file-other-window'
-function will be bound.")
-(defvar erlang-eunit-compile-and-run-tests-key "\C-c\C-ek"
- "*Key to which the `erlang-eunit-compile-and-run-tests'
-function will be bound.")
+(defconst erlang-eunit-key-bindings
+ '(("\C-c\C-et" erlang-eunit-toggle-src-and-test-file-other-window)
+ ("\C-c\C-ek" erlang-eunit-compile-and-run-module-tests)
+ ("\C-c\C-ej" erlang-eunit-compile-and-run-current-test)
+ ("\C-c\C-el" erlang-eunit-compile-and-run-recent)
+ ("\C-c\C-ec" erlang-eunit-compile-and-run-module-tests-under-cover)
+ ("\C-c\C-ev" erlang-eunit-cover-compile)
+ ("\C-c\C-ea" erlang-eunit-analyze-coverage)))
(defun erlang-eunit-add-key-bindings ()
- (erlang-eunit-ensure-keymap-for-key
- erlang-eunit-toggle-src-and-test-file-other-window-key)
- (local-set-key erlang-eunit-toggle-src-and-test-file-other-window-key
- 'erlang-eunit-toggle-src-and-test-file-other-window)
- (erlang-eunit-ensure-keymap-for-key
- erlang-eunit-compile-and-run-tests-key)
- (local-set-key erlang-eunit-compile-and-run-tests-key
- 'erlang-eunit-compile-and-run-tests))
+ (dolist (binding erlang-eunit-key-bindings)
+ (erlang-eunit-bind-key (car binding) (cadr binding))))
+
+(defun erlang-eunit-bind-key (key function)
+ (erlang-eunit-ensure-keymap-for-key key)
+ (local-set-key key function))
(defun erlang-eunit-ensure-keymap-for-key (key-seq)
(let ((prefix-keys (butlast (append key-seq nil)))
diff --git a/lib/tools/emacs/erlang-flymake.el b/lib/tools/emacs/erlang-flymake.el
new file mode 100644
index 0000000000..bc368e9454
--- /dev/null
+++ b/lib/tools/emacs/erlang-flymake.el
@@ -0,0 +1,102 @@
+;; erlang-flymake.el
+;;
+;; Syntax check erlang source code on the fly (integrates with flymake).
+;;
+;; Start using flymake with erlang by putting the following somewhere
+;; in your .emacs file:
+;;
+;; (require 'erlang-flymake)
+;;
+;; Flymake is rather eager and does its syntax checks frequently by
+;; default and if you are bothered by this, you might want to put the
+;; following in your .emacs as well:
+;;
+;; (erlang-flymake-only-on-save)
+;;
+;; There are a couple of variables which control the compilation options:
+;; * erlang-flymake-get-code-path-dirs-function
+;; * erlang-flymake-get-include-dirs-function
+;; * erlang-flymake-extra-opts
+;;
+;; This code is inspired by http://www.emacswiki.org/emacs/FlymakeErlang.
+
+(require 'flymake)
+(eval-when-compile
+ (require 'cl))
+
+(defvar erlang-flymake-command
+ "erlc"
+ "The command that will be used to perform the syntax check")
+
+(defvar erlang-flymake-get-code-path-dirs-function
+ 'erlang-flymake-get-code-path-dirs
+ "Return a list of ebin directories to add to the code path.")
+
+(defvar erlang-flymake-get-include-dirs-function
+ 'erlang-flymake-get-include-dirs
+ "Return a list of include directories to add to the compiler options.")
+
+(defvar erlang-flymake-extra-opts
+ (list "+warn_obsolete_guard"
+ "+warn_unused_import"
+ "+warn_shadow_vars"
+ "+warn_export_vars"
+ "+strong_validation"
+ "+report")
+ "A list of options that will be passed to the compiler")
+
+(defun erlang-flymake-only-on-save ()
+ "Trigger flymake only when the buffer is saved (disables syntax
+check on newline and when there are no changes)."
+ (interactive)
+ ;; There doesn't seem to be a way of disabling this; set to the
+ ;; largest int available as a workaround (most-positive-fixnum
+ ;; equates to 8.5 years on my machine, so it ought to be enough ;-) )
+ (setq flymake-no-changes-timeout most-positive-fixnum)
+ (setq flymake-start-syntax-check-on-newline nil))
+
+
+(defun erlang-flymake-get-code-path-dirs ()
+ (list (concat (erlang-flymake-get-app-dir) "ebin")))
+
+(defun erlang-flymake-get-include-dirs ()
+ (list (concat (erlang-flymake-get-app-dir) "include")))
+
+(defun erlang-flymake-get-app-dir ()
+ (let ((src-path (file-name-directory (buffer-file-name))))
+ (file-name-directory (directory-file-name src-path))))
+
+(defun erlang-flymake-init ()
+ (let* ((temp-file
+ (flet ((flymake-get-temp-dir () (erlang-flymake-temp-dir)))
+ (flymake-init-create-temp-buffer-copy
+ 'flymake-create-temp-with-folder-structure)))
+ (code-dir-opts
+ (erlang-flymake-flatten
+ (mapcar (lambda (dir) (list "-pa" dir))
+ (funcall erlang-flymake-get-code-path-dirs-function))))
+ (inc-dir-opts
+ (erlang-flymake-flatten
+ (mapcar (lambda (dir) (list "-I" dir))
+ (funcall erlang-flymake-get-include-dirs-function))))
+ (compile-opts
+ (append inc-dir-opts
+ code-dir-opts
+ erlang-flymake-extra-opts)))
+ (list erlang-flymake-command (append compile-opts (list temp-file)))))
+
+(defun erlang-flymake-temp-dir ()
+ ;; Squeeze the user's name in there in order to make sure that files
+ ;; for two users who are working on the same computer (like a linux
+ ;; box) don't collide
+ (format "%s/flymake-%s" temporary-file-directory user-login-name))
+
+(defun erlang-flymake-flatten (list)
+ (apply #'append list))
+
+(add-to-list 'flymake-allowed-file-name-masks
+ '("\\.erl\\'" erlang-flymake-init))
+(add-hook 'erlang-mode-hook 'flymake-mode)
+
+(provide 'erlang-flymake)
+;; erlang-flymake ends here
diff --git a/lib/tools/emacs/erlang-start.el b/lib/tools/emacs/erlang-start.el
index 542e81f24c..bbcea3e46a 100644
--- a/lib/tools/emacs/erlang-start.el
+++ b/lib/tools/emacs/erlang-start.el
@@ -90,6 +90,11 @@
(or (assoc (car b) auto-mode-alist)
(setq auto-mode-alist (cons b auto-mode-alist))))
+;;
+;; Associate files using interpreter "escript" with Erlang mode.
+;;
+
+(add-to-list 'interpreter-mode-alist (cons "escript" 'erlang-mode))
;;
;; Ignore files ending in ".jam", ".vee", and ".beam" when performing
diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el
index 0132587d28..91acfdf2b6 100644
--- a/lib/tools/emacs/erlang.el
+++ b/lib/tools/emacs/erlang.el
@@ -659,24 +659,30 @@ resulting regexp is surrounded by \\_< and \\_>."
(eval-and-compile
(defconst erlang-guards-regexp (erlang-regexp-opt erlang-guards 'symbols)))
-
(eval-and-compile
(defvar erlang-predefined-types
'("any"
"arity"
+ "boolean"
"byte"
"char"
"cons"
"deep_string"
+ "iolist"
"maybe_improper_list"
+ "module"
"mfa"
"nil"
+ "neg_integer"
"none"
"non_neg_integer"
"nonempty_list"
"nonempty_improper_list"
"nonempty_maybe_improper_list"
+ "no_return"
+ "pos_integer"
"string"
+ "term"
"timeout")
"Erlang type specs types"))
@@ -885,15 +891,54 @@ files written in other languages than Erlang.")
If nil, the inferior shell replaces the window. This is the traditional
behaviour.")
-(defvar erlang-mode-map nil
+(defconst inferior-erlang-use-cmm (boundp 'minor-mode-overriding-map-alist)
+ "Non-nil means use `compilation-minor-mode' in Erlang shell.")
+
+(defvar erlang-mode-map
+ (let ((map (make-sparse-keymap)))
+ (unless (boundp 'indent-line-function)
+ (define-key map "\t" 'erlang-indent-command))
+ (define-key map ";" 'erlang-electric-semicolon)
+ (define-key map "," 'erlang-electric-comma)
+ (define-key map "<" 'erlang-electric-lt)
+ (define-key map ">" 'erlang-electric-gt)
+ (define-key map "\C-m" 'erlang-electric-newline)
+ (if (not (boundp 'delete-key-deletes-forward))
+ (define-key map "\177" 'backward-delete-char-untabify)
+ (define-key map [(backspace)] 'backward-delete-char-untabify))
+ ;;(unless (boundp 'fill-paragraph-function)
+ (define-key map "\M-q" 'erlang-fill-paragraph)
+ (unless (boundp 'beginning-of-defun-function)
+ (define-key map "\M-\C-a" 'erlang-beginning-of-function)
+ (define-key map "\M-\C-e" 'erlang-end-of-function)
+ (define-key map '(meta control h) 'erlang-mark-function)) ; Xemacs
+ (define-key map "\M-\t" 'erlang-complete-tag)
+ (define-key map "\C-c\M-\t" 'tempo-complete-tag)
+ (define-key map "\M-+" 'erlang-find-next-tag)
+ (define-key map "\C-c\M-a" 'erlang-beginning-of-clause)
+ (define-key map "\C-c\M-b" 'tempo-backward-mark)
+ (define-key map "\C-c\M-e" 'erlang-end-of-clause)
+ (define-key map "\C-c\M-f" 'tempo-forward-mark)
+ (define-key map "\C-c\M-h" 'erlang-mark-clause)
+ (define-key map "\C-c\C-c" 'comment-region)
+ (define-key map "\C-c\C-j" 'erlang-generate-new-clause)
+ (define-key map "\C-c\C-k" 'erlang-compile)
+ (define-key map "\C-c\C-l" 'erlang-compile-display)
+ (define-key map "\C-c\C-s" 'erlang-show-syntactic-information)
+ (define-key map "\C-c\C-q" 'erlang-indent-function)
+ (define-key map "\C-c\C-u" 'erlang-uncomment-region)
+ (define-key map "\C-c\C-y" 'erlang-clone-arguments)
+ (define-key map "\C-c\C-a" 'erlang-align-arrows)
+ (define-key map "\C-c\C-z" 'erlang-shell-display)
+ (unless inferior-erlang-use-cmm
+ (define-key map "\C-x`" 'erlang-next-error))
+ map)
"*Keymap used in Erlang mode.")
(defvar erlang-mode-abbrev-table nil
"Abbrev table in use in Erlang-mode buffers.")
(defvar erlang-mode-syntax-table nil
"Syntax table in use in Erlang-mode buffers.")
-(defconst inferior-erlang-use-cmm (boundp 'minor-mode-overriding-map-alist)
- "Non-nil means use `compilation-minor-mode' in Erlang shell.")
(defvar erlang-skel-file "erlang-skels"
@@ -1247,7 +1292,7 @@ Other commands:
(setq major-mode 'erlang-mode)
(setq mode-name "Erlang")
(erlang-syntax-table-init)
- (erlang-keymap-init)
+ (use-local-map erlang-mode-map)
(erlang-electric-init)
(erlang-menu-init)
(erlang-mode-variables)
@@ -1302,53 +1347,6 @@ Other commands:
(set-syntax-table erlang-mode-syntax-table))
-(defun erlang-keymap-init ()
- (if erlang-mode-map
- nil
- (setq erlang-mode-map (make-sparse-keymap))
- (erlang-mode-commands erlang-mode-map))
- (use-local-map erlang-mode-map))
-
-
-(defun erlang-mode-commands (map)
- (unless (boundp 'indent-line-function)
- (define-key map "\t" 'erlang-indent-command))
- (define-key map ";" 'erlang-electric-semicolon)
- (define-key map "," 'erlang-electric-comma)
- (define-key map "<" 'erlang-electric-lt)
- (define-key map ">" 'erlang-electric-gt)
- (define-key map "\C-m" 'erlang-electric-newline)
- (if (not (boundp 'delete-key-deletes-forward))
- (define-key map "\177" 'backward-delete-char-untabify)
- (define-key map [(backspace)] 'backward-delete-char-untabify))
- ;;(unless (boundp 'fill-paragraph-function)
- (define-key map "\M-q" 'erlang-fill-paragraph)
- (unless (boundp 'beginning-of-defun-function)
- (define-key map "\M-\C-a" 'erlang-beginning-of-function)
- (define-key map "\M-\C-e" 'erlang-end-of-function)
- (define-key map '(meta control h) 'erlang-mark-function)) ; Xemacs
- (define-key map "\M-\t" 'erlang-complete-tag)
- (define-key map "\C-c\M-\t" 'tempo-complete-tag)
- (define-key map "\M-+" 'erlang-find-next-tag)
- (define-key map "\C-c\M-a" 'erlang-beginning-of-clause)
- (define-key map "\C-c\M-b" 'tempo-backward-mark)
- (define-key map "\C-c\M-e" 'erlang-end-of-clause)
- (define-key map "\C-c\M-f" 'tempo-forward-mark)
- (define-key map "\C-c\M-h" 'erlang-mark-clause)
- (define-key map "\C-c\C-c" 'comment-region)
- (define-key map "\C-c\C-j" 'erlang-generate-new-clause)
- (define-key map "\C-c\C-k" 'erlang-compile)
- (define-key map "\C-c\C-l" 'erlang-compile-display)
- (define-key map "\C-c\C-s" 'erlang-show-syntactic-information)
- (define-key map "\C-c\C-q" 'erlang-indent-function)
- (define-key map "\C-c\C-u" 'erlang-uncomment-region)
- (define-key map "\C-c\C-y" 'erlang-clone-arguments)
- (define-key map "\C-c\C-a" 'erlang-align-arrows)
- (define-key map "\C-c\C-z" 'erlang-shell-display)
- (unless inferior-erlang-use-cmm
- (define-key map "\C-x`" 'erlang-next-error)))
-
-
(defun erlang-electric-init ()
;; Set up electric character functions to work with
;; delsel/pending-del mode. Also, set up text properties for bit
@@ -1402,7 +1400,7 @@ Other commands:
(set (make-local-variable 'imenu-prev-index-position-function)
'erlang-beginning-of-function)
(set (make-local-variable 'imenu-extract-index-name-function)
- 'erlang-get-function-name)
+ 'erlang-get-function-name-and-arity)
(set (make-local-variable 'tempo-match-finder)
"[^-a-zA-Z0-9_]\\([-a-zA-Z0-9_]*\\)\\=")
(set (make-local-variable 'beginning-of-defun-function)
@@ -2492,9 +2490,10 @@ Value is list (stack token-start token-type in-what)."
((looking-at "\\(of\\)[^_a-zA-Z0-9]")
;; Must handle separately, try X of -> catch
(if (and stack (eq (car (car stack)) 'try))
- (let ((try-column (nth 2 (car stack))))
+ (let ((try-column (nth 2 (car stack)))
+ (try-pos (nth 1 (car stack))))
(erlang-pop stack)
- (erlang-push (list 'icr token try-column) stack))))
+ (erlang-push (list 'icr try-pos try-column) stack))))
((looking-at "\\(fun\\)[^_a-zA-Z0-9]")
;; Push a new layer if we are defining a `fun'
@@ -2755,7 +2754,7 @@ Return nil if inside string, t if in a comment."
;;
;; `after' should be indented to the same level as the
;; corresponding receive.
- (cond ((looking-at "\\(after\\|catch\\|of\\)\\($\\|[^_a-zA-Z0-9]\\)")
+ (cond ((looking-at "\\(after\\|of\\)\\($\\|[^_a-zA-Z0-9]\\)")
(nth 2 stack-top))
((looking-at "when[^_a-zA-Z0-9]")
;; Handling one when part
@@ -2774,7 +2773,7 @@ Return nil if inside string, t if in a comment."
((and (eq (car stack-top) '||) (looking-at "\\(]\\|>>\\)[^_a-zA-Z0-9]"))
(nth 2 (car (cdr stack))))
;; Real indentation, where operators create extra indentation etc.
- ((memq (car stack-top) '(-> || begin try))
+ ((memq (car stack-top) '(-> || try begin))
(if (looking-at "\\(of\\)[^_a-zA-Z0-9]")
(nth 2 stack-top)
(goto-char (nth 1 stack-top))
@@ -2803,19 +2802,24 @@ Return nil if inside string, t if in a comment."
(erlang-caddr (car stack))
0))
((looking-at "catch\\($\\|[^_a-zA-Z0-9]\\)")
- (if (or (eq (car stack-top) 'try)
- (eq (car (car (cdr stack))) 'icr))
- (progn
- (if (eq (car stack-top) '->)
- (erlang-pop stack))
- (if stack
- (erlang-caddr (car stack))
- 0))
- base)) ;; old catch
+ ;; Are we in a try
+ (let ((start (if (eq (car stack-top) '->)
+ (car (cdr stack))
+ stack-top)))
+ (if (null start) nil
+ (goto-char (nth 1 start)))
+ (cond ((looking-at "try\\($\\|[^_a-zA-Z0-9]\\)")
+ (progn
+ (if (eq (car stack-top) '->)
+ (erlang-pop stack))
+ (if stack
+ (erlang-caddr (car stack))
+ 0)))
+ (t (erlang-indent-standard indent-point token base 'nil))))) ;; old catch
(t
(erlang-indent-standard indent-point token base 'nil)
))))
- ))
+ ))
((eq (car stack-top) 'when)
(goto-char (nth 1 stack-top))
(if (looking-at "when\\s *\\($\\|%\\)")
@@ -2841,27 +2845,32 @@ Return nil if inside string, t if in a comment."
(current-column)))
;; Type and Spec indentation
((eq (car stack-top) '::)
- (cond ((null erlang-argument-indent)
- ;; indent to next column.
- (+ 2 (nth 2 stack-top)))
- ((looking-at "::[^_a-zA-Z0-9]")
- (nth 2 stack-top))
- (t
- (let ((start-alternativ (if (looking-at "|") 2 0)))
- (goto-char (nth 1 stack-top))
- (- (cond ((looking-at "::\\s *\\($\\|%\\)")
- ;; Line ends with ::
- (if (eq (car (car (last stack))) 'spec)
+ (if (looking-at "}")
+ ;; Closing record definition with types
+ ;; pop stack and recurse
+ (erlang-calculate-stack-indent indent-point
+ (cons (erlang-pop stack) (cdr state)))
+ (cond ((null erlang-argument-indent)
+ ;; indent to next column.
+ (+ 2 (nth 2 stack-top)))
+ ((looking-at "::[^_a-zA-Z0-9]")
+ (nth 2 stack-top))
+ (t
+ (let ((start-alternativ (if (looking-at "|") 2 0)))
+ (goto-char (nth 1 stack-top))
+ (- (cond ((looking-at "::\\s *\\($\\|%\\)")
+ ;; Line ends with ::
+ (if (eq (car (car (last stack))) 'spec)
(+ (erlang-indent-find-preceding-expr 1)
erlang-argument-indent)
- (+ (erlang-indent-find-preceding-expr 2)
- erlang-argument-indent)))
- (t
- ;; Indent to the same column as the first
- ;; argument.
- (goto-char (+ 2 (nth 1 stack-top)))
- (skip-chars-forward " \t")
- (current-column))) start-alternativ)))))
+ (+ (erlang-indent-find-preceding-expr 2)
+ erlang-argument-indent)))
+ (t
+ ;; Indent to the same column as the first
+ ;; argument.
+ (goto-char (+ 2 (nth 1 stack-top)))
+ (skip-chars-forward " \t")
+ (current-column))) start-alternativ))))))
)))
(defun erlang-indent-standard (indent-point token base inside-parenthesis)
@@ -2933,10 +2942,16 @@ This assumes that the preceding expression is either simple
(skip-chars-backward " \t")
;; Needed to match the colon in "'foo':'bar'".
(if (not (memq (preceding-char) '(?# ?:)))
- col
- (backward-char 1)
- (forward-sexp -1)
- (current-column)))))
+ col
+ ;; Special hack to handle: (note line break)
+ ;; [#myrecord{
+ ;; foo = foo}]
+ (or
+ (ignore-errors
+ (backward-char 1)
+ (forward-sexp -1)
+ (current-column))
+ col)))))
(defun erlang-indent-parenthesis (stack-position)
(let ((previous (erlang-indent-find-preceding-expr)))
@@ -3505,6 +3520,13 @@ Normally used in conjunction with `erlang-beginning-of-clause', e.g.:
res)
(error nil)))))
+(defun erlang-get-function-name-and-arity ()
+ "Return the name and arity of the function at point, or nil.
+The return value is a string of the form \"foo/1\"."
+ (let ((name (erlang-get-function-name))
+ (arity (erlang-get-function-arity)))
+ (and name arity (format "%s/%d" name arity))))
+
(defun erlang-get-function-arguments ()
"Return arguments of current function, or nil."
(if (not (looking-at (eval-when-compile
@@ -4901,9 +4923,14 @@ a prompt. When nil, we will wait forever, or until \\[keyboard-quit].")
(defvar inferior-erlang-buffer nil
"Buffer of last invoked inferior Erlang, or nil.")
+;; Enable uniquifying Erlang shell buffers based on directory name.
+(eval-after-load "uniquify"
+ '(add-to-list 'uniquify-list-buffers-directory-modes 'erlang-shell-mode))
+
;;;###autoload
-(defun inferior-erlang ()
+(defun inferior-erlang (&optional command)
"Run an inferior Erlang.
+With prefix command, prompt for command to start Erlang with.
This is just like running Erlang in a normal shell, except that
an Emacs buffer is used for input and output.
@@ -4917,17 +4944,37 @@ Entry to this mode calls the functions in the variables
The following commands imitate the usual Unix interrupt and
editing control characters:
\\{erlang-shell-mode-map}"
- (interactive)
+ (interactive
+ (when current-prefix-arg
+ (list (if (fboundp 'read-shell-command)
+ ;; `read-shell-command' is a new function in Emacs 23.
+ (read-shell-command "Erlang command: ")
+ (read-string "Erlang command: ")))))
(require 'comint)
- (let ((opts inferior-erlang-machine-options))
- (cond ((eq inferior-erlang-shell-type 'oldshell)
- (setq opts (cons "-oldshell" opts)))
- ((eq inferior-erlang-shell-type 'newshell)
- (setq opts (append '("-newshell" "-env" "TERM" "vt100") opts))))
- (setq inferior-erlang-buffer
- (apply 'make-comint
- inferior-erlang-process-name inferior-erlang-machine
- nil opts)))
+ (let (cmd opts)
+ (if command
+ (setq cmd "sh"
+ opts (list "-c" command))
+ (setq cmd inferior-erlang-machine
+ opts inferior-erlang-machine-options)
+ (cond ((eq inferior-erlang-shell-type 'oldshell)
+ (setq opts (cons "-oldshell" opts)))
+ ((eq inferior-erlang-shell-type 'newshell)
+ (setq opts (append '("-newshell" "-env" "TERM" "vt100") opts)))))
+
+ ;; Using create-file-buffer and list-buffers-directory in this way
+ ;; makes uniquify give each buffer a unique name based on the
+ ;; directory.
+ (let ((fake-file-name (expand-file-name inferior-erlang-buffer-name default-directory)))
+ (setq inferior-erlang-buffer (create-file-buffer fake-file-name))
+ (apply 'make-comint-in-buffer
+ inferior-erlang-process-name
+ inferior-erlang-buffer
+ cmd
+ nil opts)
+ (with-current-buffer inferior-erlang-buffer
+ (setq list-buffers-directory fake-file-name))))
+
(setq inferior-erlang-process
(get-buffer-process inferior-erlang-buffer))
(if (> 21 erlang-emacs-major-version) ; funcalls to avoid compiler warnings
@@ -4940,10 +4987,6 @@ editing control characters:
(if (and (not (eq system-type 'windows-nt))
(eq inferior-erlang-shell-type 'newshell))
(setq comint-process-echoes t))
- ;; `rename-buffer' takes only one argument in Emacs 18.
- (condition-case nil
- (rename-buffer inferior-erlang-buffer-name t)
- (error (rename-buffer inferior-erlang-buffer-name)))
(erlang-shell-mode))
diff --git a/lib/tools/emacs/test.erl.indented b/lib/tools/emacs/test.erl.indented
index d0ea4c29cf..2948ccf1b5 100644
--- a/lib/tools/emacs/test.erl.indented
+++ b/lib/tools/emacs/test.erl.indented
@@ -93,11 +93,27 @@
-type t13() :: maybe_improper_list(integer(), t11()).
-type t14() :: [erl_scan:foo() |
%% Should be highlighted
- non_neg_integer() | nonempty_list() |
+ term() |
+ bool() |
+ byte() |
+ char() |
+ non_neg_integer() | nonempty_list() |
+ pos_integer() |
+ neg_integer() |
+ number() |
+ list() |
nonempty_improper_list() | nonempty_maybe_improper_list() |
+ maybe_improper_list() | string() | iolist() | byte() |
+ module() |
+ mfa() |
+ node() |
+ timeout() |
+ no_return() |
%% Should not be highlighted
nonempty_() | nonlist() |
- erl_scan:bar(34, 92) | t13() | m:f(integer() | <<_:_*16>>)].
+ erl_scan:bar(34, 92) | t13() | m:f(integer() | <<_:_*16>>)].
+
+
-type t15() :: {binary(),<<>>,<<_:34>>,<<_:_*42>>,
<<_:3,_:_*14>>,<<>>} | [<<>>|<<_:34>>|<<_:16>>|
<<_:3,_:_*1472>>|<<_:19,_:_*14>>| <<_:34>>|
@@ -172,6 +188,9 @@
f19 = 3 :: integer()|undefined,
f5 = 3 :: undefined|integer()}).
+-record(state, {
+ sequence_number = 1 :: integer()
+ }).
highlighting(X) % Function definitions should be highlighted
@@ -493,7 +512,9 @@ indent_try_catch() ->
file:close(Xfile)
end;
indent_try_catch() ->
- try foo(bar) of
+ try
+ foo(bar)
+ of
X when true andalso
kalle ->
io:format(stdout, "Parsing file ~s, ",
@@ -551,14 +572,57 @@ indent_catch() ->
C = catch B +
float(43.1),
- case catch (X) of
+ case catch foo(X) of
+ A ->
+ B
+ end,
+
+ case
+ catch foo(X)
+ of
A ->
B
end,
+
+ case
+ foo(X)
+ of
+ A ->
+ catch B,
+ X
+ end,
+
try sune of
_ -> foo
catch _:_ -> baf
- end.
+ end,
+
+ try
+ sune
+ of
+ _ ->
+ X = 5,
+ (catch foo(X)),
+ X + 10
+ catch _:_ -> baf
+ end,
+
+ try
+ (catch sune)
+ of
+ _ ->
+ catch foo() %% BUGBUG can't handle catch inside try without parentheses
+ catch _:_ ->
+ baf
+ end,
+
+ try
+ (catch exit())
+ catch
+ _ ->
+ catch baf()
+ end,
+ ok.
indent_binary() ->
X = lists:foldr(fun(M) ->
@@ -588,3 +652,8 @@ indent_comprehensions() ->
true = (X rem 2)
>>,
ok.
+
+%% This causes an error in earlier erlang-mode versions.
+foo() ->
+ [#foo{
+ foo = foo}].
diff --git a/lib/tools/emacs/test.erl.orig b/lib/tools/emacs/test.erl.orig
index 70e97a2e91..1221c5655e 100644
--- a/lib/tools/emacs/test.erl.orig
+++ b/lib/tools/emacs/test.erl.orig
@@ -93,11 +93,27 @@
-type t13() :: maybe_improper_list(integer(), t11()).
-type t14() :: [erl_scan:foo() |
%% Should be highlighted
- non_neg_integer() | nonempty_list() |
+ term() |
+ bool() |
+ byte() |
+ char() |
+ non_neg_integer() | nonempty_list() |
+ pos_integer() |
+ neg_integer() |
+ number() |
+ list() |
nonempty_improper_list() | nonempty_maybe_improper_list() |
+ maybe_improper_list() | string() | iolist() | byte() |
+ module() |
+ mfa() |
+ node() |
+ timeout() |
+ no_return() |
%% Should not be highlighted
nonempty_() | nonlist() |
-erl_scan:bar(34, 92) | t13() | m:f(integer() | <<_:_*16>>)].
+ erl_scan:bar(34, 92) | t13() | m:f(integer() | <<_:_*16>>)].
+
+
-type t15() :: {binary(),<<>>,<<_:34>>,<<_:_*42>>,
<<_:3,_:_*14>>,<<>>} | [<<>>|<<_:34>>|<<_:16>>|
<<_:3,_:_*1472>>|<<_:19,_:_*14>>| <<_:34>>|
@@ -172,6 +188,9 @@ f18 :: 1 | 2 | 'undefined',
f19 = 3 :: integer()|undefined,
f5 = 3 :: undefined|integer()}).
+-record(state, {
+ sequence_number = 1 :: integer()
+ }).
highlighting(X) % Function definitions should be highlighted
@@ -493,7 +512,9 @@ indent_try_catch() ->
file:close(Xfile)
end;
indent_try_catch() ->
- try foo(bar) of
+ try
+ foo(bar)
+ of
X when true andalso
kalle ->
io:format(stdout, "Parsing file ~s, ",
@@ -551,14 +572,57 @@ indent_catch() ->
C = catch B +
float(43.1),
- case catch (X) of
+ case catch foo(X) of
A ->
B
end,
+
+ case
+ catch foo(X)
+ of
+ A ->
+ B
+ end,
+
+ case
+ foo(X)
+ of
+ A ->
+ catch B,
+ X
+ end,
+
try sune of
- _ -> foo
- catch _:_ -> baf
- end.
+ _ -> foo
+ catch _:_ -> baf
+ end,
+
+ try
+sune
+ of
+ _ ->
+ X = 5,
+ (catch foo(X)),
+ X + 10
+ catch _:_ -> baf
+ end,
+
+ try
+ (catch sune)
+ of
+ _ ->
+ catch foo() %% BUGBUG can't handle catch inside try without parentheses
+ catch _:_ ->
+ baf
+ end,
+
+ try
+(catch exit())
+ catch
+_ ->
+ catch baf()
+ end,
+ ok.
indent_binary() ->
X = lists:foldr(fun(M) ->
@@ -588,3 +652,8 @@ Binary2 = << <<X:8>> || <<X:32,_:32>> <= <<0:512>>,
true = (X rem 2)
>>,
ok.
+
+%% This causes an error in earlier erlang-mode versions.
+foo() ->
+[#foo{
+foo = foo}].
diff --git a/lib/tools/src/xref_base.erl b/lib/tools/src/xref_base.erl
index d0dbf4a2b4..1656899e8f 100644
--- a/lib/tools/src/xref_base.erl
+++ b/lib/tools/src/xref_base.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -29,7 +29,7 @@
add_release/2, add_release/3,
get_library_path/1, set_library_path/2, set_library_path/3,
set_up/1, set_up/2,
- q/2, q/3, info/1, info/2, info/3, update/1, update/2,
+ q/2, q/3, info/1, info/2, info/3, update/1, update/2,
forget/1, forget/2, variables/1, variables/2,
analyze/2, analyze/3, analysis/1,
get_default/2, set_default/3,
@@ -38,14 +38,14 @@
-export([format_error/1]).
%% The following functions are exported for testing purposes only:
--export([do_add_module/4, do_add_application/2, do_add_release/2,
+-export([do_add_module/4, do_add_application/2, do_add_release/2,
do_remove_module/2]).
--import(lists,
- [filter/2, flatten/1, foldl/3, keysearch/3, map/2, mapfoldl/3,
- member/2, reverse/1, sort/1, usort/1]).
+-import(lists,
+ [filter/2, flatten/1, foldl/3, foreach/2, keysearch/3, map/2,
+ mapfoldl/3, member/2, reverse/1, sort/1, usort/1]).
--import(sofs,
+-import(sofs,
[constant_function/2, converse/1, difference/2, domain/1,
empty_set/0, family/1, family_difference/2, intersection/2,
family_projection/2, family_to_relation/1, family_union/1,
@@ -103,12 +103,12 @@ delete(State) ->
ok
end
end,
- map(Fun, dict:to_list(State#xref.variables)),
+ foreach(Fun, dict:to_list(State#xref.variables)),
ok.
add_directory(State, Dir) ->
add_directory(State, Dir, []).
-
+
%% -> {ok, Modules, NewState} | Error
add_directory(State, Dir, Options) ->
ValOptions = option_values([builtins, recurse, verbose, warnings], State),
@@ -277,7 +277,7 @@ q(S, Q, Options) when is_atom(Q) ->
q(S, atom_to_list(Q), Options);
q(S, Q, Options) ->
case xref_utils:is_string(Q, 1) of
- true ->
+ true ->
case set_up(S, Options) of
{ok, S1} ->
case xref_compiler:compile(Q, S1#xref.variables) of
@@ -336,7 +336,7 @@ forget(State, Variable) when is_atom(Variable) ->
forget(State, Variables) ->
Vars = State#xref.variables,
do_forget(Variables, Vars, Variables, State).
-
+
variables(State) ->
variables(State, [user]).
@@ -350,9 +350,9 @@ variables(State, Options) ->
{ok, NewState} ->
{U, P} = do_variables(NewState),
R1 = if User -> [{user, U}]; true -> [] end,
- R = if
- Predef -> [{predefined,P} | R1];
- true -> R1
+ R = if
+ Predef -> [{predefined,P} | R1];
+ true -> R1
end,
{{ok, R}, NewState};
Error ->
@@ -368,7 +368,7 @@ analyze(State, Analysis) ->
%% -> {{ok, Answer}, NewState} | {Error, NewState}
analyze(State, Analysis, Options) ->
case analysis(Analysis, State#xref.mode) of
- P when is_list(P) ->
+ P when is_list(P) ->
q(State, P, Options);
error ->
R = case analysis(Analysis, functions) of
@@ -461,7 +461,7 @@ get_default(State, Option) ->
%% -> [{Option, Value}]
get_default(State) ->
- Fun = fun(O) -> V = current_default(State, O), {O, V} end,
+ Fun = fun(O) -> V = current_default(State, O), {O, V} end,
map(Fun, [builtins, recurse, verbose, warnings]).
%% -> {ok, NewState} -> Error
@@ -478,7 +478,7 @@ set_default(State, Options) ->
format_error({error, Module, Error}) ->
Module:format_error(Error);
format_error({invalid_options, Options}) ->
- io_lib:format("Unknown option(s) or invalid option value(s): ~p~n",
+ io_lib:format("Unknown option(s) or invalid option value(s): ~p~n",
[Options]);
format_error({invalid_filename, Term}) ->
io_lib:format("A file name (a string) was expected: ~p~n", [Term]);
@@ -540,7 +540,7 @@ updated_modules(State) ->
case xref_utils:file_info(File) of
{ok, {_, file, readable, MTime}} when MTime =/= RTime ->
[{M,File} | L];
- _Else ->
+ _Else ->
L
end
end,
@@ -591,7 +591,7 @@ do_add_release(Dir, RelName, OB, OV, OW, State) ->
case xref_utils:release_directory(Dir, true, "ebin") of
{ok, ReleaseDirName, ApplDir, Dirs} ->
ApplDirs = xref_utils:select_last_application_version(Dirs),
- Release = case RelName of
+ Release = case RelName of
[[]] -> ReleaseDirName;
[Name] -> Name
end,
@@ -615,7 +615,7 @@ do_add_release(S, XRel) ->
end.
add_rel_appls([ApplDir | ApplDirs], Release, OB, OV, OW, State) ->
- {ok, _AppName, NewState} =
+ {ok, _AppName, NewState} =
add_appldir(ApplDir, Release, [[]], OB, OV, OW, State),
add_rel_appls(ApplDirs, Release, OB, OV, OW, NewState);
add_rel_appls([], [Release], _OB, _OV, _OW, NewState) ->
@@ -637,10 +637,10 @@ add_appldir(ApplDir, Release, Name, OB, OV, OW, OldState) ->
[[]] -> AppName0;
[N] -> N
end,
- AppInfo = #xref_app{name = AppName, rel_name = Release,
+ AppInfo = #xref_app{name = AppName, rel_name = Release,
vsn = Vsn, dir = Dir},
State1 = do_add_application(OldState, AppInfo),
- {ok, _Modules, NewState} =
+ {ok, _Modules, NewState} =
do_add_directory(Dir, [AppName], OB, false, OV, OW, State1),
{ok, AppName, NewState}.
@@ -662,7 +662,7 @@ do_add_directory(Dir, AppName, Bui, Rec, Ver, War, State) ->
ok = is_filename(Dir),
{FileNames, Errors, Jams, Unreadable} =
xref_utils:scan_directory(Dir, Rec, [?Suffix], [".jam"]),
- warnings(War, jam, Jams),
+ warnings(War, jam, Jams),
warnings(War, unreadable, Unreadable),
case Errors of
[] ->
@@ -683,7 +683,7 @@ do_add_a_module(File, AppName, Builtins, Verbose, Warnings, State) ->
false ->
throw_error({invalid_filename, File});
Splitname ->
- do_add_module(Splitname, AppName, Builtins, Verbose,
+ do_add_module(Splitname, AppName, Builtins, Verbose,
Warnings, State)
end.
@@ -691,7 +691,7 @@ do_add_a_module(File, AppName, Builtins, Verbose, Warnings, State) ->
%% Options: verbose, warnings, builtins
do_add_module({Dir, Basename}, AppName, Builtins, Verbose, Warnings, State) ->
File = filename:join(Dir, Basename),
- {ok, M, Bad, NewState} =
+ {ok, M, Bad, NewState} =
do_add_module1(Dir, File, AppName, Builtins, Verbose, Warnings, State),
filter(fun({Tag,B}) -> warnings(Warnings, Tag, [[File,B]]) end, Bad),
{ok, M, NewState}.
@@ -723,7 +723,7 @@ do_add_module1(Dir, File, AppName, Builtins, Verbose, Warnings, State) ->
{ok, {_, _, _, Time}} -> Time;
Error -> throw(Error)
end,
- XMod = #xref_mod{name = M, app_name = AppName, dir = Dir,
+ XMod = #xref_mod{name = M, app_name = AppName, dir = Dir,
mtime = T, builtins = Builtins,
no_unresolved = NoUnresCalls},
do_add_module(State, XMod, UnresCalls, Data);
@@ -736,13 +736,13 @@ abst(File, Builtins, Mode) when Mode =:= functions ->
case beam_lib:chunks(File, [abstract_code, exports, attributes]) of
{ok, {M,[{abstract_code,NoA},_X,_A]}} when NoA =:= no_abstract_code ->
{ok, M, NoA};
- {ok, {M, [{abstract_code, {abstract_v1, Forms}},
+ {ok, {M, [{abstract_code, {abstract_v1, Forms}},
{exports,X0}, {attributes,A}]}} ->
%% R7.
X = xref_utils:fa_to_mfa(X0, M),
D = deprecated(A, X, M),
xref_reader:module(M, Forms, Builtins, X, D);
- {ok, {M, [{abstract_code, {abstract_v2, Forms}},
+ {ok, {M, [{abstract_code, {abstract_v2, Forms}},
{exports,X0}, {attributes,A}]}} ->
%% R8-R9B.
X = xref_utils:fa_to_mfa(X0, M),
@@ -769,8 +769,8 @@ abst(File, Builtins, Mode) when Mode =:= modules ->
true ->
I0;
false ->
- Fun = fun({M,F,A}) ->
- not xref_utils:is_builtin(M, F, A)
+ Fun = fun({M,F,A}) ->
+ not xref_utils:is_builtin(M, F, A)
end,
filter(Fun, I0)
end,
@@ -790,7 +790,7 @@ mfa_exports(X0, Attributes, M) ->
xref_utils:fa_to_mfa(X1, M).
adjust_arity(F, A) ->
- case xref_utils:is_static_function(F, A) of
+ case xref_utils:is_static_function(F, A) of
true -> A;
false -> A - 1
end.
@@ -885,7 +885,7 @@ do_add_module(S, M, XMod, Unres0, Data) when S#xref.mode =:= functions ->
Unres = domain(UnresCalls),
DefinedFuns = domain(DefAt),
- {AXC, ALC, Bad1, LPreCAt2, XPreCAt2} =
+ {AXC, ALC, Bad1, LPreCAt2, XPreCAt2} =
extra_edges(AXC1, ALC1, Bad0, DefinedFuns),
Bad = map(fun(B) -> {xref_attr, B} end, Bad1),
LPreCAt = union(LPreCAt1, LPreCAt2),
@@ -904,8 +904,8 @@ do_add_module(S, M, XMod, Unres0, Data) when S#xref.mode =:= functions ->
%% {EE, ECallAt} = inter_graph(X, L, LC, XC, LCallAt, XCallAt),
Self = self(),
- Fun = fun() -> inter_graph(Self, X, L, LC, XC, CallAt) end,
- {EE, ECallAt} =
+ Fun = fun() -> inter_graph(Self, X, L, LC, XC, CallAt) end,
+ {EE, ECallAt} =
xref_utils:subprocess(Fun, [link, {min_heap_size,100000}]),
[DefAt2,L2,X2,LCallAt2,XCallAt2,CallAt2,LC2,XC2,EE2,ECallAt2,
@@ -977,13 +977,13 @@ extra_edges(CAX, CAL, Bad0, F) ->
ALC = restriction(2, restriction(ALC0, F), F),
LPreCAt2 = restriction(CAL, ALC),
XPreCAt2 = restriction(CAX, AXC),
- Bad = Bad0 ++ to_external(difference(AXC0, AXC))
+ Bad = Bad0 ++ to_external(difference(AXC0, AXC))
++ to_external(difference(ALC0, ALC)),
{AXC, ALC, Bad, LPreCAt2, XPreCAt2}.
no_info(X, L, LC, XC, EE, Unres, NoCalls, NoUnresCalls) ->
NoUnres = no_elements(Unres),
- [{no_calls, {NoCalls-NoUnresCalls, NoUnresCalls}},
+ [{no_calls, {NoCalls-NoUnresCalls, NoUnresCalls}},
{no_function_calls, {no_elements(LC), no_elements(XC)-NoUnres, NoUnres}},
{no_functions, {no_elements(L), no_elements(X)}},
%% Note: this is overwritten in do_set_up():
@@ -1011,10 +1011,10 @@ inter_graph(X, L, LC, XC, CallAt) ->
Es = union(LEs, XEs),
E1 = to_external(restriction(difference(LC, LEs), XL)),
- R0 = xref_utils:xset(reachable(E1, G, []),
+ R0 = xref_utils:xset(reachable(E1, G, []),
[{tspec(func), tspec(fun_edge)}]),
true = digraph:delete(G),
-
+
% RL is a set of indirect local calls to exports.
RL = restriction(R0, XL),
% RX is a set of indirect external calls to exports.
@@ -1033,7 +1033,7 @@ inter_graph(X, L, LC, XC, CallAt) ->
?FORMAT("XL=~p~nXEs=~p~nLEs=~p~nE1=~p~nR0=~p~nRL=~p~nRX=~p~nR=~p~n"
"EE=~p~nECallAt1=~p~nECallAt2=~p~nECallAt=~p~n~n",
- [XL, XEs, LEs, E1, R0, RL, RX, R, EE,
+ [XL, XEs, LEs, E1, R0, RL, RX, R, EE,
ECallAt1, ECallAt2, ECallAt]),
{EE, ECallAt}.
@@ -1121,7 +1121,7 @@ remove_erase([], D) ->
do_add_libraries(Path, Verbose, State) ->
message(Verbose, lib_search, []),
- {C, E} = xref_utils:list_path(Path, [?Suffix]),
+ {C, E} = xref_utils:list_path(Path, [?Suffix]),
message(Verbose, done, []),
MDs = to_external(relation_to_family(relation(C))),
%% message(Verbose, lib_check, []),
@@ -1160,23 +1160,23 @@ do_set_up(S, VerboseOpt) ->
Reply.
%% If data has been supplied using add_module/9 (and that is the only
-%% sanctioned way), then DefAt, L, X, LCallAt, XCallAt, CallAt, XC, LC,
-%% and LU are guaranteed to be functions (with all supplied
-%% modules as domain (disregarding unknown modules, that is, modules
+%% sanctioned way), then DefAt, L, X, LCallAt, XCallAt, CallAt, XC, LC,
+%% and LU are guaranteed to be functions (with all supplied
+%% modules as domain (disregarding unknown modules, that is, modules
%% not supplied but hosting unknown functions)).
%% As a consequence, V and E are also functions. V is defined for unknown
%% modules also.
%% UU is also a function (thanks to sofs:family_difference/2...).
-%% XU on the other hand can be a partial function (that is, not defined
+%% XU on the other hand can be a partial function (that is, not defined
%% for all modules). U is derived from XU, so U is also partial.
%% The inverse variables - LC_1, XC_1, E_1 and EE_1 - are all partial.
%% B is also partial.
do_set_up(S) when S#xref.mode =:= functions ->
ModDictList = dict:to_list(S#xref.modules),
- [DefAt0, L, X0, LCallAt, XCallAt, CallAt, LC, XC, LU,
+ [DefAt0, L, X0, LCallAt, XCallAt, CallAt, LC, XC, LU,
EE0, ECallAt, UC, LPredefined,
Mod_DF,Mod_DF_1,Mod_DF_2,Mod_DF_3] = make_families(ModDictList, 18),
-
+
{XC_1, XU, XPredefined} = do_set_up_1(XC),
LC_1 = user_family(union_of_family(LC)),
E_1 = family_union(XC_1, LC_1),
@@ -1206,7 +1206,7 @@ do_set_up(S) when S#xref.mode =:= functions ->
AM = domain(F1),
%% Undef is the union of U0 and Lib:
- {Undef, U0, Lib, Lib_DF, Lib_DF_1, Lib_DF_2, Lib_DF_3} =
+ {Undef, U0, Lib, Lib_DF, Lib_DF_1, Lib_DF_2, Lib_DF_3} =
make_libs(XU, F1, AM, S#xref.library_path, S#xref.libraries),
{B, U} = make_builtins(U0),
X1_B = family_union(X1, B),
@@ -1228,22 +1228,22 @@ do_set_up(S) when S#xref.mode =:= functions ->
%% way to discard calls to local functions in other modules.
EE_conv = converse(union_of_family(EE0)),
EE_exported = restriction(EE_conv, union_of_family(X)),
- EE_local =
+ EE_local =
specification({external, fun({{M1,_,_},{M2,_,_}}) -> M1 =:= M2 end},
EE_conv),
EE_0 = converse(union(EE_local, EE_exported)),
EE_1 = user_family(EE_0),
- EE1 = partition_family({external, fun({{M1,_,_}, _MFA2}) -> M1 end},
+ EE1 = partition_family({external, fun({{M1,_,_}, _MFA2}) -> M1 end},
EE_0),
%% Make sure EE is defined for all modules:
EE = family_union(family_difference(EE0, EE0), EE1),
- IFun =
- fun({Mod,EE_M}, XMods) ->
- IMFun =
+ IFun =
+ fun({Mod,EE_M}, XMods) ->
+ IMFun =
fun(XrefMod) ->
- [NoCalls, NoFunctionCalls,
+ [NoCalls, NoFunctionCalls,
NoFunctions, _NoInter] = XrefMod#xref_mod.info,
- NewInfo = [NoCalls, NoFunctionCalls, NoFunctions,
+ NewInfo = [NoCalls, NoFunctionCalls, NoFunctions,
{no_inter_function_calls,length(EE_M)}],
XrefMod#xref_mod{info = NewInfo}
end,
@@ -1274,11 +1274,11 @@ do_set_up(S) when S#xref.mode =:= functions ->
finish_set_up(S1, Vs);
do_set_up(S) when S#xref.mode =:= modules ->
ModDictList = dict:to_list(S#xref.modules),
- [X0, I0, Mod_DF, Mod_DF_1, Mod_DF_2, Mod_DF_3] =
+ [X0, I0, Mod_DF, Mod_DF_1, Mod_DF_2, Mod_DF_3] =
make_families(ModDictList, 7),
I = union_of_family(I0),
AM = domain(X0),
-
+
{XU, Predefined} = make_predefined(I, AM),
%% Add "hidden" functions to the exports.
X1 = family_union(X0, Predefined),
@@ -1288,8 +1288,8 @@ do_set_up(S) when S#xref.mode =:= modules ->
M2A = make_M2A(ModDictList),
{A2R,A} = make_A2R(S#xref.applications),
R = set(dict:fetch_keys(S#xref.releases)),
-
- ME = projection({external, fun({M1,{M2,_F2,_A2}}) -> {M1,M2} end},
+
+ ME = projection({external, fun({M1,{M2,_F2,_A2}}) -> {M1,M2} end},
family_to_relation(I0)),
ME2AE = multiple_relative_product({M2A, M2A}, ME),
@@ -1298,7 +1298,7 @@ do_set_up(S) when S#xref.mode =:= modules ->
RE = range(AE2RE),
%% Undef is the union of U0 and Lib:
- {_Undef, U0, Lib, Lib_DF, Lib_DF_1, Lib_DF_2, Lib_DF_3} =
+ {_Undef, U0, Lib, Lib_DF, Lib_DF_1, Lib_DF_2, Lib_DF_3} =
make_libs(XU, X1, AM, S#xref.library_path, S#xref.libraries),
{B, U} = make_builtins(U0),
X1_B = family_union(X1, B),
@@ -1312,7 +1312,7 @@ do_set_up(S) when S#xref.mode =:= modules ->
X = family_union(X1, Lib),
Empty = empty_set(),
- Vs = [{'X',X},{'U',U},{'B',B},{'XU',XU},{v,V},
+ Vs = [{'X',X},{'U',U},{'B',B},{'XU',XU},{v,V},
{e,{Empty,Empty}},
{'M',M},{'A',A},{'R',R},
{'AM',AM},{'UM',UM},{'LM',LM},
@@ -1328,10 +1328,10 @@ finish_set_up(S, Vs) ->
S1 = S#xref{variables = T},
%% io:format("~p <= state <= ~p~n", [pack:lsize(S), pack:usize(S)]),
{ok, S1}.
-
+
do_finish_set_up([{Key, Value} | Vs], T) ->
{Type, OType} = var_type(Key),
- Val = #xref_var{name = Key, value = Value, vtype = predef,
+ Val = #xref_var{name = Key, value = Value, vtype = predef,
otype = OType, type = Type},
T1 = dict:store(Key, Val, T),
do_finish_set_up(Vs, T1);
@@ -1362,15 +1362,15 @@ var_type('EE') -> {function, edge};
var_type('LC') -> {function, edge};
var_type('UC') -> {function, edge};
var_type('XC') -> {function, edge};
-var_type('AE') -> {application, edge};
-var_type('ME') -> {module, edge};
+var_type('AE') -> {application, edge};
+var_type('ME') -> {module, edge};
var_type('RE') -> {release, edge};
var_type(_) -> {foo, bar}.
make_families(ModDictList, N) ->
Fun1 = fun({_,XMod}) -> XMod#xref_mod.data end,
Ss = from_sets(map(Fun1, ModDictList)),
- %% io:format("~n~p <= module data <= ~p~n",
+ %% io:format("~n~p <= module data <= ~p~n",
%% [pack:lsize(Ss), pack:usize(Ss)]),
make_fams(N, Ss, []).
@@ -1389,7 +1389,7 @@ make_M2A(ModDictList) ->
make_A2R(ApplDict) ->
AppDict = dict:to_list(ApplDict),
Fun = fun({A,XApp}) -> {A, XApp#xref_app.rel_name} end,
- Appl0 = family(map(Fun, AppDict)),
+ Appl0 = family(map(Fun, AppDict)),
AllApps = domain(Appl0),
Appl = family_to_relation(Appl0),
{Appl, AllApps}.
@@ -1445,13 +1445,13 @@ make_libs(XU, F, AM, LibPath, LibDict) ->
false ->
Libraries = dict:to_list(LibDict),
Lb = restriction(a_function(Libraries), UM),
- MFun = fun({M,XLib}) ->
+ MFun = fun({M,XLib}) ->
#xref_lib{dir = Dir} = XLib,
xref_utils:module_filename(Dir, M)
end,
map(MFun, to_external(Lb))
end,
- Fun = fun(FileName, Deprs) ->
+ Fun = fun(FileName, Deprs) ->
case beam_lib:chunks(FileName, [exports, attributes]) of
{ok, {M, [{exports,X}, {attributes,A}]}} ->
Exports = mfa_exports(X, A, M),
@@ -1496,14 +1496,14 @@ user_family(R) ->
partition_family({external, fun({_MFA1, {M2,_,_}}) -> M2 end}, R).
do_variables(State) ->
- Fun = fun({Name, #xref_var{vtype = user}}, {P,U}) ->
+ Fun = fun({Name, #xref_var{vtype = user}}, {P,U}) ->
{P,[Name | U]};
- ({Name, #xref_var{vtype = predef}}, A={P,U}) ->
+ ({Name, #xref_var{vtype = predef}}, A={P,U}) ->
case atom_to_list(Name) of
[H|_] when H>= $a, H=<$z -> A;
_Else -> {[Name | P], U}
end;
- ({{tmp, V}, _}, A) ->
+ ({{tmp, V}, _}, A) ->
io:format("Bug in ~p: temporary ~p~n", [?MODULE, V]), A;
(_V, A) -> A
end,
@@ -1565,7 +1565,7 @@ do_info(S, libraries) ->
map(fun({_L,XLib}) -> lib_info(XLib) end, D);
do_info(_S, I) ->
error({no_such_info, I}).
-
+
do_info(S, Type, E) when is_atom(E) ->
do_info(S, Type, [E]);
do_info(S, modules, Modules0) when is_list(Modules0) ->
@@ -1598,7 +1598,7 @@ find_info([E | Es], Dict, Error) ->
{ok, X} ->
[X | find_info(Es, Dict, Error)]
end;
-find_info([], _Dict, _Error) ->
+find_info([], _Dict, _Error) ->
[].
%% -> {[{AppName, RelName}], [{RelName, XApp}]}
@@ -1618,7 +1618,7 @@ rel_apps(S) ->
rel_apps_sums(AR, RRA0, S) ->
AppMods = app_mods(S), % [{AppName, XMod}]
RRA1 = relation_to_family(relation(RRA0)),
- RRA = inverse(substitution(1, RRA1)),
+ RRA = inverse(substitution(1, RRA1)),
%% RRA is [{RelName,{RelName,[XApp]}}]
RelMods = relative_product1(relation(AR), relation(AppMods)),
RelAppsMods = relative_product1(RRA, RelMods),
@@ -1630,7 +1630,7 @@ rel_apps_sums(AR, RRA0, S) ->
%% -> [{AppName, XMod}]
app_mods(S) ->
D = sort(dict:to_list(S#xref.modules)),
- Fun = fun({_M,XMod}, Acc) ->
+ Fun = fun({_M,XMod}, Acc) ->
case XMod#xref_mod.app_name of
[] -> Acc;
[AppName] -> [{AppName, XMod} | Acc]
@@ -1639,7 +1639,7 @@ app_mods(S) ->
foldl(Fun, [], D).
mod_info(XMod) ->
- #xref_mod{name = M, app_name = AppName, builtins = BuiltIns,
+ #xref_mod{name = M, app_name = AppName, builtins = BuiltIns,
dir = Dir, info = Info} = XMod,
App = sup_info(AppName),
{M, [{application, App}, {builtins, BuiltIns}, {directory, Dir} | Info]}.
@@ -1649,7 +1649,7 @@ app_info({AppName, ModSums}, S) ->
#xref_app{rel_name = RelName, vsn = Vsn, dir = Dir} = XApp,
Release = sup_info(RelName),
{AppName, [{directory,Dir}, {release, Release}, {version,Vsn} | ModSums]}.
-
+
rel_info({{RelName, XApps}, ModSums}, S) ->
NoApps = length(XApps),
XRel = dict:fetch(RelName, S#xref.releases),
@@ -1678,16 +1678,16 @@ no_sum(S, L) when S#xref.mode =:= modules ->
[{no_analyzed_modules, length(L)}].
no_sum([XMod | D], C0, UC0, LC0, XC0, UFC0, L0, X0, EV0, NoM) ->
- [{no_calls, {C,UC}},
+ [{no_calls, {C,UC}},
{no_function_calls, {LC,XC,UFC}},
{no_functions, {L,X}},
{no_inter_function_calls, EV}] = XMod#xref_mod.info,
no_sum(D, C0+C, UC0+UC, LC0+LC, XC0+XC, UFC0+UFC, L0+L, X0+X, EV0+EV, NoM);
no_sum([], C, UC, LC, XC, UFC, L, X, EV, NoM) ->
[{no_analyzed_modules, NoM},
- {no_calls, {C,UC}},
+ {no_calls, {C,UC}},
{no_function_calls, {LC,XC,UFC}},
- {no_functions, {L,X}},
+ {no_functions, {L,X}},
{no_inter_function_calls, EV}].
%% -> ok | throw(Error)
@@ -1712,20 +1712,20 @@ warnings(Flag, Message, [F | Fs]) ->
%% pack(term()) -> term()
%%
%% The identify function. The returned term does not use more heap
-%% than the given term. Tuples that are equal (=:=/2) are made
+%% than the given term. Tuples that are equal (=:=/2) are made
%% "the same".
%%
%% The process dictionary is used because it seems to be faster than
%% anything else right now...
%%
%pack(T) -> T;
-pack(T) ->
+pack(T) ->
PD = erase(),
NT = pack1(T),
%% true = T =:= NT,
%% io:format("erasing ~p elements...~n", [length(erase())]),
erase(), % wasting heap (and time)...
- map(fun({K,V}) -> put(K, V) end, PD),
+ foreach(fun({K,V}) -> put(K, V) end, PD),
NT.
pack1(C) when not is_tuple(C), not is_list(C) ->
diff --git a/lib/tools/src/xref_compiler.erl b/lib/tools/src/xref_compiler.erl
index 67ac8c617d..c80eb0e669 100644
--- a/lib/tools/src/xref_compiler.erl
+++ b/lib/tools/src/xref_compiler.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -37,15 +37,15 @@
-export([format_error/1]).
--import(lists,
+-import(lists,
[concat/1, foldl/3, nthtail/2, reverse/1, sort/1, sublist/2]).
-import(sofs,
[composite/2, difference/2, empty_set/0, from_term/1,
intersection/2, is_empty_set/1, multiple_relative_product/2,
projection/2, relation/1, relation_to_family/1,
- restriction/2, substitution/2, to_external/1, union/2,
- union_of_family/1]).
+ restriction/2, specification/2, substitution/2,
+ to_external/1, union/2, union_of_family/1]).
%%
%% Exported functions
@@ -75,7 +75,7 @@ compile(Chars, Table) ->
{error, Info, Line} ->
error({parse_error, Line, Info})
end.
-
+
format_error({error, Module, Error}) ->
Module:format_error(Error);
format_error({parse_error, Line, Error}) ->
@@ -115,7 +115,7 @@ statements([Stmt={assign, VarType, Name, E} | Stmts0], Table, L, UV) ->
throw_error({variable_reassigned, xref_parser:t2s(Stmt)});
error ->
{Type, OType, NewE} = t_expr(E, Table),
- Val = #xref_var{name = Name, vtype = VarType,
+ Val = #xref_var{name = Name, vtype = VarType,
otype = OType, type = Type},
NewTable = dict:store(Name, Val, Table),
Stmts = if Stmts0 =:= [] -> [{variable, Name}]; true -> Stmts0 end,
@@ -128,9 +128,9 @@ statements([Expr], Table, L, UV) ->
E1 = un_familiarize(Type, OType, NewE),
NE = case {Type, OType} of
%% Edges with empty sets of line numbers are removed.
- {{line, _}, edge} ->
+ {{line, _}, edge} ->
{relation_to_family, E1};
- {_Type, edge_closure} ->
+ {_Type, edge_closure} ->
%% Fake a closure usage, just to make sure it is destroyed.
E2 = {fun graph_access/2, E1, E1},
{fun(_E) -> 'closure()' end, E2};
@@ -163,7 +163,7 @@ t_expr(E, Table) ->
%%% Constant = atom() | {atom(), atom()} | MFA | {MFA, MFA}
%%% Call = atom() % function in the sofs module
%%% | fun()
-%%% Type = {line, LineType} | function | module | application | release
+%%% Type = {line, LineType} | function | module | application | release
%%% | number
%%% LineType = line | local_call | external_call | export_call | all_line_call
%%% VarType = predef | user | tmp
@@ -182,7 +182,7 @@ check_expr({variable, Name}, Table) ->
case dict:find(Name, Table) of
{ok, #xref_var{vtype = VarType, otype = OType, type = Type}} ->
V0 = {variable, {VarType, Name}},
- V = case {VarType, Type, OType} of
+ V = case {VarType, Type, OType} of
{predef, release, _} -> V0;
{predef, application, _} -> V0;
{predef, module, _} -> V0;
@@ -212,7 +212,7 @@ check_expr(Expr={set, SOp, E}, Table) ->
{edge_set, domain} -> vertex_set;
{edge_set, weak} -> edge_set;
{edge_set, strict} -> edge_set;
- _ ->
+ _ ->
throw_error({type_error, xref_parser:t2s(Expr)})
end,
Op = set_op(SOp),
@@ -223,10 +223,10 @@ check_expr(Expr={graph, Op, E}, Table) ->
case Type of
{line, _LineType} ->
throw_error({type_error, xref_parser:t2s(Expr)});
- _Else ->
+ _Else ->
ok
end,
- OType =
+ OType =
case {NOType, Op} of
{edge, components} -> vertex_set;
{edge, condensation} -> edge_set;
@@ -237,7 +237,7 @@ check_expr(Expr={graph, Op, E}, Table) ->
%% Neither need nor want these ones:
%% {edge_set, closure} -> edge_set_closure;
%% {edge_set, components} -> vertex_set_set;
- _ ->
+ _ ->
throw_error({type_error, xref_parser:t2s(Expr)})
end,
E2 = {convert, NOType, edge_closure, E1},
@@ -271,10 +271,10 @@ check_expr(Expr={set, SOp, E1, E2}, Table) ->
number ->
{expr, number, number, {call, ari_op(SOp), NE1, NE2}};
_Else -> % set
- {Type, NewE1, NewE2} =
+ {Type, NewE1, NewE2} =
case {type_ord(Type1), type_ord(Type2)} of
{T1, T2} when T1 =:= T2 ->
- %% Example: if Type1 = {line, line} and
+ %% Example: if Type1 = {line, line} and
%% Type2 = {line, export_line}, then this is not
%% correct, but works:
{Type1, NE1, NE2};
@@ -296,7 +296,7 @@ check_expr(Expr={restr, ROp, E1, E2}, Table) ->
throw_error({type_error, xref_parser:t2s(Expr)});
{_Type1, {line, _LineType2}} ->
throw_error({type_error, xref_parser:t2s(Expr)});
- _ ->
+ _ ->
ok
end,
case {OType1, OType2} of
@@ -307,14 +307,14 @@ check_expr(Expr={restr, ROp, E1, E2}, Table) ->
{edge, vertex} ->
restriction(ROp, E1, Type1, NE1, Type2, NE2);
{edge_closure, vertex} when ROp =:= '|||' ->
- {expr, _, _, R1} =
+ {expr, _, _, R1} =
closure_restriction('|', Type1, Type2, OType2, NE1, NE2),
- {expr, _, _, R2} =
+ {expr, _, _, R2} =
closure_restriction('||', Type1, Type2, OType2, NE1, NE2),
{expr, Type1, edge, {call, intersection, R1, R2}};
- {edge_closure, vertex} ->
+ {edge_closure, vertex} ->
closure_restriction(ROp, Type1, Type2, OType2, NE1, NE2);
- _ ->
+ _ ->
throw_error({type_error, xref_parser:t2s(Expr)})
end;
check_expr(Expr={path, E1, E2}, Table) ->
@@ -330,7 +330,7 @@ check_expr(Expr={path, E1, E2}, Table) ->
end,
E2b = {convert, OType2, Type2, Type1, E2a},
{OType1, NE1} = path_arg(OType1a, E1a),
- NE2 = case {OType1, OType2} of
+ NE2 = case {OType1, OType2} of
{path, edge} -> {convert, OType2, edge_closure, E2b};
{path, edge_closure} when Type1 =:= Type2 -> E2b;
_ -> throw_error({type_error, xref_parser:t2s(Expr)})
@@ -347,7 +347,7 @@ check_expr({regexpr, RExpr, Type0}, _Table) ->
release -> 'R'
end,
Var = {variable, {predef, V}},
- Call = {call, fun(E, V2) -> xref_utils:regexpr(E, V2) end,
+ Call = {call, fun(E, V2) -> xref_utils:regexpr(E, V2) end,
{constants, RExpr}, Var},
{expr, Type, vertex, Call};
check_expr(C={constant, _Type, _OType, _C}, Table) ->
@@ -368,15 +368,15 @@ check_conversion(OType, Type1, Type2, Expr) ->
end.
%% Allowed conversions.
-conversions(_OType, {line, LineType}, {line, LineType}) -> ok;
+conversions(_OType, {line, LineType}, {line, LineType}) -> ok;
conversions(edge, {line, _}, {line, all_line_call}) -> ok;
-conversions(edge, From, {line, Line})
+conversions(edge, From, {line, Line})
when is_atom(From), Line =/= all_line_call -> ok;
conversions(vertex, From, {line, line}) when is_atom(From) -> ok;
conversions(vertex, From, To) when is_atom(From), is_atom(To) -> ok;
conversions(edge, From, To) when is_atom(From), is_atom(To) -> ok;
%% "Extra":
-conversions(edge, {line, Line}, To)
+conversions(edge, {line, Line}, To)
when is_atom(To), Line =/= all_line_call -> ok;
conversions(vertex, {line, line}, To) when is_atom(To) -> ok;
conversions(_OType, _From, _To) -> not_ok.
@@ -399,7 +399,7 @@ ari_op(difference) -> fun(X, Y) -> X - Y end.
restriction(ROp, E1, Type1, NE1, Type2, NE2) ->
{Column, _} = restr_op(ROp),
- case NE1 of
+ case NE1 of
{call, union_of_family, _E} when ROp =:= '|' ->
restriction(Column, Type1, E1, Type2, NE2);
{call, union_of_family, _E} when ROp =:= '||' ->
@@ -455,8 +455,8 @@ check_constants(Cs=[C={constant, Type0, OType, _Con} | Cs1], Table) ->
E = function_vertices_to_family(Type, OType, {constants, S}),
{expr, Type, OType, E};
[{Type1, [C1|_]}, {Type2, [C2|_]} | _] ->
- throw_error({type_mismatch,
- make_vertex(Type1, C1),
+ throw_error({type_mismatch,
+ make_vertex(Type1, C1),
make_vertex(Type2, C2)})
end.
@@ -467,7 +467,7 @@ check_mix([C={constant, Type, OType, _Con} | Cs], Type0, OType, _C0)
check_mix(Cs, Type, OType, C);
check_mix([C | _], _Type0, _OType0, C0) ->
throw_error({type_mismatch, xref_parser:t2s(C0), xref_parser:t2s(C)});
-check_mix([], _Type0, _OType0, _C0) ->
+check_mix([], _Type0, _OType0, _C0) ->
ok.
split(Types, Cs, Table) ->
@@ -478,11 +478,11 @@ split([Type | Types], Vs, AllSoFar, _Type, Table, L) ->
S0 = known_vertices(Type, Vs, Table),
S = difference(S0, AllSoFar),
case is_empty_set(S) of
- true ->
+ true ->
split(Types, Vs, AllSoFar, Type, Table, L);
- false ->
+ false ->
All = union(AllSoFar, S0),
- split(Types, Vs, All, Type, Table,
+ split(Types, Vs, All, Type, Table,
[{Type, to_external(S)} | L])
end;
split([], Vs, All, Type, _Table, L) ->
@@ -491,7 +491,7 @@ split([], Vs, All, Type, _Table, L) ->
[C|_] -> throw_error({unknown_constant, make_vertex(Type, C)})
end.
-make_vertex(Type, C) ->
+make_vertex(Type, C) ->
xref_parser:t2s({constant, Type, vertex, C}).
constant_vertices([{constant, _Type, edge, {A,B}} | Cs], L) ->
@@ -504,7 +504,7 @@ constant_vertices([], L) ->
known_vertices('Fun', Cs, T) ->
M = projection(1, Cs),
F = union_of_family(restriction(fetch_value(v, T), M)),
- intersection(Cs, F);
+ union(bifs(Cs), intersection(Cs, F));
known_vertices('Mod', Cs, T) ->
intersection(Cs, fetch_value('M', T));
known_vertices('App', Cs, T) ->
@@ -512,6 +512,11 @@ known_vertices('App', Cs, T) ->
known_vertices('Rel', Cs, T) ->
intersection(Cs, fetch_value('R', T)).
+bifs(Cs) ->
+ specification({external,
+ fun({M,F,A}) -> xref_utils:is_builtin(M, F, A) end},
+ Cs).
+
function_vertices_to_family(function, vertex, E) ->
{call, partition_family, 1, E};
function_vertices_to_family(_Type, _OType, E) ->
@@ -567,11 +572,11 @@ convert(E, OType, FromType, ToType) ->
general(_ObjectType, FromType, ToType, X) when FromType =:= ToType ->
X;
-general(edge, {line, _LineType}, ToType, LEs) ->
+general(edge, {line, _LineType}, ToType, LEs) ->
VEs = {projection, ?Q({external, fun({V1V2,_Ls}) -> V1V2 end}), LEs},
general(edge, function, ToType, VEs);
general(edge, function, ToType, VEs) ->
- MEs = {projection,
+ MEs = {projection,
?Q({external, fun({{M1,_,_},{M2,_,_}}) -> {M1,M2} end}),
VEs},
general(edge, module, ToType, MEs);
@@ -580,7 +585,7 @@ general(edge, module, ToType, MEs) ->
general(edge, application, ToType, AEs);
general(edge, application, release, AEs) ->
{image, {get, ae}, AEs};
-general(vertex, {line, _LineType}, ToType, L) ->
+general(vertex, {line, _LineType}, ToType, L) ->
V = {partition_family, ?Q(1), {domain, L}},
general(vertex, function, ToType, V);
general(vertex, function, ToType, V) ->
@@ -595,18 +600,18 @@ general(vertex, application, release, A) ->
special(_ObjectType, FromType, ToType, X) when FromType =:= ToType ->
X;
special(edge, {line, _LineType}, {line, all_line_call}, Calls) ->
- {put, ?T(mods),
- {projection,
- ?Q({external, fun({{{M1,_,_},{M2,_,_}},_}) -> {M1,M2} end}),
+ {put, ?T(mods),
+ {projection,
+ ?Q({external, fun({{{M1,_,_},{M2,_,_}},_}) -> {M1,M2} end}),
Calls},
- {put, ?T(def_at),
+ {put, ?T(def_at),
{union, {image, {get, def_at},
- {union, {domain, {get, ?T(mods)}},
+ {union, {domain, {get, ?T(mods)}},
{range, {get, ?T(mods)}}}}},
{fun funs_to_lines/2,
{get, ?T(def_at)}, Calls}}};
special(edge, function, {line, LineType}, VEs) ->
- Var = if
+ Var = if
LineType =:= line -> call_at;
LineType =:= export_call -> e_call_at;
LineType =:= local_call -> l_call_at;
@@ -615,9 +620,9 @@ special(edge, function, {line, LineType}, VEs) ->
line_edges(VEs, Var);
special(edge, module, ToType, MEs) ->
VEs = {image,
- {projection,
+ {projection,
?Q({external, fun(FE={{M1,_,_},{M2,_,_}}) -> {{M1,M2},FE} end}),
- {union,
+ {union,
{image, {get, e},
{projection, ?Q({external, fun({M1,_M2}) -> M1 end}), MEs}}}},
MEs},
@@ -629,7 +634,7 @@ special(edge, release, ToType, REs) ->
AEs = {inverse_image, {get, ae}, REs},
special(edge, application, ToType, AEs);
special(vertex, function, {line, _LineType}, V) ->
- {restriction,
+ {restriction,
{union_of_family, {restriction, {get, def_at}, {domain, V}}},
{union_of_family, V}};
special(vertex, module, ToType, M) ->
@@ -643,15 +648,15 @@ special(vertex, release, ToType, R) ->
special(vertex, application, ToType, A).
line_edges(VEs, CallAt) ->
- {put, ?T(ves), VEs,
- {put, ?T(m1),
- {projection, ?Q({external, fun({{M1,_,_},_}) -> M1 end}),
+ {put, ?T(ves), VEs,
+ {put, ?T(m1),
+ {projection, ?Q({external, fun({{M1,_,_},_}) -> M1 end}),
{get, ?T(ves)}},
{image, {projection, ?Q({external, fun(C={VV,_L}) -> {VV,C} end}),
{union, {image, {get, CallAt}, {get, ?T(m1)}}}},
{get, ?T(ves)}}}}.
-%% {(((v1,l1),(v2,l2)),l) :
+%% {(((v1,l1),(v2,l2)),l) :
%% (v1,l1) in DefAt and (v2,l2) in DefAt and ((v1,v2),L) in CallAt}
funs_to_lines(DefAt, CallAt) ->
T1 = multiple_relative_product({DefAt, DefAt}, projection(1, CallAt)),
@@ -765,7 +770,7 @@ save_vars([], _D, Vs, UVs, L) ->
%% Traverses the expression again, this time using more or less the
%% inverse of the table created by find_nodes. The first time a node
-%% is visited, its children are traversed, the following times a
+%% is visited, its children are traversed, the following times a
%% get instructions are inserted (using the saved value).
make_instructions(N, UserVars, D) ->
{D1, Is0} = make_instrs(N, D, []),
@@ -777,9 +782,9 @@ make_instructions(N, UserVars, D) ->
make_more_instrs([UV | UVs], D, Is) ->
case dict:find(UV, D) of
- error ->
+ error ->
make_more_instrs(UVs, D, Is);
- _Else ->
+ _Else ->
{ND, NIs} = make_instrs(UV, D, Is),
make_more_instrs(UVs, ND, [pop | NIs])
end;
@@ -844,17 +849,17 @@ evaluate([{quote, Val} | P], T, S) ->
evaluate(P, T, [Val | S]);
evaluate([{get, Var} | P], T, S) when is_atom(Var) -> % predefined
Value = fetch_value(Var, T),
- Val = case Value of
+ Val = case Value of
{R, _} -> R; % relation
_ -> Value % simple set
end,
- evaluate(P, T, [Val | S]);
+ evaluate(P, T, [Val | S]);
evaluate([{get, {inverse, Var}} | P], T, S) -> % predefined, inverse
{_, R} = fetch_value(Var, T),
- evaluate(P, T, [R | S]);
+ evaluate(P, T, [R | S]);
evaluate([{get, {user, Var}} | P], T, S) ->
Val = fetch_value(Var, T),
- evaluate(P, T, [Val | S]);
+ evaluate(P, T, [Val | S]);
evaluate([{get, Var} | P], T, S) -> % tmp
evaluate(P, T, [dict:fetch(Var, T) | S]);
evaluate([{save, Var={tmp, _}} | P], T, S=[Val | _]) ->
@@ -862,7 +867,7 @@ evaluate([{save, Var={tmp, _}} | P], T, S=[Val | _]) ->
evaluate(P, dict:store(Var, Val, T1), S);
evaluate([{save, {user, Name}} | P], T, S=[Val | _]) ->
#xref_var{vtype = user, otype = OType, type = Type} = dict:fetch(Name, T),
- NewVar = #xref_var{name = Name, value = Val,
+ NewVar = #xref_var{name = Name, value = Val,
vtype = user, otype = OType, type = Type},
T1 = update_graph_counter(Val, +1, T),
NT = dict:store(Name, NewVar, T1),
@@ -889,7 +894,7 @@ update_graph_counter(Value, Inc, T) ->
error when Inc =:= 1 ->
dict:store(Value, 1, T)
end;
- _EXIT ->
+ _EXIT ->
T
end.
diff --git a/lib/tools/src/xref_reader.erl b/lib/tools/src/xref_reader.erl
index db755c31d8..d22f0df164 100644
--- a/lib/tools/src/xref_reader.erl
+++ b/lib/tools/src/xref_reader.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(xref_reader).
@@ -22,7 +22,7 @@
-import(lists, [keysearch/3, member/2, reverse/1]).
--record(xrefr,
+-record(xrefr,
{module=[],
function=[],
def_at=[],
@@ -59,15 +59,15 @@
module(Module, Forms, CollectBuiltins, X, DF) ->
Attrs = [{Attr,V} || {attribute,_Line,Attr,V} <- Forms],
IsAbstract = xref_utils:is_abstract_module(Attrs),
- S = #xrefr{module = Module, builtins_too = CollectBuiltins,
+ S = #xrefr{module = Module, builtins_too = CollectBuiltins,
is_abstr = IsAbstract, x = X, df = DF},
forms(Forms, S).
forms([F | Fs], S) ->
S1 = form(F, S),
forms(Fs, S1);
-forms([], S) ->
- #xrefr{module = M, def_at = DefAt,
+forms([], S) ->
+ #xrefr{module = M, def_at = DefAt,
l_call_at = LCallAt, x_call_at = XCallAt,
el = LC, ex = XC, x = X, df = Depr,
lattrs = AL, xattrs = AX, battrs = B, unresolved = U} = S,
@@ -75,7 +75,7 @@ forms([], S) ->
{ok, M, {DefAt, LCallAt, XCallAt, LC, XC, X, Attrs, Depr}, U}.
form({attribute, Line, xref, Calls}, S) -> % experimental
- #xrefr{module = M, function = Fun,
+ #xrefr{module = M, function = Fun,
lattrs = L, xattrs = X, battrs = B} = S,
attr(Calls, Line, M, Fun, L, X, B, S);
form({attribute, _Line, _Attr, _Val}, S) ->
@@ -110,12 +110,12 @@ clauses([{clause, _Line, _H, G, B} | Cs], FunVars, Matches, S) ->
S2 = expr(B, S1),
S3 = S2#xrefr{funvars = FunVars, matches = Matches},
clauses(Cs, S3);
-clauses([], _FunVars, _Matches, S) ->
+clauses([], _FunVars, _Matches, S) ->
S.
attr([E={From, To} | As], Ln, M, Fun, AL, AX, B, S) ->
case mfa(From, M) of
- {_, _, MFA} when MFA =:= Fun; [] =:= Fun ->
+ {_, _, MFA} when MFA =:= Fun; [] =:= Fun ->
attr(From, To, Ln, M, Fun, AL, AX, B, S, As, E);
{_, _, _} ->
attr(As, Ln, M, Fun, AL, AX, [E | B], S);
@@ -164,7 +164,7 @@ expr({call, Line,
%% Added in R10B-6. M:F/A.
expr({'fun', Line, {function, Mod, Fun, Arity}}, S);
expr({'fun', Line, {function, Mod, Name, Arity}}, S) ->
- %% Added in R10B-6. M:F/A.
+ %% Added in R10B-6. M:F/A.
As = lists:duplicate(Arity, {atom, Line, foo}),
external_call(Mod, Name, As, Line, false, S);
expr({'fun', Line, {function, Name, Arity}, _Extra}, S) ->
@@ -183,7 +183,7 @@ expr({call, Line, {remote, _Line, Mod, Name}, As}, S) ->
expr({call, Line, F, As}, S) ->
external_call(erlang, apply, [F, list2term(As)], Line, true, S);
expr({match, _Line, {var,_,Var}, {'fun', _, {clauses, Cs}, _Extra}}, S) ->
- %% This is what is needed in R7 to avoid warnings for the functions
+ %% This is what is needed in R7 to avoid warnings for the functions
%% that are passed around by the "expansion" of list comprehension.
S1 = S#xrefr{funvars = [Var | S#xrefr.funvars]},
clauses(Cs, S1);
@@ -192,6 +192,14 @@ expr({match, _Line, {var,_,Var}, E}, S) ->
%% Args = [A,B], apply(m, f, Args)
S1 = S#xrefr{matches = [{Var, E} | S#xrefr.matches]},
expr(E, S1);
+expr({op, _Line, 'orelse', Op1, Op2}, S) ->
+ expr([Op1, Op2], S);
+expr({op, _Line, 'andalso', Op1, Op2}, S) ->
+ expr([Op1, Op2], S);
+expr({op, Line, Op, Operand1, Operand2}, S) ->
+ external_call(erlang, Op, [Operand1, Operand2], Line, false, S);
+expr({op, Line, Op, Operand}, S) ->
+ external_call(erlang, Op, [Operand], Line, false, S);
expr(T, S) when is_tuple(T) ->
expr(tuple_to_list(T), S);
expr([E | Es], S) ->
@@ -241,13 +249,13 @@ external_call(Mod, Fun, ArgsList, Line, X, S) ->
_Else -> % apply2, 1 or 2
check_funarg(W, ArgsList, Line, S1)
end.
-
+
eval_args(Mod, Fun, ArgsTerm, Line, S, ArgsList, Extra) ->
{IsSimpleCall, M, F} = mod_fun(Mod, Fun),
case term2list(ArgsTerm, [], S) of
undefined ->
S1 = unresolved(M, F, -1, Line, S),
- expr(ArgsList, S1);
+ expr(ArgsList, S1);
ArgsList2 when not IsSimpleCall ->
S1 = unresolved(M, F, length(ArgsList2), Line, S),
expr(ArgsList, S1);
@@ -288,14 +296,14 @@ fun_args(apply2, [FunArg, Args]) -> {FunArg, Args};
fun_args(1, [FunArg | Args]) -> {FunArg, Args};
fun_args(2, [_Node, FunArg | Args]) -> {FunArg, Args}.
-list2term([A | As]) ->
+list2term([A | As]) ->
{cons, 0, A, list2term(As)};
-list2term([]) ->
+list2term([]) ->
{nil, 0}.
term2list({cons, _Line, H, T}, L, S) ->
term2list(T, [H | L], S);
-term2list({nil, _Line}, L, _S) ->
+term2list({nil, _Line}, L, _S) ->
reverse(L);
term2list({var, _, Var}, L, S) ->
case keysearch(Var, 1, S#xrefr.matches) of
@@ -332,11 +340,11 @@ handle_call(Locality, To0, Line, S, IsUnres) ->
true ->
S
end,
- case Locality of
- local ->
+ case Locality of
+ local ->
S1#xrefr{el = [Call | S1#xrefr.el],
l_call_at = [CallAt | S1#xrefr.l_call_at]};
- external ->
+ external ->
S1#xrefr{ex = [Call | S1#xrefr.ex],
x_call_at = [CallAt | S1#xrefr.x_call_at]}
end.
diff --git a/lib/tools/test/xref_SUITE.erl b/lib/tools/test/xref_SUITE.erl
index a7855b0bb9..f9d062ef85 100644
--- a/lib/tools/test/xref_SUITE.erl
+++ b/lib/tools/test/xref_SUITE.erl
@@ -39,11 +39,11 @@
-export([all/1, init/1, fini/1]).
-export([xref/1,
- addrem/1, convert/1, intergraph/1, lines/1, loops/1,
+ addrem/1, convert/1, intergraph/1, lines/1, loops/1,
no_data/1, modules/1]).
-export([files/1,
- add/1, default/1, info/1, lib/1, read/1, read2/1, remove/1,
+ add/1, default/1, info/1, lib/1, read/1, read2/1, remove/1,
replace/1, update/1, deprecated/1, trycatch/1,
abstract_modules/1, fun_mfa/1, qlc/1]).
@@ -82,7 +82,7 @@ init(Conf) when is_list(Conf) ->
?line ok = erl_tar:extract(TarFile, [compressed]),
?line ok = file:delete(TarFile),
[{copy_dir, CopyDir} | Conf].
-
+
fini(Conf) when is_list(Conf) ->
%% Nothing.
Conf.
@@ -120,7 +120,7 @@ addrem(Conf) when is_list(Conf) ->
LCallAt_m1 = [],
XCallAt_m1 = [{E1,13}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
D2 = {F2,7},
@@ -132,7 +132,7 @@ addrem(Conf) when is_list(Conf) ->
LCallAt_m2 = [],
XCallAt_m2 = [{E2,96}],
Info2 = #xref_mod{name = m2, app_name = [a2]},
- ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
+ ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
XC_m2, LC_m2),
?line S5 = set_up(S2),
@@ -142,7 +142,7 @@ addrem(Conf) when is_list(Conf) ->
?line {ok, XMod2, S6a} = remove_module(S6, m2),
?line [a2] = XMod2#xref_mod.app_name,
?line S7 = set_up(S6a),
-
+
?line AppInfo1 = #xref_app{name = a1, rel_name = [r1]},
?line S9 = add_application(S7, AppInfo1),
?line S10 = set_up(S9),
@@ -186,7 +186,7 @@ convert(Conf) when is_list(Conf) ->
LCallAt_m1 = [],
XCallAt_m1 = [{E1,13},{E2,17},{E4,7}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
D2 = {F2,7},
@@ -200,7 +200,7 @@ convert(Conf) when is_list(Conf) ->
LCallAt_m2 = [],
XCallAt_m2 = [{E3,96},{E6,12},{UE1,77}],
Info2 = #xref_mod{name = m2, app_name = [a2]},
- ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
+ ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
XC_m2, LC_m2),
D4 = {F4,6},
@@ -213,7 +213,7 @@ convert(Conf) when is_list(Conf) ->
LCallAt_m3 = [{E5,19}],
XCallAt_m3 = [{UE2,22}],
Info3 = #xref_mod{name = m3, app_name = [a3]},
- ?line S3 = add_module(S2, Info3, DefAt_m3, X_m3, LCallAt_m3, XCallAt_m3,
+ ?line S3 = add_module(S2, Info3, DefAt_m3, X_m3, LCallAt_m3, XCallAt_m3,
XC_m3, LC_m3),
Info4 = #xref_mod{name = m4, app_name = [a2]},
@@ -303,7 +303,7 @@ convert(Conf) when is_list(Conf) ->
?line {ok, _} = eval("(XXL) (Lin) (Fun) E", AllCallAt, S),
?line {ok, _} = eval("(XXL) (XXL) (Lin) (Fun) E", AllCallAt, S),
- ?line {ok, _} = eval(f("(XXL) (Lin) ~p", [[E1, E6]]),
+ ?line {ok, _} = eval(f("(XXL) (Lin) ~p", [[E1, E6]]),
[{{D1,D3},[13]}, {{D7,D4},[12]}], S),
?line {ok, _} = eval(f("(Fun) ~p", [AllMs]), AllE, S),
?line {ok, _} = eval("(Fun) [m1->m2,m2->m3]", [E1,E2,E6], S),
@@ -323,7 +323,7 @@ intergraph(Conf) when is_list(Conf) ->
F3 = {m1,f3,3},
F4 = {m1,f4,4},
F5 = {m1,f5,5},
-
+
F6 = {m2,f1,6}, % X
F7 = {m2,f1,7},
F8 = {m2,f1,8},
@@ -339,7 +339,7 @@ intergraph(Conf) when is_list(Conf) ->
E5 = {F4,F2},
E6 = {F5,F4},
E7 = {F4,F5},
-
+
E8 = {F6,F7},
E9 = {F7,F8},
E10 = {F8,F1}, % X
@@ -363,9 +363,9 @@ intergraph(Conf) when is_list(Conf) ->
LCallAt_m1 = [{E1,1},{E2,2},{E3,3},{E5,5},{E6,6},{E7,7}],
XCallAt_m1 = [{E1,4}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
-
+
D6 = {F6,6},
D7 = {F7,7},
D8 = {F8,8},
@@ -380,7 +380,7 @@ intergraph(Conf) when is_list(Conf) ->
LCallAt_m2 = [{E8,8},{E9,9},{E11,11},{E12,12},{E13,13},{E14,14}],
XCallAt_m2 = [{E10,10},{E15,15}],
Info2 = #xref_mod{name = m2, app_name = [a2]},
- ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
+ ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
XC_m2, LC_m2),
AppInfo1 = #xref_app{name = a1, rel_name = [r1]},
@@ -397,13 +397,13 @@ intergraph(Conf) when is_list(Conf) ->
?line {ok, _} = eval("EE | m2", [{F6,F1}], S),
?line {ok, _} = eval("EE | m2 + EE | m2", [{F6,F1}], S),
- ?line {ok, _} = eval("(Fun)(Lin)(E | m1)",
+ ?line {ok, _} = eval("(Fun)(Lin)(E | m1)",
to_external(union(set(XC_m1), set(LC_m1))), S),
- ?line {ok, _} = eval("(XXL)(ELin) (EE | m1)",
- [{{D2,D1},[1,2,4]},{{D4,D2},[5]},{{D5,D4},[6]},{{D4,D5},[7]}],
+ ?line {ok, _} = eval("(XXL)(ELin) (EE | m1)",
+ [{{D2,D1},[1,2,4]},{{D4,D2},[5]},{{D5,D4},[6]},{{D4,D5},[7]}],
S),
?line {ok, _} = eval("(XXL)(ELin)(EE | m2)", [{{D6,D1},[8,11,12]}], S),
- ?line {ok, _} = eval("(XXL)(ELin)(ELin)(EE | m2)",
+ ?line {ok, _} = eval("(XXL)(ELin)(ELin)(EE | m2)",
[{{D6,D1},[8,11,12]}], S),
%% Combining graphs (equal or different):
@@ -420,15 +420,15 @@ intergraph(Conf) when is_list(Conf) ->
?line {ok, _} = eval("EE | m1 + E | m1", LC_m1, S),
?line {ok, _} = eval(f("EE | ~p + E | ~p", [F2, F2]), [E1,E2], S),
%% [1,4] from 'calls' is a subset of [1,2,4] from Inter Call Graph:
- ?line {ok, _} = eval(f("(XXL)(Lin) (E | ~p)", [F2]),
+ ?line {ok, _} = eval(f("(XXL)(Lin) (E | ~p)", [F2]),
[{{D2,D1},[1,4]},{{D2,D3},[2]}], S),
- ?line {ok, _} = eval(f("(XXL)(ELin) (EE | ~p)", [F2]),
+ ?line {ok, _} = eval(f("(XXL)(ELin) (EE | ~p)", [F2]),
[{{D2,D1},[1,2,4]}], S),
?line {ok, _} = eval(f("(XXL)((ELin)(EE | ~p) + (Lin)(E | ~p))", [F2, F2]),
[{{D2,D1},[1,2,4]},{{D2,D3},[2]}], S),
- ?line {ok, _} =
- eval(f("(XXL)((ELin) ~p + (Lin) ~p)", [{F2, F1}, {F2, F1}]),
+ ?line {ok, _} =
+ eval(f("(XXL)((ELin) ~p + (Lin) ~p)", [{F2, F1}, {F2, F1}]),
[{{D2,D1},[1,2,4]}], S),
?line {ok, _} = eval(f("(Fun)(Lin) ~p", [{F2, F1}]), [E1], S),
%% The external call E4 is included in the reply:
@@ -438,7 +438,7 @@ intergraph(Conf) when is_list(Conf) ->
%% The local call E1 is included in the reply:
?line {ok, _} = eval("(XXL)(Lin)(XC | m1)", [{{D2,D1},[1,4]}], S),
- ?line {ok, _} = eval(f("(LLin) (E | ~p || ~p) + (XLin) (E | ~p || ~p)",
+ ?line {ok, _} = eval(f("(LLin) (E | ~p || ~p) + (XLin) (E | ~p || ~p)",
[F2, F1, F2, F1]), [{E4,[1,4]}], S),
?line {ok, _} = eval("# (ELin) E", 6, S),
@@ -449,7 +449,7 @@ lines(suite) -> [];
lines(doc) -> ["More test of Inter Call Graph, and regular expressions"];
lines(Conf) when is_list(Conf) ->
S0 = new(),
-
+
F1 = {m1,f1,1}, % X
F2 = {m1,f2,2},
F3 = {m1,f3,3},
@@ -464,14 +464,14 @@ lines(Conf) when is_list(Conf) ->
E5 = {F2,F4}, % X
E6 = {F5,F6},
E7 = {F6,F4}, % X
-
+
D1 = {F1,1},
D2 = {F2,2},
D3 = {F3,3},
D4 = {F4,4},
D5 = {F5,5},
D6 = {F6,6},
-
+
DefAt_m1 = [D1,D2,D3,D5,D6],
X_m1 = [F1,F5],
% L_m1 = [F2,F3,F6],
@@ -480,7 +480,7 @@ lines(Conf) when is_list(Conf) ->
LCallAt_m1 = [{E1,1},{E3,3},{E6,6}],
XCallAt_m1 = [{E2,2},{E4,4},{E5,5},{E7,7}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
DefAt_m2 = [D4],
@@ -491,9 +491,9 @@ lines(Conf) when is_list(Conf) ->
LCallAt_m2 = [],
XCallAt_m2 = [],
Info2 = #xref_mod{name = m2, app_name = [a2]},
- ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
+ ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
XC_m2, LC_m2),
-
+
AppInfo1 = #xref_app{name = a1, rel_name = [r1]},
?line S5 = add_application(S2, AppInfo1),
AppInfo2 = #xref_app{name = a2, rel_name = [r1]},
@@ -509,10 +509,10 @@ lines(Conf) when is_list(Conf) ->
{{D5,D4},[6]}], S),
?line {ok, _} = eval("(XXL)(Lin) (E | m1)",
[{{D1,D2},[1]},{{D1,D4},[4]},{{D2,D1},[2]},
- {{D2,D4},[5]},{{D3,D2},[3]},{{D5,D6},[6]},{{D6,D4},[7]}],
+ {{D2,D4},[5]},{{D3,D2},[3]},{{D5,D6},[6]},{{D6,D4},[7]}],
S),
?line {ok, _} = eval("(E | m1) + (EE | m1)",
- [E1,E2,E3,E4,E5,E6,E7,{F1,F1},{F3,F1},{F3,F4},{F5,F4}],
+ [E1,E2,E3,E4,E5,E6,E7,{F1,F1},{F3,F1},{F3,F4},{F5,F4}],
S),
?line {ok, _} = eval("(Lin)(E | m1)",
[{E4,[4]},{E1,[1]},{E2,[2]},{E5,[5]},
@@ -567,7 +567,7 @@ lines(Conf) when is_list(Conf) ->
loops(suite) -> [];
loops(doc) -> ["More Inter Call Graph, loops and \"unusual\" cases"];
loops(Conf) when is_list(Conf) ->
- S0 = new(),
+ S0 = new(),
F1 = {m1,f1,1}, % X
F2 = {m1,f2,2},
@@ -582,7 +582,7 @@ loops(Conf) when is_list(Conf) ->
E3 = {F3,F4},
E4 = {F4,F5},
E5 = {F5,F3}, % X
-
+
D1 = {F1,1},
D2 = {F2,2},
D3 = {F3,3},
@@ -598,7 +598,7 @@ loops(Conf) when is_list(Conf) ->
LCallAt_m1 = [{E2,2},{E3,3},{E4,4}],
XCallAt_m1 = [{E1,1},{E5,5}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
?line S = set_up(S1),
@@ -659,16 +659,16 @@ modules(Conf) when is_list(Conf) ->
?line {ok, y} = compile:file(Y, [debug_info, {outdir,EB1_1}]),
?line {ok, S0} = xref_base:new([{xref_mode, modules}]),
- ?line {ok, release2, S1} =
+ ?line {ok, release2, S1} =
xref_base:add_release(S0, Dir, [{name,release2}]),
?line S = set_up(S1),
?line {{error, _, {unavailable_analysis, undefined_function_calls}}, _} =
xref_base:analyze(S, undefined_function_calls),
- ?line {{error, _, {unavailable_analysis, locals_not_used}}, _} =
+ ?line {{error, _, {unavailable_analysis, locals_not_used}}, _} =
xref_base:analyze(S, locals_not_used),
- ?line {{error, _, {unavailable_analysis, {call, foo}}}, _} =
+ ?line {{error, _, {unavailable_analysis, {call, foo}}}, _} =
xref_base:analyze(S, {call, foo}),
- ?line {{error, _, {unavailable_analysis, {use, foo}}}, _} =
+ ?line {{error, _, {unavailable_analysis, {use, foo}}}, _} =
xref_base:analyze(S, {use, foo}),
?line analyze(undefined_functions, [{x,undef,0}], S),
?line 5 = length(xref_base:info(S)),
@@ -681,7 +681,7 @@ modules(Conf) when is_list(Conf) ->
ok.
files(suite) ->
- [add, default, info, lib, read, read2, remove, replace, update,
+ [add, default, info, lib, read, read2, remove, replace, update,
deprecated, trycatch, abstract_modules, fun_mfa, qlc].
add(suite) -> [];
@@ -708,7 +708,7 @@ add(Conf) when is_list(Conf) ->
{unix, _} ->
?line make_udir(UDir),
?line make_ufile(UFile);
- _ ->
+ _ ->
true
end,
@@ -743,20 +743,20 @@ add(Conf) when is_list(Conf) ->
xref_base:add_release(S, foo, [{builtins,not_a_value}]),
?line {error, _, {invalid_filename,{foo,bar}}} =
xref_base:add_release(S, {foo,bar}, []),
- ?line {ok, S1} =
+ ?line {ok, S1} =
xref_base:set_default(S, [{verbose,false}, {warnings, false}]),
?line case os:type() of
{unix, _} ->
- ?line {error, _, {file_error, _, _}} =
+ ?line {error, _, {file_error, _, _}} =
xref_base:add_release(S, UDir);
_ ->
true
end,
- ?line {error, _, {file_error, _, _}} =
+ ?line {error, _, {file_error, _, _}} =
xref_base:add_release(S, fname(["/a/b/c/d/e/f","__foo"])),
- ?line {ok, release2, S2} =
+ ?line {ok, release2, S2} =
xref_base:add_release(S1, Dir, [{name,release2}]),
- ?line {error, _, {module_clash, {x, _, _}}} =
+ ?line {error, _, {module_clash, {x, _, _}}} =
xref_base:add_module(S2, Xbeam),
?line {ok, S3} = xref_base:remove_release(S2, release2),
?line {ok, rel2, S4} = xref_base:add_release(S3, Dir),
@@ -764,11 +764,11 @@ add(Conf) when is_list(Conf) ->
xref_base:add_release(S4, Dir),
?line {ok, S5} = xref_base:remove_release(S4, rel2),
%% One unreadable file and one JAM file found (no verification here):
- ?line {ok, [], S6} = xref_base:add_directory(S5, fname(CopyDir,"dir"),
+ ?line {ok, [], S6} = xref_base:add_directory(S5, fname(CopyDir,"dir"),
[{recurse,true}, {warnings,true}]),
?line case os:type() of
{unix, _} ->
- ?line {error, _, {file_error, _, _}} =
+ ?line {error, _, {file_error, _, _}} =
xref_base:add_directory(S6, UDir);
_ ->
true
@@ -803,7 +803,7 @@ default(Conf) when is_list(Conf) ->
xref_base:set_default(S, [not_an_option]),
?line D = xref_base:get_default(S),
- ?line [{builtins,false},{recurse,false},{verbose,false},{warnings,true}] =
+ ?line [{builtins,false},{recurse,false},{verbose,false},{warnings,true}] =
D,
?line ok = xref_base:delete(S),
@@ -831,7 +831,7 @@ info(Conf) when is_list(Conf) ->
?line {error, _, {no_such_info, release}} = xref:info(s, release),
?line {error, _, {no_such_info, release}} = xref:info(s, release, rel),
?line {error, _, {no_such_module, mod}} = xref:info(s, modules, mod),
- ?line {error, _, {no_such_application, app}} =
+ ?line {error, _, {no_such_application, app}} =
xref:info(s, applications, app),
?line {error, _, {no_such_release, rel}} = xref:info(s, releases, rel),
?line ok = xref:set_default(s, [{verbose,false}, {warnings, false}]),
@@ -845,9 +845,9 @@ info(Conf) when is_list(Conf) ->
?line [{rel2,_}] = xref:info(s, releases, rel2),
?line {error, _, {no_such_library, foo}} = xref:info(s, libraries, [foo]),
- ?line {ok, lib1} =
+ ?line {ok, lib1} =
compile:file(fname(LDir,lib1),[debug_info,{outdir,LDir}]),
- ?line {ok, lib2} =
+ ?line {ok, lib2} =
compile:file(fname(LDir,lib2),[debug_info,{outdir,LDir}]),
?line ok = xref:set_library_path(s, [LDir], [{verbose,false}]),
?line [{lib1,_}, {lib2, _}] = xref:info(s, libraries),
@@ -883,13 +883,13 @@ lib(Conf) when is_list(Conf) ->
xref:set_library_path(s, ["foo"], [not_an_option]),
?line {error, _, {invalid_path,otp}} = xref:set_library_path(s,otp),
?line {error, _, {invalid_path,[""]}} = xref:set_library_path(s,[""]),
- ?line {error, _, {invalid_path,[[$a | $b]]}} =
+ ?line {error, _, {invalid_path,[[$a | $b]]}} =
xref:set_library_path(s,[[$a | $b]]),
?line {error, _, {invalid_path,[otp]}} = xref:set_library_path(s,[otp]),
?line {ok, []} = xref:get_library_path(s),
?line ok = xref:set_library_path(s, [Dir], [{verbose,false}]),
?line {ok, UnknownFunctions} = xref:q(s, "U"),
- ?line [{lib1,unknown,0}, {lib2,local,0},
+ ?line [{lib1,unknown,0}, {lib2,local,0},
{lib2,unknown,0}, {unknown,unknown,0}]
= UnknownFunctions,
?line {ok, [{lib2,f,0},{lib3,f,0}]} = xref:q(s, "DF"),
@@ -934,7 +934,7 @@ lib(Conf) when is_list(Conf) ->
?line ok = xref:set_default(s, [{verbose,false}, {warnings, false}]),
?line {ok, cp} = xref:add_module(s, fname(Dir,"cp.beam")),
?line {ok, [{lists, sort, 1}]} = xref:q(s, "U"),
- ?line ok = xref:set_library_path(s, code_path),
+ ?line ok = xref:set_library_path(s, code_path),
?line {ok, []} = xref:q(s, "U"),
?line check_state(s),
?line xref:stop(s),
@@ -1010,18 +1010,18 @@ do_read(File, Version) ->
?line {ok, CallsB} = xref:q(s, "(Lin) (E - UC) "),
?line ok = check_state(s),
?line {ok, XU} = xref:q(s, "XU"),
- ?line Erl = set([{erlang,length,1},{erlang,integer,1},
+ ?line Erl = set([{erlang,length,1},{erlang,integer,1},
{erlang,binary_to_term,1}]),
- ?line [{erlang,binary_to_term,1},{erlang,length,1}] =
+ ?line [{erlang,binary_to_term,1},{erlang,length,1}] =
to_external(intersection(set(XU), Erl)),
- ?line xref:stop(s).
+ ?line xref:stop(s).
%% What is expected when xref_SUITE_data/read/read.erl is added:
read_expected(Version) ->
%% Line positions in xref_SUITE_data/read/read.erl:
- POS1 = 28, POS2 = POS1+10, POS3 = POS2+6, POS4 = POS3+6, POS5 = POS4+10,
- POS6 = POS5+5, POS7 = POS6+6, POS8 = POS7+6, POS9 = POS8+8,
- POS10 = POS9+10, POS11 = POS10+7, POS12 = POS11+8, POS13 = POS12+10,
+ POS1 = 28, POS2 = POS1+10, POS3 = POS2+6, POS4 = POS3+6, POS5 = POS4+10,
+ POS6 = POS5+5, POS7 = POS6+6, POS8 = POS7+6, POS9 = POS8+8,
+ POS10 = POS9+10, POS11 = POS10+7, POS12 = POS11+8, POS13 = POS12+10,
POS14 = POS13+18, % POS15 = POS14+23,
FF = {read,funfuns,0},
@@ -1162,7 +1162,7 @@ read_expected(Version) ->
{POS14+17,{{read,bi,0},{read,bi,0}}}],
OK = case Version of
- abstract_v1 ->
+ abstract_v1 ->
[{POS8+3, {FF,{erlang,apply,3}}},
{POS10+1, {FF,{erlang,apply,3}}},
{POS10+6, {FF,{erlang,apply,3}}}]
@@ -1170,7 +1170,7 @@ read_expected(Version) ->
[{0,{FF,{read,'$F_EXPR',178}}},
{0,{FF,{modul,'$F_EXPR',179}}}]
++ O1;
- _ ->
+ _ ->
% [{POS15+2,{{read,bi,0},{foo,t,0}}},
% {POS15+3,{{read,bi,0},{bar,t,0}}},
% {POS15+6,{{read,bi,0},{read,local,0}}},
@@ -1183,18 +1183,34 @@ read_expected(Version) ->
end,
%% When builtins =:= true:
- OKB = [{POS13+1,{FF,{erts_debug,apply,4}}},
- {POS13+2,{FF,{erts_debug,apply,4}}},
- {POS13+3,{FF,{erts_debug,apply,4}}},
- {POS1+3, {FF,{erlang,binary_to_term,1}}},
- {POS3+1, {FF,{erlang,spawn,3}}},
- {POS3+2, {FF,{erlang,spawn,3}}},
- {POS3+3, {FF,{erlang,spawn_link,3}}},
- {POS3+4, {FF,{erlang,spawn_link,3}}},
- {POS6+4, {FF,{erlang,spawn,3}}},
- {POS13+5, {{read,bi,0},{erlang,length,1}}},
- {POS14+3, {{read,bi,0},{erlang,length,1}}}]
- ++ OK,
+ OKB1 = [{POS13+1,{FF,{erts_debug,apply,4}}},
+ {POS13+2,{FF,{erts_debug,apply,4}}},
+ {POS13+3,{FF,{erts_debug,apply,4}}},
+ {POS1+3, {FF,{erlang,binary_to_term,1}}},
+ {POS3+1, {FF,{erlang,spawn,3}}},
+ {POS3+2, {FF,{erlang,spawn,3}}},
+ {POS3+3, {FF,{erlang,spawn_link,3}}},
+ {POS3+4, {FF,{erlang,spawn_link,3}}},
+ {POS6+4, {FF,{erlang,spawn,3}}},
+ {POS13+5, {{read,bi,0},{erlang,length,1}}},
+ {POS14+3, {{read,bi,0},{erlang,length,1}}}],
+
+ %% Operators (OTP-8647):
+ OKB = case Version of
+ abstract_v1 ->
+ [];
+ _ ->
+ [{POS13+16, {{read,bi,0},{erlang,'!',2}}},
+ {POS13+16, {{read,bi,0},{erlang,'-',1}}},
+ {POS13+16, {{read,bi,0},{erlang,self,0}}}]
+ end
+ ++ [{POS14+19, {{read,bi,0},{erlang,'+',2}}},
+ {POS14+21, {{read,bi,0},{erlang,'+',2}}},
+ {POS13+16, {{read,bi,0},{erlang,'==',2}}},
+ {POS14+15, {{read,bi,0},{erlang,'==',2}}},
+ {POS13+5, {{read,bi,0},{erlang,'>',2}}},
+ {POS14+3, {{read,bi,0},{erlang,'>',2}}}]
+ ++ OKB1 ++ OK,
{U, OK, OKB}.
@@ -1217,9 +1233,9 @@ read2(Conf) when is_list(Conf) ->
spawn_opt(fun() -> foo end, [link]),
spawn_opt(f(),
{read2,f}, [{min_heap_size,1000}]),
- spawn_opt(f(),
+ spawn_opt(f(),
fun() -> f() end, [flopp]),
- spawn_opt(f(),
+ spawn_opt(f(),
read2, f, [], []);
f() ->
%% Duplicated unresolved calls are ignored:
@@ -1237,7 +1253,7 @@ read2(Conf) when is_list(Conf) ->
?line {ok, U2} = xref:q(s, "(Lin) UC"),
?line {ok, OK2} = xref:q(s, "(Lin) (E - UC)"),
?line true = U =:= U2,
- ?line true = OK =:= OK2,
+ ?line true = OK =:= OK2,
?line ok = check_state(s),
?line xref:stop(s),
@@ -1304,7 +1320,7 @@ replace(Conf) when is_list(Conf) ->
?line {ok, true} = xref:set_default(s, warnings, false),
?line {ok, rel2} = xref:add_release(s, Dir, []),
?line {error, _, _} = xref:replace_application(s, app1, "no_data"),
- ?line {error, _, {no_such_application, app12}} =
+ ?line {error, _, {no_such_application, app12}} =
xref:replace_application(s, app12, A1_0, []),
?line {error, _, {invalid_filename,{foo,bar}}} =
xref:replace_application(s, app1, {foo,bar}, []),
@@ -1312,7 +1328,7 @@ replace(Conf) when is_list(Conf) ->
xref:replace_application(s, foo, bar, [not_an_option]),
?line {error, _, {invalid_options,[{builtins,not_a_value}]}} =
xref:replace_application(s, foo, bar, [{builtins,not_a_value}]),
- ?line {ok, app1} =
+ ?line {ok, app1} =
xref:replace_application(s, app1, A1_0),
?line [{_, AppInfo}] = xref:info(s, applications, app1),
?line {value, {release, [rel2]}} = keysearch(release, 1, AppInfo),
@@ -1332,14 +1348,14 @@ replace(Conf) when is_list(Conf) ->
?line {ok, x} = compile:file(X, [no_debug_info, {outdir,EB1_1}]),
?line {error, _, {no_debug_info, _}} = xref:replace_module(s, x, Xbeam),
- ?line {error, _, {module_mismatch, x,y}} =
+ ?line {error, _, {module_mismatch, x,y}} =
xref:replace_module(s, x, Ybeam),
?line case os:type() of
{unix, _} ->
?line hide_file(Ybeam),
- ?line {error, _, {file_error, _, _}} =
+ ?line {error, _, {file_error, _, _}} =
xref:replace_module(s, x, Ybeam);
- _ ->
+ _ ->
true
end,
?line ok = xref:remove_module(s, x),
@@ -1362,16 +1378,16 @@ update(Conf) when is_list(Conf) ->
Source = fname(Dir, "x.erl"),
Beam = fname(Dir, "x.beam"),
?line copy_file(fname(Dir, "x.erl.1"), Source),
- ?line {ok, x} = compile:file(Source, [debug_info, {outdir,Dir}]),
-
+ ?line {ok, x} = compile:file(Source, [debug_info, {outdir,Dir}]),
+
?line {ok, _} = start(s),
- ?line ok = xref:set_default(s, [{verbose,false}, {warnings, false}]),
+ ?line ok = xref:set_default(s, [{verbose,false}, {warnings, false}]),
?line {ok, [x]} = xref:add_directory(s, Dir, [{builtins,true}]),
?line {error, _, {invalid_options,[not_an_option]}} =
xref:update(s, [not_an_option]),
?line {ok, []} = xref:update(s),
?line {ok, [{erlang,atom_to_list,1}]} = xref:q(s, "XU"),
-
+
?line [{x, ModInfo}] = xref:info(s, modules, x),
?line case keysearch(directory, 1, ModInfo) of
{value, {directory, Dir}} -> ok
@@ -1379,7 +1395,7 @@ update(Conf) when is_list(Conf) ->
timer:sleep(2000), % make sure modification time has changed
?line copy_file(fname(Dir, "x.erl.2"), Source),
- ?line {ok, x} = compile:file(Source, [debug_info, {outdir,Dir}]),
+ ?line {ok, x} = compile:file(Source, [debug_info, {outdir,Dir}]),
?line {ok, [x]} = xref:update(s, []),
?line {ok, [{erlang,list_to_atom,1}]} = xref:q(s, "XU"),
@@ -1454,11 +1470,11 @@ deprecated(Conf) when is_list(Conf) ->
DF = usort(DF_3++[{{M9,t,0},{M9,f,1}}]),
?line {ok,DF} = xref:analyze(s, deprecated_function_calls),
- ?line {ok,DF_1} =
+ ?line {ok,DF_1} =
xref:analyze(s, {deprecated_function_calls,next_version}),
- ?line {ok,DF_2} =
+ ?line {ok,DF_2} =
xref:analyze(s, {deprecated_function_calls,next_major_release}),
- ?line {ok,DF_3} =
+ ?line {ok,DF_3} =
xref:analyze(s, {deprecated_function_calls,eventually}),
D = to_external(range(from_term(DF))),
@@ -1467,11 +1483,11 @@ deprecated(Conf) when is_list(Conf) ->
D_3 = to_external(range(from_term(DF_3))),
?line {ok,D} = xref:analyze(s, deprecated_functions),
- ?line {ok,D_1} =
+ ?line {ok,D_1} =
xref:analyze(s, {deprecated_functions,next_version}),
- ?line {ok,D_2} =
+ ?line {ok,D_2} =
xref:analyze(s, {deprecated_functions,next_major_release}),
- ?line {ok,D_3} =
+ ?line {ok,D_3} =
xref:analyze(s, {deprecated_functions,eventually}),
?line ok = check_state(s),
@@ -1516,11 +1532,11 @@ deprecated(Conf) when is_list(Conf) ->
DFa = DFa_3,
?line {ok,DFa} = xref:analyze(s, deprecated_function_calls),
- ?line {ok,DFa_1} =
+ ?line {ok,DFa_1} =
xref:analyze(s, {deprecated_function_calls,next_version}),
- ?line {ok,DFa_2} =
+ ?line {ok,DFa_2} =
xref:analyze(s, {deprecated_function_calls,next_major_release}),
- ?line {ok,DFa_3} =
+ ?line {ok,DFa_3} =
xref:analyze(s, {deprecated_function_calls,eventually}),
?line ok = check_state(s),
@@ -1564,11 +1580,11 @@ deprecated(Conf) when is_list(Conf) ->
DFb = usort(DFb_2++[{{M,bar,2},{M,t,0}},{{M,g,3},{M,bar,2}}]),
?line {ok,DFb} = xref:analyze(s, deprecated_function_calls),
- ?line {ok,DFb_1} =
+ ?line {ok,DFb_1} =
xref:analyze(s, {deprecated_function_calls,next_version}),
- ?line {ok,DFb_2} =
+ ?line {ok,DFb_2} =
xref:analyze(s, {deprecated_function_calls,next_major_release}),
- ?line {ok,DFb_3} =
+ ?line {ok,DFb_3} =
xref:analyze(s, {deprecated_function_calls,eventually}),
?line ok = check_state(s),
@@ -1599,7 +1615,7 @@ trycatch(Conf) when is_list(Conf) ->
catch
error:a -> err:e1();
error:b -> err:e2()
- after
+ after
fini:shed()
end.
">>,
@@ -1616,7 +1632,7 @@ trycatch(Conf) when is_list(Conf) ->
{{{A,A,0},{err,e2,0}},[13]},
{{{A,A,0},{fini,shed,0}},[15]},
{{{A,A,0},{foo,bar,0}},[7]},
- {{{A,A,0},{foo,foo,0}},[9]}]} =
+ {{{A,A,0},{foo,foo,0}},[9]}]} =
xref:q(s, "(Lin) (E | trycatch:trycatch/0)"),
?line ok = check_state(s),
@@ -1662,7 +1678,7 @@ abstract_modules(Conf) when is_list(Conf) ->
{{{A,args,1},{A,local,1}},[6]},
{{{A,args,1},{A,new,2}},[8]},
{{{A,local,1},{A,module_info,1}},[12]},
- {{{param,new,2},{param,instance,2}},[0]}]} =
+ {{{param,new,2},{param,instance,2}},[0]}]} =
xref:q(s, "(Lin) E"),
?line {ok,[{param,args,1},
{param,instance,2},
@@ -1747,10 +1763,10 @@ qlc(Conf) when is_list(Conf) ->
t() ->
dets:open_file(t, []),
dets:insert(t, [{1,a},{2,b},{3,c},{4,d}]),
- MS = ets:fun2ms(fun({X,Y}) when (X > 1) or (X < 5) -> {Y}
+ MS = ets:fun2ms(fun({X,Y}) when (X > 1) or (X < 5) -> {Y}
end),
QH1 = dets:table(t, [{traverse, {select, MS}}]),
- QH2 = qlc:q([{Y} || {X,Y} <- dets:table(t),
+ QH2 = qlc:q([{Y} || {X,Y} <- dets:table(t),
(X > 1) or (X < 5)]),
true = qlc:info(QH1) =:= qlc:info(QH2),
dets:close(t),
@@ -1783,7 +1799,7 @@ analyze(Conf) when is_list(Conf) ->
xref_base:analyze(S0, undefined_function_calls, [not_an_option]),
?line {{error, _, {invalid_query,{q}}}, _} = xref_base:q(S0,{q}),
?line {{error, _, {unknown_analysis,foo}}, _} = xref_base:analyze(S0, foo),
- ?line {{error, _, {unknown_constant,"foo:bar/-1"}}, _} =
+ ?line {{error, _, {unknown_constant,"foo:bar/-1"}}, _} =
xref_base:analyze(S0, {use,{foo,bar,-1}}),
CopyDir = ?copydir,
@@ -1803,30 +1819,30 @@ analyze(Conf) when is_list(Conf) ->
?line {ok, rel2, S1} = xref_base:add_release(S0, Dir, [{verbose,false}]),
?line S = set_up(S1),
- ?line {ok, _} =
+ ?line {ok, _} =
analyze(undefined_function_calls, [{{x,xx,0},{x,undef,0}}], S),
?line {ok, _} = analyze(undefined_functions, [{x,undef,0}], S),
?line {ok, _} = analyze(locals_not_used, [{x,l,0},{x,l1,0}], S),
?line {ok, _} = analyze(exports_not_used, [{x,xx,0},{y,t,0}], S),
- ?line {ok, _} =
+ ?line {ok, _} =
analyze(deprecated_function_calls, [{{y,t,0},{x,t,0}}], S),
?line {ok, _} = analyze({deprecated_function_calls,next_version}, [], S),
- ?line {ok, _} =
+ ?line {ok, _} =
analyze({deprecated_function_calls,next_major_release}, [], S),
- ?line {ok, _} = analyze({deprecated_function_calls,eventually},
+ ?line {ok, _} = analyze({deprecated_function_calls,eventually},
[{{y,t,0},{x,t,0}}], S),
?line {ok, _} = analyze(deprecated_functions, [{x,t,0}], S),
?line {ok, _} = analyze({deprecated_functions,next_version}, [], S),
- ?line {ok, _} =
+ ?line {ok, _} =
analyze({deprecated_functions,next_major_release}, [], S),
?line {ok, _} = analyze({deprecated_functions,eventually}, [{x,t,0}], S),
?line {ok, _} = analyze({call, {x,xx,0}}, [{x,undef,0}], S),
- ?line {ok, _} =
+ ?line {ok, _} =
analyze({call, [{x,xx,0},{x,l,0}]}, [{x,l1,0},{x,undef,0}], S),
?line {ok, _} = analyze({use, {x,l,0}}, [{x,l1,0}], S),
- ?line {ok, _} =
+ ?line {ok, _} =
analyze({use, [{x,l,0},{x,l1,0}]}, [{x,l,0},{x,l1,0}], S),
?line {ok, _} = analyze({module_call, x}, [x], S),
@@ -1881,7 +1897,7 @@ basic(Conf) when is_list(Conf) ->
LCallAt_m1 = [{E7,12}],
XCallAt_m1 = [{E1,13},{E2,17},{E4,7}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
D2 = {F2,7},
@@ -1895,7 +1911,7 @@ basic(Conf) when is_list(Conf) ->
LCallAt_m2 = [],
XCallAt_m2 = [{E3,96},{E6,12},{UE1,77}],
Info2 = #xref_mod{name = m2, app_name = [a2]},
- ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
+ ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
XC_m2, LC_m2),
D4 = {F4,6},
@@ -1908,7 +1924,7 @@ basic(Conf) when is_list(Conf) ->
LCallAt_m3 = [{E5,19}],
XCallAt_m3 = [{UE2,22}],
Info3 = #xref_mod{name = m3, app_name = [a3]},
- ?line S3 = add_module(S2, Info3, DefAt_m3, X_m3, LCallAt_m3, XCallAt_m3,
+ ?line S3 = add_module(S2, Info3, DefAt_m3, X_m3, LCallAt_m3, XCallAt_m3,
XC_m3, LC_m3),
Info4 = #xref_mod{name = m4, app_name = [a2]},
@@ -1955,7 +1971,7 @@ basic(Conf) when is_list(Conf) ->
?line {ok, _} = eval(f("(Mod) ~p", [[F1,F6,F5]]), [m1,m3], S),
?line {ok, _} = eval("(Lin) M - (Lin) m1",
[{F2,7},{F3,9},{F7,19},{F4,6},{F5,97},{UF2,0}], S),
- ?line {ok, _} = eval(f("(Lin) M * (Lin) ~p", [[F1,F6]]),
+ ?line {ok, _} = eval(f("(Lin) M * (Lin) ~p", [[F1,F6]]),
[{F1,12},{F6,3}], S),
?line {ok, _} = eval(f("X * ~p", [[F1, F2, F3, F4, F5]]), [F3, F4], S),
@@ -1976,7 +1992,7 @@ basic(Conf) when is_list(Conf) ->
?line {ok, _} = eval(f("(XXL) (Lin) (XC | ~p)", [F1]),
[{{D1,D3},[13]},{{D1,D4},[7]}],S),
?line {ok, _} = eval(f("XC | (~p + ~p)", [F1, F2]), [E1,E4,E3,UE1], S),
- ?line {ok, _} = eval(f("(XXL) (Lin) (XC | ~p)", [F1]),
+ ?line {ok, _} = eval(f("(XXL) (Lin) (XC | ~p)", [F1]),
[{{D1,D3},[13]},{{D1,D4},[7]}], S),
?line {ok, _} = eval("LC | m3", [E5], S),
?line {ok, _} = eval(f("LC | ~p", [F1]), [E7], S),
@@ -1984,7 +2000,7 @@ basic(Conf) when is_list(Conf) ->
?line {ok, _} = eval("E | m1", [E1,E2,E4,E7], S),
?line {ok, _} = eval(f("E | ~p", [F1]), [E1,E7,E4], S),
?line {ok, _} = eval(f("E | (~p + ~p)", [F1, F2]), [E1,E7,E4,E3,UE1], S),
-
+
?line {ok, _} = eval("XC || m1", [E3,UE2], S),
?line {ok, _} = eval(f("XC || ~p", [F6]), [E3], S),
?line {ok, _} = eval(f("XC || (~p + ~p)", [F4, UF2]), [UE1,E4,E6], S),
@@ -2012,18 +2028,18 @@ basic(Conf) when is_list(Conf) ->
?line {ok, _} = eval("components V", type_error, S),
?line {ok, _} = eval("components E + components E", type_error, S),
- ?line {ok, _} = eval(f("range (closure E | ~p)", [[F1,F2]]),
+ ?line {ok, _} = eval(f("range (closure E | ~p)", [[F1,F2]]),
[F6,F3,F7,F4,F5,UF1,UF2], S),
- ?line {ok, _} =
+ ?line {ok, _} =
eval(f("domain (closure E || ~p)", [[UF2,F7]]), [F1,F2,F6], S),
?line {ok, _} = eval("components E", [], S),
?line {ok, _} = eval("components (Mod) E", [[m1,m2,m3]], S),
?line {ok, _} = eval("components closure (Mod) E", [[m1,m2,m3]], S),
- ?line {ok, _} = eval("condensation (Mod) E",
+ ?line {ok, _} = eval("condensation (Mod) E",
[{[m1,m2,m3],[m1,m2,m3]},{[m1,m2,m3],[m17]}], S),
- ?line {ok, _} = eval("condensation closure (Mod) E",
+ ?line {ok, _} = eval("condensation closure (Mod) E",
[{[m1,m2,m3],[m1,m2,m3]},{[m1,m2,m3],[m17]}], S),
- ?line {ok, _} = eval("condensation closure closure closure (Mod) E",
+ ?line {ok, _} = eval("condensation closure closure closure (Mod) E",
[{[m1,m2,m3],[m1,m2,m3]},{[m1,m2,m3],[m17]}], S),
?line {ok, _} = eval("weak condensation (Mod) E",
[{[m1,m2,m3],[m1,m2,m3]},{[m1,m2,m3],[m17]},{[m17],[m17]}], S),
@@ -2035,11 +2051,11 @@ basic(Conf) when is_list(Conf) ->
[[m1,m2,m3]], S),
%% |, ||, |||
- ?line {ok, _} = eval("(Lin) E || V", type_error, S),
- ?line {ok, _} = eval("E ||| (Lin) V", type_error, S),
+ ?line {ok, _} = eval("(Lin) E || V", type_error, S),
+ ?line {ok, _} = eval("E ||| (Lin) V", type_error, S),
?line {ok, _} = eval("E ||| m1", [E7], S),
?line {ok, _} = eval("closure E ||| m1", [E7,{F1,UF1},{F6,UF1}], S),
- ?line {ok, _} = eval("closure E ||| [m1,m2]",
+ ?line {ok, _} = eval("closure E ||| [m1,m2]",
[{F1,UF1},{F2,F7},{F1,F7},{F6,UF1},{F2,UF1},{F7,UF1},E7,E1,E2,E3], S),
?line {ok, _} = eval("AE | a1", [{a1,a1},{a1,a2},{a1,a3}], S),
@@ -2095,7 +2111,7 @@ md(Conf) when is_list(Conf) ->
Y = fname(Dir, "y__y.erl"),
Xbeam = fname(Dir, "x__x.beam"),
Ybeam = fname(Dir, "y__y.beam"),
-
+
?line {error, _, {invalid_filename,{foo,bar}}} = xref:m({foo,bar}),
?line {error, _, {invalid_filename,{foo,bar}}} = xref:d({foo,bar}),
@@ -2171,7 +2187,7 @@ variables(Conf) when is_list(Conf) ->
LCallAt_m1 = [],
XCallAt_m1 = [{E1,13},{E3,17}],
Info1 = #xref_mod{name = m1, app_name = [a1]},
- ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
+ ?line S1 = add_module(S0, Info1, DefAt_m1, X_m1, LCallAt_m1, XCallAt_m1,
XC_m1, LC_m1),
D2 = {F2,7},
@@ -2183,11 +2199,11 @@ variables(Conf) when is_list(Conf) ->
LCallAt_m2 = [],
XCallAt_m2 = [{E2,96}],
Info2 = #xref_mod{name = m2, app_name = [a2]},
- ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
+ ?line S2 = add_module(S1, Info2, DefAt_m2, X_m2, LCallAt_m2, XCallAt_m2,
XC_m2, LC_m2),
?line S = set_up(S2),
-
+
?line eval("T1=E, T2=E*T1, T3 = T2*T2, T4=range T3, T5=T3|T4, T5",
[E1,E2,E3], S),
?line eval("((E*E)*(E*E)) | (range ((E*E)*(E*E)))",
@@ -2202,16 +2218,16 @@ variables(Conf) when is_list(Conf) ->
?line {ok, S102} = eval("T2 := E | m2", [E2], S101),
?line {{ok, [{user, ['T0', 'T1', 'T2']}]}, _} = xref_base:variables(S102),
?line {ok, S103} = xref_base:forget(S102, 'T0'),
- ?line {{ok, [{user, ['T1', 'T2']}]}, S104} =
+ ?line {{ok, [{user, ['T1', 'T2']}]}, S104} =
xref_base:variables(S103, [user]),
?line {ok, S105} = xref_base:forget(S104),
?line {{ok, [{user, []}]}, S106} = xref_base:variables(S105),
- ?line {{ok, [{predefined,_}]}, S107_0} =
+ ?line {{ok, [{predefined,_}]}, S107_0} =
xref_base:variables(S106, [predefined]),
- ?line {ok, S107_1} =
+ ?line {ok, S107_1} =
eval("TT := E, TT2 := V, TT1 := TT * TT", [E1,E2,E3], S107_0),
- ?line {{ok, [{user, ['TT', 'TT1', 'TT2']}]}, _} =
+ ?line {{ok, [{user, ['TT', 'TT1', 'TT2']}]}, _} =
xref_base:variables(S107_1),
?line {ok, S107} = xref_base:forget(S107_1),
@@ -2220,14 +2236,14 @@ variables(Conf) when is_list(Conf) ->
Beam = fname(Dir, "lib1.beam"),
?line copy_file(fname(Dir, "lib1.erl"), Beam),
- ?line {ok, S108} =
+ ?line {ok, S108} =
xref_base:set_library_path(S107, [Dir], [{verbose,false}]),
?line {{error, _, _}, _} = xref_base:variables(S108, [{verbose,false}]),
?line {ok, S109} = xref_base:set_library_path(S108, [], [{verbose,false}]),
?line Tabs = length(ets:all()),
- ?line {ok, S110} = eval("Eplus := closure E, TT := Eplus",
+ ?line {ok, S110} = eval("Eplus := closure E, TT := Eplus",
'closure()', S109),
?line {{ok, [{user, ['Eplus','TT']}]}, S111} = xref_base:variables(S110),
?line {ok, S112} = xref_base:forget(S111, ['TT','Eplus']),
@@ -2289,7 +2305,7 @@ unused_locals(Conf) when is_list(Conf) ->
?line {ok, []} = xref:analyse(s, locals_not_used),
?line ok = check_state(s),
?line xref:stop(s),
-
+
?line ok = file:delete(File1),
?line ok = file:delete(Beam1),
?line ok = file:delete(File2),
@@ -2303,7 +2319,7 @@ format_error(suite) -> [];
format_error(doc) -> ["Format error messages"];
format_error(Conf) when is_list(Conf) ->
?line {ok, _Pid} = start(s),
- ?line ok = xref:set_default(s, [{verbose,false}, {warnings, false}]),
+ ?line ok = xref:set_default(s, [{verbose,false}, {warnings, false}]),
%% Parse error messages.
?line "Invalid regular expression \"add(\"" ++ _ =
@@ -2332,7 +2348,7 @@ format_error(Conf) when is_list(Conf) ->
%% Other messages
?line 'Variable \'QQ\' used before set\n' =
fatom(xref:q(s,"QQ")),
- ?line 'Unknown constant a\n' =
+ ?line 'Unknown constant a\n' =
fatom(xref:q(s,"{a} of E")),
%% Testing xref_parser:t2s/1.
@@ -2341,12 +2357,12 @@ format_error(Conf) when is_list(Conf) ->
?line 'Variable assigned more than once: E = E + E\n' =
fatom(xref:q(s,"E=E + E")),
?line "Operator applied to argument(s) of different or invalid type(s): "
- "E + V * V\n" =
+ "E + V * V\n" =
flatten(xref:format_error(xref:q(s,"E + (V * V)"))),
?line {error,xref_compiler,{type_error,"(V + V) * E"}} =
xref:q(s,"(V + V) * E"),
?line "Type does not match structure of constant: [m:f/3 -> g:h/17] : "
- "App\n" =
+ "App\n" =
flatten(xref:format_error(xref:q(s,"[{{m,f,3},{g,h,17}}] : App"))),
?line 'Type does not match structure of constant: [m -> f, g -> h] : Fun\n'
= fatom(xref:q(s,"[{m,f},g->h] : Fun")),
@@ -2360,11 +2376,11 @@ format_error(Conf) when is_list(Conf) ->
xref:q(s,"condensation (# E + # V)"),
?line {error,xref_compiler,{type_error,"range (# E + # E)"}} =
xref:q(s,"range (#E + #E)"),
- ?line {error,xref_compiler,{type_error,"range (# E)"}} =
+ ?line {error,xref_compiler,{type_error,"range (# E)"}} =
xref:q(s,"range #E"), % Hm...
?line {error,xref_compiler,{type_error,"E + # E"}} =
xref:q(s,"E + #E + #E"), % Hm...
- ?line {error,xref_compiler,{type_error,"V * E || V | V"}} =
+ ?line {error,xref_compiler,{type_error,"V * E || V | V"}} =
xref:q(s,"V * (E || V) | V"),
?line {error,xref_compiler,{type_error,"E || (E | V)"}} =
xref:q(s,"V * E || (E | V)"),
@@ -2421,7 +2437,7 @@ eval(Query, E, S) ->
?format("------------------------------~n", []),
?format("Evaluating ~p~n", [Query]),
?line {Answer, NewState} = xref_base:q(S, Query, [{verbose, false}]),
- {Reply, Expected} =
+ {Reply, Expected} =
case Answer of
{ok, R} when is_list(E) ->
{unsetify(R), sort(E)};
@@ -2430,7 +2446,7 @@ eval(Query, E, S) ->
{error, _Module, Reason} ->
{element(1, Reason), E}
end,
- if
+ if
Reply =:= Expected ->
?format("As expected, got ~n~p~n", [Expected]),
{ok, NewState};
@@ -2442,7 +2458,7 @@ eval(Query, E, S) ->
analyze(Query, E, S) ->
?format("------------------------------~n", []),
?format("Evaluating ~p~n", [Query]),
- ?line {{ok, L}, NewState} =
+ ?line {{ok, L}, NewState} =
xref_base:analyze(S, Query, [{verbose, false}]),
case {unsetify(L), sort(E)} of
{X,X} ->
@@ -2461,7 +2477,7 @@ unsetify(S) ->
%% Note: assumes S has been set up; the new state is not returned
eval(Query, S) ->
- ?line {{ok, Answer}, _NewState} =
+ ?line {{ok, Answer}, _NewState} =
xref_base:q(S, Query, [{verbose, false}]),
unsetify(Answer).
@@ -2514,7 +2530,7 @@ check_state(S) ->
functions_mode_check(S, Info)
end.
-%% The manual mentions some facts that should always hold.
+%% The manual mentions some facts that should always hold.
%% Here they are again.
functions_mode_check(S, Info) ->
%% F = L + X,
@@ -2526,7 +2542,7 @@ functions_mode_check(S, Info) ->
?line {ok, V} = xref:q(S, "X + L + B + U"),
%% X, L, B and U are disjoint.
- ?line {ok, []} =
+ ?line {ok, []} =
xref:q(S, "X * L + X * B + X * U + L * B + L * U + B * U"),
%% V = UU + XU + LU,
@@ -2577,11 +2593,11 @@ functions_mode_check(S, Info) ->
?line {Local, Exported} = info(Info, no_functions),
?line LX = Local+Exported,
- ?line {ok, LXs} = xref:q(S, 'Extra = _:module_info/"(0|1)" + LM,
+ ?line {ok, LXs} = xref:q(S, 'Extra = _:module_info/"(0|1)" + LM,
# (F - Extra)'),
?line true = LX =:= LXs,
- ?line {LocalCalls, ExternalCalls, UnresCalls} =
+ ?line {LocalCalls, ExternalCalls, UnresCalls} =
info(Info, no_function_calls),
?line LEU = LocalCalls + ExternalCalls + UnresCalls,
?line {ok, LEU} = xref:q(S, "# LC + # XC"),
@@ -2635,7 +2651,7 @@ check_count(S) ->
%% {ok, A} = xref:q(S, 'A'),
{ok, M} = xref:q(S, 'AM'),
- {ok, _} = xref:q(S,
+ {ok, _} = xref:q(S,
"Extra := _:module_info/\"(0|1)\" + LM"),
%% info/1:
@@ -2670,7 +2686,7 @@ check_count(S) ->
ok.
info_module([M | Ms], S) ->
- {ok, NoCalls} = per_module("T = (E | ~p : Mod), # (XLin) T + # (LLin) T",
+ {ok, NoCalls} = per_module("T = (E | ~p : Mod), # (XLin) T + # (LLin) T",
M, S),
{ok, NoFunCalls} = per_module("# (E | ~p : Mod)", M, S),
{ok, NoXCalls} = per_module("# (XC | ~p : Mod)", M, S),
@@ -2719,14 +2735,14 @@ start(Server) ->
end.
add_erts_code_path(KernelPath) ->
- VersionDirs =
+ VersionDirs =
filelib:is_dir(
filename:join(
[code:lib_dir(),
lists:flatten(
["kernel-",
- [X ||
- {kernel,_,X} <-
+ [X ||
+ {kernel,_,X} <-
application_controller:which_applications()]])])),
case VersionDirs of
true ->
@@ -2746,5 +2762,5 @@ add_erts_code_path(KernelPath) ->
[KernelPath]
end
end.
-
-
+
+
diff --git a/lib/tools/test/xref_SUITE_data/read/read.erl b/lib/tools/test/xref_SUITE_data/read/read.erl
index 4a0cc280c3..19694c9e25 100644
--- a/lib/tools/test/xref_SUITE_data/read/read.erl
+++ b/lib/tools/test/xref_SUITE_data/read/read.erl
@@ -106,13 +106,13 @@ funfuns() ->
apply(m,f,a), % {m,f,-1}
3(a), % {'$M_EXPR','$F_EXPR',1}
apply(3,[a]), % {'$M_EXPR','$F_EXPR',1}
-
+
%% POS12=POS11+8
apply(A, A), % number of arguments is not known, {'$M_EXPR','$F_EXPR',-1}
Args0 = [list],
Args = [a | Args0], % number of arguments is known
apply(A, Args), % {'$M_EXPR','$F_EXPR',2}
- apply(m3, f3, Args), %
+ apply(m3, f3, Args), %
NotArgs = [is_not, a | list], % number of arguments is not known
apply(A, NotArgs), % {'$M_EXPR','$F_EXPR',-1}
apply(m4, f4, NotArgs), % {m4,f4,-1}
@@ -125,7 +125,7 @@ funfuns() ->
bi() when length([]) > 17 ->
foo:module_info(),
module_info(),
- A = tjo,
+ A = true andalso tjo ,
t:foo(A),
case true of
true when integer(1) ->
@@ -133,7 +133,7 @@ bi() when length([]) > 17 ->
false ->
X = flopp
end,
- X == A;
+ self() ! X == -A orelse false;
bi() ->
%% POS14=POS13+18
Z = fun(Y) -> Y end,
@@ -159,7 +159,7 @@ bi() ->
D + E + F.
%bi() ->
% %% POS15=POS14+13
-% try
+% try
% foo:t(),
% bar:t()
% of
@@ -169,7 +169,7 @@ bi() ->
% foo:t()
% catch
% {'EXIT',_} -> bar:t()
-% end.
+% end.
local() ->
true.
diff --git a/lib/tools/vsn.mk b/lib/tools/vsn.mk
index 13cf5af9f5..abe9a804f0 100644
--- a/lib/tools/vsn.mk
+++ b/lib/tools/vsn.mk
@@ -1,19 +1,19 @@
# This is an -*-makefile-*- file.
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 1997-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 1997-2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
-TOOLS_VSN = 2.6.5.1
+TOOLS_VSN = 2.6.6
diff --git a/lib/tv/src/tv_io_lib_format.erl b/lib/tv/src/tv_io_lib_format.erl
index 5042fd3f9d..e043d9296e 100644
--- a/lib/tv/src/tv_io_lib_format.erl
+++ b/lib/tv/src/tv_io_lib_format.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1998-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
-module(tv_io_lib_format).
@@ -188,7 +188,7 @@ indentation([], I) -> I.
term(T, none, _Adj, none, _Pad) -> T;
term(T, none, Adj, P, Pad) -> term(T, P, Adj, P, Pad);
-term(T, F, Adj, none, Pad) -> term(T, F, Adj, min(flat_length(T), F), Pad);
+term(T, F, Adj, none, Pad) -> term(T, F, Adj, erlang:min(flat_length(T), F), Pad);
term(T, F, Adj, P, Pad) when F >= P ->
adjust_error(T, F, Adj, P, Pad).
@@ -316,7 +316,7 @@ fwrite_g(Fl, F, Adj, P, Pad) ->
string(S, none, _Adj, none, _Pad) -> S;
string(S, F, Adj, none, Pad) ->
- string(S, F, Adj, min(flat_length(S), F), Pad);
+ string(S, F, Adj, erlang:min(flat_length(S), F), Pad);
string(S, none, _Adj, P, Pad) ->
string:left(flatten(S), P, Pad);
string(S, F, Adj, P, Pad) when F >= P ->
@@ -362,9 +362,6 @@ reverse([H|T], Stack) ->
reverse(T, [H|Stack]);
reverse([], Stack) -> Stack.
-min(L, R) when L < R -> L;
-min(_, R) -> R.
-
%% flatten(List)
%% Flatten a list.
diff --git a/lib/tv/src/tv_pb.erl b/lib/tv/src/tv_pb.erl
index 34db8d0772..78a27185dc 100644
--- a/lib/tv/src/tv_pb.erl
+++ b/lib/tv/src/tv_pb.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
-module(tv_pb).
@@ -522,7 +522,7 @@ handle_col_resizing(RbtnId, RealCol, VirtualCol, Xpos, ProcVars) ->
get_xdiff(Id, Btn, LastXdiff, LineId, LineXpos, MinAllowedXdiff) ->
receive
{gs, Id, motion, {resbtn, _RealCol, _VirtCol, _OldXpos}, [NewXdiff | _T]} ->
- UsedXdiff = max(MinAllowedXdiff, NewXdiff),
+ UsedXdiff = erlang:max(MinAllowedXdiff, NewXdiff),
gs:config(LineId, [{x, LineXpos + UsedXdiff}]),
get_xdiff(Id, Btn, UsedXdiff, LineId, LineXpos, MinAllowedXdiff);
{gs, Id, buttonrelease, _Data, [Btn | _T]} ->
@@ -658,28 +658,3 @@ update_vbtns(Msg, ProcVars) ->
update_keys(Msg, ProcVars) ->
#pb_key_info{list_of_keys = KeyList} = Msg,
tv_pb_funcs:update_keys(KeyList, ProcVars).
-
-
-
-
-
-
-
-
-%%======================================================================
-%% Function:
-%%
-%% Return Value:
-%%
-%% Description:
-%%
-%% Parameters:
-%%======================================================================
-
-
-max(A, B) when A >= B ->
- A;
-max(_, B) ->
- B.
-
-
diff --git a/lib/tv/src/tv_pg_gridfcns.erl b/lib/tv/src/tv_pg_gridfcns.erl
index 809403fd96..ab88e2864f 100644
--- a/lib/tv/src/tv_pg_gridfcns.erl
+++ b/lib/tv/src/tv_pg_gridfcns.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
-module(tv_pg_gridfcns).
@@ -98,7 +98,7 @@ init_grid(GridParentId, GridWidth,
nof_rows_shown = NofRowsShown
},
- NewNofCols = max(length(ColsShown), NofCols),
+ NewNofCols = erlang:max(length(ColsShown), NofCols),
% The GridColWidths list shall contain the current width of each frame.
NewColWidths = update_col_widths(ColsShown, ColWidths, FirstColShown,
@@ -270,7 +270,7 @@ resize_grid_column(RealCol, VirtualCol, Xdiff, ProcVars) ->
lists_as_strings = ListAsStr} = GridP,
% Get new width!
- Width = min(MaxColWidth, max((lists:nth(VirtualCol, ColWidths) + Xdiff),
+ Width = erlang:min(MaxColWidth, erlang:max((lists:nth(VirtualCol, ColWidths) + Xdiff),
MinColWidth)),
% Resize the column.
@@ -1336,7 +1336,7 @@ resize_all_grid_columns(RealCol, [ColWidth | Tail], ColFrameIds, MaxColWidth, Mi
resize_one_column(RealCol, Width, ColFrameIds, MaxW, MinW) ->
- NewWidthOfCol = min(MaxW, max(Width, MinW)),
+ NewWidthOfCol = erlang:min(MaxW, erlang:max(Width, MinW)),
case length(ColFrameIds) of
RealCol ->
done;
@@ -1894,46 +1894,3 @@ extract_ids_for_one_row(N, [ColIds | Tail]) ->
%%%---------------------------------------------------------------------
%%% END of functions used to create the grid.
%%%---------------------------------------------------------------------
-
-
-
-
-
-%%======================================================================
-%% Function:
-%%
-%% Return Value:
-%%
-%% Description:
-%%
-%% Parameters:
-%%======================================================================
-
-
-max(A, B) when A > B ->
- A;
-max(_, B) ->
- B.
-
-
-
-
-
-
-
-%%======================================================================
-%% Function:
-%%
-%% Return Value:
-%%
-%% Description:
-%%
-%% Parameters:
-%%======================================================================
-
-
-min(A, B) when A < B ->
- A;
-min(_, B) ->
- B.
-
diff --git a/lib/tv/vsn.mk b/lib/tv/vsn.mk
index be1981b755..93973489bc 100644
--- a/lib/tv/vsn.mk
+++ b/lib/tv/vsn.mk
@@ -1,20 +1,20 @@
#
# %CopyrightBegin%
-#
-# Copyright Ericsson AB 1997-2009. All Rights Reserved.
-#
+#
+# Copyright Ericsson AB 1997-2010. All Rights Reserved.
+#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
# compliance with the License. You should have received a copy of the
# Erlang Public License along with this software. If not, it can be
# retrieved online at http://www.erlang.org/.
-#
+#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License.
-#
+#
# %CopyrightEnd%
#
-TV_VSN = 2.1.4.4
+TV_VSN = 2.1.4.5
diff --git a/lib/wx/doc/src/notes.xml b/lib/wx/doc/src/notes.xml
index 92933c348b..34c56091aa 100644
--- a/lib/wx/doc/src/notes.xml
+++ b/lib/wx/doc/src/notes.xml
@@ -31,6 +31,23 @@
<p>This document describes the changes made to the wxErlang
application.</p>
+<section><title>Wx 0.98.6</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Calling <c>sys:get_status()</c> for processes that have
+ globally registered names that were not atoms would cause
+ a crash. Corrected. (Thanks to Steve Vinoski.)</p>
+ <p>
+ Own Id: OTP-8656</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Wx 0.98.5</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/wx/src/wx_object.erl b/lib/wx/src/wx_object.erl
index 1f0b7922a0..bfd38960dd 100644
--- a/lib/wx/src/wx_object.erl
+++ b/lib/wx/src/wx_object.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%%-------------------------------------------------------------------
%%% File : wx_object.erl
@@ -321,7 +321,8 @@ loop(Parent, Name, State, Mod, Time, Debug) ->
_Msg when Debug =:= [] ->
handle_msg(Msg, Parent, Name, State, Mod);
_Msg ->
- Debug1 = sys:handle_debug(Debug, {gen_server, print_event}, Name, {in, Msg}),
+ Debug1 = sys:handle_debug(Debug, fun print_event/3,
+ Name, {in, Msg}),
handle_msg(Msg, Parent, Name, State, Mod, Debug1)
end.
@@ -410,12 +411,12 @@ handle_msg({'$gen_call', From, Msg}, Parent, Name, State, Mod, Debug) ->
Debug1 = reply(Name, From, Reply, NState, Debug),
loop(Parent, Name, NState, Mod, Time1, Debug1);
{noreply, NState} ->
- Debug1 = sys:handle_debug(Debug, {gen_server, print_event}, Name,
- {noreply, NState}),
+ Debug1 = sys:handle_debug(Debug, fun print_event/3,
+ Name, {noreply, NState}),
loop(Parent, Name, NState, Mod, infinity, Debug1);
{noreply, NState, Time1} ->
- Debug1 = sys:handle_debug(Debug, {gen_server, print_event}, Name,
- {noreply, NState}),
+ Debug1 = sys:handle_debug(Debug, fun print_event/3,
+ Name, {noreply, NState}),
loop(Parent, Name, NState, Mod, Time1, Debug1);
{stop, Reason, Reply, NState} ->
{'EXIT', R} =
@@ -437,12 +438,12 @@ handle_no_reply({noreply, NState}, Parent, Name, _Msg, Mod, _State, []) ->
handle_no_reply({noreply, NState, Time1}, Parent, Name, _Msg, Mod, _State, []) ->
loop(Parent, Name, NState, Mod, Time1, []);
handle_no_reply({noreply, NState}, Parent, Name, _Msg, Mod, _State, Debug) ->
- Debug1 = sys:handle_debug(Debug, {gen_server, print_event}, Name,
- {noreply, NState}),
+ Debug1 = sys:handle_debug(Debug, fun print_event/3,
+ Name, {noreply, NState}),
loop(Parent, Name, NState, Mod, infinity, Debug1);
handle_no_reply({noreply, NState, Time1}, Parent, Name, _Msg, Mod, _State, Debug) ->
- Debug1 = sys:handle_debug(Debug, {gen_server, print_event}, Name,
- {noreply, NState}),
+ Debug1 = sys:handle_debug(Debug, fun print_event/3,
+ Name, {noreply, NState}),
loop(Parent, Name, NState, Mod, Time1, Debug1);
handle_no_reply(Reply, _Parent, Name, Msg, Mod, State, Debug) ->
handle_common_reply(Reply, Name, Msg, Mod, State,Debug).
@@ -462,8 +463,8 @@ handle_common_reply(Reply, Name, Msg, Mod, State, Debug) ->
%% @hidden
reply(Name, {To, Tag}, Reply, State, Debug) ->
reply({To, Tag}, Reply),
- sys:handle_debug(Debug, {gen_server, print_event}, Name,
- {out, Reply, To, State} ).
+ sys:handle_debug(Debug, fun print_event/3,
+ Name, {out, Reply, To, State}).
%%-----------------------------------------------------------------
@@ -485,6 +486,29 @@ system_code_change([Name, State, Mod, Time], _Module, OldVsn, Extra) ->
Else -> Else
end.
+%%-----------------------------------------------------------------
+%% Format debug messages. Print them as the call-back module sees
+%% them, not as the real erlang messages. Use trace for that.
+%%-----------------------------------------------------------------
+print_event(Dev, {in, Msg}, Name) ->
+ case Msg of
+ {'$gen_call', {From, _Tag}, Call} ->
+ io:format(Dev, "*DBG* ~p got call ~p from ~w~n",
+ [Name, Call, From]);
+ {'$gen_cast', Cast} ->
+ io:format(Dev, "*DBG* ~p got cast ~p~n",
+ [Name, Cast]);
+ _ ->
+ io:format(Dev, "*DBG* ~p got ~p~n", [Name, Msg])
+ end;
+print_event(Dev, {out, Msg, To, State}, Name) ->
+ io:format(Dev, "*DBG* ~p sent ~p to ~w, new state ~w~n",
+ [Name, Msg, To, State]);
+print_event(Dev, {noreply, State}, Name) ->
+ io:format(Dev, "*DBG* ~p new state ~w~n", [Name, State]);
+print_event(Dev, Event, Name) ->
+ io:format(Dev, "*DBG* ~p dbg ~p~n", [Name, Event]).
+
%%% ---------------------------------------------------
%%% Terminate the server.
%%% ---------------------------------------------------
@@ -581,12 +605,15 @@ dbg_opts(Name, Opts) ->
%%-----------------------------------------------------------------
format_status(Opt, StatusData) ->
[PDict, SysState, Parent, Debug, [Name, State, Mod, _Time]] = StatusData,
- NameTag = if is_pid(Name) ->
- pid_to_list(Name);
- is_atom(Name) ->
- Name
- end,
- Header = lists:concat(["Status for generic server ", NameTag]),
+ StatusHdr = "Status for wx object ",
+ Header = if
+ is_pid(Name) ->
+ lists:concat([StatusHdr, pid_to_list(Name)]);
+ is_atom(Name); is_list(Name) ->
+ lists:concat([StatusHdr, Name]);
+ true ->
+ {StatusHdr, Name}
+ end,
Log = sys:get_debug(log, Debug, []),
Specfic =
case erlang:function_exported(Mod, format_status, 2) of
diff --git a/lib/wx/vsn.mk b/lib/wx/vsn.mk
index 54ab92cad2..4ed22d2256 100644
--- a/lib/wx/vsn.mk
+++ b/lib/wx/vsn.mk
@@ -1,6 +1,8 @@
-WX_VSN = 0.98.5
+WX_VSN = 0.98.6
-TICKETS = OTP-8330 OTP-8461 OTP-8408 OTP-8455 OTP-8462
+TICKETS = OTP-8656
+
+TICKETS_0.98.5 = OTP-8330 OTP-8461 OTP-8408 OTP-8455 OTP-8462
TICKETS_0.98.4 = OTP-8243 OTP-8250 OTP-8292
TICKETS_0.98.3 = OTP-8138 OTP-8126 OTP-8083
TICKETS_0.98.2 = OTP-7943
diff --git a/lib/xmerl/doc/src/notes.xml b/lib/xmerl/doc/src/notes.xml
index 207f6fdf16..0403fbca27 100644
--- a/lib/xmerl/doc/src/notes.xml
+++ b/lib/xmerl/doc/src/notes.xml
@@ -50,6 +50,14 @@
Own Id: OTP-8537
</p>
</item>
+ <item>
+ <p>
+ An empty element declared as a simpleContent was not properly validated.
+ </p>
+ <p>
+ Own Id: OTP-8599
+ </p>
+ </item>
</list>
</section>
diff --git a/lib/xmerl/src/xmerl_xsd.erl b/lib/xmerl/src/xmerl_xsd.erl
index c7bca86205..1aedc9e270 100644
--- a/lib/xmerl/src/xmerl_xsd.erl
+++ b/lib/xmerl/src/xmerl_xsd.erl
@@ -1,19 +1,19 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
@@ -2687,13 +2687,16 @@ check_element_type(XML=[E=#xmlElement{name=Name}|Rest],
_ ->
{error,{error_path(E,Name),?MODULE,{element_bad_match,E,Any,Env}}}
end;
-check_element_type([],CM,_Env,_Block,_S,Checked) ->
+check_element_type([],CM,_Env,_Block,S,Checked) ->
%% #schema_complex_type, any, #schema_group, anyType and lists are
%% catched above.
case CM of
+ #schema_simple_type{} ->
+ {NewVal,S2} = check_type(CM,[],unapplied,S),
+ {NewVal,[],S2};
{simpleType,_} ->
- {error,{error_path(Checked,undefined),?MODULE,
- {empty_content_not_allowed,CM}}};
+ {NewVal,S2} = check_type(CM,[],unapplied,S),
+ {NewVal,[],S2};
_ ->
{error,{error_path(Checked,undefined),?MODULE,
{empty_content_not_allowed,CM}}}
diff --git a/lib/xmerl/vsn.mk b/lib/xmerl/vsn.mk
index 83b9d4826f..aee7546c3c 100644
--- a/lib/xmerl/vsn.mk
+++ b/lib/xmerl/vsn.mk
@@ -21,7 +21,8 @@ XMERL_VSN = 1.2.5
TICKETS = \
- OTP-8537
+ OTP-8537 \
+ OTP-8599
TICKETS_1.2.5 = \
OTP-8537