aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/.gitignore4
-rw-r--r--lib/asn1/src/asn1ct_check.erl8
-rw-r--r--lib/asn1/test/asn1_SUITE.erl.src1
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CAP.asn141
-rw-r--r--lib/common_test/doc/src/run_test_chapter.xml10
-rw-r--r--lib/common_test/src/ct_testspec.erl133
-rw-r--r--lib/common_test/src/ct_util.hrl3
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE.erl947
-rw-r--r--lib/compiler/doc/src/compile.xml66
-rw-r--r--lib/compiler/src/compile.erl297
-rw-r--r--lib/compiler/src/v3_kernel_pp.erl37
-rw-r--r--lib/compiler/test/bs_construct_SUITE.erl4
-rw-r--r--lib/compiler/test/compilation_SUITE.erl10
-rw-r--r--lib/compiler/test/compile_SUITE.erl77
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple-basic1.mk1
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple-basic2.mk1
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple-missing.mk1
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple-target1.mk1
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple-target2.mk1
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple.erl6
-rw-r--r--lib/compiler/test/guard_SUITE.erl6
-rw-r--r--lib/compiler/test/inline_SUITE.erl4
-rw-r--r--lib/compiler/test/lc_SUITE.erl4
-rw-r--r--lib/compiler/test/test_lib.erl18
-rw-r--r--lib/cosEvent/doc/src/notes.xml26
-rw-r--r--lib/cosEvent/src/cosEventApp.erl44
-rw-r--r--lib/cosEvent/vsn.mk4
-rw-r--r--lib/cosEventDomain/doc/src/notes.xml20
-rw-r--r--lib/cosEventDomain/src/CosEventDomainAdmin_EventDomain_impl.erl24
-rw-r--r--lib/cosEventDomain/src/cosEventDomainApp.erl13
-rw-r--r--lib/cosEventDomain/vsn.mk4
-rw-r--r--lib/cosNotification/doc/src/notes.xml16
-rw-r--r--lib/cosNotification/src/CosNotification_Common.erl46
-rw-r--r--lib/cosNotification/vsn.mk3
-rw-r--r--lib/cosProperty/doc/src/notes.xml30
-rw-r--r--lib/cosProperty/src/CosPropertyService_PropertySetDefFactory_impl.erl14
-rw-r--r--lib/cosProperty/src/CosPropertyService_PropertySetFactory_impl.erl14
-rw-r--r--lib/cosProperty/vsn.mk3
-rw-r--r--lib/cosTime/doc/src/notes.xml18
-rw-r--r--lib/cosTime/src/cosTime.erl60
-rw-r--r--lib/cosTime/vsn.mk3
-rw-r--r--lib/crypto/c_src/crypto.c14
-rw-r--r--lib/debugger/src/dbg_ui_view.erl13
-rw-r--r--lib/debugger/src/dbg_wx_view.erl11
-rw-r--r--lib/dialyzer/doc/manual.txt4
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml6
-rw-r--r--lib/dialyzer/src/dialyzer.erl25
-rw-r--r--lib/dialyzer/src/dialyzer.hrl14
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl14
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl11
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl120
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl12
-rw-r--r--lib/edoc/src/edoc_lib.erl43
-rw-r--r--lib/edoc/src/edoc_wiki.erl3
-rw-r--r--lib/erl_docgen/doc/src/notes.xml27
-rwxr-xr-xlib/erl_docgen/priv/bin/xref_mod_app.escript4
-rw-r--r--lib/erl_docgen/priv/xsl/db_html.xsl8
-rw-r--r--lib/erl_docgen/priv/xsl/db_pdf.xsl20
-rw-r--r--lib/erl_docgen/vsn.mk3
-rw-r--r--lib/erl_interface/src/decode/decode_atom.c2
-rw-r--r--lib/erl_interface/src/decode/decode_pid.c2
-rw-r--r--lib/erl_interface/src/decode/decode_port.c2
-rw-r--r--lib/erl_interface/src/decode/decode_ref.c3
-rw-r--r--lib/erl_interface/src/legacy/erl_connect.c7
-rw-r--r--lib/erl_interface/src/legacy/erl_format.c2
-rw-r--r--lib/erl_interface/src/legacy/erl_marshal.c4
-rw-r--r--lib/erl_interface/src/legacy/erl_timeout.c2
-rw-r--r--lib/erl_interface/src/misc/ei_decode_term.c5
-rw-r--r--lib/hipe/cerl/erl_bif_types.erl200
-rw-r--r--lib/hipe/icode/hipe_icode_range.erl364
-rw-r--r--lib/inets/doc/src/http_client.xml8
-rw-r--r--lib/inets/doc/src/httpc.xml104
-rw-r--r--lib/inets/doc/src/notes.xml83
-rw-r--r--lib/inets/src/http_client/httpc.erl2
-rw-r--r--lib/inets/src/http_lib/http_chunk.erl10
-rw-r--r--lib/inets/src/inets_app/inets.appup.src14
-rw-r--r--lib/inets/vsn.mk2
-rw-r--r--lib/inviso/src/inviso_tool.erl6511
-rw-r--r--lib/inviso/src/inviso_tool_sh.erl3480
-rw-r--r--lib/inviso/test/inviso_tool_SUITE.erl36
-rw-r--r--lib/kernel/doc/src/gen_tcp.xml1
-rw-r--r--lib/kernel/src/code.erl129
-rw-r--r--lib/kernel/src/file.erl2
-rw-r--r--lib/kernel/src/inet6_tcp_dist.erl6
-rw-r--r--lib/kernel/src/net_kernel.erl31
-rw-r--r--lib/kernel/src/os.erl31
-rw-r--r--lib/kernel/test/erl_distribution_SUITE.erl4
-rw-r--r--lib/kernel/test/gen_tcp_api_SUITE.erl54
-rw-r--r--lib/kernel/test/gen_udp_SUITE.erl55
-rw-r--r--lib/kernel/test/os_SUITE.erl17
-rw-r--r--lib/megaco/Makefile34
-rw-r--r--lib/megaco/doc/src/notes.xml86
-rw-r--r--lib/megaco/src/app/megaco.app.src3
-rw-r--r--lib/megaco/src/app/megaco.appup.src50
-rw-r--r--lib/megaco/src/binary/megaco_binary_encoder_lib.erl31
-rw-r--r--lib/megaco/src/engine/depend.mk4
-rw-r--r--lib/megaco/src/engine/megaco_config.erl156
-rw-r--r--lib/megaco/src/engine/megaco_config_misc.erl113
-rw-r--r--lib/megaco/src/engine/megaco_filter.erl33
-rw-r--r--lib/megaco/src/engine/megaco_sdp.erl6
-rw-r--r--lib/megaco/src/engine/megaco_timer.erl14
-rw-r--r--lib/megaco/src/engine/modules.mk3
-rw-r--r--lib/megaco/src/flex/megaco_flex_scanner.erl18
-rw-r--r--lib/megaco/test/megaco_SUITE.erl77
-rw-r--r--lib/megaco/test/megaco_app_test.erl14
-rw-r--r--lib/megaco/test/megaco_appup_test.erl3
-rw-r--r--lib/megaco/test/megaco_codec_v1_test.erl188
-rw-r--r--lib/megaco/test/megaco_test_generator.erl6
-rw-r--r--lib/megaco/test/megaco_test_lib.erl238
-rw-r--r--lib/megaco/test/megaco_test_megaco_generator.erl4
-rw-r--r--lib/megaco/test/megaco_test_msg_prev3a_lib.erl6
-rw-r--r--lib/megaco/test/megaco_test_msg_prev3b_lib.erl6
-rw-r--r--lib/megaco/test/megaco_test_msg_prev3c_lib.erl6
-rw-r--r--lib/megaco/test/megaco_test_msg_v1_lib.erl6
-rw-r--r--lib/megaco/test/megaco_test_msg_v2_lib.erl6
-rw-r--r--lib/megaco/test/megaco_test_msg_v3_lib.erl6
-rw-r--r--lib/megaco/test/megaco_test_tcp_generator.erl4
-rw-r--r--lib/megaco/test/megaco_timer_test.erl4
-rw-r--r--lib/megaco/vsn.mk25
-rw-r--r--lib/mnesia/src/mnesia.appup.src20
-rw-r--r--lib/mnesia/src/mnesia.erl9
-rw-r--r--lib/mnesia/src/mnesia_bup.erl7
-rw-r--r--lib/mnesia/src/mnesia_frag.erl28
-rw-r--r--lib/mnesia/src/mnesia_index.erl2
-rw-r--r--lib/mnesia/src/mnesia_lib.erl4
-rw-r--r--lib/mnesia/src/mnesia_locker.erl3
-rw-r--r--lib/mnesia/src/mnesia_recover.erl2
-rw-r--r--lib/mnesia/src/mnesia_schema.erl9
-rw-r--r--lib/mnesia/src/mnesia_snmp_hook.erl2
-rw-r--r--lib/mnesia/src/mnesia_tm.erl4
-rw-r--r--lib/mnesia/test/mnesia_SUITE.erl4
-rw-r--r--lib/mnesia/vsn.mk2
-rw-r--r--lib/observer/doc/src/crashdump.xml6
-rw-r--r--lib/observer/doc/src/crashdump_help.html4
-rw-r--r--lib/observer/doc/src/crashdump_ug.xml44
-rw-r--r--lib/observer/doc/src/etop.xml43
-rwxr-xr-xlib/observer/priv/bin/cdv4
-rw-r--r--lib/observer/priv/bin/cdv.bat2
-rw-r--r--lib/observer/src/Makefile6
-rw-r--r--lib/observer/src/crashdump_viewer.erl1111
-rw-r--r--lib/observer/src/crashdump_viewer.hrl185
-rw-r--r--lib/observer/src/crashdump_viewer_html.erl601
-rw-r--r--lib/observer/test/Makefile2
-rw-r--r--lib/observer/test/crashdump_viewer_SUITE.erl63
-rw-r--r--lib/odbc/c_src/odbcserver.c4
-rw-r--r--lib/odbc/src/odbc.appup.src4
-rw-r--r--lib/odbc/vsn.mk2
-rw-r--r--lib/orber/doc/src/notes.xml18
-rw-r--r--lib/orber/include/ifr_types.hrl6
-rw-r--r--lib/orber/src/corba.erl6
-rw-r--r--lib/orber/src/orber.app.src2
-rw-r--r--lib/orber/src/orber.erl18
-rw-r--r--lib/orber/src/orber_socket.erl12
-rw-r--r--lib/orber/test/csiv2_SUITE.erl10
-rw-r--r--lib/orber/test/multi_ORB_SUITE.erl4
-rw-r--r--lib/orber/test/orber_test_lib.erl4
-rw-r--r--lib/orber/vsn.mk4
-rw-r--r--lib/os_mon/test/disksup_SUITE.erl6
-rw-r--r--lib/percept/src/egd.erl1
-rw-r--r--lib/public_key/doc/src/public_key.xml4
-rw-r--r--lib/public_key/src/public_key.appup.src26
-rw-r--r--lib/public_key/test/pkits_SUITE.erl6
-rw-r--r--lib/public_key/test/public_key_SUITE.erl6
-rw-r--r--lib/public_key/vsn.mk2
-rw-r--r--lib/runtime_tools/src/inviso_rt.erl70
-rw-r--r--lib/sasl/doc/src/appup.xml10
-rw-r--r--lib/sasl/src/systools_rc.erl28
-rw-r--r--lib/sasl/src/systools_relup.erl4
-rw-r--r--lib/snmp/doc/man1/.gitignore0
-rw-r--r--lib/snmp/doc/src/Makefile32
-rw-r--r--lib/snmp/doc/src/depend.mk1
-rw-r--r--lib/snmp/doc/src/files.mk19
-rw-r--r--lib/snmp/doc/src/make.dep2
-rw-r--r--lib/snmp/doc/src/notes.xml137
-rw-r--r--lib/snmp/doc/src/ref_man.xml3
-rw-r--r--lib/snmp/doc/src/snmp_agent_config_files.xml11
-rw-r--r--lib/snmp/doc/src/snmp_config.xml42
-rw-r--r--lib/snmp/doc/src/snmp_view_based_acm_mib.xml74
-rw-r--r--lib/snmp/doc/src/snmpa.xml37
-rw-r--r--lib/snmp/doc/src/snmpa_error.xml7
-rw-r--r--lib/snmp/doc/src/snmpc.xml45
-rw-r--r--lib/snmp/doc/src/snmpc_cmd.xml191
-rw-r--r--lib/snmp/include/snmp_types.hrl4
-rw-r--r--lib/snmp/mibs/Makefile.in26
-rw-r--r--lib/snmp/src/agent/snmp_community_mib.erl4
-rw-r--r--lib/snmp/src/agent/snmp_framework_mib.erl12
-rw-r--r--lib/snmp/src/agent/snmp_standard_mib.erl275
-rw-r--r--lib/snmp/src/agent/snmp_target_mib.erl13
-rw-r--r--lib/snmp/src/agent/snmp_user_based_sm_mib.erl60
-rw-r--r--lib/snmp/src/agent/snmp_view_based_acm_mib.erl234
-rw-r--r--lib/snmp/src/agent/snmpa.erl182
-rw-r--r--lib/snmp/src/agent/snmpa_mib_lib.erl57
-rw-r--r--lib/snmp/src/agent/snmpa_vacm.erl9
-rw-r--r--lib/snmp/src/app/snmp.appup.src120
-rw-r--r--lib/snmp/src/compile/Makefile14
-rw-r--r--lib/snmp/src/compile/depend.mk3
-rw-r--r--lib/snmp/src/compile/modules.mk3
-rw-r--r--lib/snmp/src/compile/snmpc.erl138
-rw-r--r--lib/snmp/src/compile/snmpc.hrl63
-rw-r--r--lib/snmp/src/compile/snmpc.src381
-rw-r--r--lib/snmp/src/compile/snmpc_lib.erl5
-rw-r--r--lib/snmp/src/compile/snmpc_mib_gram.yrl330
-rw-r--r--lib/snmp/src/compile/snmpc_mib_to_hrl.erl3
-rw-r--r--lib/snmp/src/compile/snmpc_tok.erl9
-rw-r--r--lib/snmp/test/modules.mk4
-rw-r--r--lib/snmp/test/snmp_SUITE.erl123
-rw-r--r--lib/snmp/test/snmp_app_test.erl44
-rw-r--r--lib/snmp/test/snmp_appup_test.erl20
-rw-r--r--lib/snmp/test/snmp_compiler_test.erl109
-rw-r--r--lib/snmp/test/snmp_manager_config_test.erl105
-rw-r--r--lib/snmp/test/snmp_test_data/AC-TEST-MIB.mib131
-rw-r--r--lib/snmp/test/snmp_test_data/MC-TEST-MIB.mib173
-rw-r--r--lib/snmp/test/snmp_test_mgr_misc.erl4
-rw-r--r--lib/snmp/test/test_config/.gitignore19
-rw-r--r--lib/snmp/test/test_config/Makefile199
-rw-r--r--lib/snmp/test/test_config/agent/agent.conf.src19
-rw-r--r--lib/snmp/test/test_config/agent/community.conf.src15
-rw-r--r--lib/snmp/test/test_config/agent/context.conf.src14
-rw-r--r--lib/snmp/test/test_config/agent/notify.conf.src13
-rw-r--r--lib/snmp/test/test_config/agent/standard.conf.src21
-rw-r--r--lib/snmp/test/test_config/agent/target_addr.conf.src21
-rw-r--r--lib/snmp/test/test_config/agent/target_params.conf.src11
-rw-r--r--lib/snmp/test/test_config/agent/usm.conf.src17
-rw-r--r--lib/snmp/test/test_config/agent/vacm.conf.src27
-rw-r--r--lib/snmp/test/test_config/manager/manager.conf.src16
-rw-r--r--lib/snmp/test/test_config/manager/usm.conf.src9
-rw-r--r--lib/snmp/test/test_config/modules.mk41
-rw-r--r--lib/snmp/test/test_config/snmp_test_config.erl32
-rw-r--r--lib/snmp/test/test_config/sys-agent.config.src43
-rw-r--r--lib/snmp/test/test_config/sys-manager.config.src35
-rw-r--r--lib/snmp/test/test_config/sys.config.src68
-rw-r--r--lib/snmp/vsn.mk21
-rw-r--r--lib/ssl/doc/src/ssl.xml7
-rw-r--r--lib/ssl/src/inet_ssl_dist.erl27
-rw-r--r--lib/ssl/src/ssl.appup.src2
-rw-r--r--lib/ssl/src/ssl.erl15
-rw-r--r--lib/ssl/src/ssl_connection.erl94
-rw-r--r--lib/ssl/src/ssl_internal.hrl6
-rw-r--r--lib/ssl/test/Makefile3
-rw-r--r--lib/ssl/test/old_ssl_dist_SUITE.erl10
-rw-r--r--lib/ssl/test/old_transport_accept_SUITE.erl10
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl50
-rw-r--r--lib/ssl/test/ssl_packet_SUITE.erl6
-rw-r--r--lib/ssl/test/ssl_payload_SUITE.erl6
-rw-r--r--lib/ssl/test/ssl_session_cache_SUITE.erl6
-rw-r--r--lib/ssl/test/ssl_test_lib.erl12
-rw-r--r--lib/ssl/test/ssl_to_openssl_SUITE.erl6
-rw-r--r--lib/ssl/vsn.mk3
-rw-r--r--lib/stdlib/doc/src/calendar.xml32
-rw-r--r--lib/stdlib/doc/src/dict.xml8
-rw-r--r--lib/stdlib/doc/src/orddict.xml8
-rw-r--r--lib/stdlib/src/calendar.erl53
-rw-r--r--lib/stdlib/src/escript.erl18
-rw-r--r--lib/stdlib/src/supervisor.erl115
-rw-r--r--lib/stdlib/test/calendar_SUITE.erl22
-rw-r--r--lib/stdlib/test/ets_SUITE.erl280
-rw-r--r--lib/stdlib/test/stdlib_SUITE.erl4
-rw-r--r--lib/stdlib/test/supervisor_SUITE.erl131
-rw-r--r--lib/test_server/src/test_server.erl15
-rw-r--r--lib/test_server/src/ts_install_cth.erl9
-rw-r--r--lib/test_server/src/ts_run.erl392
-rw-r--r--lib/test_server/test/Makefile7
-rw-r--r--lib/test_server/test/test_server.spec3
-rw-r--r--lib/test_server/test/test_server_SUITE.erl656
-rw-r--r--lib/test_server/test/test_server_SUITE_data/Makefile.src2
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl554
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_SUITE_data/dummy_file (renamed from lib/test_server/test/test_server_SUITE_data/dummy_file)0
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_conf01_SUITE.erl (renamed from lib/test_server/test/test_server_conf01_SUITE.erl)0
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_conf02_SUITE.erl (renamed from lib/test_server/test/test_server_conf02_SUITE.erl)0
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_parallel01_SUITE.erl (renamed from lib/test_server/test/test_server_parallel01_SUITE.erl)0
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_shuffle01_SUITE.erl (renamed from lib/test_server/test/test_server_shuffle01_SUITE.erl)0
-rw-r--r--lib/test_server/test/test_server_SUITE_data/test_server_skip_SUITE.erl (renamed from lib/test_server/test/test_server_skip_SUITE.erl)0
-rw-r--r--lib/test_server/test/test_server_line_SUITE.erl19
-rw-r--r--lib/test_server/test/test_server_test_lib.erl191
-rw-r--r--lib/test_server/test/test_server_test_lib.hrl23
-rw-r--r--lib/tools/doc/src/cover.xml29
-rw-r--r--lib/tools/doc/src/cover_chapter.xml7
-rw-r--r--lib/tools/src/cover.erl583
-rw-r--r--lib/tools/test/cover_SUITE.erl27
-rw-r--r--lib/typer/RELEASE_NOTES22
-rw-r--r--lib/typer/src/Makefile23
-rw-r--r--lib/typer/src/typer.app.src7
-rw-r--r--lib/typer/src/typer.erl995
-rw-r--r--lib/typer/src/typer.hrl64
-rw-r--r--lib/typer/src/typer_annotator.erl384
-rw-r--r--lib/typer/src/typer_info.erl162
-rw-r--r--lib/typer/src/typer_map.erl47
-rw-r--r--lib/typer/src/typer_options.erl191
-rw-r--r--lib/typer/src/typer_preprocess.erl154
-rw-r--r--lib/typer/vsn.mk2
-rw-r--r--lib/wx/c_src/wxe_driver.c6
-rw-r--r--lib/wx/c_src/wxe_impl.cpp40
-rw-r--r--lib/wx/c_src/wxe_ps_init.c2
-rw-r--r--lib/wx/src/wxe_master.erl28
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc4
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc4
-rw-r--r--lib/xmerl/src/xmerl_uri.erl8
297 files changed, 16232 insertions, 10384 deletions
diff --git a/lib/.gitignore b/lib/.gitignore
index 340baf5269..56b1ed2b84 100644
--- a/lib/.gitignore
+++ b/lib/.gitignore
@@ -531,6 +531,10 @@
/percept/doc/src/percept_profile.xml
/percept/doc/src/percept_ug.xml
+# snmp
+
+snmp/doc/intex.html
+
# syntax_tools
/syntax_tools/doc/src/chapter.xml
diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl
index 8b1ee6e601..efd731f052 100644
--- a/lib/asn1/src/asn1ct_check.erl
+++ b/lib/asn1/src/asn1ct_check.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -6176,7 +6176,6 @@ componentrelation_leadingattr(_,[],_CompList,[],NewCompList) ->
{false,lists:reverse(NewCompList)};
componentrelation_leadingattr(_,[],_CompList,LeadingAttr,NewCompList) ->
{lists:last(LeadingAttr),lists:reverse(NewCompList)}; %send all info in Ts later
-
componentrelation_leadingattr(S,[C= #'ComponentType'{}|Cs],CompList,Acc,CompAcc) ->
{LAAcc,NewC} =
case catch componentrelation1(S,C#'ComponentType'.typespec,
@@ -6229,7 +6228,10 @@ componentrelation_leadingattr(S,[C= #'ComponentType'{}|Cs],CompList,Acc,CompAcc)
{[],C}
end,
componentrelation_leadingattr(S,Cs,CompList,LAAcc++Acc,
- [NewC|CompAcc]).
+ [NewC|CompAcc]);
+componentrelation_leadingattr(S,[NotComponentType|Cs],CompList,LeadingAttr,NewCompList) ->
+ componentrelation_leadingattr(S,Cs,CompList,LeadingAttr,[NotComponentType|NewCompList]).
+
object_set_mod_name(_S,ObjSet) when is_atom(ObjSet) ->
ObjSet;
diff --git a/lib/asn1/test/asn1_SUITE.erl.src b/lib/asn1/test/asn1_SUITE.erl.src
index e1a09adc82..fd0bae34c8 100644
--- a/lib/asn1/test/asn1_SUITE.erl.src
+++ b/lib/asn1/test/asn1_SUITE.erl.src
@@ -2327,6 +2327,7 @@ ber_modules() ->
test_modules() ->
_Modules = [
"BitStr",
+ "CAP",
"CommonDataTypes",
"Constraints",
"ContextSwitchingTypes",
diff --git a/lib/asn1/test/asn1_SUITE_data/CAP.asn1 b/lib/asn1/test/asn1_SUITE_data/CAP.asn1
new file mode 100644
index 0000000000..69d8486d3b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/CAP.asn1
@@ -0,0 +1,41 @@
+CAP {ccitt(0) identified-organization(4) etsi(0) mobileDomain(0) umts-network(1) modules(3) cap-datatypes(52) version3(2)}
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+EXTENSION ::= CLASS {
+ &ExtensionType,
+ &criticality CriticalityType DEFAULT ignore,
+ &id Code
+ }
+WITH SYNTAX {
+ EXTENSION-SYNTAX &ExtensionType
+ CRITICALITY &criticality
+ IDENTIFIED BY &id
+ }
+
+ExtensionField ::= SEQUENCE {
+ type EXTENSION.&id ({SupportedExtensions }),
+ criticality CriticalityType DEFAULT ignore,
+ value [1] EXTENSION.&ExtensionType ({SupportedExtensions }{@type}),
+ ...}
+
+SupportedExtensions EXTENSION ::= {firstExtension, ...}
+
+firstExtension EXTENSION ::= {
+ EXTENSION-SYNTAX NULL
+ CRITICALITY ignore
+ IDENTIFIED BY global : {itu-t(0) identified-organization(4) organisation(0) gsm(1)
+ capextension(2)}}
+
+CriticalityType ::= ENUMERATED {
+ ignore (0),
+ abort (1)
+ }
+
+Code ::= CHOICE {local INTEGER,
+ global OBJECT IDENTIFIER}
+
+
+END
diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml
index 7b485bdc35..f052adf25e 100644
--- a/lib/common_test/doc/src/run_test_chapter.xml
+++ b/lib/common_test/doc/src/run_test_chapter.xml
@@ -368,15 +368,17 @@
either one or more suites, one or more test case groups, or one
or more test cases in a group or suite.</p>
<p>An arbitrary number of test terms may be declared in sequence.
- Common Test will compile the terms into one or more tests to be
- performed in one resulting test run. Note that a term that
+ Common Test will by default compile the terms into one or more tests
+ to be performed in one resulting test run. Note that a term that
specifies a set of test cases will "swallow" one that only
specifies a subset of these cases. E.g. the result of merging
one term that specifies that all cases in suite S should be
executed, with another term specifying only test case X and Y in
S, is a test of all cases in S. However, if a term specifying
test case X and Y in S is merged with a term specifying case Z
- in S, the result is a test of X, Y and Z in S.</p>
+ in S, the result is a test of X, Y and Z in S. To disable this
+ behaviour, it is possible in test specification to set the
+ <c>merge_tests</c> term to <c>false</c>.</p>
<p>A test term can also specify one or more test suites, groups,
or test cases to be skipped. Skipped suites, groups and cases
are not executed and show up in the HTML test log files as
@@ -435,6 +437,8 @@
{userconfig, NodeRefs, {CallbackModule, ConfigStrings}}.
{alias, DirAlias, Dir}.
+
+ {merge_tests, Bool}.
{logdir, LogDir}.
{logdir, NodeRefs, LogDir}.
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 2b6abefb72..2c7cd3577c 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -68,7 +68,8 @@ prepare_tests(TestSpec) when is_record(TestSpec,testspec) ->
%% Create initial list of {Node,{Run,Skip}} tuples
NodeList = lists:map(fun(N) -> {N,{[],[]}} end, list_nodes(TestSpec)),
%% Get all Run tests sorted per node basis.
- NodeList1 = run_per_node(Run,NodeList),
+ NodeList1 = run_per_node(Run,NodeList,
+ TestSpec#testspec.merge_tests),
%% Get all Skip entries sorted per node basis.
NodeList2 = skip_per_node(Skip,NodeList1),
%% Change representation.
@@ -89,11 +90,17 @@ prepare_tests(TestSpec) when is_record(TestSpec,testspec) ->
%% run_per_node/2 takes the Run list as input and returns a list
%% of {Node,RunPerNode,[]} tuples where the tests have been sorted
%% on a per node basis.
-run_per_node([{{Node,Dir},Test}|Ts],Result) ->
+run_per_node([{{Node,Dir},Test}|Ts],Result, MergeTests) ->
{value,{Node,{Run,Skip}}} = lists:keysearch(Node,1,Result),
- Run1 = merge_tests(Dir,Test,Run),
- run_per_node(Ts,insert_in_order({Node,{Run1,Skip}},Result));
-run_per_node([],Result) ->
+ Run1 = case MergeTests of
+ false ->
+ append({Dir, Test}, Run);
+ true ->
+ merge_tests(Dir,Test,Run)
+ end,
+ run_per_node(Ts,insert_in_order({Node,{Run1,Skip}},Result),
+ MergeTests);
+run_per_node([],Result,_) ->
Result.
merge_tests(Dir,Test={all,_},TestDirs) ->
@@ -281,6 +288,8 @@ collect_tests(Terms,TestSpec,Relaxed) ->
{Terms2, TestSpec3} = filter_init_terms(Terms, [], TestSpec2),
add_tests(Terms2,TestSpec3).
+get_global([{merge_tests, Bool} | Ts], Spec) ->
+ get_global(Ts,Spec#testspec{ merge_tests = Bool });
get_global([{alias,Ref,Dir}|Ts],Spec=#testspec{alias=Refs}) ->
get_global(Ts,Spec#testspec{alias=[{Ref,get_absdir(Dir,Spec)}|Refs]});
get_global([{node,Ref,Node}|Ts],Spec=#testspec{nodes=Refs}) ->
@@ -668,7 +677,7 @@ add_tests([{suites,Node,Dir,Ss}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = insert_suites(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Ss,Tests),
+ Ss,Tests, Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
%% --- groups ---
@@ -694,13 +703,15 @@ add_tests([{groups,Node,Dir,Suite,Gs}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = insert_groups(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Suite,Gs,all,Tests),
+ Suite,Gs,all,Tests,
+ Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
add_tests([{groups,Node,Dir,Suite,Gs,{cases,TCs}}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = insert_groups(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Suite,Gs,TCs,Tests),
+ Suite,Gs,TCs,Tests,
+ Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
%% --- cases ---
@@ -715,7 +726,7 @@ add_tests([{cases,Node,Dir,Suite,Cs}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = insert_cases(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Suite,Cs,Tests),
+ Suite,Cs,Tests, Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
%% --- skip_suites ---
@@ -730,7 +741,8 @@ add_tests([{skip_suites,Node,Dir,Ss,Cmt}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = skip_suites(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Ss,Cmt,Tests),
+ Ss,Cmt,Tests,
+ Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
%% --- skip_groups ---
@@ -752,13 +764,15 @@ add_tests([{skip_groups,Node,Dir,Suite,Gs,Cmt}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = skip_groups(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Suite,Gs,all,Cmt,Tests),
+ Suite,Gs,all,Cmt,Tests,
+ Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
add_tests([{skip_groups,Node,Dir,Suite,Gs,{cases,TCs},Cmt}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = skip_groups(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Suite,Gs,TCs,Cmt,Tests),
+ Suite,Gs,TCs,Cmt,Tests,
+ Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
%% --- skip_cases ---
@@ -773,7 +787,7 @@ add_tests([{skip_cases,Node,Dir,Suite,Cs,Cmt}|Ts],Spec) ->
Tests = Spec#testspec.tests,
Tests1 = skip_cases(ref2node(Node,Spec#testspec.nodes),
ref2dir(Dir,Spec#testspec.alias),
- Suite,Cs,Cmt,Tests),
+ Suite,Cs,Cmt,Tests,Spec#testspec.merge_tests),
add_tests(Ts,Spec#testspec{tests=Tests1});
%% --- handled/errors ---
@@ -783,6 +797,9 @@ add_tests([{alias,_,_}|Ts],Spec) -> % handled
add_tests([{node,_,_}|Ts],Spec) -> % handled
add_tests(Ts,Spec);
+add_tests([{merge_tests, _} | Ts], Spec) -> % handled
+ add_tests(Ts,Spec);
+
%% check if it's a CT term that has bad format or if the user seems to
%% have added something of his/her own, which we'll let pass if relaxed
%% mode is enabled.
@@ -835,17 +852,22 @@ separate([],_,_,_) ->
%% {Suite2,[{GrOrCase21,{skip,Cmt}},GrOrCase22,...]},...]}
%% GrOrCase = {GroupName,[Case1,Case2,...]} | Case
-insert_suites(Node,Dir,[S|Ss],Tests) ->
- Tests1 = insert_cases(Node,Dir,S,all,Tests),
- insert_suites(Node,Dir,Ss,Tests1);
-insert_suites(_Node,_Dir,[],Tests) ->
+insert_suites(Node,Dir,[S|Ss],Tests, MergeTests) ->
+ Tests1 = insert_cases(Node,Dir,S,all,Tests,MergeTests),
+ insert_suites(Node,Dir,Ss,Tests1,MergeTests);
+insert_suites(_Node,_Dir,[],Tests,_MergeTests) ->
Tests;
-insert_suites(Node,Dir,S,Tests) ->
- insert_suites(Node,Dir,[S],Tests).
+insert_suites(Node,Dir,S,Tests,MergeTests) ->
+ insert_suites(Node,Dir,[S],Tests,MergeTests).
-insert_groups(Node,Dir,Suite,Group,Cases,Tests) when is_atom(Group) ->
- insert_groups(Node,Dir,Suite,[Group],Cases,Tests);
-insert_groups(Node,Dir,Suite,Groups,Cases,Tests) when
+insert_groups(Node,Dir,Suite,Group,Cases,Tests,MergeTests)
+ when is_atom(Group) ->
+ insert_groups(Node,Dir,Suite,[Group],Cases,Tests,MergeTests);
+insert_groups(Node,Dir,Suite,Groups,Cases,Tests,false) when
+ ((Cases == all) or is_list(Cases)) and is_list(Groups) ->
+ Groups1 = [{Gr,Cases} || Gr <- Groups],
+ append({{Node,Dir},[{Suite,Groups1}]},Tests);
+insert_groups(Node,Dir,Suite,Groups,Cases,Tests,true) when
((Cases == all) or is_list(Cases)) and is_list(Groups) ->
case lists:keysearch({Node,Dir},1,Tests) of
{value,{{Node,Dir},[{all,_}]}} ->
@@ -859,9 +881,10 @@ insert_groups(Node,Dir,Suite,Groups,Cases,Tests) when
Groups1 = [{Gr,Cases} || Gr <- Groups],
insert_in_order({{Node,Dir},[{Suite,Groups1}]},Tests)
end;
-insert_groups(Node,Dir,Suite,Groups,Case,Tests) when is_atom(Case) ->
+insert_groups(Node,Dir,Suite,Groups,Case,Tests, MergeTests)
+ when is_atom(Case) ->
Cases = if Case == all -> all; true -> [Case] end,
- insert_groups(Node,Dir,Suite,Groups,Cases,Tests).
+ insert_groups(Node,Dir,Suite,Groups,Cases,Tests, MergeTests).
insert_groups1(_Suite,_Groups,all) ->
all;
@@ -891,7 +914,9 @@ insert_groups2([Group={GrName,Cases}|Groups],GrAndCases) ->
insert_groups2([],GrAndCases) ->
GrAndCases.
-insert_cases(Node,Dir,Suite,Cases,Tests) when is_list(Cases) ->
+insert_cases(Node,Dir,Suite,Cases,Tests,false) when is_list(Cases) ->
+ append({{Node,Dir},[{Suite,Cases}]},Tests);
+insert_cases(Node,Dir,Suite,Cases,Tests,true) when is_list(Cases) ->
case lists:keysearch({Node,Dir},1,Tests) of
{value,{{Node,Dir},[{all,_}]}} ->
Tests;
@@ -901,8 +926,8 @@ insert_cases(Node,Dir,Suite,Cases,Tests) when is_list(Cases) ->
false ->
insert_in_order({{Node,Dir},[{Suite,Cases}]},Tests)
end;
-insert_cases(Node,Dir,Suite,Case,Tests) when is_atom(Case) ->
- insert_cases(Node,Dir,Suite,[Case],Tests).
+insert_cases(Node,Dir,Suite,Case,Tests,MergeTests) when is_atom(Case) ->
+ insert_cases(Node,Dir,Suite,[Case],Tests,MergeTests).
insert_cases1(_Suite,_Cases,all) ->
all;
@@ -917,22 +942,28 @@ insert_cases1(Suite,Cases,Suites0) ->
insert_in_order({Suite,Cases},Suites0)
end.
-skip_suites(Node,Dir,[S|Ss],Cmt,Tests) ->
- Tests1 = skip_cases(Node,Dir,S,all,Cmt,Tests),
- skip_suites(Node,Dir,Ss,Cmt,Tests1);
-skip_suites(_Node,_Dir,[],_Cmt,Tests) ->
+skip_suites(Node,Dir,[S|Ss],Cmt,Tests,MergeTests) ->
+ Tests1 = skip_cases(Node,Dir,S,all,Cmt,Tests,MergeTests),
+ skip_suites(Node,Dir,Ss,Cmt,Tests1,MergeTests);
+skip_suites(_Node,_Dir,[],_Cmt,Tests,_MergeTests) ->
Tests;
-skip_suites(Node,Dir,S,Cmt,Tests) ->
- skip_suites(Node,Dir,[S],Cmt,Tests).
-
-skip_groups(Node,Dir,Suite,Group,all,Cmt,Tests) when is_atom(Group) ->
- skip_groups(Node,Dir,Suite,[Group],all,Cmt,Tests);
-skip_groups(Node,Dir,Suite,Group,Cases,Cmt,Tests) when is_atom(Group) ->
- skip_groups(Node,Dir,Suite,[Group],Cases,Cmt,Tests);
-skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests) when is_atom(Case),
- Case =/= all ->
- skip_groups(Node,Dir,Suite,Groups,[Case],Cmt,Tests);
-skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests) when
+skip_suites(Node,Dir,S,Cmt,Tests,MergeTests) ->
+ skip_suites(Node,Dir,[S],Cmt,Tests,MergeTests).
+
+skip_groups(Node,Dir,Suite,Group,all,Cmt,Tests,MergeTests)
+ when is_atom(Group) ->
+ skip_groups(Node,Dir,Suite,[Group],all,Cmt,Tests,MergeTests);
+skip_groups(Node,Dir,Suite,Group,Cases,Cmt,Tests,MergeTests)
+ when is_atom(Group) ->
+ skip_groups(Node,Dir,Suite,[Group],Cases,Cmt,Tests,MergeTests);
+skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests,MergeTests)
+ when is_atom(Case),Case =/= all ->
+ skip_groups(Node,Dir,Suite,Groups,[Case],Cmt,Tests,MergeTests);
+skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,false) when
+ ((Cases == all) or is_list(Cases)) and is_list(Groups) ->
+ Suites1 = skip_groups1(Suite,[{Gr,Cases} || Gr <- Groups],Cmt,[]),
+ append({{Node,Dir},Suites1},Tests);
+skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,true) when
((Cases == all) or is_list(Cases)) and is_list(Groups) ->
Suites =
case lists:keysearch({Node,Dir},1,Tests) of
@@ -943,9 +974,10 @@ skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests) when
end,
Suites1 = skip_groups1(Suite,[{Gr,Cases} || Gr <- Groups],Cmt,Suites),
insert_in_order({{Node,Dir},Suites1},Tests);
-skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests) when is_atom(Case) ->
+skip_groups(Node,Dir,Suite,Groups,Case,Cmt,Tests,MergeTests)
+ when is_atom(Case) ->
Cases = if Case == all -> all; true -> [Case] end,
- skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests).
+ skip_groups(Node,Dir,Suite,Groups,Cases,Cmt,Tests,MergeTests).
skip_groups1(Suite,Groups,Cmt,Suites0) ->
SkipGroups = lists:map(fun(Group) ->
@@ -959,7 +991,10 @@ skip_groups1(Suite,Groups,Cmt,Suites0) ->
insert_in_order({Suite,SkipGroups},Suites0)
end.
-skip_cases(Node,Dir,Suite,Cases,Cmt,Tests) when is_list(Cases) ->
+skip_cases(Node,Dir,Suite,Cases,Cmt,Tests,false) when is_list(Cases) ->
+ Suites1 = skip_cases1(Suite,Cases,Cmt,[]),
+ append({{Node,Dir},Suites1},Tests);
+skip_cases(Node,Dir,Suite,Cases,Cmt,Tests,true) when is_list(Cases) ->
Suites =
case lists:keysearch({Node,Dir},1,Tests) of
{value,{{Node,Dir},Suites0}} ->
@@ -969,8 +1004,8 @@ skip_cases(Node,Dir,Suite,Cases,Cmt,Tests) when is_list(Cases) ->
end,
Suites1 = skip_cases1(Suite,Cases,Cmt,Suites),
insert_in_order({{Node,Dir},Suites1},Tests);
-skip_cases(Node,Dir,Suite,Case,Cmt,Tests) when is_atom(Case) ->
- skip_cases(Node,Dir,Suite,[Case],Cmt,Tests).
+skip_cases(Node,Dir,Suite,Case,Cmt,Tests,MergeTests) when is_atom(Case) ->
+ skip_cases(Node,Dir,Suite,[Case],Cmt,Tests,MergeTests).
skip_cases1(Suite,Cases,Cmt,Suites0) ->
SkipCases = lists:map(fun(C) ->
@@ -984,6 +1019,9 @@ skip_cases1(Suite,Cases,Cmt,Suites0) ->
insert_in_order({Suite,SkipCases},Suites0)
end.
+append(Elem, List) ->
+ List ++ [Elem].
+
insert_in_order([E|Es],List) ->
List1 = insert_elem(E,List,[]),
insert_in_order(Es,List1);
@@ -1056,6 +1094,7 @@ valid_terms() ->
{userconfig,2},
{userconfig,3},
{alias,3},
+ {merge_tests,1},
{logdir,2},
{logdir,3},
{label,2},
diff --git a/lib/common_test/src/ct_util.hrl b/lib/common_test/src/ct_util.hrl
index 4ed29bdcd1..f0255c533c 100644
--- a/lib/common_test/src/ct_util.hrl
+++ b/lib/common_test/src/ct_util.hrl
@@ -41,7 +41,8 @@
multiply_timetraps=[],
scale_timetraps=[],
alias=[],
- tests=[]}).
+ tests=[],
+ merge_tests = true }).
-record(cover, {app=none,
level=details,
diff --git a/lib/common_test/test/ct_testspec_1_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE.erl
index c2a7bd8fd7..e080e074bf 100644
--- a/lib/common_test/test/ct_testspec_1_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_1_SUITE.erl
@@ -67,7 +67,21 @@ all() ->
skip_group_testcase, topgroup, subgroup, skip_subgroup,
subgroup_all_testcases, skip_subgroup_all_testcases,
subgroup_testcase, skip_subgroup_testcase,
- sub_skipped_by_top, testcase_in_multiple_groups].
+ sub_skipped_by_top, testcase_in_multiple_groups,
+ order_of_tests_in_multiple_dirs_no_merge_tests,
+ order_of_tests_in_multiple_suites_no_merge_tests,
+ order_of_suites_in_multiple_dirs_no_merge_tests,
+ order_of_groups_in_multiple_dirs_no_merge_tests,
+ order_of_groups_in_multiple_suites_no_merge_tests,
+ order_of_tests_in_multiple_dirs,
+ order_of_tests_in_multiple_suites,
+ order_of_suites_in_multiple_dirs,
+ order_of_groups_in_multiple_dirs,
+ order_of_groups_in_multiple_suites,
+ order_of_tests_in_multiple_suites_with_skip_no_merge_tests,
+ order_of_tests_in_multiple_suites_with_skip,
+ all_plus_one_tc_no_merge_tests,
+ all_plus_one_tc].
groups() ->
[].
@@ -78,7 +92,6 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-
%%--------------------------------------------------------------------
%% TEST CASES
%%--------------------------------------------------------------------
@@ -370,6 +383,223 @@ testcase_in_multiple_groups(Config) when is_list(Config) ->
setup_and_execute(testcase_in_multiple_groups, TestSpec, Config).
%%%-----------------------------------------------------------------
+%%%
+
+order_of_tests_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestDir2 = filename:join(DataDir, "groups_2"),
+ TestSpec = [{merge_tests, false},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1a]},
+ {cases,TestDir2,groups_22_SUITE,[testcase_1]},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1b]}],
+
+ setup_and_execute(order_of_tests_in_multiple_dirs_no_merge_tests,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_tests_in_multiple_suites_no_merge_tests(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{merge_tests, false},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1a]},
+ {cases,TestDir1,groups_11_SUITE,[testcase_1]},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1b]}],
+
+ setup_and_execute(order_of_tests_in_multiple_suites_no_merge_tests,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_suites_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestDir2 = filename:join(DataDir, "groups_2"),
+ TestSpec = [{merge_tests, false},
+ {suites,TestDir1,groups_12_SUITE},
+ {suites,TestDir2,groups_22_SUITE},
+ {suites,TestDir1,groups_11_SUITE}],
+
+ setup_and_execute(order_of_suites_in_multiple_dirs_no_merge_tests,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_groups_in_multiple_dirs_no_merge_tests(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestDir2 = filename:join(DataDir, "groups_2"),
+ TestSpec = [{merge_tests, false},
+ {groups,TestDir1,groups_12_SUITE,test_group_1a},
+ {groups,TestDir2,groups_22_SUITE,test_group_1a},
+ {groups,TestDir1,groups_12_SUITE,test_group_1b}],
+
+ setup_and_execute(order_of_groups_in_multiple_dirs_no_merge_tests,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_groups_in_multiple_suites_no_merge_tests(Config)
+ when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{merge_tests, false},
+ {groups,TestDir1,groups_12_SUITE,test_group_1a},
+ {groups,TestDir1,groups_11_SUITE,test_group_1a},
+ {groups,TestDir1,groups_12_SUITE,test_group_1b}],
+
+ setup_and_execute(order_of_groups_in_multiple_suites_no_merge_tests,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_tests_in_multiple_suites_with_skip_no_merge_tests(Config)
+ when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{merge_tests, false},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1a]},
+ {cases,TestDir1,groups_11_SUITE,[testcase_1]},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1b]},
+ {cases,TestDir1,groups_11_SUITE,[testcase_2]},
+ {skip_cases,TestDir1,groups_12_SUITE,[testcase_1b],"Skip it"}],
+
+ setup_and_execute(
+ order_of_tests_in_multiple_suites_with_skip_no_merge_tests,
+ TestSpec, Config).
+
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_tests_in_multiple_dirs(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestDir2 = filename:join(DataDir, "groups_2"),
+ TestSpec = [{cases,TestDir1,groups_12_SUITE,[testcase_1a]},
+ {cases,TestDir2,groups_22_SUITE,[testcase_1]},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1b]}],
+
+ setup_and_execute(order_of_tests_in_multiple_dirs,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_tests_in_multiple_suites(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{cases,TestDir1,groups_12_SUITE,[testcase_1a]},
+ {cases,TestDir1,groups_11_SUITE,[testcase_1]},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1b]}],
+
+ setup_and_execute(order_of_tests_in_multiple_suites,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_suites_in_multiple_dirs(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestDir2 = filename:join(DataDir, "groups_2"),
+ TestSpec = [{suites,TestDir1,groups_12_SUITE},
+ {suites,TestDir2,groups_22_SUITE},
+ {suites,TestDir1,groups_11_SUITE}],
+
+ setup_and_execute(order_of_suites_in_multiple_dirs,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_groups_in_multiple_dirs(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestDir2 = filename:join(DataDir, "groups_2"),
+ TestSpec = [{groups,TestDir1,groups_12_SUITE,test_group_1a},
+ {groups,TestDir2,groups_22_SUITE,test_group_1a},
+ {groups,TestDir1,groups_12_SUITE,test_group_1b}],
+
+ setup_and_execute(order_of_groups_in_multiple_dirs,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_groups_in_multiple_suites(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{groups,TestDir1,groups_12_SUITE,test_group_1a},
+ {groups,TestDir1,groups_11_SUITE,test_group_1a},
+ {groups,TestDir1,groups_12_SUITE,test_group_1b}],
+
+ setup_and_execute(order_of_groups_in_multiple_suites,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+order_of_tests_in_multiple_suites_with_skip(Config) when is_list(Config) ->
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{cases,TestDir1,groups_12_SUITE,[testcase_1a]},
+ {cases,TestDir1,groups_11_SUITE,[testcase_1]},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1b]},
+ {cases,TestDir1,groups_11_SUITE,[testcase_2]},
+ {skip_cases,TestDir1,groups_12_SUITE,[testcase_1b],"Skip it!"}],
+
+ setup_and_execute(order_of_tests_in_multiple_suites_with_skip,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+all_plus_one_tc_no_merge_tests(Config) when is_list(Config) ->
+
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{merge_tests,false},
+ {suites,TestDir1,groups_12_SUITE},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1a]}],
+
+ setup_and_execute(all_plus_one_tc_no_merge_tests,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
+%%%
+
+all_plus_one_tc(Config) when is_list(Config) ->
+
+ DataDir = ?config(data_dir, Config),
+
+ TestDir1 = filename:join(DataDir, "groups_1"),
+ TestSpec = [{suites,TestDir1,groups_12_SUITE},
+ {cases,TestDir1,groups_12_SUITE,[testcase_1a]}],
+
+ setup_and_execute(all_plus_one_tc,
+ TestSpec, Config).
+
+%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
@@ -432,6 +662,719 @@ events_to_check(_, 0) ->
events_to_check(Test, N) ->
test_events(Test) ++ events_to_check(Test, N-1).
+test_events(all_suites) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{simple_1_SUITE,init_per_suite}},
+ {?eh,tc_done,{simple_1_SUITE,end_per_suite,'_'}},
+ {?eh,tc_start,{simple_2_SUITE,init_per_suite}},
+ {?eh,test_stats,{4,0,{0,0}}},
+ {?eh,tc_done,{simple_2_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_all_suites) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_user_skip,{simple_1_SUITE,all,"SKIPPED!"}},
+ {?eh,tc_user_skip,{simple_2_SUITE,all,"SKIPPED!"}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(suite) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{simple_1_SUITE,init_per_suite}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{simple_1_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_suite) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_user_skip,{simple_1_SUITE,all,"SKIPPED!"}},
+ {?eh,tc_done,{simple_2_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(all_testcases) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{simple_1_SUITE,init_per_suite}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{simple_1_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_all_testcases) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_user_skip,{simple_1_SUITE,all,"SKIPPED!"}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(testcase) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{simple_1_SUITE,init_per_suite}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ {negative,{?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{simple_1_SUITE,end_per_suite,'_'}}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_testcase) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{simple_1_SUITE,init_per_suite}},
+ {?eh,tc_user_skip,{simple_1_SUITE,tc1,"SKIPPED!"}},
+ {?eh,tc_start,{simple_1_SUITE,tc2}},
+ {?eh,tc_start,{simple_1_SUITE,end_per_suite}},
+
+ {?eh,tc_start,{simple_2_SUITE,init_per_suite}},
+ {?eh,tc_user_skip,{simple_2_SUITE,tc2,"SKIPPED!"}},
+ {?eh,tc_start,{simple_2_SUITE,tc1}},
+ {?eh,test_stats,{2,0,{2,0}}},
+ {?eh,tc_start,{simple_2_SUITE,end_per_suite}},
+
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(all_groups) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,test_stats,{12,0,{0,0}}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_all_groups) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_1a},"SKIPPED!"}},
+ {?eh,test_stats,{0,0,{1,0}}},
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_1b},"SKIPPED!"}},
+ {?eh,test_stats,{0,0,{2,0}}},
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_2},"SKIPPED!"}},
+ {?eh,test_stats,{0,0,{3,0}}},
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_4},"SKIPPED!"}},
+ {?eh,test_stats,{0,0,{4,0}}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(group) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_start,{groups_11_SUITE,{init_per_group,test_group_1a,[]}}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1a}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1b}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_1a,[]},'_'}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_group) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+
+ {?eh,tc_start,{groups_11_SUITE,{init_per_group,test_group_1a,[]}}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1a}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1b}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_1a,[]},'_'}},
+
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_1b},"SKIPPED!"}},
+ {?eh,test_stats,{2,0,{1,0}}},
+ {negative,{?eh,tc_user_skip,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(group_all_testcases) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_start,{groups_11_SUITE,{init_per_group,test_group_1a,[]}}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1a}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1b}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_1a,[]},'_'}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_group_all_testcases) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_1a},"SKIPPED!"}},
+ {?eh,tc_user_skip, {groups_11_SUITE,{group,test_group_1b},"SKIPPED!"}},
+ {?eh,test_stats,{0,0,{2,0}}},
+ {?eh,tc_start,{groups_11_SUITE,end_per_suite}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(group_testcase) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_start,{groups_11_SUITE,{init_per_group,test_group_1a,[]}}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1a}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ {negative,{?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_1a,[]},'_'}}},
+
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_group_testcase) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+
+ {?eh,tc_start,{groups_11_SUITE,{init_per_group,test_group_1a,[]}}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1a}},
+ {?eh,tc_user_skip,{groups_11_SUITE,testcase_1b,"SKIPPED!"}},
+ {?eh,test_stats,{1,0,{1,0}}},
+ {?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_1a,[]},'_'}},
+
+ {?eh,tc_start,{groups_11_SUITE,{init_per_group,test_group_1b,[]}}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1b}},
+ {?eh,tc_user_skip,{groups_11_SUITE,testcase_1a,"SKIPPED!"}},
+ {?eh,test_stats,{2,0,{2,0}}},
+ {?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_1b,[]},'_'}},
+
+ {negative,{?eh,tc_user_skip,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(topgroup) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]},ok}},
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[{repeat,2}]}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[{repeat,2}]}}}
+ ],
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[]}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[]}}}
+ ],
+ {?eh,test_stats,{6,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]},ok}}]},
+
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]}}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]},ok}},
+ [{?eh,tc_start,{groups_12_SUITE,{init_per_group,test_group_7,'_'}}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_7,'_'}}}],
+ {shuffle,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_8,
+ [{shuffle,'_'},sequence]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_8,
+ [{shuffle,'_'},sequence]},ok}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_8,
+ [shuffle,sequence]}}},
+ {?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_8,
+ [shuffle,sequence]},ok}}
+ ]},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]},ok}}
+ ]},
+ {?eh,test_stats,{12,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]},ok}}]},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_4,[]}}}],
+
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(subgroup) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]},ok}},
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[{repeat,2}]}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[{repeat,2}]}}}
+ ],
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[]}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[]}}}
+ ],
+ {?eh,test_stats,{4,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]},ok}}]},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_subgroup) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]}}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]},ok}},
+ [{?eh,tc_start,{groups_12_SUITE,{init_per_group,test_group_7,'_'}}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_7,'_'}}}],
+ {?eh,tc_user_skip,
+ {groups_12_SUITE,{group,test_group_8},"SKIPPED!"}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]},ok}}
+ ]},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]},ok}}]},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_4,[]}}}],
+
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(subgroup_all_testcases) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]}}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]},ok}},
+ [{?eh,tc_start,{groups_12_SUITE,{init_per_group,test_group_7,'_'}}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_7,'_'}}}],
+ {shuffle,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_8,
+ [{shuffle,'_'},sequence]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_8,
+ [{shuffle,'_'},sequence]},ok}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_8,
+ [shuffle,sequence]}}},
+ {?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_8,
+ [shuffle,sequence]},ok}}
+ ]},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]},ok}}
+ ]},
+ {?eh,test_stats,{6,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]},ok}}]},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_4,[]}}}],
+
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]},ok}},
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[{repeat,2}]}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[{repeat,2}]}}}
+ ],
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[]}}},
+ {?eh,test_stats,{10,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[]}}}
+ ],
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]},ok}}]},
+
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_subgroup_all_testcases) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]},ok}},
+ {?eh,tc_user_skip,{groups_12_SUITE,{group,test_group_5},"SKIPPED!"}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_4,[]}}},
+ {?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_4,[]},ok}}
+ ],
+
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(subgroup_testcase) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]}}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]},ok}},
+ [{?eh,tc_start,{groups_12_SUITE,{init_per_group,test_group_7,'_'}}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_7,'_'}}}],
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]},ok}}
+ ]},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]},ok}}]},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_4,[]}}}],
+
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_2,[parallel]},ok}},
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[{repeat,2}]}}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[{repeat,2}]}}}
+ ],
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_3,[]}}},
+ {?eh,test_stats,{3,0,{0,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_3,[]}}}
+ ],
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_2,[parallel]},ok}}]},
+
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(skip_subgroup_testcase) ->
+ [
+
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_4,[]}}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_5,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_6,[parallel]},ok}},
+ [{?eh,tc_start,{groups_12_SUITE,{init_per_group,test_group_7,'_'}}},
+ {?eh,tc_user_skip, {groups_12_SUITE,testcase_7a,"SKIPPED!"}},
+ {?eh,test_stats,{1,0,{1,0}}},
+ {?eh,tc_user_skip, {groups_12_SUITE,testcase_7b,"SKIPPED!"}},
+ {?eh,test_stats,{1,0,{2,0}}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_7,'_'}}}],
+ {shuffle,
+ [{?eh,tc_start,
+ {groups_12_SUITE,{init_per_group,test_group_8,
+ [{shuffle,'_'},sequence]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{init_per_group,test_group_8,
+ [{shuffle,'_'},sequence]},ok}},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_8,
+ [shuffle,sequence]}}},
+ {?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_8,
+ [shuffle,sequence]},ok}}
+ ]},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_6,[parallel]},ok}}
+ ]},
+ {?eh,test_stats,{4,0,{2,0}}},
+ {?eh,tc_start,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]}}},
+ {?eh,tc_done,
+ {groups_12_SUITE,{end_per_group,test_group_5,[parallel]},ok}}]},
+ {?eh,tc_start,{groups_12_SUITE,{end_per_group,test_group_4,[]}}}],
+
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+
+ ];
+
+test_events(sub_skipped_by_top) ->
+ [
+ {?eh,start_logging,'_'},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+
+ {?eh,tc_user_skip,{groups_12_SUITE,{group,test_group_4},"SKIPPED!"}},
+
+ {negative,
+ {?eh,tc_user_skip,{groups_12_SUITE,{group,test_group_4},"SKIPPED!"}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}}},
+
+ {negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
+ ];
+
+test_events(testcase_in_multiple_groups) ->
+ [];
+
+test_events(order_of_tests_in_multiple_dirs_no_merge_tests) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1a}},
+ {?eh,tc_done, {groups_12_SUITE,testcase_1a,
+ {failed,{error,{test_case_failed,no_group_data}}}}},
+ {?eh,tc_start,{groups_22_SUITE,testcase_1}},
+ {?eh,tc_done,{groups_22_SUITE,testcase_1,ok}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1b}},
+ {?eh,tc_done, {groups_12_SUITE,testcase_1b,
+ {failed,{error,{test_case_failed,no_group_data}}}}},
+ {?eh,stop_logging,[]}
+ ];
+test_events(order_of_tests_in_multiple_suites_no_merge_tests) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1a}},
+ {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1}},
+ {?eh,tc_done,{groups_11_SUITE,testcase_1,ok}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1b}},
+ {?eh,tc_done,{groups_12_SUITE,testcase_1b,'_'}},
+ {?eh,stop_logging,[]}
+ ];
+test_events(order_of_suites_in_multiple_dirs_no_merge_tests) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,init_per_suite,'_'}},
+ {?eh,tc_start,{groups_12_SUITE,end_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {?eh,tc_start,{groups_22_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_22_SUITE,init_per_suite,'_'}},
+ {?eh,tc_start,{groups_22_SUITE,end_per_suite}},
+ {?eh,tc_done,{groups_22_SUITE,end_per_suite,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_11_SUITE,init_per_suite,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,end_per_suite}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+ {?eh,stop_logging,[]}];
+test_events(order_of_groups_in_multiple_dirs_no_merge_tests) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,tc_start, {groups_22_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_22_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1b,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1b,'_'},'_'}},
+
+ {?eh,stop_logging,[]}];
+test_events(order_of_groups_in_multiple_suites_no_merge_tests) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,tc_start, {groups_11_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_11_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1b,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1b,'_'},'_'}},
+
+ {?eh,stop_logging,[]}];
+test_events(order_of_tests_in_multiple_suites_with_skip_no_merge_tests) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1a}},
+ {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1}},
+ {?eh,tc_done,{groups_11_SUITE,testcase_1,ok}},
+ {?eh,tc_user_skip,{groups_12_SUITE,testcase_1b,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_2}},
+ {?eh,tc_done,{groups_11_SUITE,testcase_2,ok}},
+ {?eh,stop_logging,[]}
+ ];
+
+test_events(order_of_tests_in_multiple_dirs) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1a}},
+ {?eh,tc_done,
+ {groups_12_SUITE,testcase_1a,
+ {failed,{error,{test_case_failed,no_group_data}}}}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1b}},
+ {?eh,tc_done,
+ {groups_12_SUITE,testcase_1b,
+ {failed,{error,{test_case_failed,no_group_data}}}}},
+ {?eh,tc_start,{groups_22_SUITE,testcase_1}},
+ {?eh,tc_done,{groups_22_SUITE,testcase_1,ok}},
+ {?eh,stop_logging,[]}
+ ];
+test_events(order_of_tests_in_multiple_suites) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1a}},
+ {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}},
+
+ {?eh,tc_start,{groups_12_SUITE,testcase_1b}},
+ {?eh,tc_done,{groups_12_SUITE,testcase_1b,'_'}},
+
+ {?eh,tc_start,{groups_11_SUITE,testcase_1}},
+ {?eh,tc_done,{groups_11_SUITE,testcase_1,ok}},
+ {?eh,stop_logging,[]}
+ ];
+test_events(order_of_suites_in_multiple_dirs) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,init_per_suite,'_'}},
+ {?eh,tc_start,{groups_12_SUITE,end_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+
+ {?eh,tc_start,{groups_11_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_11_SUITE,init_per_suite,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,end_per_suite}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,'_'}},
+
+ {?eh,tc_start,{groups_22_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_22_SUITE,init_per_suite,'_'}},
+ {?eh,tc_start,{groups_22_SUITE,end_per_suite}},
+ {?eh,tc_done,{groups_22_SUITE,end_per_suite,'_'}},
+ {?eh,stop_logging,[]}];
+test_events(order_of_groups_in_multiple_dirs) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1b,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1b,'_'},'_'}},
+
+ {?eh,tc_start, {groups_22_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_22_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,stop_logging,[]}];
+test_events(order_of_groups_in_multiple_suites) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,tc_start, {groups_12_SUITE,{init_per_group,test_group_1b,'_'}}},
+ {?eh,tc_done, {groups_12_SUITE,{end_per_group,test_group_1b,'_'},'_'}},
+
+ {?eh,tc_start, {groups_11_SUITE,{init_per_group,test_group_1a,'_'}}},
+ {?eh,tc_done, {groups_11_SUITE,{end_per_group,test_group_1a,'_'},'_'}},
+
+ {?eh,stop_logging,[]}];
+
+test_events(order_of_tests_in_multiple_suites_with_skip) ->
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,testcase_1a}},
+ {?eh,tc_done,{groups_12_SUITE,testcase_1a,'_'}},
+ {?eh,tc_user_skip,{groups_12_SUITE,testcase_1b,'_'}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_1}},
+ {?eh,tc_done,{groups_11_SUITE,testcase_1,ok}},
+ {?eh,tc_start,{groups_11_SUITE,testcase_2}},
+ {?eh,tc_done,{groups_11_SUITE,testcase_2,ok}},
+ {?eh,stop_logging,[]}
+ ];
+
+test_events(all_plus_one_tc_no_merge_tests) ->
+
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {?eh,stop_logging,[]}
+ ];
+
+test_events(all_plus_one_tc) ->
+
+ [{?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,'_'}},
+ {negative,{?eh,tc_start,{groups_12_SUITE,init_per_suite}},
+ {?eh,stop_logging,[]}}
+ ];
+
test_events(_) ->
[
].
diff --git a/lib/compiler/doc/src/compile.xml b/lib/compiler/doc/src/compile.xml
index c3d65b4cb5..f2af932aef 100644
--- a/lib/compiler/doc/src/compile.xml
+++ b/lib/compiler/doc/src/compile.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2010</year>
+ <year>1996</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -164,6 +164,70 @@
for details.</p>
</item>
+ <tag><c>makedep</c></tag>
+ <item>
+ <p>Produce a Makefile rule to track headers dependencies.
+ No object file is produced.
+ </p>
+ <p>By default, this rule is written to
+ <c><![CDATA[<File>.Pbeam]]></c>. However, if the option
+ <c>binary</c> is set, nothing is written and the rule is
+ returned in <c>Binary</c>.
+ </p>
+ <p>For instance, if one has the following module:
+ </p>
+ <code>
+-module(module).
+
+-include_lib("eunit/include/eunit.hrl").
+-include("header.hrl").
+ </code>
+ <p>Here is the Makefile rule generated by this option:
+ </p>
+ <code>
+module.beam: module.erl \
+ /usr/local/lib/erlang/lib/eunit/include/eunit.hrl \
+ header.hrl
+ </code>
+ </item>
+
+ <tag><c>{makedep_output, Output}</c></tag>
+ <item>
+ <p>Write generated rule(s) to <c>Output</c> instead of the
+ default <c><![CDATA[<File>.Pbeam]]></c>. <c>Output</c>
+ can be a filename or an <c>io_device()</c>. To write to
+ stdout, use <c>standard_io</c>. However if <c>binary</c>
+ is set, nothing is written to <c>Output</c> and the
+ result is returned to the caller with
+ <c>{ok, ModuleName, Binary}</c>.
+ </p>
+ </item>
+
+ <tag><c>{makedep_target, Target}</c></tag>
+ <item>
+ <p>Change the name of the rule emitted to <c>Target</c>.
+ </p>
+ </item>
+
+ <tag><c>makedep_quote_target</c></tag>
+ <item>
+ <p>Characters in <c>Target</c> special to make(1) are quoted.
+ </p>
+ </item>
+
+ <tag><c>makedep_add_missing</c></tag>
+ <item>
+ <p>Consider missing headers as generated files and add them to the
+ dependencies.
+ </p>
+ </item>
+
+ <tag><c>makedep_phony</c></tag>
+ <item>
+ <p>Add a phony target for each dependency.
+ </p>
+ </item>
+
<tag><c>'P'</c></tag>
<item>
<p>Produces a listing of the parsed code after preprocessing
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 26da3ecad2..ce8a5bf864 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -112,9 +112,10 @@ noenv_forms(Forms, Opt) when is_atom(Opt) ->
-spec noenv_output_generated([option()]) -> boolean().
noenv_output_generated(Opts) ->
+ {_,Passes} = passes(file, expand_opts(Opts)),
any(fun ({save_binary,_F}) -> true;
(_Other) -> false
- end, passes(file, expand_opts(Opts))).
+ end, Passes).
%%
%% Local functions
@@ -205,6 +206,9 @@ format_error(write_error) ->
format_error({rename,From,To,Error}) ->
io_lib:format("failed to rename ~s to ~s: ~s",
[From,To,file:format_error(Error)]);
+format_error({delete,File,Error}) ->
+ io_lib:format("failed to delete file ~s: ~s",
+ [File,file:format_error(Error)]);
format_error({delete_temp,File,Error}) ->
io_lib:format("failed to delete temporary file ~s: ~s",
[File,file:format_error(Error)]);
@@ -240,26 +244,12 @@ internal(Master, Input, Opts) ->
end}.
internal({forms,Forms}, Opts) ->
- Ps = passes(forms, Opts),
+ {_,Ps} = passes(forms, Opts),
internal_comp(Ps, "", "", #compile{code=Forms,options=Opts});
internal({file,File}, Opts) ->
- Ps = passes(file, Opts),
+ {Ext,Ps} = passes(file, Opts),
Compile = #compile{options=Opts},
- case member(from_core, Opts) of
- true -> internal_comp(Ps, File, ".core", Compile);
- false ->
- case member(from_beam, Opts) of
- true ->
- internal_comp(Ps, File, ".beam", Compile);
- false ->
- case member(from_asm, Opts) orelse member(asm, Opts) of
- true ->
- internal_comp(Ps, File, ".S", Compile);
- false ->
- internal_comp(Ps, File, ".erl", Compile)
- end
- end
- end.
+ internal_comp(Ps, File, Ext, Compile).
internal_comp(Passes, File, Suffix, St0) ->
Dir = filename:dirname(File),
@@ -367,42 +357,52 @@ mpf(Ms) ->
[{File,[M || {F,M} <- Ms, F =:= File]} ||
File <- lists:usort([F || {F,_} <- Ms])].
-%% passes(forms|file, [Option]) -> [{Name,PassFun}]
-%% Figure out which passes that need to be run.
-
-passes(forms, Opts) ->
- case member(from_core, Opts) of
- true ->
- select_passes(core_passes(), Opts);
- false ->
- select_passes(standard_passes(), Opts)
+%% passes(forms|file, [Option]) -> {Extension,[{Name,PassFun}]}
+%% Figure out the extension of the input file and which passes
+%% that need to be run.
+
+passes(Type, Opts) ->
+ {Ext,Passes0} = passes_1(Opts),
+ Passes1 = case Type of
+ file -> Passes0;
+ forms -> tl(Passes0)
+ end,
+ Passes = select_passes(Passes1, Opts),
+
+ %% If the last pass saves the resulting binary to a file,
+ %% insert a first pass to remove the file (unless the
+ %% source file is a BEAM file).
+ {Ext,case last(Passes) of
+ {save_binary,_Fun} ->
+ case Passes of
+ [{read_beam_file,_}|_] ->
+ %% The BEAM is both input and output.
+ %% Don't remove it.
+ Passes;
+ _ ->
+ [?pass(remove_file)|Passes]
+ end;
+ _ ->
+ Passes
+ end}.
+
+passes_1([Opt|Opts]) ->
+ case pass(Opt) of
+ {_,_}=Res -> Res;
+ none -> passes_1(Opts)
end;
-passes(file, Opts) ->
- case member(from_beam, Opts) of
- true ->
- Ps = [?pass(read_beam_file)|binary_passes()],
- select_passes(Ps, Opts);
- false ->
- Ps = case member(from_asm, Opts) orelse member(asm, Opts) of
- true ->
- [?pass(beam_consult_asm)|asm_passes()];
- false ->
- case member(from_core, Opts) of
- true ->
- [?pass(parse_core)|core_passes()];
- false ->
- [?pass(parse_module)|standard_passes()]
- end
- end,
- Fs = select_passes(Ps, Opts),
-
- %% If the last pass saves the resulting binary to a file,
- %% insert a first pass to remove the file.
- case last(Fs) of
- {save_binary,_Fun} -> [?pass(remove_file)|Fs];
- _Other -> Fs
- end
- end.
+passes_1([]) ->
+ {".erl",[?pass(parse_module)|standard_passes()]}.
+
+pass(from_core) ->
+ {".core",[?pass(parse_core)|core_passes()]};
+pass(from_asm) ->
+ {".S",[?pass(beam_consult_asm)|asm_passes()]};
+pass(asm) ->
+ pass(from_asm);
+pass(from_beam) ->
+ {".beam",[?pass(read_beam_file)|binary_passes()]};
+pass(_) -> none.
%% select_passes([Command], Opts) -> [{Name,Function}]
%% Interpret the lists of commands to return a pure list of passes.
@@ -435,6 +435,8 @@ passes(file, Opts) ->
%% file will be Ext. (Ext should not contain
%% a period.) No more passes will be run.
%%
+%% done End compilation at this point.
+%%
%% {done,Ext} End compilation at this point. Produce a listing
%% as with {listing,Ext}, unless 'binary' is
%% specified, in which case the current
@@ -468,6 +470,8 @@ select_passes([{src_listing,Ext}|_], _Opts) ->
[{listing,fun (St) -> src_listing(Ext, St) end}];
select_passes([{listing,Ext}|_], _Opts) ->
[{listing,fun (St) -> listing(Ext, St) end}];
+select_passes([done|_], _Opts) ->
+ [];
select_passes([{done,Ext}|_], Opts) ->
select_passes([{unless,binary,{listing,Ext}}], Opts);
select_passes([{iff,Flag,Pass}|Ps], Opts) ->
@@ -550,6 +554,13 @@ select_list_passes_1([], _, Acc) ->
standard_passes() ->
[?pass(transform_module),
+
+ {iff,makedep,[
+ ?pass(makedep),
+ {unless,binary,?pass(makedep_output)}
+ ]},
+ {iff,makedep,done},
+
{iff,'dpp',{listing,"pp"}},
?pass(lint_module),
{iff,'P',{src_listing,"P"}},
@@ -901,6 +912,184 @@ core_lint_module(St) ->
errors=St#compile.errors ++ Es}}
end.
+makedep(#compile{code=Code,options=Opts}=St) ->
+ Ifile = St#compile.ifile,
+ Ofile = St#compile.ofile,
+
+ %% Get the target of the Makefile rule.
+ Target0 =
+ case proplists:get_value(makedep_target, Opts) of
+ undefined ->
+ %% The target is derived from the output filename: possibly
+ %% remove the current working directory to obtain a relative
+ %% path.
+ shorten_filename(Ofile);
+ T ->
+ %% The caller specified one.
+ T
+ end,
+
+ %% Quote the target is the called asked for this.
+ Target1 = case proplists:get_value(makedep_quote_target, Opts) of
+ true ->
+ %% For now, only "$" is replaced by "$$".
+ Fun = fun
+ ($$) -> "$$";
+ (C) -> C
+ end,
+ map(Fun, Target0);
+ _ ->
+ Target0
+ end,
+ Target = Target1 ++ ":",
+
+ %% List the dependencies (includes) for this target.
+ {MainRule,PhonyRules} = makedep_add_headers(
+ Ifile, % The input file name.
+ Code, % The parsed source.
+ [], % The list of dependencies already added.
+ length(Target), % The current line length.
+ Target, % The target.
+ "", % Phony targets.
+ Opts),
+
+ %% Prepare the content of the Makefile. For instance:
+ %% hello.erl: hello.hrl common.hrl
+ %%
+ %% Or if phony targets are enabled:
+ %% hello.erl: hello.hrl common.hrl
+ %%
+ %% hello.hrl:
+ %%
+ %% common.hrl:
+ Makefile = case proplists:get_value(makedep_phony, Opts) of
+ true -> MainRule ++ PhonyRules;
+ _ -> MainRule
+ end,
+ {ok,St#compile{code=iolist_to_binary([Makefile,"\n"])}}.
+
+makedep_add_headers(Ifile, [{attribute,_,file,{File,_}}|Rest],
+ Included, LineLen, MainTarget, Phony, Opts) ->
+ %% The header "File" exists, add it to the dependencies.
+ {Included1,LineLen1,MainTarget1,Phony1} =
+ makedep_add_header(Ifile, Included, LineLen, MainTarget, Phony, File),
+ makedep_add_headers(Ifile, Rest, Included1, LineLen1,
+ MainTarget1, Phony1, Opts);
+makedep_add_headers(Ifile, [{error,{_,epp,{include,file,File}}}|Rest],
+ Included, LineLen, MainTarget, Phony, Opts) ->
+ %% The header "File" doesn't exist, do we add it to the dependencies?
+ case proplists:get_value(makedep_add_missing, Opts) of
+ true ->
+ {Included1,LineLen1,MainTarget1,Phony1} =
+ makedep_add_header(Ifile, Included, LineLen, MainTarget,
+ Phony, File),
+ makedep_add_headers(Ifile, Rest, Included1, LineLen1,
+ MainTarget1, Phony1, Opts);
+ _ ->
+ makedep_add_headers(Ifile, Rest, Included, LineLen,
+ MainTarget, Phony, Opts)
+ end;
+makedep_add_headers(Ifile, [_|Rest], Included, LineLen,
+ MainTarget, Phony, Opts) ->
+ makedep_add_headers(Ifile, Rest, Included,
+ LineLen, MainTarget, Phony, Opts);
+makedep_add_headers(_Ifile, [], _Included, _LineLen,
+ MainTarget, Phony, _Opts) ->
+ {MainTarget,Phony}.
+
+makedep_add_header(Ifile, Included, LineLen, MainTarget, Phony, File) ->
+ case member(File, Included) of
+ true ->
+ %% This file was already listed in the dependencies, skip it.
+ {Included,LineLen,MainTarget,Phony};
+ false ->
+ Included1 = [File|Included],
+
+ %% Remove "./" in front of the dependency filename.
+ File1 = case File of
+ "./" ++ File0 -> File0;
+ _ -> File
+ end,
+
+ %% Prepare the phony target name.
+ Phony1 = case File of
+ Ifile -> Phony;
+ _ -> Phony ++ "\n\n" ++ File1 ++ ":"
+ end,
+
+ %% Add the file to the dependencies. Lines longer than 76 columns
+ %% are splitted.
+ if
+ LineLen + 1 + length(File1) > 76 ->
+ LineLen1 = 2 + length(File1),
+ MainTarget1 = MainTarget ++ " \\\n " ++ File1,
+ {Included1,LineLen1,MainTarget1,Phony1};
+ true ->
+ LineLen1 = LineLen + 1 + length(File1),
+ MainTarget1 = MainTarget ++ " " ++ File1,
+ {Included1,LineLen1,MainTarget1,Phony1}
+ end
+ end.
+
+makedep_output(#compile{code=Code,options=Opts,ofile=Ofile}=St) ->
+ %% Write this Makefile (Code) to the selected output.
+ %% If no output is specified, the default is to write to a file named after
+ %% the output file.
+ Output0 = case proplists:get_value(makedep_output, Opts) of
+ undefined ->
+ %% Prepare the default filename.
+ outfile(filename:basename(Ofile, ".beam"), "Pbeam", Opts);
+ O ->
+ O
+ end,
+
+ %% If the caller specified an io_device(), there's nothing to do. If he
+ %% specified a filename, we must create it. Furthermore, this created file
+ %% must be closed before returning.
+ Ret = case Output0 of
+ _ when is_list(Output0) ->
+ case file:delete(Output0) of
+ Ret2 when Ret2 =:= ok; Ret2 =:= {error,enoent} ->
+ case file:open(Output0, [write]) of
+ {ok,IODev} ->
+ {ok,IODev,true};
+ {error,Reason2} ->
+ {error,open,Reason2}
+ end;
+ {error,Reason1} ->
+ {error,delete,Reason1}
+ end;
+ _ ->
+ {ok,Output0,false}
+ end,
+
+ case Ret of
+ {ok,Output1,CloseOutput} ->
+ try
+ %% Write the Makefile.
+ io:fwrite(Output1, "~s", [Code]),
+ %% Close the file if relevant.
+ if
+ CloseOutput -> file:close(Output1);
+ true -> ok
+ end,
+ {ok,St}
+ catch
+ exit:_ ->
+ %% Couldn't write to output Makefile.
+ Err = {St#compile.ifile,[{none,?MODULE,write_error}]},
+ {error,St#compile{errors=St#compile.errors++[Err]}}
+ end;
+ {error,open,Reason} ->
+ %% Couldn't open output Makefile.
+ Err = {St#compile.ifile,[{none,?MODULE,{open,Reason}}]},
+ {error,St#compile{errors=St#compile.errors++[Err]}};
+ {error,delete,Reason} ->
+ %% Couldn't open output Makefile.
+ Err = {St#compile.ifile,[{none,?MODULE,{delete,Output0,Reason}}]},
+ {error,St#compile{errors=St#compile.errors++[Err]}}
+ end.
+
%% expand_module(State) -> State'
%% Do the common preprocessing of the input forms.
diff --git a/lib/compiler/src/v3_kernel_pp.erl b/lib/compiler/src/v3_kernel_pp.erl
index 9bd13f7032..e363a5387a 100644
--- a/lib/compiler/src/v3_kernel_pp.erl
+++ b/lib/compiler/src/v3_kernel_pp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -20,10 +20,12 @@
-module(v3_kernel_pp).
--include("v3_kernel.hrl").
-
-export([format/1]).
+%%-define(INCLUDE_ANNOTATIONS, 1).
+
+-include("v3_kernel.hrl").
+
%% These are "internal" structures in sys_kernel which are here for
%% debugging purposes.
-record(iset, {anno=[],vars,arg,body}).
@@ -62,22 +64,21 @@ format(Node, Ctxt) ->
end)
end.
-format_anno(Anno, Ctxt0, ObjFun) ->
- case annotations_enabled() of
- true ->
- Ctxt1 = ctxt_bump_indent(Ctxt0, 1),
- ["( ",
- ObjFun(Ctxt0),
- nl_indent(Ctxt1),
- "-| ",io_lib:write(Anno),
- " )"];
- false ->
- ObjFun(Ctxt0)
- end.
-%% By default, don't show annotations since they clutter up the output.
-annotations_enabled() ->
- false.
+-ifndef(INCLUDE_ANNOTATIONS).
+%% Don't include annotations (for readability).
+format_anno(_Anno, Ctxt, ObjFun) ->
+ ObjFun(Ctxt).
+-else.
+%% Include annotations (for debugging of annotations).
+format_anno(Anno, Ctxt0, ObjFun) ->
+ Ctxt1 = ctxt_bump_indent(Ctxt0, 1),
+ ["( ",
+ ObjFun(Ctxt0),
+ nl_indent(Ctxt1),
+ "-| ",io_lib:write(Anno),
+ " )"].
+-endif.
%% format_1(Kexpr, Context) -> string().
diff --git a/lib/compiler/test/bs_construct_SUITE.erl b/lib/compiler/test/bs_construct_SUITE.erl
index fe72fbb143..c430b12b70 100644
--- a/lib/compiler/test/bs_construct_SUITE.erl
+++ b/lib/compiler/test/bs_construct_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -231,7 +231,7 @@ one_test({C_bin, E_bin, Str, Result}) ->
ok;
%% For situations where the final bits may not matter, like
%% for floats:
- N when integer(N) ->
+ N when is_integer(N) ->
io:format("Info: compiled and interpreted differ in the"
" last bytes:~n ~p, ~p.~n",
[bitstring_to_list(C_bin), bitstring_to_list(E_bin)]),
diff --git a/lib/compiler/test/compilation_SUITE.erl b/lib/compiler/test/compilation_SUITE.erl
index 6cd133ee39..ba225b66d0 100644
--- a/lib/compiler/test/compilation_SUITE.erl
+++ b/lib/compiler/test/compilation_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -163,7 +163,7 @@ split({int, N}, <<N:16,B:N/binary,T/binary>>) ->
beam_compiler_7(doc) ->
"Code snippet submitted from Ulf Wiger which fails in R3 Beam.";
beam_compiler_7(suite) -> [];
-beam_compiler_7(Config) when list(Config) ->
+beam_compiler_7(Config) when is_list(Config) ->
?line done = empty(2, false).
empty(N, Toggle) when N > 0 ->
@@ -327,7 +327,7 @@ from(_, []) -> [].
vsn_1(doc) ->
"Test generation of 'vsn' attribute";
vsn_1(suite) -> [];
-vsn_1(Conf) when list(Conf) ->
+vsn_1(Conf) when is_list(Conf) ->
?line M = vsn_1,
?line compile_load(M, ?config(data_dir, Conf), Conf),
@@ -351,7 +351,7 @@ vsn_1(Conf) when list(Conf) ->
vsn_2(doc) ->
"Test overriding of generation of 'vsn' attribute";
vsn_2(suite) -> [];
-vsn_2(Conf) when list(Conf) ->
+vsn_2(Conf) when is_list(Conf) ->
?line M = vsn_2,
?line compile_load(M, ?config(data_dir, Conf), Conf),
@@ -367,7 +367,7 @@ vsn_2(Conf) when list(Conf) ->
vsn_3(doc) ->
"Test that different code yields different generated 'vsn'";
vsn_3(suite) -> [];
-vsn_3(Conf) when list(Conf) ->
+vsn_3(Conf) when is_list(Conf) ->
?line M = vsn_3,
?line compile_load(M, ?config(data_dir, Conf), Conf),
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index a9ba4b0b86..037c078fd0 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,7 +26,7 @@
init_per_group/2,end_per_group/2,
app_test/1,
file_1/1, module_mismatch/1, big_file/1, outdir/1,
- binary/1, cond_and_ifdef/1, listings/1, listings_big/1,
+ binary/1, makedep/1, cond_and_ifdef/1, listings/1, listings_big/1,
other_output/1, package_forms/1, encrypted_abstr/1,
bad_record_use1/1, bad_record_use2/1, strict_record/1,
missing_testheap/1, cover/1, env/1, core/1, asm/1]).
@@ -38,10 +38,11 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
%% To cover the stripping of 'type' and 'spec' in beam_asm.
-type all_return_type() :: [atom()].
-spec all() -> all_return_type().
+
all() ->
test_lib:recompile(compile_SUITE),
[app_test, file_1, module_mismatch, big_file, outdir,
- binary, cond_and_ifdef, listings, listings_big,
+ binary, makedep, cond_and_ifdef, listings, listings_big,
other_output, package_forms, encrypted_abstr,
{group, bad_record_use}, strict_record,
missing_testheap, cover, env, core, asm].
@@ -148,6 +149,76 @@ binary(Config) when is_list(Config) ->
?line test_server:timetrap_cancel(Dog),
ok.
+%% Tests that the dependencies-Makefile-related options work.
+
+makedep(Config) when is_list(Config) ->
+ ?line Dog = test_server:timetrap(test_server:seconds(60)),
+ ?line {Simple,Target} = files(Config, "makedep"),
+ ?line DataDir = ?config(data_dir, Config),
+ ?line SimpleRootname = filename:rootname(Simple),
+ ?line IncludeDir = filename:join(filename:dirname(Simple), "include"),
+ ?line IncludeOptions = [
+ {d,need_foo},
+ {d,foo_value,42},
+ {d,include_generated},
+ {i,IncludeDir}
+ ],
+ %% Basic rule.
+ ?line BasicMf1Name = SimpleRootname ++ "-basic1.mk",
+ ?line {ok,BasicMf1} = file:read_file(BasicMf1Name),
+ ?line {ok,_,Mf1} = compile:file(Simple, [binary,makedep]),
+ ?line BasicMf1 = makedep_canonicalize_result(Mf1, DataDir),
+ %% Basic rule with one existing header.
+ ?line BasicMf2Name = SimpleRootname ++ "-basic2.mk",
+ ?line {ok,BasicMf2} = file:read_file(BasicMf2Name),
+ ?line {ok,_,Mf2} = compile:file(Simple, [binary,makedep|IncludeOptions]),
+ ?line BasicMf2 = makedep_canonicalize_result(Mf2, DataDir),
+ %% Rule with one existing header and one missing header.
+ ?line MissingMfName = SimpleRootname ++ "-missing.mk",
+ ?line {ok,MissingMf} = file:read_file(MissingMfName),
+ ?line {ok,_,Mf3} = compile:file(Simple,
+ [binary,makedep,makedep_add_missing|IncludeOptions]),
+ ?line MissingMf = makedep_canonicalize_result(Mf3, DataDir),
+ %% Rule with modified target.
+ ?line TargetMf1Name = SimpleRootname ++ "-target1.mk",
+ ?line {ok,TargetMf1} = file:read_file(TargetMf1Name),
+ ?line {ok,_,Mf4} = compile:file(Simple,
+ [binary,makedep,{makedep_target,"$target"}|IncludeOptions]),
+ ?line TargetMf1 = makedep_modify_target(
+ makedep_canonicalize_result(Mf4, DataDir), "$$target"),
+ %% Rule with quoted modified target.
+ ?line TargetMf2Name = SimpleRootname ++ "-target2.mk",
+ ?line {ok,TargetMf2} = file:read_file(TargetMf2Name),
+ ?line {ok,_,Mf5} = compile:file(Simple,
+ [binary,makedep,{makedep_target,"$target"},makedep_quote_target|
+ IncludeOptions]),
+ ?line TargetMf2 = makedep_modify_target(
+ makedep_canonicalize_result(Mf5, DataDir), "$$target"),
+ %% Basic rule written to some file.
+ ?line {ok,_} = compile:file(Simple,
+ [makedep,{makedep_output,Target}|IncludeOptions]),
+ ?line {ok,Mf6} = file:read_file(Target),
+ ?line BasicMf2 = makedep_canonicalize_result(Mf6, DataDir),
+
+ ?line ok = file:delete(Target),
+ ?line ok = file:del_dir(filename:dirname(Target)),
+ ?line test_server:timetrap_cancel(Dog),
+ ok.
+
+makedep_canonicalize_result(Mf, DataDir) ->
+ Mf0 = binary_to_list(Mf),
+ %% Replace the Datadir by "$(srcdir)".
+ Mf1 = re:replace(Mf0, DataDir, "$(srcdir)/",
+ [global,multiline,{return,list}]),
+ %% Long lines are splitted, put back everything on one line.
+ Mf2 = re:replace(Mf1, "\\\\\n ", "", [global,multiline,{return,list}]),
+ list_to_binary(Mf2).
+
+makedep_modify_target(Mf, Target) ->
+ Mf0 = binary_to_list(Mf),
+ Mf1 = re:replace(Mf0, Target, "$target", [{return,list}]),
+ list_to_binary(Mf1).
+
%% Tests that conditional compilation, defining values, including files work.
cond_and_ifdef(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/compile_SUITE_data/simple-basic1.mk b/lib/compiler/test/compile_SUITE_data/simple-basic1.mk
new file mode 100644
index 0000000000..4073fa82d0
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/simple-basic1.mk
@@ -0,0 +1 @@
+simple.beam: $(srcdir)/simple.erl
diff --git a/lib/compiler/test/compile_SUITE_data/simple-basic2.mk b/lib/compiler/test/compile_SUITE_data/simple-basic2.mk
new file mode 100644
index 0000000000..761d1d9582
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/simple-basic2.mk
@@ -0,0 +1 @@
+simple.beam: $(srcdir)/simple.erl $(srcdir)/include/simple.hrl
diff --git a/lib/compiler/test/compile_SUITE_data/simple-missing.mk b/lib/compiler/test/compile_SUITE_data/simple-missing.mk
new file mode 100644
index 0000000000..b13d44ec36
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/simple-missing.mk
@@ -0,0 +1 @@
+simple.beam: $(srcdir)/simple.erl $(srcdir)/include/simple.hrl generated.hrl
diff --git a/lib/compiler/test/compile_SUITE_data/simple-target1.mk b/lib/compiler/test/compile_SUITE_data/simple-target1.mk
new file mode 100644
index 0000000000..dd9fa0d6e5
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/simple-target1.mk
@@ -0,0 +1 @@
+$target: $(srcdir)/simple.erl $(srcdir)/include/simple.hrl
diff --git a/lib/compiler/test/compile_SUITE_data/simple-target2.mk b/lib/compiler/test/compile_SUITE_data/simple-target2.mk
new file mode 100644
index 0000000000..a5fc6f461d
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/simple-target2.mk
@@ -0,0 +1 @@
+$$target: $(srcdir)/simple.erl $(srcdir)/include/simple.hrl
diff --git a/lib/compiler/test/compile_SUITE_data/simple.erl b/lib/compiler/test/compile_SUITE_data/simple.erl
index 2021056388..0c1c70a778 100644
--- a/lib/compiler/test/compile_SUITE_data/simple.erl
+++ b/lib/compiler/test/compile_SUITE_data/simple.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -37,3 +37,7 @@ foo() ->
{?included_value, ?foo_value}.
-endif.
+
+-ifdef(include_generated).
+-include("generated.hrl").
+-endif.
diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl
index 7201edf4ac..482564a32b 100644
--- a/lib/compiler/test/guard_SUITE.erl
+++ b/lib/compiler/test/guard_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -111,8 +111,8 @@ const_cond(Config) when is_list(Config) ->
const_cond(T, Sz) ->
case T of
_X when false -> never;
- _X when tuple(T), eq == eq, tuple_size(T) == Sz -> ok;
- _X when tuple(T), eq == leq, tuple_size(T) =< Sz -> ok;
+ _X when is_tuple(T), eq == eq, tuple_size(T) == Sz -> ok;
+ _X when is_tuple(T), eq == leq, tuple_size(T) =< Sz -> ok;
_X -> error
end.
diff --git a/lib/compiler/test/inline_SUITE.erl b/lib/compiler/test/inline_SUITE.erl
index 6684b82751..7b9600c2f6 100644
--- a/lib/compiler/test/inline_SUITE.erl
+++ b/lib/compiler/test/inline_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -68,7 +68,7 @@ attribute(Config) when is_list(Config) ->
ok.
-define(comp(Name),
- Name(Config) when list(Config) ->
+ Name(Config) when is_list(Config) ->
try_inline(Name, Config)).
?comp(bsdecode).
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index 6dd950eade..bcdcf2fd9f 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -180,7 +180,7 @@ empty_generator(Config) when is_list(Config) ->
id(I) -> I.
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args}|_]}}) -> ok;
-fc(Args, {'EXIT',{function_clause,[{?MODULE,Name,Arity}|_]}})
+fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Arity}|_]}})
when length(Args) =:= Arity ->
true = test_server:is_native(?MODULE);
fc(Args, {'EXIT',{{case_clause,ActualArgs},_}})
diff --git a/lib/compiler/test/test_lib.erl b/lib/compiler/test/test_lib.erl
index d8799952a9..53d8c04169 100644
--- a/lib/compiler/test/test_lib.erl
+++ b/lib/compiler/test/test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -41,7 +41,7 @@ smoke_disasm(Mod) when is_atom(Mod) ->
smoke_disasm(code:which(Mod));
smoke_disasm(File) when is_list(File) ->
Res = beam_disasm:file(File),
- {beam_file,Mod} = {element(1, Res),element(2, Res)}.
+ {beam_file,_Mod} = {element(1, Res),element(2, Res)}.
%% Retrieve the "interesting" compiler options (options for optimization
%% and compatibility) for the given module.
@@ -62,16 +62,16 @@ opt_opts(Mod) ->
(_) -> false
end, Opts).
-%% Some test suites gets cloned (e.g. to "record_SUITE" to "record_no_opt_SUITE"),
-%% but the data directory is not cloned. This function retrieves the path to
-%% the original data directory.
+%% Some test suites gets cloned (e.g. to "record_SUITE" to
+%% "record_no_opt_SUITE"), but the data directory is not cloned.
+%% This function retrieves the path to the original data directory.
get_data_dir(Config) ->
Data0 = ?config(data_dir, Config),
- {ok,Data1,_} = regexp:sub(Data0, "_no_opt_SUITE", "_SUITE"),
- {ok,Data2,_} = regexp:sub(Data1, "_post_opt_SUITE", "_SUITE"),
- {ok,Data,_} = regexp:sub(Data2, "_inline_SUITE", "_SUITE"),
- Data.
+ Opts = [{return,list}],
+ Data1 = re:replace(Data0, "_no_opt_SUITE", "_SUITE", Opts),
+ Data = re:replace(Data1, "_post_opt_SUITE", "_SUITE", Opts),
+ re:replace(Data, "_inline_SUITE", "_SUITE", Opts).
%% p_run(fun(Data) -> ok|error, List) -> ok
%% Will fail the test case if there were any errors.
diff --git a/lib/cosEvent/doc/src/notes.xml b/lib/cosEvent/doc/src/notes.xml
index b6c4531901..1a5c8afa17 100644
--- a/lib/cosEvent/doc/src/notes.xml
+++ b/lib/cosEvent/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>1999</year><year>2010</year>
+ <year>1999</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -33,42 +33,48 @@
</header>
<section>
- <title>cosEvent 2.1.9</title>
+ <title>cosEvent 2.1.10</title>
<section>
<title>Improvements and New Features</title>
<list type="bulleted">
<item>
<p>
- Test suites published.</p>
+ Eliminated Dialyzer warnings when using exit or throw.</p>
<p>
- Own Id: OTP-8543 Aux Id:</p>
+ Own Id: OTP-9050 Aux Id:</p>
</item>
</list>
</section>
</section>
<section>
- <title>cosEvent 2.1.8</title>
+ <title>cosEvent 2.1.9</title>
<section>
<title>Improvements and New Features</title>
<list type="bulleted">
<item>
<p>
- Removed the usage of the codeinclude tag in the documentation.</p>
+ Test suites published.</p>
<p>
- Own Id: OTP-8409 Aux Id:</p>
+ Own Id: OTP-8543 Aux Id:</p>
</item>
</list>
</section>
+ </section>
+
+ <section>
+ <title>cosEvent 2.1.8</title>
<section>
- <title>Fixed Bugs and Malfunctions</title>
+ <title>Improvements and New Features</title>
<list type="bulleted">
<item>
- <p>The documentation EIX file was not generated.</p>
- <p>Own id: OTP-8355 Aux Id:</p>
+ <p>
+ Removed the usage of the codeinclude tag in the documentation.</p>
+ <p>
+ Own Id: OTP-8409 Aux Id:</p>
</item>
</list>
</section>
diff --git a/lib/cosEvent/src/cosEventApp.erl b/lib/cosEvent/src/cosEventApp.erl
index 084490f845..143c241448 100644
--- a/lib/cosEvent/src/cosEventApp.erl
+++ b/lib/cosEvent/src/cosEventApp.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -66,26 +66,31 @@
%% Effect : Install necessary data in the IFR DB
%%------------------------------------------------------------
install() ->
- install_loop(?IDL_MODULES, []).
+ case install_loop(?IDL_MODULES, []) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end.
install_loop([], _) ->
ok;
install_loop([H|T], Accum) ->
case catch H:'oe_register'() of
{'EXIT',{unregistered,App}} ->
- ?write_ErrorMsg("Unable to register '~p'; application ~p not registered.
-Trying to unregister ~p~n", [H,App,Accum]),
+ ?write_ErrorMsg("Unable to register '~p'; application ~p not registered.\n"
+ "Trying to unregister ~p~n", [H,App,Accum]),
uninstall_loop(Accum, {exit, register});
{'EXCEPTION',_} ->
- ?write_ErrorMsg("Unable to register '~p'; propably already registered.
-You are adviced to confirm this.
-Trying to unregister ~p~n", [H,Accum]),
+ ?write_ErrorMsg("Unable to register '~p'; propably already registered.\n"
+ "You are adviced to confirm this.\n"
+ "Trying to unregister ~p~n", [H,Accum]),
uninstall_loop(Accum, {exit, register});
ok ->
install_loop(T, [H|Accum]);
_ ->
- ?write_ErrorMsg("Unable to register '~p'; reason unknown.
-Trying to unregister ~p~n", [H,Accum]),
+ ?write_ErrorMsg("Unable to register '~p'; reason unknown.\n"
+ "Trying to unregister ~p~n", [H,Accum]),
uninstall_loop(Accum, {exit, register})
end.
@@ -96,27 +101,32 @@ Trying to unregister ~p~n", [H,Accum]),
%% Effect : Remove data related to cosEvent from the IFR DB
%%------------------------------------------------------------
uninstall() ->
- uninstall_loop(lists:reverse(?IDL_MODULES), ok).
+ case uninstall_loop(lists:reverse(?IDL_MODULES), ok) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end.
uninstall_loop([],ok) ->
ok;
uninstall_loop([],{exit, register}) ->
- exit({?MODULE, "oe_register failed"});
+ {error, {?MODULE, "oe_register failed"}};
uninstall_loop([],{exit, unregister}) ->
- exit({?MODULE, "oe_unregister failed"});
+ {error, {?MODULE, "oe_unregister failed"}};
uninstall_loop([],{exit, both}) ->
- exit({?MODULE, "oe_register and, for some of those already registered, oe_unregister failed"});
+ {error, {?MODULE, "oe_register and, for some of those already registered, oe_unregister failed"}};
uninstall_loop([H|T], Status) ->
case catch H:'oe_unregister'() of
ok ->
uninstall_loop(T, Status);
_ when Status == ok ->
- ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.
-You are adviced to confirm this.~n",[H]),
+ ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.\n"
+ "You are adviced to confirm this.\n",[H]),
uninstall_loop(T, {exit, unregister});
_ ->
- ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.
-You are adviced to confirm this.~n",[H]),
+ ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.\n"
+ "You are adviced to confirm this.\n",[H]),
uninstall_loop(T, {exit, both})
end.
diff --git a/lib/cosEvent/vsn.mk b/lib/cosEvent/vsn.mk
index 9c00a17100..38999db5fa 100644
--- a/lib/cosEvent/vsn.mk
+++ b/lib/cosEvent/vsn.mk
@@ -1 +1,3 @@
-COSEVENT_VSN = 2.1.9
+
+COSEVENT_VSN = 2.1.10
+
diff --git a/lib/cosEventDomain/doc/src/notes.xml b/lib/cosEventDomain/doc/src/notes.xml
index deb1985c86..522dcea829 100644
--- a/lib/cosEventDomain/doc/src/notes.xml
+++ b/lib/cosEventDomain/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2001</year><year>2010</year>
+ <year>2001</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -32,7 +32,24 @@
</header>
<section>
+ <title>cosEventDomain 1.1.10</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>
+ Eliminated Dialyzer warnings when using exit or throw.</p>
+ <p>
+ Own Id: OTP-9050 Aux Id:</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
<title>cosEventDomain 1.1.9</title>
+
<section>
<title>Improvements and New Features</title>
<list type="bulleted">
@@ -44,7 +61,6 @@
</item>
</list>
</section>
-
</section>
<section>
diff --git a/lib/cosEventDomain/src/CosEventDomainAdmin_EventDomain_impl.erl b/lib/cosEventDomain/src/CosEventDomainAdmin_EventDomain_impl.erl
index 0b73100540..f5dd6d5c14 100644
--- a/lib/cosEventDomain/src/CosEventDomainAdmin_EventDomain_impl.erl
+++ b/lib/cosEventDomain/src/CosEventDomainAdmin_EventDomain_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -778,12 +778,17 @@ get_qos(_OE_This, #state{cyclic = Cyclic, diamonds = Diamonds} = State) ->
%%----------------------------------------------------------------------
set_qos(_OE_This, State, NewQoS) ->
QoS = cosEventDomainApp:get_qos(NewQoS),
- set_qos_helper(QoS, State, []).
+ case set_qos_helper(QoS, State, []) of
+ {ok, NewState} ->
+ {reply, ok, NewState};
+ {error, Errors} ->
+ corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Errors})
+ end.
set_qos_helper([], State, []) ->
- {reply, ok, State};
+ {ok, State}; %{reply, ok, State};
set_qos_helper([], _, Errors) ->
- corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Errors});
+ {error, Errors};
set_qos_helper([{?DiamondDetection, Diamonds}|T], #state{diamonds = Diamonds} = State,
Errors) ->
set_qos_helper(T, State, Errors);
@@ -828,12 +833,17 @@ set_qos_helper([{?CycleDetection, _}|T], #state{cyclic = Cyclic} = State, Errors
%%----------------------------------------------------------------------
validate_qos(_OE_This, State, WantedQoS) ->
QoS = cosEventDomainApp:get_qos(WantedQoS),
- {reply, {ok, validate_qos_helper(QoS, State, [], [])}, State}.
+ case validate_qos_helper(QoS, State, [], []) of
+ {ok, Properties} ->
+ {reply, {ok, Properties}, State};
+ {error, Errors} ->
+ corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Errors})
+ end.
validate_qos_helper([], _, Properties, []) ->
- Properties;
+ {ok, Properties};
validate_qos_helper([], _, _, Errors) ->
- corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Errors});
+ {error, Errors};
validate_qos_helper([{?DiamondDetection, ?ForbidDiamonds}|T], State, Properties,
Errors) ->
case get_diamonds_helper(State, false) of
diff --git a/lib/cosEventDomain/src/cosEventDomainApp.erl b/lib/cosEventDomain/src/cosEventDomainApp.erl
index d57f51443c..734e4deccb 100644
--- a/lib/cosEventDomain/src/cosEventDomainApp.erl
+++ b/lib/cosEventDomain/src/cosEventDomainApp.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -270,12 +270,17 @@ create_id() ->
get_qos([]) ->
[];
get_qos(Properties) ->
- get_qos(Properties, [], []).
+ case get_qos(Properties, [], []) of
+ {ok, Supported} ->
+ Supported;
+ {error, Unsupported} ->
+ corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Unsupported})
+ end.
get_qos([], Supported, []) ->
- Supported;
+ {ok, Supported};
get_qos([], _, Unsupported) ->
- corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Unsupported});
+ {error, Unsupported};
get_qos([#'CosNotification_Property'{name = ?CycleDetection,
value= #any{value = ?AuthorizeCycles}}|T],
Supported, Unsupported) ->
diff --git a/lib/cosEventDomain/vsn.mk b/lib/cosEventDomain/vsn.mk
index bd21133fe5..f4a77ab7a8 100644
--- a/lib/cosEventDomain/vsn.mk
+++ b/lib/cosEventDomain/vsn.mk
@@ -1 +1,3 @@
-COSEVENTDOMAIN_VSN = 1.1.9
+
+COSEVENTDOMAIN_VSN = 1.1.10
+
diff --git a/lib/cosNotification/doc/src/notes.xml b/lib/cosNotification/doc/src/notes.xml
index 04c0c2accd..125e25e67e 100644
--- a/lib/cosNotification/doc/src/notes.xml
+++ b/lib/cosNotification/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2000</year><year>2010</year>
+ <year>2000</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -31,6 +31,20 @@
<file>notes.xml</file>
</header>
+ <section><title>cosNotification 1.1.16</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Eliminated Dialyzer warnings when using exit or throw.</p>
+ <p>
+ Own Id: OTP-9050</p>
+ </item>
+ </list>
+ </section>
+</section>
+
<section><title>cosNotification 1.1.15</title>
<section><title>Improvements and New Features</title>
diff --git a/lib/cosNotification/src/CosNotification_Common.erl b/lib/cosNotification/src/CosNotification_Common.erl
index 0e0f1da0d5..af9b2d4368 100644
--- a/lib/cosNotification/src/CosNotification_Common.erl
+++ b/lib/cosNotification/src/CosNotification_Common.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,7 +28,6 @@
%%--------------- INCLUDES -----------------------------------
-include_lib("orber/include/corba.hrl").
--include_lib("orber/include/ifr_types.hrl").
%% Application files
-include("CosNotification.hrl").
-include("CosNotifyChannelAdmin.hrl").
@@ -945,14 +944,19 @@ check_limits(LQS, NPR) ->
%% supported.
%%------------------------------------------------------------
validate_event_qos(Wanted, Curr) ->
- v_e_q_helper(Wanted, Curr, []),
- [].
+ case v_e_q_helper(Wanted, Curr, []) of
+ ok ->
+ [];
+ {error, Unsupp} ->
+ corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Unsupp})
+ end.
+
v_e_q_helper([], _Curr, []) ->
- %% Parsed all and foynd no conflicts.
+ %% Parsed all and found no conflicts.
ok;
v_e_q_helper([], _Curr, Unsupp) ->
%% Not possible to use these requested QoS.
- corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Unsupp});
+ {error, Unsupp};
%%--- EventReliability ---%%
v_e_q_helper([#'CosNotification_Property'{name=?not_EventReliability,
@@ -1071,30 +1075,38 @@ v_e_q_helper(What, _, _) ->
%% LQS - local representation of QoS.
%% Returns : {NewOMGStyleQoS, NewLocalQoS} | #'CosNotification_UnsupportedQoS'{}
%%------------------------------------------------------------
-set_properties([], Curr, channelAdm, _, [], NewQoS,_,_,LAS) ->
+set_properties(Wanted, Current, Type, Supported, Unsupp, NewQoS, Parent, Childs, LQS) ->
+ case do_set_properties(Wanted, Current, Type, Supported, Unsupp, NewQoS, Parent, Childs, LQS) of
+ {error, Exc} ->
+ corba:raise(Exc);
+ Result ->
+ Result
+ end.
+
+do_set_properties([], Curr, channelAdm, _, [], NewQoS,_,_,LAS) ->
merge_properties(NewQoS, Curr, LAS);
-set_properties([], Curr, _, _, [], NewQoS,_,_,LQS) ->
+do_set_properties([], Curr, _, _, [], NewQoS,_,_,LQS) ->
%% set_local_qos and merge_properties are help functions found at the end of QoS
%% functions.
NewLQS = set_local_qos(NewQoS, LQS),
merge_properties(NewQoS, Curr, NewLQS);
-set_properties([], _, channelAdm, _, Unsupp, _,_,_,_) ->
- corba:raise(#'CosNotification_UnsupportedAdmin'{admin_err = Unsupp});
-set_properties([], _, _, _, Unsupp, _,_,_,_) ->
- corba:raise(#'CosNotification_UnsupportedQoS'{qos_err = Unsupp});
+do_set_properties([], _, channelAdm, _, Unsupp, _,_,_,_) ->
+ {error, #'CosNotification_UnsupportedAdmin'{admin_err = Unsupp}};
+do_set_properties([], _, _, _, Unsupp, _,_,_,_) ->
+ {error, #'CosNotification_UnsupportedQoS'{qos_err = Unsupp}};
-set_properties([Req|Tail], Curr, Type, Supported, Unsupp, NewQoS, Parent, Childs,LQS) ->
+do_set_properties([Req|Tail], Curr, Type, Supported, Unsupp, NewQoS, Parent, Childs,LQS) ->
%% set_values and is_supported are help functions found at the end of QoS
%% functions.
case set_values(is_supported(Supported, Req), Req, Type, Curr, Parent, Childs,LQS) of
{unsupported, U} ->
- set_properties(Tail, Curr, Type, Supported, [U|Unsupp], NewQoS, Parent, Childs,LQS);
+ do_set_properties(Tail, Curr, Type, Supported, [U|Unsupp], NewQoS, Parent, Childs,LQS);
{ok, S, NewLQS} ->
- set_properties(Tail, Curr, Type, Supported, Unsupp, [S|NewQoS], Parent, Childs,NewLQS);
+ do_set_properties(Tail, Curr, Type, Supported, Unsupp, [S|NewQoS], Parent, Childs,NewLQS);
{ok, S} ->
- set_properties(Tail, Curr, Type, Supported, Unsupp, [S|NewQoS], Parent, Childs,LQS);
+ do_set_properties(Tail, Curr, Type, Supported, Unsupp, [S|NewQoS], Parent, Childs,LQS);
ok ->
- set_properties(Tail, Curr, Type, Supported, Unsupp, NewQoS, Parent, Childs,LQS)
+ do_set_properties(Tail, Curr, Type, Supported, Unsupp, NewQoS, Parent, Childs,LQS)
end.
diff --git a/lib/cosNotification/vsn.mk b/lib/cosNotification/vsn.mk
index cfd5948dfc..6613385579 100644
--- a/lib/cosNotification/vsn.mk
+++ b/lib/cosNotification/vsn.mk
@@ -1 +1,2 @@
-COSNOTIFICATION_VSN = 1.1.15
+COSNOTIFICATION_VSN = 1.1.16
+
diff --git a/lib/cosProperty/doc/src/notes.xml b/lib/cosProperty/doc/src/notes.xml
index 11e6205ee9..d7d52514ce 100644
--- a/lib/cosProperty/doc/src/notes.xml
+++ b/lib/cosProperty/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2000</year><year>2010</year>
+ <year>2000</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -54,6 +54,34 @@
<title>Fixed Bugs and Malfunctions</title>
<list type="bulleted">
<item>
+ <p>Eliminated Dialyzer warnings when using exit or throw.</p>
+ <p>Own id: OTP-9050 Aux Id:</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>cosProperty 1.1.12</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
+ <p>Test suites published.</p>
+ <p>Own id: OTP-8543 Aux Id:</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <title>cosProperty 1.1.11</title>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+ <list type="bulleted">
+ <item>
<p>The documentation EIX file was not generated.</p>
<p>Own id: OTP-8355 Aux Id:</p>
</item>
diff --git a/lib/cosProperty/src/CosPropertyService_PropertySetDefFactory_impl.erl b/lib/cosProperty/src/CosPropertyService_PropertySetDefFactory_impl.erl
index b099026b88..202df42b61 100644
--- a/lib/cosProperty/src/CosPropertyService_PropertySetDefFactory_impl.erl
+++ b/lib/cosProperty/src/CosPropertyService_PropertySetDefFactory_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -152,12 +152,18 @@ create_initial_propertysetdef(_OE_This, State, PropDefs) ->
%% Internal functions
%%======================================================================
evaluate_propertysetdef(SetDefs) ->
- evaluate_propertysetdef(SetDefs, [], []).
+ case evaluate_propertysetdef(SetDefs, [], []) of
+ {ok, NewProperties} ->
+ NewProperties;
+ {error, Exc} ->
+ corba:raise(#'CosPropertyService_MultipleExceptions'{exceptions = Exc})
+ end.
+
evaluate_propertysetdef([], NewProperties, []) ->
%% No exceptions found.
- NewProperties;
+ {ok, NewProperties};
evaluate_propertysetdef([], _, Exc) ->
- corba:raise(#'CosPropertyService_MultipleExceptions'{exceptions = Exc});
+ {error, Exc};
evaluate_propertysetdef([#'CosPropertyService_PropertyDef'
{property_name = Name,
property_value = Value,
diff --git a/lib/cosProperty/src/CosPropertyService_PropertySetFactory_impl.erl b/lib/cosProperty/src/CosPropertyService_PropertySetFactory_impl.erl
index ad3cdb62d4..4bc29b99ac 100644
--- a/lib/cosProperty/src/CosPropertyService_PropertySetFactory_impl.erl
+++ b/lib/cosProperty/src/CosPropertyService_PropertySetFactory_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -153,12 +153,18 @@ create_initial_propertyset(_OE_This, State, Properties) ->
%% Internal functions
%%======================================================================
evaluate_propertyset(Sets) ->
- evaluate_propertyset(Sets, [], []).
+ case evaluate_propertyset(Sets, [], []) of
+ {ok, NewProperties} ->
+ NewProperties;
+ {error, Exc} ->
+ corba:raise(#'CosPropertyService_MultipleExceptions'{exceptions = Exc})
+ end.
+
evaluate_propertyset([], NewProperties, []) ->
%% No exceptions found.
- NewProperties;
+ {ok, NewProperties};
evaluate_propertyset([], _, Exc) ->
- corba:raise(#'CosPropertyService_MultipleExceptions'{exceptions = Exc});
+ {error, Exc};
evaluate_propertyset([#'CosPropertyService_Property'
{property_name = Name,
property_value = Value}|T], X, Exc) ->
diff --git a/lib/cosProperty/vsn.mk b/lib/cosProperty/vsn.mk
index ca9a7ca77e..deb1eb0450 100644
--- a/lib/cosProperty/vsn.mk
+++ b/lib/cosProperty/vsn.mk
@@ -1 +1,2 @@
-COSPROPERTY_VSN = 1.1.12
+COSPROPERTY_VSN = 1.1.13
+
diff --git a/lib/cosTime/doc/src/notes.xml b/lib/cosTime/doc/src/notes.xml
index 40ebf42753..718ca23bc5 100644
--- a/lib/cosTime/doc/src/notes.xml
+++ b/lib/cosTime/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2000</year><year>2010</year>
+ <year>2000</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -33,6 +33,22 @@
</header>
<section>
+ <title>cosTime 1.1.10</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>
+ Eliminated Dialyzer warnings when using exit or throw.</p>
+ <p>
+ Own Id: OTP-9050 Aux Id:</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
<title>cosTime 1.1.9</title>
<section>
diff --git a/lib/cosTime/src/cosTime.erl b/lib/cosTime/src/cosTime.erl
index f4e67570ad..f7d03650af 100644
--- a/lib/cosTime/src/cosTime.erl
+++ b/lib/cosTime/src/cosTime.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -71,29 +71,39 @@
%%------------------------------------------------------------
install_time() ->
- install_loop(?IDL_TIME_MODULES,[]).
+ case install_loop(?IDL_TIME_MODULES,[]) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end.
install_timerevent() ->
- install_loop(?IDL_TIMEREVENT_MODULES,[]).
+ case install_loop(?IDL_TIMEREVENT_MODULES,[]) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end.
install_loop([], _) ->
ok;
install_loop([H|T], Accum) ->
case catch H:'oe_register'() of
{'EXIT',{unregistered,App}} ->
- ?write_ErrorMsg("Unable to register '~p'; application ~p not registered.
-Trying to unregister ~p~n", [H,App,Accum]),
+ ?write_ErrorMsg("Unable to register '~p'; application ~p not registered.\n"
+ "Trying to unregister ~p\n", [H,App,Accum]),
uninstall_loop(Accum, {exit, register});
{'EXCEPTION',_} ->
- ?write_ErrorMsg("Unable to register '~p'; propably already registered.
-You are adviced to confirm this.
-Trying to unregister ~p~n", [H,Accum]),
+ ?write_ErrorMsg("Unable to register '~p'; propably already registered.\n"
+ "You are adviced to confirm this.\n"
+ "Trying to unregister ~p\n", [H,Accum]),
uninstall_loop(Accum, {exit, register});
ok ->
install_loop(T, [H|Accum]);
_ ->
- ?write_ErrorMsg("Unable to register '~p'; reason unknown.
-Trying to unregister ~p~n", [H,Accum]),
+ ?write_ErrorMsg("Unable to register '~p'; reason unknown.\n"
+ "Trying to unregister ~p\n", [H,Accum]),
uninstall_loop(Accum, {exit, register})
end.
@@ -105,33 +115,43 @@ Trying to unregister ~p~n", [H,Accum]),
%%------------------------------------------------------------
uninstall_time() ->
- uninstall_loop(lists:reverse(?IDL_TIME_MODULES),ok).
+ case uninstall_loop(lists:reverse(?IDL_TIME_MODULES),ok) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end.
uninstall_timerevent() ->
- uninstall_loop(lists:reverse(?IDL_TIMEREVENT_MODULES),ok).
+ case uninstall_loop(lists:reverse(?IDL_TIMEREVENT_MODULES),ok) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end.
uninstall_loop([],ok) ->
ok;
uninstall_loop([],{exit, register}) ->
- exit({?MODULE, "oe_register failed"});
+ {error, {?MODULE, "oe_register failed"}};
uninstall_loop([],{exit, unregister}) ->
- exit({?MODULE, "oe_unregister failed"});
+ {error, {?MODULE, "oe_unregister failed"}};
uninstall_loop([],{exit, both}) ->
- exit({?MODULE, "oe_register and, for some of those already registered, oe_unregister failed"});
+ {error, {?MODULE, "oe_register and, for some of those already registered, oe_unregister failed"}};
uninstall_loop([H|T], Status) ->
case catch H:'oe_unregister'() of
ok ->
uninstall_loop(T, Status);
_ when Status == ok ->
- ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.
-You are adviced to confirm this.~n",[H]),
+ ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.\n"
+ "You are adviced to confirm this.~n",[H]),
uninstall_loop(T, {exit, unregister});
_ ->
- ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.
-You are adviced to confirm this.~n",[H]),
+ ?write_ErrorMsg("Unable to unregister '~p'; propably already unregistered.\n"
+ "You are adviced to confirm this.~n",[H]),
uninstall_loop(T, {exit, both})
end.
-
+
%%------------------------------------------------------------
%% function : start/stop
%% Arguments:
diff --git a/lib/cosTime/vsn.mk b/lib/cosTime/vsn.mk
index 429613fb61..ebc5aff1cc 100644
--- a/lib/cosTime/vsn.mk
+++ b/lib/cosTime/vsn.mk
@@ -1 +1,2 @@
-COSTIME_VSN = 1.1.9
+COSTIME_VSN = 1.1.10
+
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 92cc2b4dd9..0e7e63eb73 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -62,10 +62,16 @@
# define ERL_VALGRIND_MAKE_MEM_DEFINED(ptr,size) \
VALGRIND_MAKE_MEM_DEFINED(ptr,size)
- # define ERL_VALGRIND_ASSERT_MEM_DEFINED(ptr,size) \
- ((void) ((VALGRIND_CHECK_MEM_IS_DEFINED(ptr,size) == 0) ? 1 : \
- (fprintf(stderr,"\r\n####### VALGRIND_ASSSERT(%p,%ld) failed at %s:%d\r\n",\
- (ptr),(long)(size), __FILE__, __LINE__), abort(), 0)))
+ # define ERL_VALGRIND_ASSERT_MEM_DEFINED(Ptr,Size) \
+ do { \
+ int __erl_valgrind_mem_defined = VALGRIND_CHECK_MEM_IS_DEFINED((Ptr),(Size)); \
+ if (__erl_valgrind_mem_defined != 0) { \
+ fprintf(stderr,"\r\n####### VALGRIND_ASSSERT(%p,%ld) failed at %s:%d\r\n", \
+ (Ptr),(long)(Size), __FILE__, __LINE__); \
+ abort(); \
+ } \
+ } while (0)
+
#else
# define ERL_VALGRIND_MAKE_MEM_DEFINED(ptr,size)
# define ERL_VALGRIND_ASSERT_MEM_DEFINED(ptr,size)
diff --git a/lib/debugger/src/dbg_ui_view.erl b/lib/debugger/src/dbg_ui_view.erl
index 7350a830a8..be998f22ff 100644
--- a/lib/debugger/src/dbg_ui_view.erl
+++ b/lib/debugger/src/dbg_ui_view.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -42,6 +42,9 @@ start(GS, Mod) ->
false -> spawn(fun () -> init(GS, Mod, Title) end)
end.
+-spec stop() -> no_return().
+stop() ->
+ exit(stop).
%%====================================================================
%% Main loop and message handling
@@ -90,7 +93,7 @@ loop(State) ->
dbg_ui_winman:update_windows_menu(Data),
loop(State);
{dbg_ui_winman, destroy} ->
- exit(stop);
+ stop();
%% Help window termination -- ignore
{'EXIT', _Pid, _Reason} ->
@@ -104,7 +107,7 @@ gui_cmd(ignore, State) ->
gui_cmd({win, Win}, State) ->
State#state{win=Win};
gui_cmd(stopped, _State) ->
- exit(stop);
+ stop();
gui_cmd({coords, Coords}, State) ->
State#state{coords=Coords};
@@ -115,8 +118,8 @@ gui_cmd({shortcut, Key}, State) ->
end;
%% File menu
-gui_cmd('Close', State) ->
- gui_cmd(stopped, State);
+gui_cmd('Close', _State) ->
+ stop();
%% Edit menu
gui_cmd('Go To Line...', State) ->
diff --git a/lib/debugger/src/dbg_wx_view.erl b/lib/debugger/src/dbg_wx_view.erl
index 8ff89a4847..6242b9d0e0 100644
--- a/lib/debugger/src/dbg_wx_view.erl
+++ b/lib/debugger/src/dbg_wx_view.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,6 +46,9 @@ start(GS, Mod) ->
spawn_link(fun () -> init(GS, Env, Mod, Title) end)
end.
+-spec stop() -> no_return().
+stop() ->
+ exit(normal).
%%====================================================================
%% Main loop and message handling
@@ -113,13 +116,13 @@ loop(State) ->
end.
%%--Commands from the GUI---------------------------------------------
-
+
gui_cmd(ignore, State) ->
State;
gui_cmd({win, Win}, State) ->
State#state{win=Win};
gui_cmd(stopped, _State) ->
- exit(normal);
+ stop();
gui_cmd({coords, Coords}, State) ->
State#state{coords=Coords};
@@ -132,7 +135,7 @@ gui_cmd({shortcut, Key}, State) ->
%% File menu
gui_cmd('Close', State) ->
dbg_wx_trace_win:stop(State#state.win),
- gui_cmd(stopped, State);
+ stop();
%% Edit menu
gui_cmd('Go To Line', State) ->
diff --git a/lib/dialyzer/doc/manual.txt b/lib/dialyzer/doc/manual.txt
index cc6f9130c7..1d7a1a6222 100644
--- a/lib/dialyzer/doc/manual.txt
+++ b/lib/dialyzer/doc/manual.txt
@@ -129,7 +129,7 @@ Usage: dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
- [--no_native]
+ [--no_native] [--fullpath]
Options:
files_or_dirs (for backwards compatibility also as: -c files_or_dirs)
@@ -231,6 +231,8 @@ Options:
Bypass the native code compilation of some key files that Dialyzer
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis time.
+ --fullpath
+ Display the full path names of files for which warnings are emitted.
--gui
Use the gs-based GUI.
--wx
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
index 01a7e478bc..8813d51f1f 100644
--- a/lib/dialyzer/doc/src/dialyzer.xml
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -71,7 +71,7 @@
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
- [--no_native]
+ [--no_native] [--fullpath]
]]></code>
<p>Options:</p>
<taglist>
@@ -198,10 +198,12 @@
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis
time.</item>
+ <tag><c><![CDATA[--fullpath]]></c></tag>
+ <item>Display the full path names of files for which warnings are emitted.</item>
<tag><c><![CDATA[--gui]]></c></tag>
<item>Use the gs-based GUI.</item>
<tag><c><![CDATA[--wx]]></c></tag>
- <item>Use the wx-based GUI..</item>
+ <item>Use the wx-based GUI.</item>
</taglist>
<note>
<p>* denotes that multiple occurrences of these options are possible.</p>
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index 471f9fccd2..dde0c17c39 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,8 @@
gui/0,
gui/1,
plt_info/1,
- format_warning/1]).
+ format_warning/1,
+ format_warning/2]).
-include("dialyzer.hrl").
@@ -48,6 +49,8 @@
%% - run/1: Erlang interface for a command line-like analysis
%% - gui/0/1: Erlang interface for the gui.
%% - format_warning/1: Get the string representation of a warning.
+%% - format_warning/1: Likewise, but with an option whether
+%% to display full path names or not
%% - plt_info/1: Get information of the specified plt.
%%--------------------------------------------------------------------
@@ -281,11 +284,19 @@ cl_check_log(Output) ->
-spec format_warning(dial_warning()) -> string().
-format_warning({_Tag, {File, Line}, Msg}) when is_list(File),
- is_integer(Line) ->
- BaseName = filename:basename(File),
+format_warning(W) ->
+ format_warning(W, basename).
+
+-spec format_warning(dial_warning(), fopt()) -> string().
+
+format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File),
+ is_integer(Line) ->
+ F = case FOpt of
+ fullpath -> File;
+ basename -> filename:basename(File)
+ end,
String = lists:flatten(message_to_string(Msg)),
- lists:flatten(io_lib:format("~s:~w: ~s", [BaseName, Line, String])).
+ lists:flatten(io_lib:format("~s:~w: ~s", [F, Line, String])).
%%-----------------------------------------------------------------------------
@@ -325,6 +336,8 @@ message_to_string({guard_fail, [Arg1, Infix, Arg2]}) ->
io_lib:format("Guard test ~s ~s ~s can never succeed\n", [Arg1, Infix, Arg2]);
message_to_string({guard_fail, [Guard, Args]}) ->
io_lib:format("Guard test ~w~s can never succeed\n", [Guard, Args]);
+message_to_string({neg_guard_fail, [Guard, Args]}) ->
+ io_lib:format("Guard test not(~w~s) can never succeed\n", [Guard, Args]);
message_to_string({guard_fail_pat, [Pat, Type]}) ->
io_lib:format("Clause guard cannot succeed. The ~s was matched"
" against the type ~s\n", [Pat, Type]);
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
index 1d98574585..aa3f703af2 100644
--- a/lib/dialyzer/src/dialyzer.hrl
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -2,7 +2,7 @@
%%%
%%% %CopyrightBegin%
%%%
-%%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%%
%%% The contents of this file are subject to the Erlang Public License,
%%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,7 +31,7 @@
-define(RET_DISCREPANCIES, 2).
-type dial_ret() :: ?RET_NOTHING_SUSPICIOUS
- | ?RET_INTERNAL_ERROR
+ | ?RET_INTERNAL_ERROR
| ?RET_DISCREPANCIES.
%%--------------------------------------------------------------------
@@ -87,7 +87,7 @@
%%--------------------------------------------------------------------
%% THIS TYPE SHOULD ONE DAY DISAPPEAR -- IT DOES NOT BELONG HERE
%%--------------------------------------------------------------------
-
+
-type ordset(T) :: [T] . %% XXX: temporarily
%%--------------------------------------------------------------------
@@ -102,6 +102,8 @@
-type dial_define() :: {atom(), term()}.
-type dial_option() :: {atom(), term()}.
-type dial_options() :: [dial_option()].
+-type fopt() :: 'basename' | 'fullpath'.
+-type format() :: 'formatted' | 'raw'.
-type label() :: non_neg_integer().
-type rep_mode() :: 'quiet' | 'normal' | 'verbose'.
-type start_from() :: 'byte_code' | 'src_code'.
@@ -137,10 +139,10 @@
erlang_mode = false :: boolean(),
use_contracts = true :: boolean(),
output_file = none :: 'none' | file:filename(),
- output_format = formatted :: 'raw' | 'formatted',
+ output_format = formatted :: format(),
+ filename_opt = basename :: fopt(),
callgraph_file = "" :: file:filename(),
- check_plt = true :: boolean()
- }).
+ check_plt = true :: boolean()}).
-record(contract, {contracts = [] :: [contract_pair()],
args = [] :: [erl_types:erl_type()],
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 2a9de7886f..8d61216b7a 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -2,7 +2,7 @@
%%-------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,7 +46,8 @@
legal_warnings = ordsets:new() :: [dial_warn_tag()],
mod_deps = dict:new() :: dict(),
output = standard_io :: io:device(),
- output_format = formatted :: 'raw' | 'formatted',
+ output_format = formatted :: format(),
+ filename_opt = basename :: fopt(),
output_plt = none :: 'none' | file:filename(),
plt_info = none :: 'none' | dialyzer_plt:plt_info(),
report_mode = normal :: rep_mode(),
@@ -538,8 +539,10 @@ hc(Mod) ->
new_state() ->
#cl_state{}.
-init_output(State0, #options{output_file = OutFile, output_format = OutFormat}) ->
- State = State0#cl_state{output_format = OutFormat},
+init_output(State0, #options{output_file = OutFile,
+ output_format = OutFormat,
+ filename_opt = FOpt}) ->
+ State = State0#cl_state{output_format = OutFormat, filename_opt = FOpt},
case OutFile =:= none of
true ->
State;
@@ -772,6 +775,7 @@ print_warnings(#cl_state{stored_warnings = []}) ->
ok;
print_warnings(#cl_state{output = Output,
output_format = Format,
+ filename_opt = FOpt,
stored_warnings = Warnings}) ->
PrWarnings = process_warnings(Warnings),
case PrWarnings of
@@ -779,7 +783,7 @@ print_warnings(#cl_state{output = Output,
[_|_] ->
S = case Format of
formatted ->
- [dialyzer:format_warning(W) || W <- PrWarnings];
+ [dialyzer:format_warning(W, FOpt) || W <- PrWarnings];
raw ->
[io_lib:format("~p. \n", [W]) || W <- PrWarnings]
end,
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index 2867b522fd..b68d6d190e 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -133,6 +133,9 @@ cl(["-o"++Output|T]) ->
cl(["--raw"|T]) ->
put(dialyzer_output_format, raw),
cl(T);
+cl(["--fullpath"|T]) ->
+ put(dialyzer_filename_opt, fullpath),
+ cl(T);
cl(["-pa", Path|T]) ->
case code:add_patha(Path) of
true -> cl(T);
@@ -243,6 +246,7 @@ init() ->
put(dialyzer_options_defines, DefaultOpts#options.defines),
put(dialyzer_options_files, DefaultOpts#options.files),
put(dialyzer_output_format, formatted),
+ put(dialyzer_filename_opt, basename),
put(dialyzer_options_check_plt, DefaultOpts#options.check_plt),
ok.
@@ -281,6 +285,7 @@ cl_options() ->
{files_rec, get(dialyzer_options_files_rec)},
{output_file, get(dialyzer_output)},
{output_format, get(dialyzer_output_format)},
+ {filename_opt, get(dialyzer_filename_opt)},
{analysis_type, get(dialyzer_options_analysis_type)},
{get_warnings, get(dialyzer_options_get_warnings)},
{callgraph_file, get(dialyzer_callgraph_file)}
@@ -338,7 +343,7 @@ help_message() ->
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
- [--no_native]
+ [--no_native] [--fullpath]
Options:
files_or_dirs (for backwards compatibility also as: -c files_or_dirs)
Use Dialyzer from the command line to detect defects in the
@@ -440,6 +445,8 @@ Options:
Bypass the native code compilation of some key files that Dialyzer
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis time.
+ --fullpath
+ Display the full path names of files for which warnings are emitted.
--gui
Use the gs-based GUI.
--wx
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index b80c7efc1a..2ffbcd3e82 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -1457,6 +1457,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map,
false ->
WarnType = case Msg of
{guard_fail, _} -> ?WARN_MATCHING;
+ {neg_guard_fail, _} -> ?WARN_MATCHING;
{opaque_guard, _} -> ?WARN_OPAQUE
end,
state__add_warning(State1, WarnType, FailGuard, Msg);
@@ -1748,7 +1749,7 @@ bind_opaque_pats(GenType, Type, Pat, Map, State, Rev) ->
bind_guard(Guard, Map, State) ->
try bind_guard(Guard, Map, dict:new(), pos, State) of
- {Map1, _Type} -> Map1
+ {Map1, _Type} -> Map1
catch
throw:{fail, Warning} -> {error, Warning};
throw:{fatal_fail, Warning} -> {error, Warning}
@@ -1869,8 +1870,8 @@ handle_guard_gen_fun({M, F, A}, Guard, Map, Env, Eval, State) ->
true ->
%% Is this an error-bif?
case t_is_none(erl_bif_types:type(M, F, A)) of
- true -> signal_guard_fail(Guard, As, State);
- false -> signal_guard_fatal_fail(Guard, As, State)
+ true -> signal_guard_fail(Eval, Guard, As, State);
+ false -> signal_guard_fatal_fail(Eval, Guard, As, State)
end;
false ->
BifArgs = case erl_bif_types:arg_types(M, F, A) of
@@ -1887,7 +1888,7 @@ handle_guard_gen_fun({M, F, A}, Guard, Map, Env, Eval, State) ->
case t_is_none(Ret) of
true ->
case Eval =:= pos of
- true -> signal_guard_fail(Guard, As, State);
+ true -> signal_guard_fail(Eval, Guard, As, State);
false -> throw({fail, none})
end;
false -> {Map2, Ret}
@@ -1900,7 +1901,7 @@ handle_guard_type_test(Guard, F, Map, Env, Eval, State) ->
case bind_type_test(Eval, F, ArgType, State) of
error ->
?debug("Type test: ~w failed\n", [F]),
- signal_guard_fail(Guard, [ArgType], State);
+ signal_guard_fail(Eval, Guard, [ArgType], State);
{ok, NewArgType, Ret} ->
?debug("Type test: ~w succeeded, NewType: ~s, Ret: ~s\n",
[F, t_to_string(NewArgType), t_to_string(Ret)]),
@@ -1963,18 +1964,19 @@ handle_guard_comp(Guard, Comp, Map, Env, Eval, State) ->
true when Eval =:= pos -> {Map, t_atom(true)};
true when Eval =:= dont_know -> {Map, t_atom(true)};
true when Eval =:= neg -> {Map, t_atom(true)};
- false when Eval =:= pos -> signal_guard_fail(Guard, ArgTypes, State);
+ false when Eval =:= pos ->
+ signal_guard_fail(Eval, Guard, ArgTypes, State);
false when Eval =:= dont_know -> {Map, t_atom(false)};
false when Eval =:= neg -> {Map, t_atom(false)}
end;
{literal, var} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
case bind_comp_literal_var(Arg1, Arg2, Type2, Comp, Map1) of
- error -> signal_guard_fail(Guard, ArgTypes, State);
+ error -> signal_guard_fail(Eval, Guard, ArgTypes, State);
{ok, NewMap} -> {NewMap, t_atom(true)}
end;
{var, literal} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
case bind_comp_literal_var(Arg2, Arg1, Type1, invert_comp(Comp), Map1) of
- error -> signal_guard_fail(Guard, ArgTypes, State);
+ error -> signal_guard_fail(Eval, Guard, ArgTypes, State);
{ok, NewMap} -> {NewMap, t_atom(true)}
end;
{_, _} ->
@@ -2014,7 +2016,7 @@ handle_guard_is_function(Guard, Map, Env, Eval, State) ->
[FunType0, ArityType0] = ArgTypes0,
ArityType = t_inf(ArityType0, t_integer()),
case t_is_none(ArityType) of
- true -> signal_guard_fail(Guard, ArgTypes0, State);
+ true -> signal_guard_fail(Eval, Guard, ArgTypes0, State);
false ->
FunTypeConstr =
case t_number_vals(ArityType) of
@@ -2026,7 +2028,7 @@ handle_guard_is_function(Guard, Map, Env, Eval, State) ->
case t_is_none(FunType) of
true ->
case Eval of
- pos -> signal_guard_fail(Guard, ArgTypes0, State);
+ pos -> signal_guard_fail(Eval, Guard, ArgTypes0, State);
neg -> {Map1, t_atom(false)};
dont_know -> {Map1, t_atom(false)}
end;
@@ -2062,7 +2064,7 @@ handle_guard_is_record(Guard, Map, Env, Eval, State) ->
case t_is_none(Type) of
true ->
case Eval of
- pos -> signal_guard_fail(Guard,
+ pos -> signal_guard_fail(Eval, Guard,
[RecType, t_from_term(Tag),
t_from_term(Arity)],
State);
@@ -2095,7 +2097,7 @@ handle_guard_eq(Guard, Map, Env, Eval, State) ->
Eval =:= pos ->
ArgTypes = [t_from_term(cerl:concrete(Arg1)),
t_from_term(cerl:concrete(Arg2))],
- signal_guard_fail(Guard, ArgTypes, State)
+ signal_guard_fail(Eval, Guard, ArgTypes, State)
end
end;
{literal, _} when Eval =:= pos ->
@@ -2150,7 +2152,7 @@ handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
Eval =:= pos ->
ArgTypes = [t_from_term(cerl:concrete(Arg1)),
t_from_term(cerl:concrete(Arg2))],
- signal_guard_fail(Guard, ArgTypes, State)
+ signal_guard_fail(Eval, Guard, ArgTypes, State)
end
end;
{literal, _} when Eval =:= pos ->
@@ -2172,7 +2174,7 @@ bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
case Eval of
neg -> {Map2, t_atom(false)};
dont_know -> {Map2, t_atom(false)};
- pos -> signal_guard_fail(Guard, [Type1, Type2], State)
+ pos -> signal_guard_fail(Eval, Guard, [Type1, Type2], State)
end;
false ->
case Eval of
@@ -2199,29 +2201,29 @@ bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
end.
bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State) ->
- %% Assumes positive evaluation
+ Eval = dont_know,
case cerl:concrete(Arg1) of
true ->
{_, Type} = MT = bind_guard(Arg2, Map, Env, pos, State),
case t_is_atom(true, Type) of
true -> MT;
false ->
- {_, Type0} = bind_guard(Arg2, Map, Env, dont_know, State),
- signal_guard_fail(Guard, [Type0, t_atom(true)], State)
+ {_, Type0} = bind_guard(Arg2, Map, Env, Eval, State),
+ signal_guard_fail(Eval, Guard, [Type0, t_atom(true)], State)
end;
false ->
{Map1, Type} = bind_guard(Arg2, Map, Env, neg, State),
case t_is_atom(false, Type) of
true -> {Map1, t_atom(true)};
false ->
- {_, Type0} = bind_guard(Arg2, Map, Env, dont_know, State),
- signal_guard_fail(Guard, [Type0, t_atom(true)], State)
+ {_, Type0} = bind_guard(Arg2, Map, Env, Eval, State),
+ signal_guard_fail(Eval, Guard, [Type0, t_atom(true)], State)
end;
Term ->
LitType = t_from_term(Term),
- {Map1, Type} = bind_guard(Arg2, Map, Env, dont_know, State),
+ {Map1, Type} = bind_guard(Arg2, Map, Env, Eval, State),
case t_is_subtype(LitType, Type) of
- false -> signal_guard_fail(Guard, [Type, LitType], State);
+ false -> signal_guard_fail(Eval, Guard, [Type, LitType], State);
true ->
case cerl:is_c_var(Arg2) of
true -> {enter_type(Arg2, LitType, Map1), t_atom(true)};
@@ -2250,31 +2252,37 @@ handle_guard_and(Guard, Map, Env, Eval, State) ->
catch throw:{fail, _} -> bind_guard(Arg2, Map, Env, pos, State)
end,
{Map2, Type2} =
- try bind_guard(Arg1, Map, Env, neg, State)
- catch throw:{fail, _} -> bind_guard(Arg2, Map, Env, pos, State)
+ try bind_guard(Arg2, Map, Env, neg, State)
+ catch throw:{fail, _} -> bind_guard(Arg1, Map, Env, pos, State)
end,
case t_is_atom(false, Type1) orelse t_is_atom(false, Type2) of
true -> {join_maps([Map1, Map2], Map), t_atom(false)};
false -> throw({fail, none})
end;
dont_know ->
- True = t_atom(true),
{Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
- case t_is_none(t_inf(Type1, t_boolean())) of
- true -> throw({fail, none});
+ {Map2, Type2} = bind_guard(Arg2, Map, Env, dont_know, State),
+ Bool1 = t_inf(Type1, t_boolean()),
+ Bool2 = t_inf(Type2, t_boolean()),
+ case t_is_none(Bool1) orelse t_is_none(Bool2) of
+ true -> throw({fatal_fail, none});
false ->
- {Map2, Type2} = bind_guard(Arg2, Map1, Env, Eval, State),
- case t_is_none(t_inf(Type2, t_boolean())) of
- true -> throw({fail, none});
- false -> {Map2, True}
- end
+ NewMap = join_maps([Map1, Map2], Map),
+ NewType =
+ case {t_atom_vals(Bool1), t_atom_vals(Bool2)} of
+ {['true'] , ['true'] } -> t_atom(true);
+ {['false'], _ } -> t_atom(false);
+ {_ , ['false']} -> t_atom(false);
+ {_ , _ } -> t_boolean()
+ end,
+ {NewMap, NewType}
end
end.
handle_guard_or(Guard, Map, Env, Eval, State) ->
[Arg1, Arg2] = cerl:call_args(Guard),
case Eval of
- pos ->
+ pos ->
{Map1, Bool1} =
try bind_guard(Arg1, Map, Env, pos, State)
catch
@@ -2303,11 +2311,22 @@ handle_guard_or(Guard, Map, Env, Eval, State) ->
end
end;
dont_know ->
- {Map1, Bool1} = bind_guard(Arg1, Map, Env, dont_know, State),
- {Map2, Bool2} = bind_guard(Arg2, Map, Env, dont_know, State),
- case t_is_boolean(Bool1) andalso t_is_boolean(Bool2) of
- true -> {join_maps([Map1, Map2], Map), t_sup(Bool1, Bool2)};
- false -> throw({fail, none})
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, Map, Env, dont_know, State),
+ Bool1 = t_inf(Type1, t_boolean()),
+ Bool2 = t_inf(Type2, t_boolean()),
+ case t_is_none(Bool1) orelse t_is_none(Bool2) of
+ true -> throw({fatal_fail, none});
+ false ->
+ NewMap = join_maps([Map1, Map2], Map),
+ NewType =
+ case {t_atom_vals(Bool1), t_atom_vals(Bool2)} of
+ {['false'], ['false']} -> t_atom(false);
+ {['true'] , _ } -> t_atom(true);
+ {_ , ['true'] } -> t_atom(true);
+ {_ , _ } -> t_boolean()
+ end,
+ {NewMap, NewType}
end
end.
@@ -2349,10 +2368,12 @@ bind_guard_list([G|Gs], Map, Env, Eval, State, Acc) ->
bind_guard_list([], Map, _Env, _Eval, _State, Acc) ->
{Map, lists:reverse(Acc)}.
--spec signal_guard_fail(cerl:c_call(), [erl_types:erl_type()], state()) ->
- no_return().
+-type eval() :: 'pos' | 'neg' | 'dont_know'.
+
+-spec signal_guard_fail(eval(), cerl:c_call(), [erl_types:erl_type()],
+ state()) -> no_return().
-signal_guard_fail(Guard, ArgTypes, State) ->
+signal_guard_fail(Eval, Guard, ArgTypes, State) ->
Args = cerl:call_args(Guard),
F = cerl:atom_val(cerl:call_name(Guard)),
MFA = {cerl:atom_val(cerl:call_module(Guard)), F, length(Args)},
@@ -2365,7 +2386,7 @@ signal_guard_fail(Guard, ArgTypes, State) ->
atom_to_list(F),
format_args_1([Arg2], [ArgType2], State)]};
false ->
- mk_guard_msg(F, Args, ArgTypes, State)
+ mk_guard_msg(Eval, F, Args, ArgTypes, State)
end,
throw({fail, {Guard, Msg}}).
@@ -2380,20 +2401,25 @@ is_infix_op({erlang, '>=', 2}) -> true;
is_infix_op({M, F, A}) when is_atom(M), is_atom(F),
is_integer(A), 0 =< A, A =< 255 -> false.
--spec signal_guard_fatal_fail(cerl:c_call(), [erl_types:erl_type()], state()) ->
- no_return().
+-spec signal_guard_fatal_fail(eval(), cerl:c_call(), [erl_types:erl_type()],
+ state()) -> no_return().
-signal_guard_fatal_fail(Guard, ArgTypes, State) ->
+signal_guard_fatal_fail(Eval, Guard, ArgTypes, State) ->
Args = cerl:call_args(Guard),
F = cerl:atom_val(cerl:call_name(Guard)),
- Msg = mk_guard_msg(F, Args, ArgTypes, State),
+ Msg = mk_guard_msg(Eval, F, Args, ArgTypes, State),
throw({fatal_fail, {Guard, Msg}}).
-mk_guard_msg(F, Args, ArgTypes, State) ->
+mk_guard_msg(Eval, F, Args, ArgTypes, State) ->
FArgs = [F, format_args(Args, ArgTypes, State)],
case any_has_opaque_subtype(ArgTypes) of
true -> {opaque_guard, FArgs};
- false -> {guard_fail, FArgs}
+ false ->
+ case Eval of
+ neg -> {neg_guard_fail, FArgs};
+ pos -> {guard_fail, FArgs};
+ dont_know -> {guard_fail, FArgs}
+ end
end.
bind_guard_case_clauses(Arg, Clauses, Map, Env, Eval, State) ->
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index d64e44a814..29e164628a 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -175,6 +175,9 @@ build_options([{OptionName, Value} = Term|Rest], Options) ->
output_format ->
assert_output_format(Value),
build_options(Rest, Options#options{output_format = Value});
+ filename_opt ->
+ assert_filename_opt(Value),
+ build_options(Rest, Options#options{filename_opt = Value});
output_plt ->
assert_filename(Value),
build_options(Rest, Options#options{output_plt = Value});
@@ -229,6 +232,13 @@ assert_output_format(formatted) ->
assert_output_format(Term) ->
bad_option("Illegal value for output_format", Term).
+assert_filename_opt(basename) ->
+ ok;
+assert_filename_opt(fullpath) ->
+ ok;
+assert_filename_opt(Term) ->
+ bad_option("Illegal value for filename_opt", Term).
+
assert_plt_op(#options{analysis_type = OldVal},
#options{analysis_type = NewVal}) ->
case is_plt_mode(OldVal) andalso is_plt_mode(NewVal) of
diff --git a/lib/edoc/src/edoc_lib.erl b/lib/edoc/src/edoc_lib.erl
index c1f95a7a67..6705ccd356 100644
--- a/lib/edoc/src/edoc_lib.erl
+++ b/lib/edoc/src/edoc_lib.erl
@@ -16,7 +16,6 @@
%%
%% $Id$
%%
-%% @private
%% @copyright 2001-2003 Richard Carlsson
%% @author Richard Carlsson <[email protected]>
%% @see edoc
@@ -49,14 +48,17 @@
%% ---------------------------------------------------------------------
%% List and string utilities
+%% @private
timestr({H,M,Sec}) ->
lists:flatten(io_lib:fwrite("~2.2.0w:~2.2.0w:~2.2.0w",[H,M,Sec])).
+%% @private
datestr({Y,M,D}) ->
Ms = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep",
"Oct", "Nov", "Dec"],
lists:flatten(io_lib:fwrite("~s ~w ~w",[lists:nth(M, Ms),D,Y])).
+%% @private
count(X, Xs) ->
count(X, Xs, 0).
@@ -67,6 +69,7 @@ count(X, [_ | Xs], N) ->
count(_X, [], N) ->
N.
+%% @private
lines(Cs) ->
lines(Cs, [], []).
@@ -77,6 +80,7 @@ lines([C | Cs], As, Ls) ->
lines([], As, Ls) ->
lists:reverse([lists:reverse(As) | Ls]).
+%% @private
split_at(Cs, K) ->
split_at(Cs, K, []).
@@ -87,6 +91,7 @@ split_at([C | Cs], K, As) ->
split_at([], _K, As) ->
{lists:reverse(As), []}.
+%% @private
split_at_stop(Cs) ->
split_at_stop(Cs, []).
@@ -103,6 +108,7 @@ split_at_stop([C | Cs], As) ->
split_at_stop([], As) ->
{lists:reverse(As), []}.
+%% @private
split_at_space(Cs) ->
split_at_space(Cs, []).
@@ -117,17 +123,20 @@ split_at_space([C | Cs], As) ->
split_at_space([], As) ->
{lists:reverse(As), []}.
+%% @private
is_space([$\s | Cs]) -> is_space(Cs);
is_space([$\t | Cs]) -> is_space(Cs);
is_space([$\n | Cs]) -> is_space(Cs);
is_space([_C | _Cs]) -> false;
is_space([]) -> true.
+%% @private
strip_space([$\s | Cs]) -> strip_space(Cs);
strip_space([$\t | Cs]) -> strip_space(Cs);
strip_space([$\n | Cs]) -> strip_space(Cs);
strip_space(Cs) -> Cs.
+%% @private
segment(Es, N) ->
segment(Es, [], [], 0, N).
@@ -140,6 +149,7 @@ segment([], [], Cs, _N, _M) ->
segment([], As, Cs, _N, _M) ->
lists:reverse([lists:reverse(As) | Cs]).
+%% @private
transpose([]) -> [];
transpose([[] | Xss]) -> transpose(Xss);
transpose([[X | Xs] | Xss]) ->
@@ -151,6 +161,7 @@ transpose([[X | Xs] | Xss]) ->
%% end of the summary sentence only if it is also the last segment in
%% the list, or is followed by a 'p' or 'br' ("whitespace") element.
+%% @private
get_first_sentence([#xmlElement{name = p, content = Es} | _]) ->
%% Descend into initial paragraph.
get_first_sentence_1(Es);
@@ -230,6 +241,7 @@ end_of_sentence_1(_, false, _) ->
%% Names must begin with a lowercase letter and contain only
%% alphanumerics and underscores.
+%% @private
is_name([C | Cs]) when C >= $a, C =< $z ->
is_name_1(Cs);
is_name([C | Cs]) when C >= $\337, C =< $\377, C =/= $\367 ->
@@ -252,6 +264,7 @@ is_name_1(_) -> false.
to_atom(A) when is_atom(A) -> A;
to_atom(S) when is_list(S) -> list_to_atom(S).
+%% @private
unique([X | Xs]) -> [X | unique(Xs, X)];
unique([]) -> [].
@@ -267,6 +280,7 @@ unique([], _) -> [].
%% content of <a href="overview-summary.html#ftag-equiv">`@equiv'</a>
%% tags, and strings denoting file names, e.g. in @headerfile. Also used
%% by {@link edoc_run}.
+%% @private
parse_expr(S, L) ->
case erl_scan:string(S ++ ".", L) of
@@ -287,10 +301,11 @@ parse_expr(S, L) ->
%% @doc EDoc "contact information" parsing. This is the type of the
%% content in e.g.
%% <a href="overview-summary.html#mtag-author">`@author'</a> tags.
+%% @private
-%% @type info() = #info{name = string(),
-%% email = string(),
-%% uri = string()}
+%% % @type info() = #info{name = string(),
+%% % email = string(),
+%% % uri = string()}
-record(info, {name = "" :: string(),
email = "" :: string(),
@@ -367,6 +382,7 @@ strip_and_reverse(As) ->
%%
%% TODO: general utf-8 encoding for all of Unicode (0-16#10ffff)
+%% @private
escape_uri([C | Cs]) when C >= $a, C =< $z ->
[C | escape_uri(Cs)];
escape_uri([C | Cs]) when C >= $A, C =< $Z ->
@@ -409,6 +425,7 @@ hex_octet(N) ->
%% Please note that URI are *not* file names. Don't use the stdlib
%% 'filename' module for operations on (any parts of) URI.
+%% @private
join_uri(Base, "") ->
Base;
join_uri("", Path) ->
@@ -418,6 +435,7 @@ join_uri(Base, Path) ->
%% Check for relative URI; "network paths" ("//...") not included!
+%% @private
is_relative_uri([$: | _]) ->
false;
is_relative_uri([$/, $/ | _]) ->
@@ -433,6 +451,7 @@ is_relative_uri([_ | Cs]) ->
is_relative_uri([]) ->
true.
+%% @private
uri_get("file:///" ++ Path) ->
uri_get_file(Path);
uri_get("file://localhost/" ++ Path) ->
@@ -532,6 +551,7 @@ uri_get_ftp(URI) ->
Msg = io_lib:format("cannot access ftp scheme yet: '~s'.", [URI]),
{error, Msg}.
+%% @private
to_label([$\s | Cs]) ->
to_label(Cs);
to_label([$\t | Cs]) ->
@@ -564,6 +584,7 @@ to_label_2(Cs) ->
%% ---------------------------------------------------------------------
%% Files
+%% @private
filename([C | T]) when is_integer(C), C > 0 ->
[C | filename(T)];
filename([H|T]) ->
@@ -576,6 +597,7 @@ filename(N) ->
report("bad filename: `~P'.", [N, 25]),
exit(error).
+%% @private
copy_file(From, To) ->
case file:copy(From, To) of
{ok, _} -> ok;
@@ -600,6 +622,7 @@ list_dir(Dir, Error) ->
F("could not read directory '~s': ~s.", [filename(Dir), R1])
end.
+%% @private
simplify_path(P) ->
case filename:basename(P) of
"." ->
@@ -636,6 +659,7 @@ simplify_path(P) ->
%% exit(error)
%% end.
+%% @private
try_subdir(Dir, Subdir) ->
D = filename:join(Dir, Subdir),
case filelib:is_dir(D) of
@@ -648,6 +672,7 @@ try_subdir(Dir, Subdir) ->
%%
%% @doc Write the given `Text' to the file named by `Name' in directory
%% `Dir'. If the target directory does not exist, it will be created.
+%% @private
write_file(Text, Dir, Name) ->
write_file(Text, Dir, Name, '').
@@ -657,6 +682,7 @@ write_file(Text, Dir, Name) ->
%% Name::edoc:filename(), Package::atom()|string()) -> ok
%% @doc Like {@link write_file/3}, but adds path components to the target
%% directory corresponding to the specified package.
+%% @private
write_file(Text, Dir, Name, Package) ->
Dir1 = filename:join([Dir | packages:split(Package)]),
@@ -672,6 +698,7 @@ write_file(Text, Dir, Name, Package) ->
exit(error)
end.
+%% @private
write_info_file(App, Packages, Modules, Dir) ->
Ts = [{packages, Packages},
{modules, Modules}],
@@ -703,6 +730,7 @@ info_file_data(Ts) ->
%% Local file access - don't complain if file does not exist.
+%% @private
read_info_file(Dir) ->
File = filename:join(Dir, ?INFO_FILE),
case filelib:is_file(File) of
@@ -769,11 +797,13 @@ parse_terms_1([], _As, _Vs) ->
%% ---------------------------------------------------------------------
%% Source files and packages
+%% @private
find_sources(Path, Opts) ->
find_sources(Path, "", Opts).
%% @doc See {@link edoc:run/3} for a description of the options
%% `subpackages', `source_suffix' and `exclude_packages'.
+%% @private
%% NEW-OPTIONS: subpackages, source_suffix, exclude_packages
%% DEFER-OPTIONS: edoc:run/3
@@ -827,6 +857,7 @@ is_package_dir(Name, Dir) ->
is_name(filename:rootname(filename:basename(Name)))
andalso filelib:is_dir(filename:join(Dir, Name)).
+%% @private
find_file([P | Ps], Pkg, Name) ->
Dir = filename:join(P, filename:join(packages:split(Pkg))),
File = filename:join(Dir, Name),
@@ -839,6 +870,7 @@ find_file([P | Ps], Pkg, Name) ->
find_file([], _Pkg, _Name) ->
"".
+%% @private
find_doc_dirs() ->
find_doc_dirs(code:get_path()).
@@ -904,6 +936,7 @@ add_new(K, V, D) ->
%% @spec (Options::proplist()) -> edoc_env()
%% @equiv get_doc_env([], [], [], Opts)
+%% @private
get_doc_env(Opts) ->
get_doc_env([], [], [], Opts).
@@ -952,6 +985,7 @@ get_doc_env(App, Packages, Modules, Opts) ->
%% NEW-OPTIONS: doclet
%% DEFER-OPTIONS: edoc:run/3
+%% @private
run_doclet(Fun, Opts) ->
run_plugin(doclet, ?DEFAULT_DOCLET, Fun, Opts).
@@ -961,6 +995,7 @@ run_doclet(Fun, Opts) ->
%% NEW-OPTIONS: layout
%% DEFER-OPTIONS: edoc:layout/2
+%% @private
run_layout(Fun, Opts) ->
run_plugin(layout, ?DEFAULT_LAYOUT, Fun, Opts).
diff --git a/lib/edoc/src/edoc_wiki.erl b/lib/edoc/src/edoc_wiki.erl
index e4a3d74734..b36aaae6ce 100644
--- a/lib/edoc/src/edoc_wiki.erl
+++ b/lib/edoc/src/edoc_wiki.erl
@@ -82,7 +82,8 @@ parse_xml(Data, Line) ->
parse_xml_1(Text, Line) ->
Text1 = "<doc>" ++ Text ++ "</doc>",
- case catch {ok, xmerl_scan:string(Text1, [{line, Line}])} of
+ Options = [{line, Line}, {encoding, "iso-8859-1"}],
+ case catch {ok, xmerl_scan:string(Text1, Options)} of
{ok, {E, _}} ->
E#xmlElement.content;
{'EXIT', {fatal, {Reason, L, _C}}} ->
diff --git a/lib/erl_docgen/doc/src/notes.xml b/lib/erl_docgen/doc/src/notes.xml
index c7a7926c40..7c8a2c8208 100644
--- a/lib/erl_docgen/doc/src/notes.xml
+++ b/lib/erl_docgen/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2004</year><year>2009</year>
+ <year>2004</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -29,7 +29,22 @@
<file>notes.xml</file>
</header>
<p>This document describes the changes made to the erl_docgen application.</p>
- <section><title>Erl_Docgen 0.2.3</title>
+
+ <section><title>erl_docgen 0.2.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>Subsections below level 2 where not handled correct when generating html and pdf.</p>
+ <p>
+ Own Id: OTP-90730</p>
+ </item>
+ </list>
+ </section>
+
+ </section>
+
+ <section><title>erl_docgen 0.2.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
@@ -56,9 +71,9 @@
</list>
</section>
-</section>
+ </section>
-<section><title>Erl_Docgen 0.2.2</title>
+ <section><title>erl_docgen 0.2.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
@@ -71,9 +86,9 @@
</list>
</section>
-</section>
+ </section>
-<section><title>erl_docgen 0.2.1</title>
+ <section><title>erl_docgen 0.2.1</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/erl_docgen/priv/bin/xref_mod_app.escript b/lib/erl_docgen/priv/bin/xref_mod_app.escript
index fcc3a96ada..13671ef2f8 100755
--- a/lib/erl_docgen/priv/bin/xref_mod_app.escript
+++ b/lib/erl_docgen/priv/bin/xref_mod_app.escript
@@ -2,7 +2,7 @@
%% -*- erlang -*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -86,8 +86,6 @@ appmods(D) ->
end,
{App, [filename:basename(EF, ".erl") || EF <- ErlFiles]}.
--include_lib("xmerl/include/xmerl.hrl").
-
-define(IND(N), lists:duplicate(N, $\s)).
-define(NL, "\n").
diff --git a/lib/erl_docgen/priv/xsl/db_html.xsl b/lib/erl_docgen/priv/xsl/db_html.xsl
index 732560e303..c6375ea621 100644
--- a/lib/erl_docgen/priv/xsl/db_html.xsl
+++ b/lib/erl_docgen/priv/xsl/db_html.xsl
@@ -3,7 +3,7 @@
#
# %CopyrightBegin%
#
- # Copyright Ericsson AB 2009-2010. All Rights Reserved.
+ # Copyright Ericsson AB 2009-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -556,8 +556,8 @@
</xsl:apply-templates>
</xsl:template>
- <!-- Chapter/Subsection -->
- <xsl:template match="chapter/section/section">
+ <!-- Subsections lvl 3 and ... -->
+ <xsl:template match="section/section">
<xsl:param name="chapnum"/>
<xsl:param name="sectnum"/>
<h4>
@@ -569,8 +569,6 @@
</xsl:apply-templates>
</xsl:template>
-
-
<!-- *ref/Section -->
<xsl:template match="erlref/section|cref/section|comref/section|fileref/section|appref/section">
<xsl:param name="chapnum"/>
diff --git a/lib/erl_docgen/priv/xsl/db_pdf.xsl b/lib/erl_docgen/priv/xsl/db_pdf.xsl
index 1e80c360b8..f500cd3fee 100644
--- a/lib/erl_docgen/priv/xsl/db_pdf.xsl
+++ b/lib/erl_docgen/priv/xsl/db_pdf.xsl
@@ -3,7 +3,7 @@
#
# %CopyrightBegin%
#
- # Copyright Ericsson AB 2009-2010. All Rights Reserved.
+ # Copyright Ericsson AB 2009-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -827,7 +827,7 @@
</xsl:template>
- <!-- Chapter/Subsection -->
+ <!-- Chapter/Subsection -->
<xsl:template match="chapter/section/section">
<xsl:param name="partnum"/>
<xsl:param name="chapnum"/>
@@ -844,6 +844,22 @@
</xsl:template>
+ <!-- Subsection below level 2 -->
+ <xsl:template match="section/section/section">
+ <xsl:param name="partnum"/>
+ <xsl:param name="chapnum"/>
+ <xsl:param name="sectnum"/>
+ <fo:block xsl:use-attribute-sets="h5" id="{generate-id(title)}">
+ <!-- xsl:value-of select="$partnum"/>.<xsl:value-of select="$chapnum"/>.<xsl:value-of select="$sectnum"/>.<xsl:number/ -->
+ <xsl:value-of select="title"/>
+ </fo:block>
+ <xsl:apply-templates>
+ <xsl:with-param name="partnum" select="$partnum"/>
+ <xsl:with-param name="chapnum" select="$chapnum"/>
+ <xsl:with-param name="sectnum" select="$sectnum"/>
+ </xsl:apply-templates>
+ </xsl:template>
+
<!-- *ref/Section -->
<xsl:template match="erlref/section|comref/section|cref/section|fileref/section|appref/section">
diff --git a/lib/erl_docgen/vsn.mk b/lib/erl_docgen/vsn.mk
index fb0f5ca0cd..29585d8520 100644
--- a/lib/erl_docgen/vsn.mk
+++ b/lib/erl_docgen/vsn.mk
@@ -1 +1,2 @@
-ERL_DOCGEN_VSN = 0.2.3
+ERL_DOCGEN_VSN = 0.2.4
+
diff --git a/lib/erl_interface/src/decode/decode_atom.c b/lib/erl_interface/src/decode/decode_atom.c
index b247bd4e17..ef28838b79 100644
--- a/lib/erl_interface/src/decode/decode_atom.c
+++ b/lib/erl_interface/src/decode/decode_atom.c
@@ -31,6 +31,8 @@ int ei_decode_atom(const char *buf, int *index, char *p)
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
+
if (p) {
memmove(p,s,len);
p[len] = (char)0;
diff --git a/lib/erl_interface/src/decode/decode_pid.c b/lib/erl_interface/src/decode/decode_pid.c
index 5f2aec3b44..48a0c68240 100644
--- a/lib/erl_interface/src/decode/decode_pid.c
+++ b/lib/erl_interface/src/decode/decode_pid.c
@@ -33,6 +33,8 @@ int ei_decode_pid(const char *buf, int *index, erlang_pid *p)
if (get8(s) != ERL_ATOM_EXT) return -1;
len = get16be(s);
+
+ if (len > MAXATOMLEN) return -1;
if (p) {
memmove(p->node, s, len);
diff --git a/lib/erl_interface/src/decode/decode_port.c b/lib/erl_interface/src/decode/decode_port.c
index 7fb7d8d414..296ebae024 100644
--- a/lib/erl_interface/src/decode/decode_port.c
+++ b/lib/erl_interface/src/decode/decode_port.c
@@ -34,6 +34,8 @@ int ei_decode_port(const char *buf, int *index, erlang_port *p)
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
+
if (p) {
memmove(p->node, s, len);
p->node[len] = (char)0;
diff --git a/lib/erl_interface/src/decode/decode_ref.c b/lib/erl_interface/src/decode/decode_ref.c
index 6fc2cd6533..691b51fe2d 100644
--- a/lib/erl_interface/src/decode/decode_ref.c
+++ b/lib/erl_interface/src/decode/decode_ref.c
@@ -35,6 +35,8 @@ int ei_decode_ref(const char *buf, int *index, erlang_ref *p)
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
+
if (p) {
memmove(p->node, s, len);
p->node[len] = (char)0;
@@ -62,6 +64,7 @@ int ei_decode_ref(const char *buf, int *index, erlang_ref *p)
/* then the nodename */
if (get8(s) != ERL_ATOM_EXT) return -1;
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
if (p) {
memmove(p->node, s, len);
diff --git a/lib/erl_interface/src/legacy/erl_connect.c b/lib/erl_interface/src/legacy/erl_connect.c
index 3c8c946506..e77bd5db37 100644
--- a/lib/erl_interface/src/legacy/erl_connect.c
+++ b/lib/erl_interface/src/legacy/erl_connect.c
@@ -180,9 +180,7 @@ int erl_xconnect(Erl_IpAddr addr, char *alivename)
*
* Close a connection. FIXME call ei_close_connection() later.
*
- * Returns valid file descriptor on success and < 0 on failure.
- * Set erl_errno to EHOSTUNREACH, ENOMEM, EIO or errno from socket(2)
- * or connect(2).
+ * Returns 0 on success and -1 on failure.
*
***************************************************************************/
@@ -250,7 +248,8 @@ int erl_send(int fd, ETERM *to ,ETERM *msg)
return -1;
}
- strcpy(topid.node, (char *)ERL_PID_NODE(to));
+ strncpy(topid.node, (char *)ERL_PID_NODE(to), sizeof(topid.node));
+ topid.node[sizeof(topid.node)-1] = '\0';
topid.num = ERL_PID_NUMBER(to);
topid.serial = ERL_PID_SERIAL(to);
topid.creation = ERL_PID_CREATION(to);
diff --git a/lib/erl_interface/src/legacy/erl_format.c b/lib/erl_interface/src/legacy/erl_format.c
index 9848e9296a..b17269213f 100644
--- a/lib/erl_interface/src/legacy/erl_format.c
+++ b/lib/erl_interface/src/legacy/erl_format.c
@@ -116,7 +116,7 @@ static lvar *lvar_alloc(void)
lvar *tmp;
if ((tmp = ef.idle) == NULL) {
- tmp = (lvar *) malloc(sizeof(lvar)); /* FIXME check result */
+ tmp = (lvar *) erl_malloc(sizeof(lvar));
}
else {
tmp = ef.idle;
diff --git a/lib/erl_interface/src/legacy/erl_marshal.c b/lib/erl_interface/src/legacy/erl_marshal.c
index 70949a7adf..5cfb5e2124 100644
--- a/lib/erl_interface/src/legacy/erl_marshal.c
+++ b/lib/erl_interface/src/legacy/erl_marshal.c
@@ -662,7 +662,7 @@ len = i
#define STATIC_NODE_BUF_SZ 30
#define SET_NODE(node,node_buf,cp,len) \
-if (len >= STATIC_NODE_BUF_SZ) node = malloc(len+1); \
+if (len >= STATIC_NODE_BUF_SZ) node = erl_malloc(len+1); \
else node = node_buf; \
memcpy(node, cp, len); \
node[len] = '\0'
@@ -1534,7 +1534,7 @@ static int cmp_string_list(unsigned char **e1, unsigned char **e2) {
if ( e1_len < 256 ) {
bp = buf;
} else {
- bp = malloc(5+(2*e1_len)+1);
+ bp = erl_malloc(5+(2*e1_len)+1);
}
bp[0] = ERL_LIST_EXT;
diff --git a/lib/erl_interface/src/legacy/erl_timeout.c b/lib/erl_interface/src/legacy/erl_timeout.c
index af1a4a1f3a..6ef5d258ed 100644
--- a/lib/erl_interface/src/legacy/erl_timeout.c
+++ b/lib/erl_interface/src/legacy/erl_timeout.c
@@ -74,7 +74,7 @@ jmp_buf *timeout_setup(int ms)
t.it_value.tv_usec = (ms % 1000) * 1000;
/* get a jump buffer and save it */
- j = malloc(sizeof(*j)); /* FIXME check result */
+ j = erl_malloc(sizeof(*j));
j->siginfo = s;
push(j);
diff --git a/lib/erl_interface/src/misc/ei_decode_term.c b/lib/erl_interface/src/misc/ei_decode_term.c
index 75c5dc9460..9b238c1e90 100644
--- a/lib/erl_interface/src/misc/ei_decode_term.c
+++ b/lib/erl_interface/src/misc/ei_decode_term.c
@@ -49,6 +49,7 @@ int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
return ei_decode_double(buf, index, &term->value.d_val);
case ERL_ATOM_EXT:
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
memcpy(term->value.atom_name, s, len);
term->value.atom_name[len] = '\0';
s += len;
@@ -57,6 +58,7 @@ int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
/* first the nodename */
if (get8(s) != ERL_ATOM_EXT) return -1;
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
memcpy(term->value.ref.node, s, len);
term->value.ref.node[len] = '\0';
s += len;
@@ -71,6 +73,7 @@ int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
/* then the nodename */
if (get8(s) != ERL_ATOM_EXT) return -1;
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
memcpy(term->value.ref.node, s, len);
term->value.ref.node[len] = '\0';
s += len;
@@ -87,6 +90,7 @@ int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
case ERL_PORT_EXT:
if (get8(s) != ERL_ATOM_EXT) return -1;
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
memcpy(term->value.port.node, s, len);
term->value.port.node[len] = '\0';
term->value.port.id = get32be(s) & 0x0fffffff; /* 28 bits */;
@@ -96,6 +100,7 @@ int ei_decode_ei_term(const char* buf, int* index, ei_term* term)
if (get8(s) != ERL_ATOM_EXT) return -1;
/* name first */
len = get16be(s);
+ if (len > MAXATOMLEN) return -1;
memcpy(term->value.pid.node, s, len);
term->value.pid.node[len] = '\0';
s += len;
diff --git a/lib/hipe/cerl/erl_bif_types.erl b/lib/hipe/cerl/erl_bif_types.erl
index 309c118107..30b911c41b 100644
--- a/lib/hipe/cerl/erl_bif_types.erl
+++ b/lib/hipe/cerl/erl_bif_types.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -191,127 +191,19 @@ type(binary, referenced_byte_size, 1, Xs) ->
strict(arg_types(binary, referenced_byte_size, 1), Xs,
fun(_) -> t_non_neg_integer() end);
%%-- code ---------------------------------------------------------------------
-type(code, add_path, 1, Xs) ->
- strict(arg_types(code, add_path, 1), Xs,
- fun (_) ->
- t_sup(t_atom('true'),
- t_tuple([t_atom('error'), t_atom('bad_directory')]))
- end);
-type(code, add_patha, 1, Xs) ->
- type(code, add_path, 1, Xs);
-type(code, add_paths, 1, Xs) ->
- strict(arg_types(code, add_paths, 1), Xs, fun(_) -> t_atom('ok') end);
-type(code, add_pathsa, 1, Xs) ->
- type(code, add_paths, 1, Xs);
-type(code, add_pathsz, 1, Xs) ->
- type(code, add_paths, 1, Xs);
-type(code, add_pathz, 1, Xs) ->
- type(code, add_path, 1, Xs);
-type(code, all_loaded, 0, _) ->
- t_list(t_tuple([t_atom(), t_code_loaded_fname_or_status()]));
-type(code, compiler_dir, 0, _) ->
- t_string();
-type(code, del_path, 1, Xs) ->
- strict(arg_types(code, del_path, 1), Xs,
- fun (_) ->
- t_sup(t_boolean(),
- t_tuple([t_atom('error'), t_atom('bad_name')]))
- end);
-type(code, delete, 1, Xs) ->
- strict(arg_types(code, delete, 1), Xs, fun (_) -> t_boolean() end);
-type(code, ensure_loaded, 1, Xs) ->
- type(code, load_file, 1, Xs);
type(code, get_chunk, 2, Xs) ->
strict(arg_types(code, get_chunk, 2), Xs,
fun (_) -> t_sup(t_binary(), t_atom('undefined')) end);
-type(code, get_object_code, 1, Xs) ->
- strict(arg_types(code, get_object_code, 1), Xs,
- fun (_) ->
- t_sup(t_tuple([t_atom(), t_binary(), t_string()]),
- t_atom('error'))
- end);
-type(code, get_path, 0, _) ->
- t_list(t_string());
-type(code, is_loaded, 1, Xs) ->
- strict(arg_types(code, is_loaded, 1), Xs,
- fun (_) ->
- t_sup([t_tuple([t_atom('file'), t_code_loaded_fname_or_status()]),
- t_atom('false')])
- end);
-type(code, is_sticky, 1, Xs) ->
- strict(arg_types(code, is_sticky, 1), Xs, fun (_) -> t_boolean() end);
type(code, is_module_native, 1, Xs) ->
strict(arg_types(code, is_module_native, 1), Xs,
fun (_) -> t_sup(t_boolean(), t_atom('undefined')) end);
-type(code, lib_dir, 0, _) ->
- t_string();
-type(code, lib_dir, 1, Xs) ->
- strict(arg_types(code, lib_dir, 1), Xs,
- fun (_) ->
- t_sup(t_string(),
- t_tuple([t_atom('error'), t_atom('bad_name')]))
- end);
-type(code, load_abs, 1, Xs) ->
- strict(arg_types(code, load_abs, 1), Xs,
- fun ([_File]) -> t_code_load_return(t_atom()) end); % XXX: cheating
-type(code, load_abs, 2, Xs) ->
- strict(arg_types(code, load_abs, 2), Xs,
- fun ([_File,Mod]) -> t_code_load_return(Mod) end);
-type(code, load_binary, 3, Xs) ->
- strict(arg_types(code, load_binary, 3), Xs,
- fun ([Mod,_File,_Bin]) -> t_code_load_return(Mod) end);
-type(code, load_file, 1, Xs) ->
- strict(arg_types(code, load_file, 1), Xs,
- fun ([Mod]) -> t_code_load_return(Mod) end);
-type(code, load_native_partial, 2, Xs) ->
- strict(arg_types(code, load_native_partial, 2), Xs,
- fun ([Mod,_Bin]) -> t_code_load_return(Mod) end);
-type(code, load_native_sticky, 3, Xs) ->
- strict(arg_types(code, load_native_sticky, 3), Xs,
- fun ([Mod,_Bin,_]) -> t_code_load_return(Mod) end);
type(code, module_md5, 1, Xs) ->
strict(arg_types(code, module_md5, 1), Xs,
fun (_) -> t_sup(t_binary(), t_atom('undefined')) end);
type(code, make_stub_module, 3, Xs) ->
strict(arg_types(code, make_stub_module, 3), Xs, fun ([Mod,_,_]) -> Mod end);
-type(code, priv_dir, 1, Xs) ->
- strict(arg_types(code, priv_dir, 1), Xs,
- fun (_) ->
- t_sup(t_string(), t_tuple([t_atom('error'), t_atom('bad_name')]))
- end);
-type(code, purge, 1, Xs) ->
- type(code, delete, 1, Xs);
-type(code, rehash, 0, _) -> t_atom('ok');
-type(code, replace_path, 2, Xs) ->
- strict(arg_types(code, replace_path, 2), Xs,
- fun (_) ->
- t_sup([t_atom('true'),
- t_tuple([t_atom('error'), t_atom('bad_name')]),
- t_tuple([t_atom('error'), t_atom('bad_directory')]),
- t_tuple([t_atom('error'),
- t_tuple([t_atom('badarg'), t_any()])])])
- end);
-type(code, root_dir, 0, _) ->
- t_string();
-type(code, set_path, 1, Xs) ->
- strict(arg_types(code, set_path, 1), Xs,
- fun (_) ->
- t_sup([t_atom('true'),
- t_tuple([t_atom('error'), t_atom('bad_path')]),
- t_tuple([t_atom('error'), t_atom('bad_directory')])])
- end);
-type(code, soft_purge, 1, Xs) ->
- type(code, delete, 1, Xs);
-type(code, stick_mod, 1, Xs) ->
- strict(arg_types(code, stick_mod, 1), Xs, fun (_) -> t_atom('true') end);
-type(code, unstick_mod, 1, Xs) ->
- type(code, stick_mod, 1, Xs);
-type(code, which, 1, Xs) ->
- strict(arg_types(code, which, 1), Xs,
- fun (_) ->
- t_sup([t_code_loaded_fname_or_status(),
- t_atom('non_existing')])
- end);
+type(code, rehash, 0, _) ->
+ t_atom('ok');
%%-- erl_ddll -----------------------------------------------------------------
type(erl_ddll, demonitor, 1, Xs) ->
type(erlang, demonitor, 1, Xs);
@@ -3334,80 +3226,16 @@ arg_types(binary, part, 3) ->
arg_types(binary, referenced_byte_size, 1) ->
[t_binary()];
%%------- code ----------------------------------------------------------------
-arg_types(code, add_path, 1) ->
- [t_string()];
-arg_types(code, add_patha, 1) ->
- arg_types(code, add_path, 1);
-arg_types(code, add_paths, 1) ->
- [t_list(t_string())];
-arg_types(code, add_pathsa, 1) ->
- arg_types(code, add_paths, 1);
-arg_types(code, add_pathsz, 1) ->
- arg_types(code, add_paths, 1);
-arg_types(code, add_pathz, 1) ->
- arg_types(code, add_path, 1);
-arg_types(code, all_loaded, 0) ->
- [];
-arg_types(code, compiler_dir, 0) ->
- [];
-arg_types(code, del_path, 1) ->
- [t_sup(t_string(), t_atom())]; % OBS: differs from add_path/1
-arg_types(code, delete, 1) ->
- [t_atom()];
-arg_types(code, ensure_loaded, 1) ->
- arg_types(code, load_file, 1);
arg_types(code, get_chunk, 2) ->
[t_binary(), t_string()];
-arg_types(code, get_object_code, 1) ->
- [t_atom()];
-arg_types(code, get_path, 0) ->
- [];
-arg_types(code, is_loaded, 1) ->
- [t_atom()];
-arg_types(code, is_sticky, 1) ->
- [t_atom()];
arg_types(code, is_module_native, 1) ->
[t_atom()];
-arg_types(code, lib_dir, 0) ->
- [];
-arg_types(code, lib_dir, 1) ->
- [t_atom()];
-arg_types(code, load_abs, 1) ->
- [t_string()];
-arg_types(code, load_abs, 2) ->
- [t_code_loaded_fname_or_status(), t_atom()];
-arg_types(code, load_binary, 3) ->
- [t_atom(), t_code_loaded_fname_or_status(), t_binary()];
-arg_types(code, load_file, 1) ->
- [t_atom()];
-arg_types(code, load_native_partial, 2) ->
- [t_atom(), t_binary()];
-arg_types(code, load_native_sticky, 3) ->
- [t_atom(), t_binary(), t_sup(t_binary(), t_atom('false'))];
arg_types(code, module_md5, 1) ->
[t_binary()];
arg_types(code, make_stub_module, 3) ->
[t_atom(), t_binary(), t_tuple([t_list(), t_list()])];
-arg_types(code, priv_dir, 1) ->
- [t_atom()];
-arg_types(code, purge, 1) ->
- arg_types(code, delete, 1);
arg_types(code, rehash, 0) ->
[];
-arg_types(code, replace_path, 2) ->
- [t_atom(), t_string()];
-arg_types(code, root_dir, 0) ->
- [];
-arg_types(code, set_path, 1) ->
- [t_list(t_string())];
-arg_types(code, soft_purge, 1) ->
- arg_types(code, delete, 1);
-arg_types(code, stick_mod, 1) ->
- [t_atom()];
-arg_types(code, unstick_mod, 1) ->
- arg_types(code, stick_mod, 1);
-arg_types(code, which, 1) ->
- [t_atom()];
%%------- erl_ddll ------------------------------------------------------------
arg_types(erl_ddll, demonitor, 1) ->
arg_types(erlang, demonitor, 1);
@@ -3536,9 +3364,9 @@ arg_types(erlang, atom_to_binary, 2) ->
arg_types(erlang, atom_to_list, 1) ->
[t_atom()];
arg_types(erlang, binary_part, 2) ->
- [t_binary(), t_tuple([t_integer(),t_integer()])];
+ [t_binary(), t_tuple([t_non_neg_integer(), t_integer()])];
arg_types(erlang, binary_part, 3) ->
- [t_binary(), t_integer(), t_integer()];
+ [t_binary(), t_non_neg_integer(), t_integer()];
arg_types(erlang, binary_to_atom, 2) ->
[t_binary(), t_encoding_a2b()];
arg_types(erlang, binary_to_existing_atom, 2) ->
@@ -3801,9 +3629,10 @@ arg_types(erlang, nodes, 1) ->
arg_types(erlang, now, 0) ->
[];
arg_types(erlang, open_port, 2) ->
+ ArgT = t_sup(t_unicode_string(), t_binary()),
[t_sup(t_atom(), t_sup([t_tuple([t_atom('spawn'), t_string()]),
t_tuple([t_atom('spawn_driver'), t_string()]),
- t_tuple([t_atom('spawn_executable'), t_sup(t_unicode_string(),t_binary())]),
+ t_tuple([t_atom('spawn_executable'), ArgT]),
t_tuple([t_atom('fd'), t_integer(), t_integer()])])),
t_list(t_sup(t_sup([t_atom('stream'),
t_atom('exit_status'),
@@ -3819,8 +3648,8 @@ arg_types(erlang, open_port, 2) ->
t_tuple([t_atom('line'), t_integer()]),
t_tuple([t_atom('cd'), t_string()]),
t_tuple([t_atom('env'), t_list(t_tuple(2))]), % XXX: More
- t_tuple([t_atom('args'), t_list(t_sup(t_unicode_string(),t_binary()))]),
- t_tuple([t_atom('arg0'),t_sup(t_unicode_string(),t_binary())])])))];
+ t_tuple([t_atom('args'), t_list(ArgT)]),
+ t_tuple([t_atom('arg0'), ArgT])])))];
arg_types(erlang, phash, 2) ->
[t_any(), t_pos_integer()];
arg_types(erlang, phash2, 1) ->
@@ -4643,10 +4472,10 @@ t_endian() ->
%% =====================================================================
t_binary_part() ->
- t_tuple([t_non_neg_integer(),t_integer()]).
+ t_tuple([t_non_neg_integer(), t_integer()]).
t_binary_canonical_part() ->
- t_tuple([t_non_neg_integer(),t_non_neg_integer()]).
+ t_tuple([t_non_neg_integer(), t_non_neg_integer()]).
t_binary_pattern() ->
t_sup([t_binary(),
@@ -4654,10 +4483,10 @@ t_binary_pattern() ->
t_binary_compiled_pattern()]).
t_binary_compiled_pattern() ->
- t_tuple([t_atom('cp'),t_binary()]).
+ t_tuple([t_atom('cp'), t_binary()]).
t_binary_options() ->
- t_list(t_tuple([t_atom('scope'),t_binary_part()])).
+ t_list(t_tuple([t_atom('scope'), t_binary_part()])).
%% =====================================================================
%% HTTP types documented in R12B-4
@@ -4807,7 +4636,8 @@ t_process_priority_level() ->
t_sup([t_atom('max'), t_atom('high'), t_atom('normal'), t_atom('low')]).
t_process_status() ->
- t_sup([t_atom('runnable'), t_atom('running'),
+ t_sup([t_atom('exiting'), t_atom('garbage_collecting'),
+ t_atom('runnable'), t_atom('running'),
t_atom('suspended'), t_atom('waiting')]).
t_raise_errorclass() ->
diff --git a/lib/hipe/icode/hipe_icode_range.erl b/lib/hipe/icode/hipe_icode_range.erl
index c7e6a451af..c222e8a5d5 100644
--- a/lib/hipe/icode/hipe_icode_range.erl
+++ b/lib/hipe/icode/hipe_icode_range.erl
@@ -1,20 +1,20 @@
%% -*- erlang-indent-level: 2 -*-
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
%%%-------------------------------------------------------------------
@@ -59,15 +59,17 @@
-record(range, {range :: range_rep(),
other :: boolean()}).
+-type range() :: #range{}.
--record(ann, {range :: #range{},
+-record(ann, {range :: range(),
type :: erl_types:erl_type(),
count :: integer()}).
+-type ann() :: #ann{}.
--type range_anno() :: {range_anno, #ann{}, fun((#ann{}) -> string())}.
--type args_fun() :: fun((mfa(),cfg()) -> [#range{}]).
--type call_fun() :: fun((mfa(),[#range{}]) -> #range{}).
--type final_fun() :: fun((mfa(),[#range{}]) -> ok).
+-type range_anno() :: {'range_anno', ann(), fun((ann()) -> string())}.
+-type args_fun() :: fun((mfa(), cfg()) -> [range()]).
+-type call_fun() :: fun((mfa(), [range()]) -> range()).
+-type final_fun() :: fun((mfa(), [range()]) -> 'ok').
-type data() :: {mfa(), args_fun(), call_fun(), final_fun()}.
-type label() :: non_neg_integer().
-type info() :: gb_tree().
@@ -75,15 +77,15 @@
-type variable() :: #icode_variable{}.
-type annotated_variable() :: #icode_variable{}.
-type argument() :: #icode_const{} | variable().
--type three_range_fun() :: fun((#range{},#range{},#range{}) -> #range{}).
+-type three_range_fun() :: fun((range(),range(),range()) -> range()).
-type instr_split_info() :: {icode_instr(), [{label(),info()}]}.
--type last_instr_return() :: {instr_split_info(), #range{}}.
+-type last_instr_return() :: {instr_split_info(), range()}.
-record(state, {info_map = gb_trees:empty() :: info(),
counter = dict:new() :: dict(),
cfg :: cfg(),
liveness = gb_trees:empty() :: gb_tree(),
- ret_type :: #range{},
+ ret_type :: range(),
lookup_fun :: call_fun(),
result_action :: final_fun()}).
@@ -108,8 +110,8 @@ cfg(Cfg, MFA, Options, Servers) ->
-spec concurrent_cfg(cfg(), mfa(), pid()) -> cfg().
concurrent_cfg(Cfg, MFA, CompServer) ->
- CompServer ! {ready, {MFA,self()}},
- {ArgsFun,CallFun,FinalFun} = do_analysis(Cfg, MFA),
+ CompServer ! {ready, {MFA, self()}},
+ {ArgsFun, CallFun, FinalFun} = do_analysis(Cfg, MFA),
Ans = do_rewrite(Cfg, MFA, ArgsFun, CallFun, FinalFun),
CompServer ! {done_rewrite, MFA},
Ans.
@@ -227,7 +229,7 @@ analyse_block(Label, Info, State, Rewrite) ->
state__update_info(State2, InfoList, Rewrite).
-spec analyse_BB([icode_instr()], info(), [icode_instr()], boolean(), call_fun()) ->
- {[icode_instr()], [{label(),info()}], #range{}}.
+ {[icode_instr()], [{label(),info()}], range()}.
analyse_BB([Last], Info, Code, Rewrite, LookupFun) ->
{{NewI, LabelInfoList}, RetType} =
@@ -266,9 +268,9 @@ handle_args(I, Info, WidenFun) ->
%% io:format("Uses: ~p~nRanges: ~p~n", [Uses, PresentRanges]),
JoinFun = fun(Var, Range) -> update_info(Var, Range, WidenFun) end,
NewUses = lists:zipwith(JoinFun, Uses, PresentRanges),
- hipe_icode:subst_uses(lists:zip(Uses, NewUses),I).
+ hipe_icode:subst_uses(lists:zip(Uses, NewUses), I).
--spec join_info(#ann{}, #range{}, three_range_fun()) -> #ann{}.
+-spec join_info(ann(), range(), three_range_fun()) -> ann().
join_info(Ann = #ann{range = R1, type = Type, count = ?WIDEN}, R2, Fun) ->
Ann#ann{range = Fun(R1, R2, range_from_simple_type(Type))};
@@ -278,17 +280,17 @@ join_info(Ann = #ann{range = R1, type = Type, count = C}, R2, _Fun) when C < ?WI
NewR -> Ann#ann{range = NewR, count = C+1}
end.
--spec join_three(#range{}, #range{}, #range{}) -> #range{}.
+-spec join_three(range(), range(), range()) -> range().
join_three(R1, R2, R3) ->
inf(sup(R1, R2), R3).
--spec update_info(variable(), #range{}) -> annotated_variable().
+-spec update_info(variable(), range()) -> annotated_variable().
update_info(Var, Range) ->
update_info(Var, Range, fun update_three/3).
--spec update_info(variable(), #range{}, three_range_fun()) -> annotated_variable().
+-spec update_info(variable(), range(), three_range_fun()) -> annotated_variable().
update_info(Arg, R, Fun) ->
case hipe_icode:is_annotated_variable(Arg) of
@@ -299,7 +301,7 @@ update_info(Arg, R, Fun) ->
Arg
end.
--spec update_info1(any(), #range{}, three_range_fun()) -> range_anno().
+-spec update_info1(any(), range(), three_range_fun()) -> range_anno().
update_info1({range_anno, Ann, _}, R2, Fun) ->
make_range_anno(update_ann(Ann,R2,Fun));
@@ -314,71 +316,71 @@ update_ann(Ann = #ann{range = R1, type = Type, count = C}, R2, _Fun) ->
NewR -> Ann#ann{range = NewR, count = C+1}
end.
--spec type_to_ann(erl_types:erl_type()) -> #ann{}.
+-spec type_to_ann(erl_types:erl_type()) -> ann().
type_to_ann(Type) ->
- #ann{range = range_from_simple_type(Type), type = t_limit(Type,1), count=1}.
+ #ann{range = range_from_simple_type(Type), type = t_limit(Type,1), count = 1}.
--spec make_range_anno(#ann{}) -> range_anno().
+-spec make_range_anno(ann()) -> range_anno().
make_range_anno(Ann) ->
{range_anno, Ann, fun pp_ann/1}.
--spec update_three(#range{}, #range{}, #range{}) -> #range{}.
+-spec update_three(range(), range(), range()) -> range().
update_three(_R1, R2, R3) ->
inf(R2, R3).
--spec safe_widen(#range{}, #range{}, #range{}) -> #range{}.
+-spec safe_widen(range(), range(), range()) -> range().
safe_widen(#range{range=Old}, #range{range=New}, T = #range{range=Wide}) ->
ResRange =
- case {Old,New,Wide} of
- {{Min,Max1},{Min,Max2},{_,Max}} ->
- case inf_geq(OMax = next_up_limit(inf_max([Max1,Max2])),Max) of
+ case {Old, New, Wide} of
+ {{Min,Max1}, {Min,Max2}, {_,Max}} ->
+ case inf_geq(OMax = next_up_limit(inf_max([Max1, Max2])), Max) of
true -> {Min,Max};
false -> {Min,OMax}
end;
- {{Min1,Max},{Min2,Max},{Min,_}} ->
- case inf_geq(Min, OMin = next_down_limit(inf_min([Min1,Min2]))) of
+ {{Min1,Max}, {Min2,Max}, {Min,_}} ->
+ case inf_geq(Min, OMin = next_down_limit(inf_min([Min1, Min2]))) of
true -> {Min,Max};
false -> {OMin,Max}
end;
- {{Min1,Max1},{Min2,Max2},{Min,Max}} ->
+ {{Min1,Max1}, {Min2,Max2}, {Min,Max}} ->
RealMax =
- case inf_geq(OMax = next_up_limit(inf_max([Max1,Max2])),Max) of
+ case inf_geq(OMax = next_up_limit(inf_max([Max1, Max2])), Max) of
true -> Max;
false -> OMax
end,
RealMin =
- case inf_geq(Min, OMin = next_down_limit(inf_min([Min1,Min2]))) of
+ case inf_geq(Min, OMin = next_down_limit(inf_min([Min1, Min2]))) of
true -> Min;
false -> OMin
end,
- {RealMin,RealMax};
+ {RealMin, RealMax};
_ ->
Wide
end,
- T#range{range=ResRange}.
+ T#range{range = ResRange}.
--spec widen(#range{}, #range{}, #range{}) -> #range{}.
+-spec widen(range(), range(), range()) -> range().
widen(#range{range=Old}, #range{range=New}, T = #range{range=Wide}) ->
ResRange =
- case {Old,New,Wide} of
- {{Min,_},{Min,Max2},{_,Max}} ->
- case inf_geq(OMax = next_up_limit(Max2),Max) of
+ case {Old, New, Wide} of
+ {{Min,_}, {Min,Max2}, {_,Max}} ->
+ case inf_geq(OMax = next_up_limit(Max2), Max) of
true -> {Min,Max};
false -> {Min,OMax}
end;
- {{_,Max},{Min2,Max},{Min,_}} ->
+ {{_,Max}, {Min2,Max}, {Min,_}} ->
case inf_geq(Min, OMin = next_down_limit(Min2)) of
true -> {Min,Max};
false -> {OMin,Max}
end;
- {_,{Min2,Max2},{Min,Max}} ->
+ {_, {Min2,Max2}, {Min,Max}} ->
RealMax =
- case inf_geq(OMax = next_up_limit(Max2),Max) of
+ case inf_geq(OMax = next_up_limit(Max2), Max) of
true -> Max;
false -> OMax
end,
@@ -387,11 +389,11 @@ widen(#range{range=Old}, #range{range=New}, T = #range{range=Wide}) ->
true -> Min;
false -> OMin
end,
- {RealMin,RealMax};
+ {RealMin, RealMax};
_ ->
Wide
end,
- T#range{range=ResRange}.
+ T#range{range = ResRange}.
-spec analyse_call(#icode_call{}, call_fun()) -> #icode_call{}.
@@ -421,7 +423,7 @@ analyse_move(Move) ->
analyse_begin_handler(Handler) ->
SubstList =
- [{Dst,update_info(Dst,any_type())} ||
+ [{Dst, update_info(Dst, any_type())} ||
Dst <- hipe_icode:begin_handler_dstlist(Handler)],
hipe_icode:subst_defines(SubstList, Handler).
@@ -494,14 +496,14 @@ analyse_switch_val(Switch, Info, Rewrite) ->
end
end.
--spec update_infos(argument(), info(), [{#range{},label()}]) -> [{label(),info()}].
+-spec update_infos(argument(), info(), [{range(),label()}]) -> [{label(),info()}].
update_infos(Arg, Info, [{Range, Label}|Rest]) ->
- [{Label,enter_define({Arg,Range},Info)} | update_infos(Arg,Info,Rest)];
+ [{Label,enter_define({Arg,Range},Info)} | update_infos(Arg, Info, Rest)];
update_infos(_, _, []) -> [].
--spec get_range_label_list([{argument(),label()}], #range{}, [{#range{},label()}]) ->
- {#range{},[{#range{},label()}]}.
+-spec get_range_label_list([{argument(),label()}], range(), [{range(),label()}]) ->
+ {range(),[{range(),label()}]}.
get_range_label_list([{Val,Label}|Cases], SRange, Acc) ->
VRange = get_range_from_arg(Val),
@@ -516,7 +518,7 @@ get_range_label_list([], SRange, Acc) ->
{PointTypes, _} = lists:unzip(Acc),
{remove_point_types(SRange, PointTypes), Acc}.
--spec update_switch(#icode_switch_val{}, [{#range{},label()}], boolean()) ->
+-spec update_switch(#icode_switch_val{}, [{range(),label()}], boolean()) ->
#icode_switch_val{}.
update_switch(Switch, LabelRangeList, KeepFail) ->
@@ -524,14 +526,14 @@ update_switch(Switch, LabelRangeList, KeepFail) ->
case label_range_list_to_cases(LabelRangeList, []) of
no_update ->
Switch;
- Cases ->
+ Cases ->
hipe_icode:switch_val_cases_update(Switch, Cases)
end,
if KeepFail -> S2;
true -> S2
end.
--spec label_range_list_to_cases([{#range{},label()}], [{#icode_const{},label()}]) ->
+-spec label_range_list_to_cases([{range(),label()}], [{#icode_const{},label()}]) ->
'no_update' | [{#icode_const{},label()}].
label_range_list_to_cases([{#range{range={C,C},other=false},Label}|Rest],
@@ -586,9 +588,9 @@ analyse_last_call(Call, Info, LookupFun) ->
NewInfo = enter_vals(NewI, Info),
case hipe_icode:call_fail_label(Call) of
[] ->
- {NewI, [{Continuation,NewInfo}]};
+ {NewI, [{Continuation, NewInfo}]};
Fail ->
- {NewI, [{Continuation,NewInfo}, {Fail,Info}]}
+ {NewI, [{Continuation, NewInfo}, {Fail, Info}]}
end.
-spec analyse_if(#icode_if{}, info(), boolean()) ->
@@ -596,16 +598,16 @@ analyse_last_call(Call, Info, LookupFun) ->
analyse_if(If, Info, Rewrite) ->
case hipe_icode:if_args(If) of
- Args = [_,_] ->
+ [_, _] = Args ->
analyse_sane_if(If, Info, Args, get_range_from_args(Args), Rewrite);
_ ->
TrueLabel = hipe_icode:if_true_label(If),
FalseLabel = hipe_icode:if_false_label(If),
- {If, [{TrueLabel,Info},{FalseLabel,Info}]}
+ {If, [{TrueLabel, Info}, {FalseLabel, Info}]}
end.
-spec analyse_sane_if(#icode_if{}, info(), [argument(),...],
- [#range{},...], boolean()) ->
+ [range(),...], boolean()) ->
{#icode_goto{} | #icode_if{}, [{label(), info()}]}.
analyse_sane_if(If, Info, [Arg1, Arg2], [Range1, Range2], Rewrite) ->
@@ -613,59 +615,61 @@ analyse_sane_if(If, Info, [Arg1, Arg2], [Range1, Range2], Rewrite) ->
'>' ->
{TrueRange2, TrueRange1, FalseRange2, FalseRange1} =
range_inequality_propagation(Range2, Range1);
- '==' ->
- {TempTrueRange1, TempTrueRange2, FalseRange1, FalseRange2}=
- range_equality_propagation(Range1, Range2),
- TrueRange1 = set_other(TempTrueRange1,other(Range1)),
- TrueRange2 = set_other(TempTrueRange2,other(Range2));
'<' ->
- {TrueRange1, TrueRange2, FalseRange1, FalseRange2} =
+ {TrueRange1, TrueRange2, FalseRange1, FalseRange2} =
range_inequality_propagation(Range1, Range2);
'>=' ->
{FalseRange1, FalseRange2, TrueRange1, TrueRange2} =
range_inequality_propagation(Range1, Range2);
'=<' ->
- {FalseRange2, FalseRange1, TrueRange2, TrueRange1} =
+ {FalseRange2, FalseRange1, TrueRange2, TrueRange1} =
range_inequality_propagation(Range2, Range1);
'=:=' ->
- {TrueRange1, TrueRange2, FalseRange1, FalseRange2}=
+ {TrueRange1, TrueRange2, FalseRange1, FalseRange2} =
range_equality_propagation(Range1, Range2);
'=/=' ->
{FalseRange1, FalseRange2, TrueRange1, TrueRange2} =
range_equality_propagation(Range1, Range2);
+ '==' ->
+ {TempTrueRange1, TempTrueRange2, FalseRange1, FalseRange2} =
+ range_equality_propagation(Range1, Range2),
+ TrueRange1 = set_other(TempTrueRange1, other(Range1)),
+ TrueRange2 = set_other(TempTrueRange2, other(Range2));
'/=' ->
- {TempFalseRange1, TempFalseRange2, TrueRange1, TrueRange2}=
+ {TempFalseRange1, TempFalseRange2, TrueRange1, TrueRange2} =
range_equality_propagation(Range1, Range2),
- FalseRange1 = set_other(TempFalseRange1,other(Range1)),
- FalseRange2 = set_other(TempFalseRange2,other(Range2))
+ FalseRange1 = set_other(TempFalseRange1, other(Range1)),
+ FalseRange2 = set_other(TempFalseRange2, other(Range2))
end,
- TrueLabel = hipe_icode:if_true_label(If),
- FalseLabel = hipe_icode:if_false_label(If),
- TrueInfo =
- enter_defines([{Arg1,TrueRange1}, {Arg2,TrueRange2}],Info),
- FalseInfo =
- enter_defines([{Arg1,FalseRange1}, {Arg2,FalseRange2}],Info),
- True =
- case lists:any(fun range__is_none/1,[TrueRange1,TrueRange2]) of
+ %% io:format("TR1 = ~w\nTR2 = ~w\n", [TrueRange1, TrueRange2]),
+ True =
+ case lists:all(fun range__is_none/1, [TrueRange1, TrueRange2]) of
true -> [];
- false -> [{TrueLabel,TrueInfo}]
+ false ->
+ TrueLabel = hipe_icode:if_true_label(If),
+ TrueArgRanges = [{Arg1, TrueRange1}, {Arg2, TrueRange2}],
+ TrueInfo = enter_defines(TrueArgRanges, Info),
+ [{TrueLabel, TrueInfo}]
end,
- False =
- case lists:any(fun range__is_none/1, [FalseRange1,FalseRange2]) of
+ %% io:format("FR1 = ~w\nFR2 = ~w\n", [FalseRange1, FalseRange2]),
+ False =
+ case lists:all(fun range__is_none/1, [FalseRange1, FalseRange2]) of
true -> [];
- false -> [{FalseLabel,FalseInfo}]
+ false ->
+ FalseLabel = hipe_icode:if_false_label(If),
+ FalseArgRanges = [{Arg1, FalseRange1}, {Arg2, FalseRange2}],
+ FalseInfo = enter_defines(FalseArgRanges, Info),
+ [{FalseLabel, FalseInfo}]
end,
- UpdateInfo = True++False,
+ UpdateInfo = True ++ False,
NewIF =
if Rewrite ->
- %%io:format("~w~n~w~n", [{Arg1,FalseRange1},{Arg2,FalseRange2}]),
- %%io:format("Any none: ~w~n", [lists:any(fun range__is_none/1,[FalseRange1,FalseRange2])]),
case UpdateInfo of
- [] -> %%This is weird
+ [] -> %% This is weird
If;
- [{Label,_Info}] ->
+ [{Label, _Info}] ->
hipe_icode:mk_goto(Label);
- [_,_] ->
+ [_, _] ->
If
end;
true ->
@@ -686,13 +690,13 @@ normalize_name(Name) ->
Name -> Name
end.
--spec range_equality_propagation(#range{}, #range{}) ->
- {#range{}, #range{}, #range{}, #range{}}.
+-spec range_equality_propagation(range(), range()) ->
+ {range(), range(), range(), range()}.
range_equality_propagation(Range_1, Range_2) ->
True_range = inf(Range_1, Range_2),
case {range(Range_1), range(Range_2)} of
- {{N,N},{ N,N}} ->
+ {{N,N}, {N,N}} ->
False_range_1 = none_range(),
False_range_2 = none_range();
{{N1,N1}, {N2,N2}} ->
@@ -710,8 +714,8 @@ range_equality_propagation(Range_1, Range_2) ->
end,
{True_range, True_range, False_range_1, False_range_2}.
--spec range_inequality_propagation(#range{}, #range{}) ->
- {#range{}, #range{}, #range{}, #range{}}.
+-spec range_inequality_propagation(range(), range()) ->
+ {range(), range(), range(), range()}.
%% Range1 < Range2
range_inequality_propagation(Range1, Range2) ->
@@ -781,26 +785,24 @@ analyse_type(Type, Info, Rewrite) ->
TrueRange = inf(any_range(), OldVarRange),
FalseRange = inf(none_range(), OldVarRange);
_ ->
- TrueRange = inf(none_range(),OldVarRange),
+ TrueRange = inf(none_range(), OldVarRange),
FalseRange = OldVarRange
end,
TrueLabel = hipe_icode:type_true_label(Type),
FalseLabel = hipe_icode:type_false_label(Type),
- TrueInfo =
- enter_define({Arg,TrueRange},Info),
- FalseInfo =
- enter_define({Arg,FalseRange},Info),
- True =
+ TrueInfo = enter_define({Arg, TrueRange}, Info),
+ FalseInfo = enter_define({Arg, FalseRange}, Info),
+ True =
case range__is_none(TrueRange) of
true -> [];
- false -> [{TrueLabel,TrueInfo}]
+ false -> [{TrueLabel, TrueInfo}]
end,
- False =
+ False =
case range__is_none(FalseRange) of
true -> [];
- false -> [{FalseLabel,FalseInfo}]
+ false -> [{FalseLabel, FalseInfo}]
end,
- UpdateInfo = True++False,
+ UpdateInfo = True ++ False,
NewType =
if Rewrite ->
case UpdateInfo of
@@ -808,15 +810,15 @@ analyse_type(Type, Info, Rewrite) ->
Type;
[{Label,_Info}] ->
hipe_icode:mk_goto(Label);
- [_,_] ->
+ [_, _] ->
Type
end;
true ->
Type
end,
- {NewType,True ++ False}.
+ {NewType, True ++ False}.
--spec compare_with_integer(integer(), #range{}) -> {#range{}, #range{}}.
+-spec compare_with_integer(integer(), range()) -> {range(), range()}.
compare_with_integer(N, OldVarRange) ->
TestRange = range_init({N, N}, false),
@@ -843,13 +845,13 @@ compare_with_integer(N, OldVarRange) ->
%%== Ranges ==================================================================
--spec pp_ann(#ann{} | erl_types:erl_type()) -> string().
+-spec pp_ann(ann() | erl_types:erl_type()) -> string().
-pp_ann(#ann{range=#range{range=R, other=false}}) ->
+pp_ann(#ann{range = #range{range = R, other = false}}) ->
pp_range(R);
-pp_ann(#ann{range=#range{range=empty, other=true}, type=Type}) ->
+pp_ann(#ann{range = #range{range = empty, other = true}, type = Type}) ->
t_to_string(Type);
-pp_ann(#ann{range=#range{range=R, other=true}, type=Type}) ->
+pp_ann(#ann{range = #range{range = R, other = true}, type = Type}) ->
pp_range(R) ++ " | " ++ t_to_string(Type);
pp_ann(Type) ->
t_to_string(Type).
@@ -867,12 +869,12 @@ val_to_string(pos_inf) -> "inf";
val_to_string(neg_inf) -> "-inf";
val_to_string(X) when is_integer(X) -> integer_to_list(X).
--spec range_from_type(erl_types:erl_type()) -> [#range{}].
+-spec range_from_type(erl_types:erl_type()) -> [range()].
range_from_type(Type) ->
[range_from_simple_type(T) || T <- t_to_tlist(Type)].
--spec range_from_simple_type(erl_types:erl_type()) -> #range{}.
+-spec range_from_simple_type(erl_types:erl_type()) -> range().
range_from_simple_type(Type) ->
None = t_none(),
@@ -887,7 +889,7 @@ range_from_simple_type(Type) ->
#range{range = Range, other = true}
end.
--spec range_init(range_rep(), boolean()) -> #range{}.
+-spec range_init(range_rep(), boolean()) -> range().
range_init({Min, Max} = Range, Other) ->
case inf_geq(Max, Min) of
@@ -899,39 +901,39 @@ range_init({Min, Max} = Range, Other) ->
range_init(empty, Other) ->
#range{range = empty, other = Other}.
--spec range(#range{}) -> range_rep().
+-spec range(range()) -> range_rep().
range(#range{range = R}) -> R.
--spec other(#range{}) -> boolean().
+-spec other(range()) -> boolean().
other(#range{other = O}) -> O.
--spec set_other(#range{}, boolean()) -> #range{}.
+-spec set_other(range(), boolean()) -> range().
set_other(R, O) -> R#range{other = O}.
--spec range__min(#range{}) -> 'empty' | 'neg_inf' | integer().
+-spec range__min(range()) -> 'empty' | 'neg_inf' | integer().
-range__min(#range{range=empty}) -> empty;
-range__min(#range{range={Min,_}}) -> Min.
+range__min(#range{range = empty}) -> empty;
+range__min(#range{range = {Min,_}}) -> Min.
--spec range__max(#range{}) -> 'empty' | 'pos_inf' | integer().
+-spec range__max(range()) -> 'empty' | 'pos_inf' | integer().
-range__max(#range{range=empty}) -> empty;
-range__max(#range{range={_,Max}}) -> Max.
+range__max(#range{range = empty}) -> empty;
+range__max(#range{range = {_,Max}}) -> Max.
--spec range__is_none(#range{}) -> boolean().
+-spec range__is_none(range()) -> boolean().
-range__is_none(#range{range=empty, other=false}) -> true;
+range__is_none(#range{range = empty, other = false}) -> true;
range__is_none(#range{}) -> false.
--spec range__is_empty(#range{}) -> boolean().
+-spec range__is_empty(range()) -> boolean().
-range__is_empty(#range{range=empty}) -> true;
-range__is_empty(#range{range={_,_}}) -> false.
+range__is_empty(#range{range = empty}) -> true;
+range__is_empty(#range{range = {_,_}}) -> false.
--spec remove_point_types(#range{}, [#range{}]) -> #range{}.
+-spec remove_point_types(range(), [range()]) -> range().
remove_point_types(Range, Ranges) ->
Sorted = lists:sort(Ranges),
@@ -939,35 +941,35 @@ remove_point_types(Range, Ranges) ->
Range1 = lists:foldl(FoldFun, Range, Sorted),
lists:foldl(FoldFun, Range1, lists:reverse(Sorted)).
--spec range__remove_constant(#range{}, #range{}) -> #range{}.
+-spec range__remove_constant(range(), range()) -> range().
-range__remove_constant(R = #range{range={C,C}}, #range{range={C,C}}) ->
- R#range{range=empty};
-range__remove_constant(R = #range{range={C,H}}, #range{range={C,C}}) ->
- R#range{range={C+1,H}};
-range__remove_constant(R = #range{range={L,C}}, #range{range={C,C}}) ->
- R#range{range={L,C-1}};
-range__remove_constant(R = #range{}, #range{range={C,C}}) ->
+range__remove_constant(#range{range = {C, C}} = R, #range{range = {C, C}}) ->
+ R#range{range = empty};
+range__remove_constant(#range{range = {C, H}} = R, #range{range = {C, C}}) ->
+ R#range{range = {C+1, H}};
+range__remove_constant(#range{range = {L, C}} = R, #range{range = {C, C}}) ->
+ R#range{range = {L, C-1}};
+range__remove_constant(#range{} = R, #range{range = {C,C}}) ->
R;
-range__remove_constant(R = #range{}, _) ->
+range__remove_constant(#range{} = R, _) ->
R.
--spec any_type() -> #range{}.
+-spec any_type() -> range().
any_type() ->
- #range{range=any_r(), other=true}.
+ #range{range = any_r(), other = true}.
--spec any_range() -> #range{}.
+-spec any_range() -> range().
any_range() ->
- #range{range=any_r(), other=false}.
+ #range{range = any_r(), other = false}.
--spec none_range() -> #range{}.
+-spec none_range() -> range().
none_range() ->
- #range{range=empty, other=true}.
+ #range{range = empty, other = true}.
--spec none_type() -> #range{}.
+-spec none_type() -> range().
none_type() ->
#range{range = empty, other = false}.
@@ -976,12 +978,12 @@ none_type() ->
any_r() -> {neg_inf, pos_inf}.
--spec get_range_from_args([argument()]) -> [#range{}].
+-spec get_range_from_args([argument()]) -> [range()].
get_range_from_args(Args) ->
[get_range_from_arg(Arg) || Arg <- Args].
--spec get_range_from_arg(argument()) -> #range{}.
+-spec get_range_from_arg(argument()) -> range().
get_range_from_arg(Arg) ->
case hipe_icode:is_const(Arg) of
@@ -989,15 +991,15 @@ get_range_from_arg(Arg) ->
Value = hipe_icode:const_value(Arg),
case is_integer(Value) of
true ->
- #range{range={Value,Value}, other=false};
+ #range{range = {Value, Value}, other = false};
false ->
- #range{range=empty, other=true}
+ #range{range = empty, other = true}
end;
false ->
case hipe_icode:is_annotated_variable(Arg) of
true ->
case hipe_icode:variable_annotation(Arg) of
- {range_anno, #ann{range=Range}, _} ->
+ {range_anno, #ann{range = Range}, _} ->
Range;
{type_anno, Type, _} ->
range_from_simple_type(Type)
@@ -1012,7 +1014,7 @@ get_range_from_arg(Arg) ->
%% inf([R1,R2|Rest]) ->
%% inf([inf(R1,R2)|Rest]).
--spec inf(#range{}, #range{}) -> #range{}.
+-spec inf(range(), range()) -> range().
inf(#range{range=R1, other=O1}, #range{range=R2, other=O2}) ->
#range{range=range_inf(R1,R2), other=other_inf(O1,O2)}.
@@ -1022,8 +1024,8 @@ inf(#range{range=R1, other=O1}, #range{range=R2, other=O2}) ->
range_inf(empty, _) -> empty;
range_inf(_, empty) -> empty;
range_inf({Min1,Max1}, {Min2,Max2}) ->
- NewMin = inf_max([Min1,Min2]),
- NewMax = inf_min([Max1,Max2]),
+ NewMin = inf_max([Min1, Min2]),
+ NewMax = inf_min([Max1, Max2]),
case inf_geq(NewMax, NewMin) of
true ->
{NewMin, NewMax};
@@ -1035,14 +1037,14 @@ range_inf({Min1,Max1}, {Min2,Max2}) ->
other_inf(O1, O2) -> O1 and O2.
--spec sup([#range{},...]) -> #range{}.
+-spec sup([range(),...]) -> range().
sup([R]) ->
R;
sup([R1,R2|Rest]) ->
sup([sup(R1, R2)|Rest]).
--spec sup(#range{}, #range{}) -> #range{}.
+-spec sup(range(), range()) -> range().
sup(#range{range=R1,other=O1}, #range{range=R2,other=O2}) ->
#range{range=range_sup(R1,R2), other=other_sup(O1,O2)}.
@@ -1063,7 +1065,7 @@ other_sup(O1, O2) -> O1 or O2.
%%== Call Support =============================================================
-spec analyse_call_or_enter_fun(fun_name(), [argument()],
- icode_call_type(), call_fun()) -> [#range{}].
+ icode_call_type(), call_fun()) -> [range()].
analyse_call_or_enter_fun(Fun, Args, CallType, LookupFun) ->
%%io:format("Fun: ~p~n Args: ~p~n CT: ~p~n LF: ~p~n", [Fun, Args, CallType, LookupFun]),
@@ -1105,19 +1107,19 @@ analyse_call_or_enter_fun(Fun, Args, CallType, LookupFun) ->
[any_type()];
{hipe_bs_primop, {bs_get_integer, Size, Flags}} ->
{Min, Max} = analyse_bs_get_integer(Size, Flags, length(Args) =:= 1),
- [#range{range={Min, Max}, other=false}, any_type()];
+ [#range{range = {Min, Max}, other = false}, any_type()];
{hipe_bs_primop, _} = Primop ->
Type = hipe_icode_primops:type(Primop),
range_from_type(Type)
end.
--type bin_operation() :: fun((#range{},#range{}) -> #range{}).
--type unary_operation() :: fun((#range{}) -> #range{}).
+-type bin_operation() :: fun((range(), range()) -> range()).
+-type unary_operation() :: fun((range()) -> range()).
-spec basic_type(fun_name()) -> 'not_int' | 'not_analysed'
- | {bin, bin_operation()}
- | {unary, unary_operation()}
- | {fcall, mfa()} | {hipe_bs_primop, _}.
+ | {'bin', bin_operation()}
+ | {'unary', unary_operation()}
+ | {'fcall', mfa()} | {'hipe_bs_primop', _}.
%% Arithmetic operations
basic_type('+') -> {bin, fun(R1, R2) -> range_add(R1, R2) end};
@@ -1214,7 +1216,7 @@ analyse_bs_get_integer(Size, Flags, false) when is_integer(Size),
%% Arithmetic
--spec range_add(#range{}, #range{}) -> #range{}.
+-spec range_add(range(), range()) -> range().
range_add(Range1, Range2) ->
NewMin = inf_add(range__min(Range1), range__min(Range2)),
@@ -1222,7 +1224,7 @@ range_add(Range1, Range2) ->
Other = other(Range1) orelse other(Range2),
range_init({NewMin, NewMax}, Other).
--spec range_sub(#range{}, #range{}) -> #range{}.
+-spec range_sub(range(), range()) -> range().
range_sub(Range1, Range2) ->
Min_sub = inf_min([inf_inv(range__max(Range2)),
@@ -1234,7 +1236,7 @@ range_sub(Range1, Range2) ->
Other = other(Range1) orelse other(Range2),
range_init({NewMin, NewMax}, Other).
--spec range_mult(#range{}, #range{}) -> #range{}.
+-spec range_mult(range(), range()) -> range().
range_mult(#range{range=empty, other=true}, _Range2) ->
range_init(empty, true);
@@ -1274,7 +1276,7 @@ range_mult(Range1, Range2) ->
Other = other(Range1) orelse other(Range2),
range_init(Range, Other).
--spec extreme_divisors(#range{}) -> range_tuple().
+-spec extreme_divisors(range()) -> range_tuple().
extreme_divisors(#range{range={0,0}}) -> {0,0};
extreme_divisors(#range{range={0,Max}}) -> {1,Max};
@@ -1289,7 +1291,7 @@ extreme_divisors(#range{range={Min,Max}}) ->
end
end.
--spec range_div(#range{}, #range{}) -> #range{}.
+-spec range_div(range(), range()) -> range().
%% this is div, not /.
range_div(_, #range{range={0,0}}) ->
@@ -1306,7 +1308,7 @@ range_div(Range1, Den) ->
inf_div(Max1, Min2), inf_div(Max1, Max2)],
range_init({inf_min(Min_max_list), inf_max(Min_max_list)}, false).
--spec range_rem(#range{}, #range{}) -> #range{}.
+-spec range_rem(range(), range()) -> range().
range_rem(Range1, Range2) ->
%% Range1 desides the sign of the answer.
@@ -1332,7 +1334,7 @@ range_rem(Range1, Range2) ->
%%--- Bit operations ----------------------------
--spec range_bsr(#range{}, #range{}) -> #range{}.
+-spec range_bsr(range(), range()) -> range().
range_bsr(Range1, Range2=#range{range={Min, Max}}) ->
New_Range2 = range_init({inf_inv(Max), inf_inv(Min)}, other(Range2)),
@@ -1340,7 +1342,7 @@ range_bsr(Range1, Range2=#range{range={Min, Max}}) ->
%% io:format("bsr res:~w~nInput:= ~w~n", [Ans, {Range1,Range2}]),
Ans.
--spec range_bsl(#range{}, #range{}) -> #range{}.
+-spec range_bsl(range(), range()) -> range().
range_bsl(Range1, Range2) ->
Min1 = range__min(Range1),
@@ -1359,7 +1361,7 @@ range_bsl(Range1, Range2) ->
end,
range_init(MinMax, false).
--spec range_bnot(#range{}) -> #range{}.
+-spec range_bnot(range()) -> range().
range_bnot(Range) ->
Minus_one = range_init({-1,-1}, false),
@@ -1389,7 +1391,7 @@ negwidth(X, N) ->
false -> negwidth(X, N+1)
end.
--spec range_band(#range{}, #range{}) -> #range{}.
+-spec range_band(range(), range()) -> range().
range_band(R1, R2) ->
{_Min1, Max1} = MM1 = range(R1),
@@ -1423,7 +1425,7 @@ range_band(R1, R2) ->
end,
range_init(Range, false).
--spec range_bor(#range{}, #range{}) -> #range{}.
+-spec range_bor(range(), range()) -> range().
range_bor(R1, R2) ->
{Min1, _Max1} = MM1 = range(R1),
@@ -1457,7 +1459,7 @@ range_bor(R1, R2) ->
end,
range_init(Range, false).
--spec classify_range(#range{}) -> 'minus_minus' | 'minus_plus' | 'plus_plus'.
+-spec classify_range(range()) -> 'minus_minus' | 'minus_plus' | 'plus_plus'.
classify_range(Range) ->
case range(Range) of
@@ -1480,7 +1482,7 @@ classify_int_range(_Number1, Number2) when Number2 < 0 ->
classify_int_range(_Number1, _Number2) ->
minus_plus.
--spec range_bxor(#range{}, #range{}) -> #range{}.
+-spec range_bxor(range(), range()) -> range().
range_bxor(R1, R2) ->
{Min1, Max1} = MM1 = range(R1),
@@ -1895,7 +1897,7 @@ convert_ann_to_types(#ann{range=#range{other=true}, type=Type}) ->
%% Icode Coordinator Callbacks
%%=====================================================================
--spec replace_nones([#range{}]) -> [#range{}].
+-spec replace_nones([range()]) -> [range()].
replace_nones(Args) ->
[replace_none(Arg) || Arg <- Args].
@@ -1905,7 +1907,7 @@ replace_none(Arg) ->
false -> Arg
end.
--spec update__info([#range{}], [#range{}]) -> {boolean(), [#ann{}]}.
+-spec update__info([range()], [range()]) -> {boolean(), [ann()]}.
update__info(NewRanges, OldRanges) ->
SupFun = fun (Ann, Range) ->
join_info(Ann, Range, fun safe_widen/3)
@@ -1915,19 +1917,19 @@ update__info(NewRanges, OldRanges) ->
Change = lists:zipwith(EqFun, ResRanges, OldRanges),
{lists:all(fun (X) -> X end, Change), ResRanges}.
--spec new__info/1 :: ([#range{}]) -> [#ann{}].
+-spec new__info([range()]) -> [ann()].
new__info(NewRanges) ->
[#ann{range=Range,count=1,type=t_any()} || Range <- NewRanges].
--spec return__info/1 :: ([#ann{}]) -> [#range{}].
+-spec return__info([ann()]) -> [range()].
return__info(Ranges) ->
[Range || #ann{range=Range} <- Ranges].
--spec return_none/0 :: () -> [#range{},...].
+-spec return_none() -> [range(),...].
return_none() ->
[none_type()].
--spec return_none_args/2 :: (#cfg{}, mfa()) -> [#range{}].
+-spec return_none_args(cfg(), mfa()) -> [range()].
return_none_args(Cfg, {_M,_F,A}) ->
NoArgs =
case hipe_icode_cfg:is_closure(Cfg) of
@@ -1936,7 +1938,7 @@ return_none_args(Cfg, {_M,_F,A}) ->
end,
lists:duplicate(NoArgs, none_type()).
--spec return_any_args/2 :: (#cfg{}, mfa()) -> [#range{}].
+-spec return_any_args(cfg(), mfa()) -> [range()].
return_any_args(Cfg, {_M,_F,A}) ->
NoArgs =
case hipe_icode_cfg:is_closure(Cfg) of
diff --git a/lib/inets/doc/src/http_client.xml b/lib/inets/doc/src/http_client.xml
index 672ea3fa98..4542211d71 100644
--- a/lib/inets/doc/src/http_client.xml
+++ b/lib/inets/doc/src/http_client.xml
@@ -42,10 +42,10 @@
dynamically in runtime. Each client profile will spawn a new
process to handle each request unless there is a possibility to use
a persistent connection with or without pipelining.
- The client will add a host header and an empty
- te header if there are no such headers present in the request.</p>
+ The client will add a <c>host</c> header and an empty
+ <c>te</c> header if there are no such headers present in the request.</p>
- <p>The clients supports ipv6 as long as the underlying mechanisms also do
+ <p>The client supports ipv6 as long as the underlying mechanisms also do
so.</p>
</section>
@@ -87,7 +87,7 @@
httpc:request("http://www.erlang.org").
</code>
<p>An ordinary asynchronous request. The result will be sent
- to the calling process on the form {http, {ReqestId, Result}}</p>
+ to the calling process in the form <c>{http, {ReqestId, Result}}</c></p>
<code type="erl">
5 > {ok, RequestId} =
httpc:request(get, {"http://www.erlang.org", []}, [], [{sync, false}]).
diff --git a/lib/inets/doc/src/httpc.xml b/lib/inets/doc/src/httpc.xml
index c20358178b..bcdd2913e0 100644
--- a/lib/inets/doc/src/httpc.xml
+++ b/lib/inets/doc/src/httpc.xml
@@ -36,7 +36,7 @@
<note>
<p>When starting the Inets application a manager process for the
default profile will be started. The functions in this API
- that does not explicitly use a profile will accesses the
+ that do not explicitly use a profile will access the
default profile. A profile keeps track of proxy options,
cookies and other options that can be applied to more than one
request. </p>
@@ -117,7 +117,7 @@ ssl_options() = {verify, code()} |
application or started dynamically in runtime by calling the
inets application API <c>inets:start(httpc, ServiceConfig)</c>, or
<c>inets:start(httpc, ServiceConfig, How)</c>
- see <seealso marker="inets">inets(3)</seealso> Below follows a
+ see <seealso marker="inets">inets(3)</seealso>. Below follows a
description of the available configuration options.</p>
<taglist>
<tag>{profile, profile()}</tag>
@@ -129,8 +129,8 @@ ssl_options() = {verify, code()} |
as session cookies.</item>
</taglist>
- <p>The client can be stopped using inets:stop(httpc, Pid) or
- inets:stop(httpc, Profile).</p>
+ <p>The client can be stopped using <c>inets:stop(httpc, Pid)</c> or
+ <c>inets:stop(httpc, Profile)</c>.</p>
<marker id="request1"></marker>
</section>
@@ -148,7 +148,7 @@ ssl_options() = {verify, code()} |
<v>Reason = term() </v>
</type>
<desc>
- <p>Equivalent to httpc:request(get, {Url, []}, [], []).</p>
+ <p>Equivalent to <c>httpc:request(get, {Url, []}, [], [])</c>.</p>
<marker id="request2"></marker>
</desc>
@@ -201,7 +201,7 @@ ssl_options() = {verify, code()} |
<desc>
<p>Sends a HTTP-request. The function can be both synchronous
and asynchronous. In the later case the function will return
- {ok, RequestId} and later on the information will be delivered
+ <c>{ok, RequestId}</c> and later on the information will be delivered
to the <c>receiver</c> depending on that value. </p>
<p>Http option (<c>http_option()</c>) details: </p>
@@ -209,7 +209,7 @@ ssl_options() = {verify, code()} |
<tag><c><![CDATA[timeout]]></c></tag>
<item>
<p>Timeout time for the request. </p>
- <p>The clock start ticking as soon as the request has been
+ <p>The clock starts ticking as soon as the request has been
sent. </p>
<p>Time is in milliseconds. </p>
<p>Defaults to <c>infinity</c>. </p>
@@ -246,11 +246,11 @@ ssl_options() = {verify, code()} |
<tag><c><![CDATA[autoredirect]]></c></tag>
<item>
- <p>Should the client automatically retreive the information
+ <p>Should the client automatically retrieve the information
from the new URI and return that as the result instead
of a 30X-result code. </p>
<p>Note that for some 30X-result codes automatic redirect
- is not allowed in these cases the 30X-result will always
+ is not allowed. In these cases the 30X-result will always
be returned. </p>
<p>Defaults to <c>true</c>. </p>
</item>
@@ -267,12 +267,12 @@ ssl_options() = {verify, code()} |
<c>HTTP/0.9</c> client. By default this is an <c>HTTP/1.1</c>
client. When using <c>HTTP/1.0</c> persistent connections will
not be used. </p>
- <p>Defaults to the trsing <c>"HTTP/1.1"</c>. </p>
+ <p>Defaults to the string <c>"HTTP/1.1"</c>. </p>
</item>
<tag><c><![CDATA[relaxed]]></c></tag>
<item>
- <p>If set to true workarounds for known server deviations from
+ <p>If set to <c>true</c> workarounds for known server deviations from
the HTTP-standard are enabled. </p>
<p>Defaults to <c>false</c>. </p>
</item>
@@ -296,21 +296,21 @@ ssl_options() = {verify, code()} |
<item>
<p>Streams the body of a 200 or 206 response to the calling
process or to a file. When streaming to the calling process
- using the option <c>self</c> the the following stream messages
- will be sent to that process: {http, {RequestId,
+ using the option <c>self</c> the following stream messages
+ will be sent to that process: <c>{http, {RequestId,
stream_start, Headers}, {http, {RequestId, stream,
- BinBodyPart}, {http, {RequestId, stream_end, Headers}. When
+ BinBodyPart}, {http, {RequestId, stream_end, Headers}</c>. When
streaming to to the calling processes using the option
<c>{self, once}</c> the first message will have an additional
- element e.i. {http, {RequestId, stream_start, Headers, Pid},
+ element e.i. <c>{http, {RequestId, stream_start, Headers, Pid}</c>,
this is the process id that should be used as an argument to
- http:stream_next/1 to trigger the next message to be sent to
+ <c>http:stream_next/1</c> to trigger the next message to be sent to
the calling process. </p>
<p>Note that it is possible that chunked encoding will add
- headers so that there are more headers in the stream_end
- message than in the stream_start.
+ headers so that there are more headers in the <c>stream_end</c>
+ message than in the <c>stream_start</c>.
When streaming to a file and the request is asynchronous the
- message {http, {RequestId, saved_to_file}} will be sent. </p>
+ message <c>{http, {RequestId, saved_to_file}}</c> will be sent. </p>
<p>Defaults to <c>none</c>. </p>
</item>
@@ -338,7 +338,7 @@ ssl_options() = {verify, code()} |
case insenstive. This feature should only be used if there is
no other way to communicate with the server or for testing
purpose. Also note that when this option is used no headers
- will be automatically added, all necessary headers has to be
+ will be automatically added, all necessary headers have to be
provided by the user. </p>
<p>Defaults to <c>false</c>. </p>
</item>
@@ -354,16 +354,16 @@ ssl_options() = {verify, code()} |
checked in any way. </p>
<p>Note that this may change the socket behaviour
(see <seealso marker="kernel:inet#setopts">inet:setopts/2</seealso>)
- for an already existing, and therefor already connected
+ for an already existing one, and therefore an already connected
request handler. </p>
- <p>By defaults the socket options set by the
+ <p>By default the socket options set by the
<seealso marker="#set_options">set_options/1,2</seealso>
- function is used when establishing connection. </p>
+ function are used when establishing a connection. </p>
</item>
<tag><c><![CDATA[receiver]]></c></tag>
<item>
- <p>Defines how the client will deliver the result for a
+ <p>Defines how the client will deliver the result of an
asynchroneous request (<c>sync</c> has the value
<c>false</c>). </p>
@@ -395,7 +395,7 @@ apply(Module, Function, [ReplyInfo | Args])
</item>
</taglist>
- <p>In all cases above, <c>ReplyInfo</c> has the following
+ <p>In all of the above cases, <c>ReplyInfo</c> has the following
structure: </p>
<pre>
@@ -470,46 +470,46 @@ apply(Module, Function, [ReplyInfo | Args])
<v>IpDesc = string()</v>
<d>ex: "134.138" or "[FEDC:BA98" (all IP-addresses starting with 134.138 or FEDC:BA98), "66.35.250.150" or "[2010:836B:4179::836B:4179]" (a complete IP-address).</d>
<v>MaxSessions = integer() </v>
- <d>Default is <em>2</em>.
+ <d>Default is <c>2</c>.
Maximum number of persistent connections to a host.</d>
<v>MaxKeepAlive = integer() </v>
- <d>Default is <em>5</em>.
+ <d>Default is <c>5</c>.
Maximum number of outstanding requests on the same connection to
a host.</d>
<v>KeepAliveTimeout = integer() </v>
- <d>Default is <em>120000</em> (= 2 min).
+ <d>Default is <c>120000</c> (= 2 min).
If a persistent connection is idle longer than the
- keep_alive_timeout the client will close the connection.
- The server may also have a such a time out but you should
+ <c>keep_alive_timeout</c> the client will close the connection.
+ The server may also have such a time out but you should
not count on it!</d>
<v>MaxPipeline = integer() </v>
- <d>Default is <em>2</em>.
+ <d>Default is <c>2</c>.
Maximum number of outstanding requests on a pipelined connection to a host.</d>
<v>PipelineTimeout = integer() </v>
- <d>Default is <em>0</em>,
+ <d>Default is <c>0</c>,
which will result in pipelining not being used.
If a persistent connection is idle longer than the
- pipeline_timeout the client will close the connection. </d>
+ <c>pipeline_timeout</c> the client will close the connection. </d>
<v>CookieMode = enabled | disabled | verify </v>
- <d>Default is <em>disabled</em>.
+ <d>Default is <c>disabled</c>.
If Cookies are enabled all valid cookies will automatically be
saved in the client manager's cookie database.
- If the option verify is used the function http:verify_cookie/2
- has to be called for the cookie to be saved.</d>
+ If the option <c>verify</c> is used the function <c>store_cookies/2</c>
+ has to be called for the cookies to be saved.</d>
<v>IpFamily = inet | inet6 | inet6fb4 </v>
- <d>By default <em>inet</em>.
+ <d>By default <c>inet</c>.
When it is set to <c>inet6fb4</c> you can use both ipv4 and ipv6.
It first tries <c>inet6</c> and if that does not works falls back to <c>inet</c>.
The option is here to provide a workaround for buggy ipv6 stacks to ensure that
ipv4 will always work.</d>
<v>IpAddress = ip_address() </v>
<d>If the host has several network interfaces, this option specifies which one to use.
- See gen_tcp:connect/3,4 for more info. </d>
+ See <seealso marker="kernel:gen_tcp#connect">gen_tcp:connect/3,4</seealso> for more info. </d>
<v>Port = integer() </v>
<d>Specify which local port number to use.
- See gen_tcp:connect/3,4 for more info. </d>
+ See <seealso marker="kernel:gen_tcp#connect">gen_tcp:connect/3,4</seealso> for more info. </d>
<v>VerboseMode = false | verbose | debug | trace </v>
- <d>Default is <em>false</em>.
+ <d>Default is <c>false</c>.
This option is used to switch on (or off)
different levels of erlang trace on the client.
It is a debug feature.</d>
@@ -523,14 +523,14 @@ apply(Module, Function, [ReplyInfo | Args])
alive and use persistent connections
with or without pipeline depending on configuration
and current circumstances. The HTTP/1.1 specification does not
- provide a guideline for how many requests that would be
+ provide a guideline for how many requests would be
ideal to be sent on a persistent connection,
this very much depends on the
application. Note that a very long queue of requests may cause a
- user perceived delays as earlier request may take a long time
+ user perceived delay as earlier requests may take a long time
to complete. The HTTP/1.1 specification does suggest a
limit of 2 persistent connections per server, which is the
- default value of the max_sessions option. </p>
+ default value of the <c>max_sessions</c> option. </p>
</note>
<marker id="stream_next"></marker>
@@ -549,14 +549,14 @@ apply(Module, Function, [ReplyInfo | Args])
<p>Triggers the next message to be streamed, e.i.
same behavior as active once for sockets.</p>
- <marker id="verify_cookie"></marker>
- <marker id="store_cookie"></marker>
+ <marker id="verify_cookies"></marker>
+ <marker id="store_cookies"></marker>
</desc>
</func>
<func>
- <name>store_cookie(SetCookieHeaders, Url) -> </name>
- <name>store_cookie(SetCookieHeaders, Url, Profile) -> ok | {error, Reason}</name>
+ <name>store_cookies(SetCookieHeaders, Url) -> </name>
+ <name>store_cookies(SetCookieHeaders, Url, Profile) -> ok | {error, Reason}</name>
<fsummary>Saves the cookies defined in SetCookieHeaders in the client profile's cookie database.</fsummary>
<type>
<v>SetCookieHeaders = headers() - where field = "set-cookie"</v>
@@ -566,7 +566,7 @@ apply(Module, Function, [ReplyInfo | Args])
<desc>
<p>Saves the cookies defined in SetCookieHeaders
in the client profile's cookie database. You need to
- call this function if you set the option cookies to <c>verify</c>.
+ call this function if you have set the option <c>cookies</c> to <c>verify</c>.
If no profile is specified the default profile will be used.
</p>
@@ -576,16 +576,16 @@ apply(Module, Function, [ReplyInfo | Args])
<func>
<name>cookie_header(Url) -> </name>
- <name>cookie_header(Url, Profile) -> header() | {error, Rason}</name>
+ <name>cookie_header(Url, Profile) -> header() | {error, Reason}</name>
<fsummary>Returns the cookie header that would be sent when
- making a request to Url using the profile Profile.</fsummary>
+ making a request to Url using the profile <c>Profile</c>.</fsummary>
<type>
<v>Url = url()</v>
<v>Profile = profile()</v>
</type>
<desc>
<p>Returns the cookie header that would be sent
- when making a request to Url using the profile Profile.
+ when making a request to <c>Url</c> using the profile <c>Profile</c>.
If no profile is specified the default profile will be used.
</p>
@@ -602,7 +602,7 @@ apply(Module, Function, [ReplyInfo | Args])
<v>Profile = profile()</v>
</type>
<desc>
- <p>Resets (clears) the cookie database for the specified Profile.
+ <p>Resets (clears) the cookie database for the specified <c>Profile</c>.
If no profile is specified the default profile will be used.
</p>
</desc>
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index 11b0af4310..5da9d98002 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -1,10 +1,10 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
<header>
<copyright>
- <year>2002</year><year>2010</year>
+ <year>2002</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -32,50 +32,78 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 5.5.1</title>
+ <section><title>Inets 5.5.2</title>
- <section><title>Fixed Bugs and Malfunctions</title>
+ <section><title>Improvements and New Features</title>
+ <p>-</p>
+
+<!--
<list>
<item>
- <p> Fix format_man_pages so it handles all man sections
- and remove warnings/errors in various man pages. </p>
<p>
- Own Id: OTP-8600</p>
+ Miscellaneous inet6 related problems.</p>
+ <p>Own Id: OTP-8927</p>
</item>
+ </list>
+-->
+
+ </section>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
<item>
- <p>
- [httpc] Pipelined and queued requests not processed when
- connection closed remotelly.</p>
- <p>
- Own Id: OTP-8906</p>
+ <p>[httpd] httpd_response:send_chunk handles empty list and
+ empty binary - i.e. no chunk is sent, but it does
+ not handle a list with an empty binary [&lt;&lt;&gt;&gt;].
+ This will be sent as an empty chunk - which in turn
+ will be encoded by http_chunk to the same as a final
+ chunk, which will make the http client believe that
+ the end of the page is reached.</p>
+ <p>Own Id: OTP-8906</p>
</item>
</list>
</section>
+ </section> <!-- 5.5.2 -->
+
+
+ <section><title>Inets 5.5.1</title>
<section><title>Improvements and New Features</title>
<list>
<item>
- <p>
- Miscellaneous inet6 related problems.</p>
- <p>
- Own Id: OTP-8927</p>
+ <p>Miscellaneous inet6 related problems.</p>
+ <p>Own Id: OTP-8927</p>
</item>
<item>
- <p>
- Updated http-server to make sure URLs in error-messages
- are URL-encoded. Added support in http-client to use
- URL-encoding. Also added the missing include directory
- for the inets application.</p>
- <p>
- Own Id: OTP-8940 Aux Id: seq11735 </p>
+ <p>Updated http-server to make sure URLs in error-messages
+ are URL-encoded. Added support in http-client to use
+ URL-encoding. Also added the missing include directory
+ for the inets application.</p>
+ <p>Own Id: OTP-8940 Aux Id: seq11735 </p>
</item>
</list>
</section>
-</section>
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>Fix format_man_pages so it handles all man sections
+ and remove warnings/errors in various man pages. </p>
+ <p>Own Id: OTP-8600</p>
+ </item>
+ <item>
+ <p>[httpc] Pipelined and queued requests not processed when
+ connection closed remotelly.</p>
+ <p>Own Id: OTP-8906</p>
+ </item>
+ </list>
+ </section>
-<section><title>Inets 5.5</title>
+ </section> <!-- 5.5.1 -->
+
+
+ <section><title>Inets 5.5</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
@@ -120,9 +148,10 @@
</list>
</section>
-</section>
+ </section> <!-- 5.5 -->
+
-<section><title>Inets 5.4</title>
+ <section><title>Inets 5.4</title>
<section><title>Improvements and New Features</title>
<!--
diff --git a/lib/inets/src/http_client/httpc.erl b/lib/inets/src/http_client/httpc.erl
index ae754fab94..04fae13b20 100644
--- a/lib/inets/src/http_client/httpc.erl
+++ b/lib/inets/src/http_client/httpc.erl
@@ -252,7 +252,7 @@ set_option(Key, Value, Profile) ->
%% Description: Store the cookies from <SetCookieHeaders>
%% in the cookie database
%% for the profile <Profile>. This function shall be used when the option
-%% cookie is set to verify.
+%% cookies is set to verify.
%%-------------------------------------------------------------------------
store_cookies(SetCookieHeaders, Url) ->
store_cookies(SetCookieHeaders, Url, default_profile()).
diff --git a/lib/inets/src/http_lib/http_chunk.erl b/lib/inets/src/http_lib/http_chunk.erl
index 621bc68eae..57647438e9 100644
--- a/lib/inets/src/http_lib/http_chunk.erl
+++ b/lib/inets/src/http_lib/http_chunk.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -17,7 +17,8 @@
%% %CopyrightEnd%
%%
%% Description: Implements chunked transfer encoding see RFC2616 section
-%% 3.6.1
+%% 3.6.1
+
-module(http_chunk).
-include("http_internal.hrl").
@@ -28,6 +29,7 @@
%% little at a time on a socket.
-export([decode_size/1, ignore_extensions/1, decode_data/1, decode_trailer/1]).
+
%%%=========================================================================
%%% API
%%%=========================================================================
@@ -81,6 +83,9 @@ encode(Chunk) when is_binary(Chunk)->
HEXSize = list_to_binary(http_util:integer_to_hexlist(size(Chunk))),
<<HEXSize/binary, ?CR, ?LF, Chunk/binary, ?CR, ?LF>>;
+encode([<<>>]) ->
+ [];
+
encode(Chunk) when is_list(Chunk)->
HEXSize = http_util:integer_to_hexlist(erlang:iolist_size(Chunk)),
[HEXSize, ?CR, ?LF, Chunk, ?CR, ?LF].
@@ -88,6 +93,7 @@ encode(Chunk) when is_list(Chunk)->
encode_last() ->
<<$0, ?CR, ?LF, ?CR, ?LF >>.
+
%%-------------------------------------------------------------------------
%% handle_headers(HeaderRecord, ChunkedHeaders) -> NewHeaderRecord
%%
diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src
index 0194c65db9..07da8ca961 100644
--- a/lib/inets/src/inets_app/inets.appup.src
+++ b/lib/inets/src/inets_app/inets.appup.src
@@ -1,7 +1,7 @@
%% This is an -*- erlang -*- file.
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,6 +18,11 @@
{"%VSN%",
[
+ {"5.5.1",
+ [
+ {load_module, http_chunk, soft_purge, soft_purge, []}
+ ]
+ },
{"5.5",
[
{restart_application, inets}
@@ -30,7 +35,12 @@
}
],
[
- {"5.4",
+ {"5.5.1",
+ [
+ {load_module, http_chunk, soft_purge, soft_purge, []}
+ ]
+ },
+ {"5.5",
[
{restart_application, inets}
]
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index 67737ee552..b1de3fef43 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2001-2010. All Rights Reserved.
+# Copyright Ericsson AB 2001-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/inviso/src/inviso_tool.erl b/lib/inviso/src/inviso_tool.erl
index 05158f58fe..7d3cfb9da0 100644
--- a/lib/inviso/src/inviso_tool.erl
+++ b/lib/inviso/src/inviso_tool.erl
@@ -1,3324 +1,3255 @@
-% ``The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved via the world wide web at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
-%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
-%% AB. All Rights Reserved.''
-%%
-%% $Id$
-%%
-%% Description:
-%% The inviso_tool implementation. A tool that uses inviso.
-%%
-%% Authors:
-%% Lennart �hman, [email protected]
-%% -----------------------------------------------------------------------------
-
--module(inviso_tool).
-
-
-%% This is the inviso tool, which is a tool using the inviso trace application.
-%% It is developed to make tracing using trace cases possible in an environment
-%% of distributed Erlang nodes.
-%% A current restriction is that the Erlang nodes are supposed to have the same
-%% code. This since inviso tool can at this point not handle subsets of nodes.
-%% Instead all participating Erlang nodes are treated the same.
-%%
-%% The main functionality of the inviso tool are:
-%%
-%% (1) Handles start and stop of tracing at participating nodes.
-%% (2) Interprets trace-case files at a distributed network level.
-%% (The inviso runtime component is responsible for interpreting
-%% trace cases at a local level, if run in an autostart).
-%% (3) Keeps a command history log from which:
-%% (a) Sequences easily can be repeated.
-%% (b) Autostart configuration files can be created (understood by the
-%% default inviso autostart mechanism).
-%% (4) Performs reactivation in case tracing is suspended (manually or by
-%% an overload mechanism).
-%% (5) Can reconnect crashed nodes and by using the history bringing them
-%% up to speed.
-
-%% Distributed Erlang
-%% ------------------
-%% Inviso is built to run in a distributed environment.
-%% The inviso tool can also be used in a non distributed environment.
-
-%% Short description
-%% -----------------
-%% Start-up of the inviso tool
-%% During the start-up of the tool, the tool starts runtime components at
-%% all participating nodes. A runtime component can already be running at
-%% a particular node and will then simply be adopted.
-%%
-%% Session
-%% A session is said to start when tracing is initiated, and ends when
-%% made to stop by the user. When a session is stopped, tracing is stopped
-%% at all participating nodes. Note that participating nodes may come and
-%% go though the time-frame of a session. That means that if a node is
-%% reconnected it may resume its tracing in the current session through
-%% a 'restart_session'. A runtime component that is already tracing at the
-%% time start-session will simply be part of the session without its
-%% ingoing tracing being changed.
-%%
-%% Reactivation
-%% A node that is suspended can be reactivated to resume tracing. Note that
-%% tracing has in this situation never been stopped at the node in question.
-%% The inviso tool resumes the node and applies the history to it.
-%%
-%% Reconnect
-%% A node that is "down" from the inviso tool's perspective can be
-%% reconnected. During reconnection the tool restarts the runtime component
-%% at that node but does not (re)initiate tracing. The latter is called
-%% restart_session and must be done explicitly, unless the node in question
-%% is in fact already tracing. If the node is already tracing (due to an autostart
-%% for instance), it automatically becomes part of the ongoing session (if
-%% there is an ongoing session).
-%%
-%% Restart Session
-%% A node that has been down and has been reconnected can be made to
-%% initialize and resume its tracing. This is done by starting the session
-%% at the node in question and redoing the current history.
-
-%% Trace files within a session
-%% Since it is possible to init-tracing (from an inviso perspective) several
-%% times within the same session, a session may leave several trace log files
-%% behind. This must be resolved by the tracer data generator function
-%% (user supplied) by marking filenames in a chronological order but still
-%% making them possible to identify as part of the same session
-
-
-
-%% -----------------------------------------------------------------------------
-%% API exports.
-%% -----------------------------------------------------------------------------
-
--export([start/0,start/1,stop/0,stop/1]).
--export([reconnect_nodes/0,reconnect_nodes/1,
- start_session/0,start_session/1,
- reinitiate_session/0,reinitiate_session/1,
- restore_session/0,restore_session/1,restore_session/2,
- stop_session/0,
- reset_nodes/0,reset_nodes/1,
- atc/3,sync_atc/3,sync_atc/4,
- sync_rtc/2,sync_rtc/3,
- dtc/2,sync_dtc/2,sync_dtc/3,
- inviso/2]).
--export([reactivate/0,reactivate/1,
- save_history/1,
- get_autostart_data/1,get_autostart_data/2,
- get_activities/0,get_node_status/0,get_node_status/1,get_session_data/0]).
--export([flush/0,flush/1]).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% Debug exports.
-%% -----------------------------------------------------------------------------
-
--export([get_loopdata/0]).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% OTP exports and call backs.
-%% -----------------------------------------------------------------------------
-
--export([init/1,handle_call/3,handle_cast/2,handle_info/2,terminate/2]).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% Internal exports.
-%% -----------------------------------------------------------------------------
-
--export([tc_executer/4,reactivator_executer/6]).
--export([std_options_generator/1]).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% Constants.
-%% -----------------------------------------------------------------------------
-
-%% Defines the inviso function calls that shall be possible to do through the
-%% inviso API in this tool.
--define(INVISO_CMDS,
- [{tp,5},{tp,4},{tp,1},{tpl,5},{tpl,4},{tpl,1},
- {ctp,1},{ctp,2},{ctp,3},{ctpl,1},{ctpl,2},{ctpl,3},
- {tf,2},{tf,1},{ctf,2},{ctf,1},{ctf_all,0},
- {init_tpm,4},{init_tpm,7},
- {tpm,4},{tpm,5},{tpm,8},
- {tpm_tracer,4},{tpm_tracer,5},{init_tpm,8},
- {tpm_ms,5},{tpm_ms_tracer,5},
- {ctpm_ms,4},{ctpm,3},
- {tpm_localnames,0},{ctpm_localnames,0},
- {tpm_globalnames,0},{ctpm_globalnames,0},
- {ctp_all,0},
- {suspend,1},{cancel_suspension,0}]).
-%% -----------------------------------------------------------------------------
-
-%% These inviso functions shall be included in the command history log. Others
-%% are not relevant to be redone during a recactivation, a restart session or
-%% exported to an autostart file.
--define(INVISO_CMD_HISTORY,
- [{tp,5},{tp,4},{tp,1},{tpl,5},{tpl,4},{tpl,1},
- {ctp,1},{ctp,2},{ctp,3},{ctpl,1},{ctpl,2},{ctpl,3},
- {tf,2},{tf,1},{ctf,2},{ctf,1},{ctf_all,0},
- {init_tpm,4},{init_tpm,7},
- {tpm,4},{tpm,5},{tpm,8},
- {tpm_tracer,4},{tpm_tracer,5},{init_tpm,8},
- {tpm_ms,5},{tpm_ms_tracer,5},
- {ctpm_ms,4},{ctpm,3},
- {tpm_localnames,0},{ctpm_localnames,0},
- {tpm_globalnames,0},{ctpm_globalnames,0},
- {ctp_all,0}]).
-%% -----------------------------------------------------------------------------
-
-%% Since many function calls to inviso may take long time, especially if they
-%% involve difficult and many trace patterns to set, the default gen_server:call
-%% time out can not be used. We just do not want to get stuck for ever if some
-%% error occurs.
--define(CALL_TIMEOUT,60000).
-
-%% Default max time to wait for a trace case called synchronously to return.
--define(SYNC_TC_TIMEOUT,10000).
-
-%% Runtime components shall terminate when the tool terminates.
--define(DEFAULT_DEPENDENCY,{dependency,0}).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% Record definitions.
-%% -----------------------------------------------------------------------------
-
-%% The loopdata record.
--record(ld,{
- dir=".", % Working dir of the tool.
- nodes=down, % The nodesD database, defaults to non-distr.
- c_node, % Location of inviso_c.
- c_pid, % The inviso control component.
- regexp_node, % Node for regexp expansions.
- tc_dict, % Trace case definition db.
- chl, % Command history log.
- session_state=passive, % passive | tracing
- tdg={inviso_tool_lib,std_tdg,[]}, % Tracer data generator func.
- tracer_data, % Current session nr and TDGargs.
- reactivators=[], % Pids of now running reactivators.
- tc_def_file, % Trace case definition file.
- optg={?MODULE,std_options_generator,[]}, % Generates options to add_nodes/3.
- initial_tcs=[], % Initial trace cases.
- started_initial_tcs=[], % Cases that must be stopped when stop_tracing.
- history_dir, % File path for history file.
- keep_nodes=[], % Nodes that shall not be cleared when stopping.
- debug=false % Internal debug mode
- }).
-%% -----------------------------------------------------------------------------
-
-
-%% =============================================================================
-%% API
-%% =============================================================================
-
-%% start()={ok,Pid} | {error,{already_started,pid()}}
-%% start(Config)
-%% Config=[{Opt,Value},...], list of tuple options.
-%% Opt=dir|nodes|c_node|regexp_node|tdg|tc_def_file|optg|initial_tcs|
-%% history_dir|keep_nodes
-%% Starts the inviso_tool process. Options in Config are the same as those
-%% which are kept in the #ld structure.
-start() ->
- start([]).
-start(Config) ->
- gen_server:start({local,?MODULE},?MODULE,Config,[]).
-%% -----------------------------------------------------------------------------
-
-%% stop(UntouchedNodes)=
-%% stop()={ok,NodeResults} | NodeResult | {error,Reason}
-%% UntouchedNodes=list(), nodes where any trace patterns shall not be removed.
-%% NodeResults=[{Node,NodeResult},...]
-%% NodeResult=ok | {error,Reason} | patterns_untouched
-%% Stops the inviso tool and the inviso control component. Runtime components are
-%% stopped by them selves depending on their dependcy of the control component.
-%% All runtime components that are not marked as to be kept will have their
-%% trace patterns cleared before the inviso control component is shutdown.
-%% The NodeResults indicates which nodes were successfullt handled.
-stop() ->
- stop([]).
-stop(UntouchedNodes) ->
- gen_server:call(?MODULE,{stop,UntouchedNodes},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% reconnect_nodes()=NodeResult; function for the nod-distributed case.
-%% reconnect_nodes(Nodes)={ok,NodesResults}
-%% NodesResults=[{Node,NodeResult},...]
-%% NodeResult={ok,{State,Status}} | {error,NReason}
-%% State=tracing | inactive
-%% Status=running | suspended
-%% NReason=unknown_node | already_connected | down
-%% (Re)starts the inviso runtime components at Nodes. Depending on its state
-%% (new,idle or tracing) and if the tool is running a session or not, it becomes
-%% part of the tool's ongoing session. If the newly reconnected node is not
-%% tracing but the tool runs a session, the node must be reinitiated to become
-%% tracing.
-reconnect_nodes() ->
- gen_server:call(?MODULE,{reconnect_nodes,local_runtime},?CALL_TIMEOUT).
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% Description:
+%% The inviso_tool implementation. A tool that uses inviso.
+%%
+%% Authors:
+%% Lennart Öhman, [email protected]
+%% -----------------------------------------------------------------------------
+
+-module(inviso_tool).
+
+
+%% This is the inviso tool, which is a tool using the inviso trace application.
+%% It is developed to make tracing using trace cases possible in an environment
+%% of distributed Erlang nodes.
+%% A current restriction is that the Erlang nodes are supposed to have the same
+%% code. This since inviso tool can at this point not handle subsets of nodes.
+%% Instead all participating Erlang nodes are treated the same.
+%%
+%% The main functionality of the inviso tool are:
+%%
+%% (1) Handles start and stop of tracing at participating nodes.
+%% (2) Interprets trace-case files at a distributed network level.
+%% (The inviso runtime component is responsible for interpreting
+%% trace cases at a local level, if run in an autostart).
+%% (3) Keeps a command history log from which:
+%% (a) Sequences easily can be repeated.
+%% (b) Autostart configuration files can be created (understood by the
+%% default inviso autostart mechanism).
+%% (4) Performs reactivation in case tracing is suspended (manually or by
+%% an overload mechanism).
+%% (5) Can reconnect crashed nodes and by using the history bringing them
+%% up to speed.
+
+%% Distributed Erlang
+%% ------------------
+%% Inviso is built to run in a distributed environment.
+%% The inviso tool can also be used in a non distributed environment.
+
+%% Short description
+%% -----------------
+%% Start-up of the inviso tool
+%% During the start-up of the tool, the tool starts runtime components at
+%% all participating nodes. A runtime component can already be running at
+%% a particular node and will then simply be adopted.
+%%
+%% Session
+%% A session is said to start when tracing is initiated, and ends when
+%% made to stop by the user. When a session is stopped, tracing is stopped
+%% at all participating nodes. Note that participating nodes may come and
+%% go though the time-frame of a session. That means that if a node is
+%% reconnected it may resume its tracing in the current session through
+%% a 'restart_session'. A runtime component that is already tracing at the
+%% time start-session will simply be part of the session without its
+%% ingoing tracing being changed.
+%%
+%% Reactivation
+%% A node that is suspended can be reactivated to resume tracing. Note that
+%% tracing has in this situation never been stopped at the node in question.
+%% The inviso tool resumes the node and applies the history to it.
+%%
+%% Reconnect
+%% A node that is "down" from the inviso tool's perspective can be
+%% reconnected. During reconnection the tool restarts the runtime component
+%% at that node but does not (re)initiate tracing. The latter is called
+%% restart_session and must be done explicitly, unless the node in question
+%% is in fact already tracing. If the node is already tracing (due to an autostart
+%% for instance), it automatically becomes part of the ongoing session (if
+%% there is an ongoing session).
+%%
+%% Restart Session
+%% A node that has been down and has been reconnected can be made to
+%% initialize and resume its tracing. This is done by starting the session
+%% at the node in question and redoing the current history.
+
+%% Trace files within a session
+%% Since it is possible to init-tracing (from an inviso perspective) several
+%% times within the same session, a session may leave several trace log files
+%% behind. This must be resolved by the tracer data generator function
+%% (user supplied) by marking filenames in a chronological order but still
+%% making them possible to identify as part of the same session
+
+
+
+%% -----------------------------------------------------------------------------
+%% API exports.
+%% -----------------------------------------------------------------------------
+
+-export([start/0,start/1,stop/0,stop/1]).
+-export([reconnect_nodes/0,reconnect_nodes/1,
+ start_session/0,start_session/1,
+ reinitiate_session/0,reinitiate_session/1,
+ restore_session/0,restore_session/1,restore_session/2,
+ stop_session/0,
+ reset_nodes/0,reset_nodes/1,
+ atc/3,sync_atc/3,sync_atc/4,
+ sync_rtc/2,sync_rtc/3,
+ dtc/2,sync_dtc/2,sync_dtc/3,
+ inviso/2]).
+-export([reactivate/0,reactivate/1,
+ save_history/1,
+ get_autostart_data/1,get_autostart_data/2,
+ get_activities/0,get_node_status/0,get_node_status/1,get_session_data/0]).
+-export([flush/0,flush/1]).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Debug exports.
+%% -----------------------------------------------------------------------------
+
+-export([get_loopdata/0]).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% OTP exports and call backs.
+%% -----------------------------------------------------------------------------
+
+-export([init/1,handle_call/3,handle_cast/2,handle_info/2,terminate/2]).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Internal exports.
+%% -----------------------------------------------------------------------------
+
+-export([tc_executer/4,reactivator_executer/6]).
+-export([std_options_generator/1]).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Constants.
+%% -----------------------------------------------------------------------------
+
+%% Defines the inviso function calls that shall be possible to do through the
+%% inviso API in this tool.
+-define(INVISO_CMDS,
+ [{tp,5},{tp,4},{tp,1},{tpl,5},{tpl,4},{tpl,1},
+ {ctp,1},{ctp,2},{ctp,3},{ctpl,1},{ctpl,2},{ctpl,3},
+ {tf,2},{tf,1},{ctf,2},{ctf,1},{ctf_all,0},
+ {init_tpm,4},{init_tpm,7},
+ {tpm,4},{tpm,5},{tpm,8},
+ {tpm_tracer,4},{tpm_tracer,5},{init_tpm,8},
+ {tpm_ms,5},{tpm_ms_tracer,5},
+ {ctpm_ms,4},{ctpm,3},
+ {tpm_localnames,0},{ctpm_localnames,0},
+ {tpm_globalnames,0},{ctpm_globalnames,0},
+ {ctp_all,0},
+ {suspend,1},{cancel_suspension,0}]).
+%% -----------------------------------------------------------------------------
+
+%% These inviso functions shall be included in the command history log. Others
+%% are not relevant to be redone during a recactivation, a restart session or
+%% exported to an autostart file.
+-define(INVISO_CMD_HISTORY,
+ [{tp,5},{tp,4},{tp,1},{tpl,5},{tpl,4},{tpl,1},
+ {ctp,1},{ctp,2},{ctp,3},{ctpl,1},{ctpl,2},{ctpl,3},
+ {tf,2},{tf,1},{ctf,2},{ctf,1},{ctf_all,0},
+ {init_tpm,4},{init_tpm,7},
+ {tpm,4},{tpm,5},{tpm,8},
+ {tpm_tracer,4},{tpm_tracer,5},{init_tpm,8},
+ {tpm_ms,5},{tpm_ms_tracer,5},
+ {ctpm_ms,4},{ctpm,3},
+ {tpm_localnames,0},{ctpm_localnames,0},
+ {tpm_globalnames,0},{ctpm_globalnames,0},
+ {ctp_all,0}]).
+%% -----------------------------------------------------------------------------
+
+%% Since many function calls to inviso may take long time, especially if they
+%% involve difficult and many trace patterns to set, the default gen_server:call
+%% time out can not be used. We just do not want to get stuck for ever if some
+%% error occurs.
+-define(CALL_TIMEOUT,60000).
+
+%% Default max time to wait for a trace case called synchronously to return.
+-define(SYNC_TC_TIMEOUT,10000).
+
+%% Runtime components shall terminate when the tool terminates.
+-define(DEFAULT_DEPENDENCY,{dependency,0}).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Record definitions.
+%% -----------------------------------------------------------------------------
+
+%% The loopdata record.
+-record(ld,{
+ dir=".", % Working dir of the tool.
+ nodes=down, % The nodesD database, defaults to non-distr.
+ c_node, % Location of inviso_c.
+ c_pid, % The inviso control component.
+ regexp_node, % Node for regexp expansions.
+ tc_dict, % Trace case definition db.
+ chl, % Command history log.
+ session_state=passive, % passive | tracing
+ tdg={inviso_tool_lib,std_tdg,[]}, % Tracer data generator func.
+ tracer_data, % Current session nr and TDGargs.
+ reactivators=[], % Pids of now running reactivators.
+ tc_def_file, % Trace case definition file.
+ optg={?MODULE,std_options_generator,[]}, % Generates options to add_nodes/3.
+ initial_tcs=[], % Initial trace cases.
+ started_initial_tcs=[], % Cases that must be stopped when stop_tracing.
+ history_dir, % File path for history file.
+ keep_nodes=[], % Nodes that shall not be cleared when stopping.
+ debug=false % Internal debug mode
+ }).
+%% -----------------------------------------------------------------------------
+
+
+%% =============================================================================
+%% API
+%% =============================================================================
+
+%% start()={ok,Pid} | {error,{already_started,pid()}}
+%% start(Config)
+%% Config=[{Opt,Value},...], list of tuple options.
+%% Opt=dir|nodes|c_node|regexp_node|tdg|tc_def_file|optg|initial_tcs|
+%% history_dir|keep_nodes
+%% Starts the inviso_tool process. Options in Config are the same as those
+%% which are kept in the #ld structure.
+start() ->
+ start([]).
+start(Config) ->
+ gen_server:start({local,?MODULE},?MODULE,Config,[]).
+%% -----------------------------------------------------------------------------
+
+%% stop(UntouchedNodes)=
+%% stop()={ok,NodeResults} | NodeResult | {error,Reason}
+%% UntouchedNodes=list(), nodes where any trace patterns shall not be removed.
+%% NodeResults=[{Node,NodeResult},...]
+%% NodeResult=ok | {error,Reason} | patterns_untouched
+%% Stops the inviso tool and the inviso control component. Runtime components are
+%% stopped by them selves depending on their dependcy of the control component.
+%% All runtime components that are not marked as to be kept will have their
+%% trace patterns cleared before the inviso control component is shutdown.
+%% The NodeResults indicates which nodes were successfullt handled.
+stop() ->
+ stop([]).
+stop(UntouchedNodes) ->
+ gen_server:call(?MODULE,{stop,UntouchedNodes},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% reconnect_nodes()=NodeResult; function for the nod-distributed case.
+%% reconnect_nodes(Nodes)={ok,NodesResults}
+%% NodesResults=[{Node,NodeResult},...]
+%% NodeResult={ok,{State,Status}} | {error,NReason}
+%% State=tracing | inactive
+%% Status=running | suspended
+%% NReason=unknown_node | already_connected | down
+%% (Re)starts the inviso runtime components at Nodes. Depending on its state
+%% (new,idle or tracing) and if the tool is running a session or not, it becomes
+%% part of the tool's ongoing session. If the newly reconnected node is not
+%% tracing but the tool runs a session, the node must be reinitiated to become
+%% tracing.
+reconnect_nodes() ->
+ gen_server:call(?MODULE,{reconnect_nodes,local_runtime},?CALL_TIMEOUT).
reconnect_nodes(Node) when is_atom(Node) ->
- reconnect_nodes([Node]);
+ reconnect_nodes([Node]);
reconnect_nodes(Nodes) when is_list(Nodes) ->
- gen_server:call(?MODULE,{reconnect_nodes,Nodes},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% start_session()={ok,{SessionNr,InvisoReturn}} | {error,Reason}
-%% start_session(MoreTDGargs)=
-%% MoreTDGargs=list(), prepended to the fixed list of args used when calling the
-%% tracer data generator function.
-%% SessionNr=integer(), trace sessions are numbered by the tool.
-%% InvisoReturn=If successful inviso call, the returnvalue from inviso.
-%% Note that individual nodes may be unsuccessful. See inviso:init_tracing/1
-%% Initiates tracing at all participating nodes.
-start_session() ->
- start_session([]).
-start_session(MoreTDGargs) ->
- gen_server:call(?MODULE,{start_session,MoreTDGargs},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% reinitiate_session(Nodes)={ok,InvisoReturn} | {error,Reason}
-%% InvisoReturn=If successful inviso call, the returnvalue from inviso:init_tracing/1.
-%% Note that individual nodes may be unsuccessful. Mentioned nodes not part
-%% of the tool or not in state inactive will be marked as failing by the
-%% tool in the InvisoReturn.
-%% To reinitate a node means to (inviso) init tracing at it according to saved
-%% tracer data generator arguments for the current session and then redo the current
-%% history to bring it up to speed. Note that the tool must be running a session
-%% for reinitiate to work.
-reinitiate_session() ->
- gen_server:call(?MODULE,{reinitiate_session,local_runtime},?CALL_TIMEOUT).
-reinitiate_session(Nodes) ->
- gen_server:call(?MODULE,{reinitiate_session,Nodes},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% restore_session()=
-%% restore_session(MoreTDGargs)=
-%% restore_session(FileName)=
-%% restore_session(FileName,MoreTDGargs)={ok,{SessionNr,InvisoReturn}} | {error,Reason}
-%% The two first clauses will start a new session using the last history. This
-%% implies that there must have been a session running prior.
-%% The two last clauses starts a session and reads a history file and executes the
-%% tracecases in it at all inactive nodes.
-%% In both cases the reused or read history becomes the current histoy, just if the
-%% session had been initiated manually. The tool may not
-%% have a session ongoing, and nodes already tracing (nodes which were adopted)
-%% are not effected. Just like when starting a session manually.
-restore_session() ->
- restore_session([]).
-restore_session([]) -> % This cant be a filename.
- gen_server:call(?MODULE,{restore_session,[]},?CALL_TIMEOUT);
-restore_session(FileNameOrMoreTDGargs) ->
- case is_string(FileNameOrMoreTDGargs) of
- true -> % Interpret it as a filename.
- restore_session(FileNameOrMoreTDGargs,[]);
- false -> % The we want to use last session history!
- gen_server:call(?MODULE,{restore_session,FileNameOrMoreTDGargs},?CALL_TIMEOUT)
- end.
-restore_session(FileName,MoreTDGargs) ->
- gen_server:call(?MODULE,{restore_session,{FileName,MoreTDGargs}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% stop_session()={ok,{SessionNr,Result}} | {error,Reason}
-%% SessionNr=integer()
-%% Result=[{Node,NodeResult},...] | NonDistributedNodeResult
-%% NodeResult=ok | {error,Reason}
-%% NonDistributedNodeResult=[ok] | []
-%% Stops inviso tracing at all participating nodes. The inviso runtime components
-%% will go to state idle. It is now time to fetch the logfiles. Will most often
-%% succeed. Will only return an error if the entire inviso call returned an
-%% error. Not if an individual node failed stop tracing successfully.
-%% Any running trace case, including reactivator processes will be terminated.
-stop_session() ->
- gen_server:call(?MODULE,stop_session,?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% reset_nodes()=NodeResult | {error,Reason}
-%% reset_nodes(Nodes)={ok,NodeResults} | {error,Reason}
-%% NodeResults and NodeResult as returned by inviso:clear/1 and /0.
-%% Clear nodes from trace flags, trace patterns and meta trace patterns. The tool
-%% must not be having a running session.
-reset_nodes() ->
- gen_server:call(?MODULE,{reset_nodes,local_runtime},?CALL_TIMEOUT).
-reset_nodes(Nodes) ->
- gen_server:call(?MODULE,{reset_nodes,Nodes},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% atc(TC,Id,Vars)=ok | {error,Reason}
-%% TC=atom(), name of the trace case.
-%% Id=term(), given name of this usage of TC.
-%% Vars=list(), list of variable bindings [{Var,Value},...], Var=atom(),Value=term().
-%% Function activating a trace case. The trace case must be defined in the
-%% trace case dictionary. The 'ok' return value is only a signal that the
-%% trace case has started successfully. It may then run for as long as it is
-%% programmed to run. An erroneous return value does not necessarily mean that
-%% the trace case has not been executed. It rather means that is undetermined
-%% what happend.
-atc(TC,Id,Vars) ->
- gen_server:call(?MODULE,{atc,{TC,Id,Vars}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% sync_atc(TC,Id,Vars)=Result | {error,Reason}
-%% sync_atc(TC,Id,Vars,TimeOut)=
-%% Result=term(), what ever is returned be the last expression in the trace case.
-%% TimeOut=interger() | infinity, the max wait time for the trace case to finnish.
-%% As atc/3 but waits for the trace case to finish.
-sync_atc(TC,Id,Vars) ->
- gen_server:call(?MODULE,{sync_atc,{TC,Id,Vars,?SYNC_TC_TIMEOUT}},?CALL_TIMEOUT).
-sync_atc(TC,Id,Vars,TimeOut) ->
- gen_server:call(?MODULE,{sync_atc,{TC,Id,Vars,TimeOut}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% sync_rtc(TC,Vars)=Result | {error,Reason}
-%% sync_rtc(TC,Vars,TimeOut)=
-%% Result=term(), what ever is returned be the last expression in the trace case.
-%% TimeOut=interger() | infinity, the max wait time for the trace case to finnish.
-%% As sync_atc/3 but the trace case is not marked as activated. It is mearly placed
-%% in the history. Hence with sync_rtc a trace case can be "activated" multiple time.
-sync_rtc(TC,Vars) ->
- gen_server:call(?MODULE,{sync_rtc,{TC,Vars,?SYNC_TC_TIMEOUT}},?CALL_TIMEOUT).
-sync_rtc(TC,Vars,TimeOut) ->
- gen_server:call(?MODULE,{sync_rtc,{TC,Vars,TimeOut}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% dtc(TC,Id)=ok | {error,Reason}
-%% Deactivates a previosly activated trace case. This function can only be used
-%% on trace cases that has a deactivation defined in the trace case dictionary.
-%% There is of course really no difference between a file containing an activation
-%% compared to a deactivation. But to be able cancelling activations out from the
-%% history log, a defined deactivation is essential.
-%% As with activation, the returned 'ok' simply indicates the start of the trace
-%% case.
-dtc(TC,Id) ->
- gen_server:call(?MODULE,{dtc,{TC,Id}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% sync_dtc(TC,Id)=Result | {error,Reason}
-%% sync_dtc(TC,Id,TimeOut)=
-%% Synchronous deactivation of trace case. See dtc/2 and sync_atc/3 for
-%% parameters.
-sync_dtc(TC,Id) ->
- gen_server:call(?MODULE,{sync_dtc,{TC,Id,?SYNC_TC_TIMEOUT}},?CALL_TIMEOUT).
-sync_dtc(TC,Id,TimeOut) ->
- gen_server:call(?MODULE,{sync_dtc,{TC,Id,TimeOut}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% inviso(Cmd,Args)=Result
-%% Cmd=atom(), the (inviso) function name that shall be called.
-%% Args=list(), the arguments to Cmd.
-%% Result=term(), the result from the inviso function call.
-%% This function executes a Cmd in the inviso tool context. The inviso call will
-%% be logged in history log and thereby repeated in case of a reactivation.
-%% Note that this function is intended for use with inviso function API without
-%% specifying any nodes, since the function call is supposed to be carried out on
-%% all nodes.
-%% When these functions are written to an autostart config file by the tool there
-%% is supposed to be a translation to inviso_rt functions.
-inviso(Cmd,Args) ->
- gen_server:call(?MODULE,{inviso,{Cmd,Args}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% reactivate()=ok | {error,Reason}
-%% reactivate(Node)=ok | {error,Reason}
-%% Moves a runtime component from suspended to the state running. This can be
-%% done for both tracing and inactive nodes. The later is necessary since you
-%% may have stopped tracing with a node suspended.
-%% In case the node is tracing, commands in the command history log are redone at
-%% the node in questions.
-%% Note that this function returns 'ok' before the node is running. This because the
-%% the reactivated history is done by a separate process and there is no guarantee
-%% when it will be ready. The reactivated node will not be marked as running in
-%% the tool until done reactivating.
-%% Further it is important to understand that if there are "ongoing" tracecases
-%% (i.e tracecase scripts that are currently executing) and this node was running
-%% at the time that tracecase script started to execute, the list of nodes bound
-%% to the Nodes variable in that script executer includes this node. Making it
-%% no longer suspended makes it start executing inviso commands from where ever
-%% such are called. Hence the reactivation may be interferred by that tracecase.
-reactivate() -> % Non-distributed API.
- reactivate(node()).
-reactivate(Node) ->
- gen_server:call(?MODULE,{reactivate,Node},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% save_history(FileName)={ok,AbsFileName} | {error,Reason}
-%% Saves the currently collected command history log to a file. The file will
-%% be a binary-file. If FileName is an absolute path, it will be saved to that
-%% file. Otherwise the history dir will be used. If no history dir was specified
-%% the tool dir will be used, prepended to FileName.
-save_history(FileName) ->
- gen_server:call(?MODULE,{save_history,FileName},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% get_autostart_data(Nodes,Dependency)={ok,{AutoStartData,NodeResults} |
-%% {ok,{AutoStartData,NodeResult}} | {error,Reason}
-%% Dependency=inviso dependency parameter which will be used for every
-%% autostarted runtime component (included in Options).
-%% NodeResults=[{Node,NodeResult},...]
-%% NodeResult={ok,{Options,{tdg,{M,F,CompleteTDGargs}}}} | {error,Reason}
-%% Options=add_nodes options to the inviso runtime component.
-%% M,F=atom(), the module and function for tracerdata generation.
-%% CompleteTDGargs=list(), all arguments as they are given to the tracer
-%% data generator function.
-%% AutostartData=[CaseSpec,...]
-%% CaseSpec={file,{FileName,Bindings}} | {mfa,{M,F,Args}}
-%% FileName=string(), pointing out the trace case file. Note that this
-%% is the same as the path used by the tool.
-%% Bindings=Var bindings used according to the history for the
-%% invocation.
-%% M,F=atom(), the function that shall be called (normally some inviso).
-%% Args=list(), the actual arguments. Note that this may contain things
-%% which can not be written to file (ports, pids,...).
-%% Function returning information on how to autostart a node to make it trace
-%% according to the current history. The inviso_tool does not know how to write
-%% the necessary files at the nodes in question. That must be done by the user
-%% of the tool, guided by the return value from this function.
-%% Note that there will be two types of trace case files. Regular trace case
-%% files and binaries returned from this function. The latter contains the
-%% inviso commands which have been executed. Note that the order amongst the
-%% trace cases and binaries is of importance (otherwise they will be redone in
-%% an incorrect order).
-get_autostart_data(Dependency) ->
- gen_server:call(?MODULE,{get_autostart_data,Dependency},?CALL_TIMEOUT).
-get_autostart_data(Nodes,Dependency) ->
- gen_server:call(?MODULE,{get_autostart_data,{Nodes,Dependency}},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% get_activities()={ok,Ongoing} | {error,Reason}
-%% Ongoing=list(); [ [TraceCases] [,Reactivators] ]
-%% TraceCases={tracecases,TraceCaseList}
-%% TraceCaseList=[{{TCname,Id},Phase},...]
-%% Phase=activating | deactivating
-%% Reactivators={reactivating_nodes,ReactivatingNodes}
-%% ReactivatingNodes=[Node,...]
-%% Returns a list of assynchronous tracecases and nodes doing reactivation at
-%% this momement. This can be useful to implement "home brewn" synchronization,
-%% waiting for the runtime components to reach a certain state.
-get_activities() ->
- gen_server:call(?MODULE,get_activities,?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% get_status(Node)={ok,StateStatus} | {error,Reason}
-%% StateStatus={State,Status} | reactivating | down
-%% State=tracing | inactive | trace_failure
-%% Status=running | suspended
-get_node_status() ->
- get_node_status(local_runtime).
-get_node_status(Node) ->
- gen_server:call(?MODULE,{get_node_status,Node},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% get_session_data()={ok,{Status,SessionNr,TDGargs}} | {error,Reason}
-%% Status=tracing | not_tracing, info about current/last session.
-%% SessionNr=integer()
-%% TDGargs=list(), list of the arguments that will be given to the tracer data
-%% generator function (not including the leading Nodes list).
-%% Returns data about the current or last session.
-get_session_data() ->
- gen_server:call(?MODULE,get_session_data,?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% flush()={ok,NodeResults} | NodeResult | {error,Reason}
-%% flush(Nodes)={ok,NodesResults} | {error,Reason}
-%% NodeResults=[{Node,NodeResult},...]
-%% NodeResult=ok | {error,Reason}
-%% Makes runtime components flush their trace ports.
-flush() ->
- gen_server:call(?MODULE,flush,?CALL_TIMEOUT).
-flush(Nodes) ->
- gen_server:call(?MODULE,{flush,Nodes},?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% get_loopdata()=#ld
-%% Debug API returning the internal loopdata structure. See #ld above for details.
-get_loopdata() ->
- gen_server:call(?MODULE,get_loopdata,?CALL_TIMEOUT).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% Internal APIs.
-%% -----------------------------------------------------------------------------
-
-%% tc_executer_reply(To,Reply)=nothing significant
-%% To=pid()
-%% Reply=term()
-%% Internal API used by a trace case executer process to signal its completion.
-tc_executer_reply(To,Reply) ->
- gen_server:cast(To,{tc_executer_reply,Reply}).
-%% -----------------------------------------------------------------------------
-
-%% Internal API used by a reactivator process indicating it is done with the
-%% history log it has got so far.
-%% Timeout set to infinity since the tool may be busy, then the reactivator just
-%% have to wait. If the tool crashes the reactivator will be go down too automatically.
-reactivator_reply(TPid,Counter) ->
- gen_server:call(TPid,{reactivator_reply,{Counter,self()}},infinity).
-%% -----------------------------------------------------------------------------
-
-
-%% =============================================================================
-%% gen_server implementation.
-%% =============================================================================
-
-init(Config) ->
- case fetch_configuration(Config) of % From conf-file and Config.
+ gen_server:call(?MODULE,{reconnect_nodes,Nodes},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% start_session()={ok,{SessionNr,InvisoReturn}} | {error,Reason}
+%% start_session(MoreTDGargs)=
+%% MoreTDGargs=list(), prepended to the fixed list of args used when calling the
+%% tracer data generator function.
+%% SessionNr=integer(), trace sessions are numbered by the tool.
+%% InvisoReturn=If successful inviso call, the returnvalue from inviso.
+%% Note that individual nodes may be unsuccessful. See inviso:init_tracing/1
+%% Initiates tracing at all participating nodes.
+start_session() ->
+ start_session([]).
+start_session(MoreTDGargs) ->
+ gen_server:call(?MODULE,{start_session,MoreTDGargs},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% reinitiate_session(Nodes)={ok,InvisoReturn} | {error,Reason}
+%% InvisoReturn=If successful inviso call, the returnvalue from inviso:init_tracing/1.
+%% Note that individual nodes may be unsuccessful. Mentioned nodes not part
+%% of the tool or not in state inactive will be marked as failing by the
+%% tool in the InvisoReturn.
+%% To reinitate a node means to (inviso) init tracing at it according to saved
+%% tracer data generator arguments for the current session and then redo the current
+%% history to bring it up to speed. Note that the tool must be running a session
+%% for reinitiate to work.
+reinitiate_session() ->
+ gen_server:call(?MODULE,{reinitiate_session,local_runtime},?CALL_TIMEOUT).
+reinitiate_session(Nodes) ->
+ gen_server:call(?MODULE,{reinitiate_session,Nodes},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% restore_session()=
+%% restore_session(MoreTDGargs)=
+%% restore_session(FileName)=
+%% restore_session(FileName,MoreTDGargs)={ok,{SessionNr,InvisoReturn}} | {error,Reason}
+%% The two first clauses will start a new session using the last history. This
+%% implies that there must have been a session running prior.
+%% The two last clauses starts a session and reads a history file and executes the
+%% tracecases in it at all inactive nodes.
+%% In both cases the reused or read history becomes the current histoy, just if the
+%% session had been initiated manually. The tool may not
+%% have a session ongoing, and nodes already tracing (nodes which were adopted)
+%% are not effected. Just like when starting a session manually.
+restore_session() ->
+ restore_session([]).
+restore_session([]) -> % This cant be a filename.
+ gen_server:call(?MODULE,{restore_session,[]},?CALL_TIMEOUT);
+restore_session(FileNameOrMoreTDGargs) ->
+ case is_string(FileNameOrMoreTDGargs) of
+ true -> % Interpret it as a filename.
+ restore_session(FileNameOrMoreTDGargs,[]);
+ false -> % The we want to use last session history!
+ gen_server:call(?MODULE,{restore_session,FileNameOrMoreTDGargs},?CALL_TIMEOUT)
+ end.
+restore_session(FileName,MoreTDGargs) ->
+ gen_server:call(?MODULE,{restore_session,{FileName,MoreTDGargs}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% stop_session()={ok,{SessionNr,Result}} | {error,Reason}
+%% SessionNr=integer()
+%% Result=[{Node,NodeResult},...] | NonDistributedNodeResult
+%% NodeResult=ok | {error,Reason}
+%% NonDistributedNodeResult=[ok] | []
+%% Stops inviso tracing at all participating nodes. The inviso runtime components
+%% will go to state idle. It is now time to fetch the logfiles. Will most often
+%% succeed. Will only return an error if the entire inviso call returned an
+%% error. Not if an individual node failed stop tracing successfully.
+%% Any running trace case, including reactivator processes will be terminated.
+stop_session() ->
+ gen_server:call(?MODULE,stop_session,?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% reset_nodes()=NodeResult | {error,Reason}
+%% reset_nodes(Nodes)={ok,NodeResults} | {error,Reason}
+%% NodeResults and NodeResult as returned by inviso:clear/1 and /0.
+%% Clear nodes from trace flags, trace patterns and meta trace patterns. The tool
+%% must not be having a running session.
+reset_nodes() ->
+ gen_server:call(?MODULE,{reset_nodes,local_runtime},?CALL_TIMEOUT).
+reset_nodes(Nodes) ->
+ gen_server:call(?MODULE,{reset_nodes,Nodes},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% atc(TC,Id,Vars)=ok | {error,Reason}
+%% TC=atom(), name of the trace case.
+%% Id=term(), given name of this usage of TC.
+%% Vars=list(), list of variable bindings [{Var,Value},...], Var=atom(),Value=term().
+%% Function activating a trace case. The trace case must be defined in the
+%% trace case dictionary. The 'ok' return value is only a signal that the
+%% trace case has started successfully. It may then run for as long as it is
+%% programmed to run. An erroneous return value does not necessarily mean that
+%% the trace case has not been executed. It rather means that is undetermined
+%% what happend.
+atc(TC,Id,Vars) ->
+ gen_server:call(?MODULE,{atc,{TC,Id,Vars}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% sync_atc(TC,Id,Vars)=Result | {error,Reason}
+%% sync_atc(TC,Id,Vars,TimeOut)=
+%% Result=term(), what ever is returned be the last expression in the trace case.
+%% TimeOut=interger() | infinity, the max wait time for the trace case to finnish.
+%% As atc/3 but waits for the trace case to finish.
+sync_atc(TC,Id,Vars) ->
+ gen_server:call(?MODULE,{sync_atc,{TC,Id,Vars,?SYNC_TC_TIMEOUT}},?CALL_TIMEOUT).
+sync_atc(TC,Id,Vars,TimeOut) ->
+ gen_server:call(?MODULE,{sync_atc,{TC,Id,Vars,TimeOut}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% sync_rtc(TC,Vars)=Result | {error,Reason}
+%% sync_rtc(TC,Vars,TimeOut)=
+%% Result=term(), what ever is returned be the last expression in the trace case.
+%% TimeOut=interger() | infinity, the max wait time for the trace case to finnish.
+%% As sync_atc/3 but the trace case is not marked as activated. It is mearly placed
+%% in the history. Hence with sync_rtc a trace case can be "activated" multiple time.
+sync_rtc(TC,Vars) ->
+ gen_server:call(?MODULE,{sync_rtc,{TC,Vars,?SYNC_TC_TIMEOUT}},?CALL_TIMEOUT).
+sync_rtc(TC,Vars,TimeOut) ->
+ gen_server:call(?MODULE,{sync_rtc,{TC,Vars,TimeOut}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% dtc(TC,Id)=ok | {error,Reason}
+%% Deactivates a previosly activated trace case. This function can only be used
+%% on trace cases that has a deactivation defined in the trace case dictionary.
+%% There is of course really no difference between a file containing an activation
+%% compared to a deactivation. But to be able cancelling activations out from the
+%% history log, a defined deactivation is essential.
+%% As with activation, the returned 'ok' simply indicates the start of the trace
+%% case.
+dtc(TC,Id) ->
+ gen_server:call(?MODULE,{dtc,{TC,Id}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% sync_dtc(TC,Id)=Result | {error,Reason}
+%% sync_dtc(TC,Id,TimeOut)=
+%% Synchronous deactivation of trace case. See dtc/2 and sync_atc/3 for
+%% parameters.
+sync_dtc(TC,Id) ->
+ gen_server:call(?MODULE,{sync_dtc,{TC,Id,?SYNC_TC_TIMEOUT}},?CALL_TIMEOUT).
+sync_dtc(TC,Id,TimeOut) ->
+ gen_server:call(?MODULE,{sync_dtc,{TC,Id,TimeOut}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% inviso(Cmd,Args)=Result
+%% Cmd=atom(), the (inviso) function name that shall be called.
+%% Args=list(), the arguments to Cmd.
+%% Result=term(), the result from the inviso function call.
+%% This function executes a Cmd in the inviso tool context. The inviso call will
+%% be logged in history log and thereby repeated in case of a reactivation.
+%% Note that this function is intended for use with inviso function API without
+%% specifying any nodes, since the function call is supposed to be carried out on
+%% all nodes.
+%% When these functions are written to an autostart config file by the tool there
+%% is supposed to be a translation to inviso_rt functions.
+inviso(Cmd,Args) ->
+ gen_server:call(?MODULE,{inviso,{Cmd,Args}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% reactivate()=ok | {error,Reason}
+%% reactivate(Node)=ok | {error,Reason}
+%% Moves a runtime component from suspended to the state running. This can be
+%% done for both tracing and inactive nodes. The later is necessary since you
+%% may have stopped tracing with a node suspended.
+%% In case the node is tracing, commands in the command history log are redone at
+%% the node in questions.
+%% Note that this function returns 'ok' before the node is running. This because the
+%% the reactivated history is done by a separate process and there is no guarantee
+%% when it will be ready. The reactivated node will not be marked as running in
+%% the tool until done reactivating.
+%% Further it is important to understand that if there are "ongoing" tracecases
+%% (i.e tracecase scripts that are currently executing) and this node was running
+%% at the time that tracecase script started to execute, the list of nodes bound
+%% to the Nodes variable in that script executer includes this node. Making it
+%% no longer suspended makes it start executing inviso commands from where ever
+%% such are called. Hence the reactivation may be interferred by that tracecase.
+reactivate() -> % Non-distributed API.
+ reactivate(node()).
+reactivate(Node) ->
+ gen_server:call(?MODULE,{reactivate,Node},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% save_history(FileName)={ok,AbsFileName} | {error,Reason}
+%% Saves the currently collected command history log to a file. The file will
+%% be a binary-file. If FileName is an absolute path, it will be saved to that
+%% file. Otherwise the history dir will be used. If no history dir was specified
+%% the tool dir will be used, prepended to FileName.
+save_history(FileName) ->
+ gen_server:call(?MODULE,{save_history,FileName},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% get_autostart_data(Nodes,Dependency)={ok,{AutoStartData,NodeResults} |
+%% {ok,{AutoStartData,NodeResult}} | {error,Reason}
+%% Dependency=inviso dependency parameter which will be used for every
+%% autostarted runtime component (included in Options).
+%% NodeResults=[{Node,NodeResult},...]
+%% NodeResult={ok,{Options,{tdg,{M,F,CompleteTDGargs}}}} | {error,Reason}
+%% Options=add_nodes options to the inviso runtime component.
+%% M,F=atom(), the module and function for tracerdata generation.
+%% CompleteTDGargs=list(), all arguments as they are given to the tracer
+%% data generator function.
+%% AutostartData=[CaseSpec,...]
+%% CaseSpec={file,{FileName,Bindings}} | {mfa,{M,F,Args}}
+%% FileName=string(), pointing out the trace case file. Note that this
+%% is the same as the path used by the tool.
+%% Bindings=Var bindings used according to the history for the
+%% invocation.
+%% M,F=atom(), the function that shall be called (normally some inviso).
+%% Args=list(), the actual arguments. Note that this may contain things
+%% which can not be written to file (ports, pids,...).
+%% Function returning information on how to autostart a node to make it trace
+%% according to the current history. The inviso_tool does not know how to write
+%% the necessary files at the nodes in question. That must be done by the user
+%% of the tool, guided by the return value from this function.
+%% Note that there will be two types of trace case files. Regular trace case
+%% files and binaries returned from this function. The latter contains the
+%% inviso commands which have been executed. Note that the order amongst the
+%% trace cases and binaries is of importance (otherwise they will be redone in
+%% an incorrect order).
+get_autostart_data(Dependency) ->
+ gen_server:call(?MODULE,{get_autostart_data,Dependency},?CALL_TIMEOUT).
+get_autostart_data(Nodes,Dependency) ->
+ gen_server:call(?MODULE,{get_autostart_data,{Nodes,Dependency}},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% get_activities()={ok,Ongoing} | {error,Reason}
+%% Ongoing=list(); [ [TraceCases] [,Reactivators] ]
+%% TraceCases={tracecases,TraceCaseList}
+%% TraceCaseList=[{{TCname,Id},Phase},...]
+%% Phase=activating | deactivating
+%% Reactivators={reactivating_nodes,ReactivatingNodes}
+%% ReactivatingNodes=[Node,...]
+%% Returns a list of assynchronous tracecases and nodes doing reactivation at
+%% this momement. This can be useful to implement "home brewn" synchronization,
+%% waiting for the runtime components to reach a certain state.
+get_activities() ->
+ gen_server:call(?MODULE,get_activities,?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% get_status(Node)={ok,StateStatus} | {error,Reason}
+%% StateStatus={State,Status} | reactivating | down
+%% State=tracing | inactive | trace_failure
+%% Status=running | suspended
+get_node_status() ->
+ get_node_status(local_runtime).
+get_node_status(Node) ->
+ gen_server:call(?MODULE,{get_node_status,Node},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% get_session_data()={ok,{Status,SessionNr,TDGargs}} | {error,Reason}
+%% Status=tracing | not_tracing, info about current/last session.
+%% SessionNr=integer()
+%% TDGargs=list(), list of the arguments that will be given to the tracer data
+%% generator function (not including the leading Nodes list).
+%% Returns data about the current or last session.
+get_session_data() ->
+ gen_server:call(?MODULE,get_session_data,?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% flush()={ok,NodeResults} | NodeResult | {error,Reason}
+%% flush(Nodes)={ok,NodesResults} | {error,Reason}
+%% NodeResults=[{Node,NodeResult},...]
+%% NodeResult=ok | {error,Reason}
+%% Makes runtime components flush their trace ports.
+flush() ->
+ gen_server:call(?MODULE,flush,?CALL_TIMEOUT).
+flush(Nodes) ->
+ gen_server:call(?MODULE,{flush,Nodes},?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% get_loopdata()=#ld
+%% Debug API returning the internal loopdata structure. See #ld above for details.
+get_loopdata() ->
+ gen_server:call(?MODULE,get_loopdata,?CALL_TIMEOUT).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Internal APIs.
+%% -----------------------------------------------------------------------------
+
+%% tc_executer_reply(To,Reply)=nothing significant
+%% To=pid()
+%% Reply=term()
+%% Internal API used by a trace case executer process to signal its completion.
+tc_executer_reply(To,Reply) ->
+ gen_server:cast(To,{tc_executer_reply,Reply}).
+%% -----------------------------------------------------------------------------
+
+%% Internal API used by a reactivator process indicating it is done with the
+%% history log it has got so far.
+%% Timeout set to infinity since the tool may be busy, then the reactivator just
+%% have to wait. If the tool crashes the reactivator will be go down too automatically.
+reactivator_reply(TPid,Counter) ->
+ gen_server:call(TPid,{reactivator_reply,{Counter,self()}},infinity).
+%% -----------------------------------------------------------------------------
+
+
+%% =============================================================================
+%% gen_server implementation.
+%% =============================================================================
+
+init(Config) ->
+ case fetch_configuration(Config) of % From conf-file and Config.
{ok,LD} when is_record(LD,ld) ->
- case start_inviso_at_c_node(LD) of
- {ok,CPid} ->
- LD2=start_runtime_components(LD),
- LD3=read_trace_case_definitions(LD2),
- process_flag(trap_exit,true),
- start_subscribe_inviso_events(LD3#ld.c_node),
- {ok,LD3#ld{c_pid=CPid}};
- {error,Reason} -> % Most likely already running.
- {stop,{error,Reason}}
- end;
- {error,Reason} ->
- {stop,{error,{start_up,Reason}}}
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function starting the inviso control component at node c_node, or "here"
-%% if it is not a distributed network.
-start_inviso_at_c_node(#ld{c_node=undefined}) -> % Non distributed case.
- case inviso:start() of
- {ok,Pid} ->
- {ok,Pid};
- {error,Reason} ->
- {error,Reason}
- end;
-start_inviso_at_c_node(#ld{c_node=CNode}) ->
- case rpc:call(CNode,inviso,start,[]) of
- {ok,Pid} ->
- {ok,Pid};
- {error,{already_started,_}} -> % A control component already started.
- {error,{inviso_control_already_running,CNode}};
- {error,Reason} ->
- {error,Reason};
- {badrpc,Reason} ->
- {error,{inviso_control_node_error,Reason}}
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function starting the runtime components at all particapting nodes.
-%% It also updates the nodes structure in the #ld to indicate which nodes where
-%% successfully started. Returns a new #ld.
-%% Note that a runtime component may actually be running at one or several nodes.
-%% This is supposed to be the result of an (wanted) autostart. Meaning that the
-%% inviso tool can not handle the situation if a runtime component is not doing
-%% what it is supposed to do. In case a runtime component is already running it
-%% will be adopted and therefore marked as running.
-start_runtime_components(LD=#ld{c_node=undefined}) ->
- start_runtime_components_2(local_runtime,undefined,LD);
-start_runtime_components(LD=#ld{c_node=CNode,nodes=NodesD}) ->
- start_runtime_components_2(get_all_nodenames_nodes(NodesD),CNode,LD).
-start_runtime_components(Nodes,LD=#ld{c_node=CNode}) ->
- start_runtime_components_2(Nodes,CNode,LD).
-
-start_runtime_components_2(local_runtime,CNode,LD=#ld{optg=OptG}) ->
- Opts=start_runtime_components_mk_opts(local_runtime,OptG),
- case inviso:add_node(mk_rt_tag(),Opts) of
- {ok,NAnsw} -> % Should be more clever really!
- NewNodesD=update_added_nodes(CNode,{ok,NAnsw},LD#ld.nodes),
- LD#ld{nodes=NewNodesD};
- {error,_Reason} ->
- LD
- end;
-start_runtime_components_2([Node|Rest],CNode,LD=#ld{optg=OptG}) ->
- Opts=start_runtime_components_mk_opts(Node,OptG),
- case rpc:call(CNode,inviso,add_nodes,[[Node],mk_rt_tag(),Opts]) of
- {ok,NodeResults} ->
- NewNodesD=update_added_nodes(CNode,NodeResults,LD#ld.nodes),
- start_runtime_components_2(Rest,CNode,LD#ld{nodes=NewNodesD});
- {error,_Reason} ->
- start_runtime_components_2(Rest,CNode,LD);
- {badrpc,_Reason} ->
- start_runtime_components_2(Rest,CNode,LD)
- end;
-start_runtime_components_2([],_,LD) ->
- LD.
-
-start_runtime_components_mk_opts(Node,{M,F,Args}) ->
- case catch apply(M,F,[Node|Args]) of
+ case start_inviso_at_c_node(LD) of
+ {ok,CPid} ->
+ LD2=start_runtime_components(LD),
+ LD3=read_trace_case_definitions(LD2),
+ process_flag(trap_exit,true),
+ start_subscribe_inviso_events(LD3#ld.c_node),
+ {ok,LD3#ld{c_pid=CPid}};
+ {error,Reason} -> % Most likely already running.
+ {stop,{error,Reason}}
+ end;
+ {error,Reason} ->
+ {stop,{error,{start_up,Reason}}}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function starting the inviso control component at node c_node, or "here"
+%% if it is not a distributed network.
+start_inviso_at_c_node(#ld{c_node=undefined}) -> % Non distributed case.
+ case inviso:start() of
+ {ok,Pid} ->
+ {ok,Pid};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+start_inviso_at_c_node(#ld{c_node=CNode}) ->
+ case rpc:call(CNode,inviso,start,[]) of
+ {ok,Pid} ->
+ {ok,Pid};
+ {error,{already_started,_}} -> % A control component already started.
+ {error,{inviso_control_already_running,CNode}};
+ {error,Reason} ->
+ {error,Reason};
+ {badrpc,Reason} ->
+ {error,{inviso_control_node_error,Reason}}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function starting the runtime components at all particapting nodes.
+%% It also updates the nodes structure in the #ld to indicate which nodes where
+%% successfully started. Returns a new #ld.
+%% Note that a runtime component may actually be running at one or several nodes.
+%% This is supposed to be the result of an (wanted) autostart. Meaning that the
+%% inviso tool can not handle the situation if a runtime component is not doing
+%% what it is supposed to do. In case a runtime component is already running it
+%% will be adopted and therefore marked as running.
+start_runtime_components(LD=#ld{c_node=undefined}) ->
+ start_runtime_components_2(local_runtime,undefined,LD);
+start_runtime_components(LD=#ld{c_node=CNode,nodes=NodesD}) ->
+ start_runtime_components_2(get_all_nodenames_nodes(NodesD),CNode,LD).
+start_runtime_components(Nodes,LD=#ld{c_node=CNode}) ->
+ start_runtime_components_2(Nodes,CNode,LD).
+
+start_runtime_components_2(local_runtime,CNode,LD=#ld{optg=OptG}) ->
+ Opts=start_runtime_components_mk_opts(local_runtime,OptG),
+ case inviso:add_node(mk_rt_tag(),Opts) of
+ {ok,NAnsw} -> % Should be more clever really!
+ NewNodesD=update_added_nodes(CNode,{ok,NAnsw},LD#ld.nodes),
+ LD#ld{nodes=NewNodesD};
+ {error,_Reason} ->
+ LD
+ end;
+start_runtime_components_2([Node|Rest],CNode,LD=#ld{optg=OptG}) ->
+ Opts=start_runtime_components_mk_opts(Node,OptG),
+ case rpc:call(CNode,inviso,add_nodes,[[Node],mk_rt_tag(),Opts]) of
+ {ok,NodeResults} ->
+ NewNodesD=update_added_nodes(CNode,NodeResults,LD#ld.nodes),
+ start_runtime_components_2(Rest,CNode,LD#ld{nodes=NewNodesD});
+ {error,_Reason} ->
+ start_runtime_components_2(Rest,CNode,LD);
+ {badrpc,_Reason} ->
+ start_runtime_components_2(Rest,CNode,LD)
+ end;
+start_runtime_components_2([],_,LD) ->
+ LD.
+
+start_runtime_components_mk_opts(Node,{M,F,Args}) ->
+ case catch apply(M,F,[Node|Args]) of
{ok,Opts} when is_list(Opts) ->
- start_runtime_component_mk_opts_add_dependency(Opts);
- _ ->
- [?DEFAULT_DEPENDENCY]
- end.
-
-%% The options generator is not supposed to generate the dependency. Hence this
-%% function adds and if necessary removes an incorrectly added dependency tag.
-start_runtime_component_mk_opts_add_dependency(Opts) ->
- case lists:keysearch(dependency,1,Opts) of
- {value,_} -> % Not allowed!!!
- [?DEFAULT_DEPENDENCY|lists:keydelete(dependecy,1,Opts)];
- false ->
- [?DEFAULT_DEPENDENCY|Opts]
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function subscribing to inviso events from the inviso controller. This
-%% will make it possible to follow runtime components going down.
-start_subscribe_inviso_events(undefined) ->
- inviso:subscribe();
-start_subscribe_inviso_events(CNode) ->
- rpc:call(CNode,inviso,subscribe,[self()]). % Don't want the rpc-proc to subscribe!
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% gen_server handle call back functions.
-%% -----------------------------------------------------------------------------
-
-handle_call({stop,UntouchedNodes},_From,LD=#ld{nodes=NodesD,c_node=CNode,keep_nodes=KeepNodes})
- when is_list(UntouchedNodes) ->
- {stop,
- normal,
- remove_all_trace_patterns(CNode,
- UntouchedNodes++KeepNodes,
- get_available_nodes(NodesD)),
- LD};
-handle_call({stop,BadArg},_From,LD) ->
- {reply,{error,{badarg,BadArg}},LD};
-
-handle_call({reconnect_nodes,Nodes},_From,LD) ->
- case h_reconnect_nodes(Nodes,LD) of
- {ok,{Nodes2,NodesErr,NewLD}} ->
- if
- Nodes==local_runtime ->
- {reply,
- build_reconnect_nodes_reply(Nodes,Nodes2,NodesErr,NewLD#ld.nodes),
- NewLD};
+ start_runtime_component_mk_opts_add_dependency(Opts);
+ _ ->
+ [?DEFAULT_DEPENDENCY]
+ end.
+
+%% The options generator is not supposed to generate the dependency. Hence this
+%% function adds and if necessary removes an incorrectly added dependency tag.
+start_runtime_component_mk_opts_add_dependency(Opts) ->
+ case lists:keysearch(dependency,1,Opts) of
+ {value,_} -> % Not allowed!!!
+ [?DEFAULT_DEPENDENCY|lists:keydelete(dependecy,1,Opts)];
+ false ->
+ [?DEFAULT_DEPENDENCY|Opts]
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function subscribing to inviso events from the inviso controller. This
+%% will make it possible to follow runtime components going down.
+start_subscribe_inviso_events(undefined) ->
+ inviso:subscribe();
+start_subscribe_inviso_events(CNode) ->
+ rpc:call(CNode,inviso,subscribe,[self()]). % Don't want the rpc-proc to subscribe!
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% gen_server handle call back functions.
+%% -----------------------------------------------------------------------------
+
+handle_call({stop,UntouchedNodes},_From,LD=#ld{nodes=NodesD,c_node=CNode,keep_nodes=KeepNodes})
+ when is_list(UntouchedNodes) ->
+ {stop,
+ normal,
+ remove_all_trace_patterns(CNode,
+ UntouchedNodes++KeepNodes,
+ get_available_nodes(NodesD)),
+ LD};
+handle_call({stop,BadArg},_From,LD) ->
+ {reply,{error,{badarg,BadArg}},LD};
+
+handle_call({reconnect_nodes,Nodes},_From,LD) ->
+ case h_reconnect_nodes(Nodes,LD) of
+ {ok,{Nodes2,NodesErr,NewLD}} ->
+ if
+ Nodes==local_runtime ->
+ {reply,
+ build_reconnect_nodes_reply(Nodes,Nodes2,NodesErr,NewLD#ld.nodes),
+ NewLD};
is_list(Nodes) ->
- {reply,
- {ok,build_reconnect_nodes_reply(Nodes,Nodes2,NodesErr,NewLD#ld.nodes)},
- NewLD}
- end;
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
-
-handle_call({start_session,MoreTDGargs},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of
- false -> % No session running.
- if
+ {reply,
+ {ok,build_reconnect_nodes_reply(Nodes,Nodes2,NodesErr,NewLD#ld.nodes)},
+ NewLD}
+ end;
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+
+handle_call({start_session,MoreTDGargs},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of
+ false -> % No session running.
+ if
is_list(MoreTDGargs) ->
- DateTime=calendar:universal_time(),
- {M,F,Args}=LD#ld.tdg,
- TDGargs=inviso_tool_lib:mk_tdg_args(DateTime,MoreTDGargs++Args),
- case h_start_session(M,F,TDGargs,LD) of
- {ok,{SessionNr,ReturnVal,NewLD}} -> % No nodes to initiate.
- NewLD2=add_initial_tcs_to_history(NewLD#ld.initial_tcs,
- NewLD#ld{chl=mk_chl(LD#ld.chl)}),
- {reply,
- {ok,{SessionNr,ReturnVal}},
- NewLD2#ld{session_state=tracing_sessionstate()}};
- {ok,{SessionNr,ReturnVal,Nodes2,NewLD}} ->
- NewLD2=do_initial_tcs(NewLD#ld.initial_tcs,
- Nodes2,
- NewLD#ld{chl=mk_chl(LD#ld.chl)}),
- {reply,
- {ok,{SessionNr,ReturnVal}},
- NewLD2#ld{session_state=tracing_sessionstate()}};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- true -> % Faulty TDGargs.
- {reply,{error,{badarg,MoreTDGargs}},LD}
- end;
- true ->
- {reply,{error,session_already_started},LD}
- end;
-
-handle_call({reinitiate_session,Nodes},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of
- true -> % The tool must be tracing.
- {M,F,_Args}=LD#ld.tdg,
- TDGargs=get_latest_tdgargs_tracer_data(LD#ld.tracer_data),
- case h_reinitiate_session(Nodes,M,F,TDGargs,LD) of
- {ok,{NodesErr,ReturnVal,NewLD}} ->
- {reply,
- {ok,build_reinitiate_session_reply(Nodes,NodesErr,ReturnVal)},
- NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- false -> % Must have a running session!
- {reply,{error,no_session},LD}
- end;
-
-handle_call({restore_session,{FileName,MoreTDGargs}},_From,LD=#ld{chl=OldCHL})
+ DateTime=calendar:universal_time(),
+ {M,F,Args}=LD#ld.tdg,
+ TDGargs=inviso_tool_lib:mk_tdg_args(DateTime,MoreTDGargs++Args),
+ case h_start_session(M,F,TDGargs,LD) of
+ {ok,{SessionNr,ReturnVal,NewLD}} -> % No nodes to initiate.
+ NewLD2=add_initial_tcs_to_history(NewLD#ld.initial_tcs,
+ NewLD#ld{chl=mk_chl(LD#ld.chl)}),
+ {reply,
+ {ok,{SessionNr,ReturnVal}},
+ NewLD2#ld{session_state=tracing_sessionstate()}};
+ {ok,{SessionNr,ReturnVal,Nodes2,NewLD}} ->
+ NewLD2=do_initial_tcs(NewLD#ld.initial_tcs,
+ Nodes2,
+ NewLD#ld{chl=mk_chl(LD#ld.chl)}),
+ {reply,
+ {ok,{SessionNr,ReturnVal}},
+ NewLD2#ld{session_state=tracing_sessionstate()}};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ true -> % Faulty TDGargs.
+ {reply,{error,{badarg,MoreTDGargs}},LD}
+ end;
+ true ->
+ {reply,{error,session_already_started},LD}
+ end;
+
+handle_call({reinitiate_session,Nodes},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of
+ true -> % The tool must be tracing.
+ {M,F,_Args}=LD#ld.tdg,
+ TDGargs=get_latest_tdgargs_tracer_data(LD#ld.tracer_data),
+ case h_reinitiate_session(Nodes,M,F,TDGargs,LD) of
+ {ok,{NodesErr,ReturnVal,NewLD}} ->
+ {reply,
+ {ok,build_reinitiate_session_reply(Nodes,NodesErr,ReturnVal)},
+ NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ false -> % Must have a running session!
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({restore_session,{FileName,MoreTDGargs}},_From,LD=#ld{chl=OldCHL})
when is_list(MoreTDGargs) ->
- case is_tracing(LD#ld.session_state) of
- false ->
- case catch make_absolute_path(FileName,LD#ld.dir) of
+ case is_tracing(LD#ld.session_state) of
+ false ->
+ case catch make_absolute_path(FileName,LD#ld.dir) of
AbsFileName when is_list(AbsFileName) ->
- case file:read_file(AbsFileName) of
- {ok,Bin} ->
- if
+ case file:read_file(AbsFileName) of
+ {ok,Bin} ->
+ if
is_list(MoreTDGargs) ->
- case catch replace_history_chl(OldCHL,
- binary_to_term(Bin)) of
- {ok,CHL} -> % The file was well formatted.
- case h_restore_session(MoreTDGargs,
- LD#ld{chl=CHL}) of
- {ok,{SessionNr,ReturnVal,NewLD}} ->
- {reply,
- {ok,{SessionNr,ReturnVal}},
- NewLD#ld{session_state=
- tracing_sessionstate()}};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- Error -> % Badly formatted file.
- {reply,
- {error,{bad_file,{AbsFileName,Error}}},
- LD}
- end;
- true ->
- {reply,{error,{badarg,MoreTDGargs}},LD}
- end;
- {error,Reason} ->
- {reply,{error,{read_file,Reason}},LD}
- end;
- Error ->
- {reply,{error,{bad_filename,{FileName,Error}}},LD}
- end;
- true ->
- {reply,{error,session_already_started},LD}
- end;
-%% This is doing restore session on the current history.
-handle_call({restore_session,MoreTDGargs},_From,LD=#ld{chl=CHL}) ->
- case is_tracing(LD#ld.session_state) of
- false ->
- case history_exists_chl(CHL) of
- true -> % There is a history to redo.
- if
+ case catch replace_history_chl(OldCHL,
+ binary_to_term(Bin)) of
+ {ok,CHL} -> % The file was well formatted.
+ case h_restore_session(MoreTDGargs,
+ LD#ld{chl=CHL}) of
+ {ok,{SessionNr,ReturnVal,NewLD}} ->
+ {reply,
+ {ok,{SessionNr,ReturnVal}},
+ NewLD#ld{session_state=
+ tracing_sessionstate()}};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ Error -> % Badly formatted file.
+ {reply,
+ {error,{bad_file,{AbsFileName,Error}}},
+ LD}
+ end;
+ true ->
+ {reply,{error,{badarg,MoreTDGargs}},LD}
+ end;
+ {error,Reason} ->
+ {reply,{error,{read_file,Reason}},LD}
+ end;
+ Error ->
+ {reply,{error,{bad_filename,{FileName,Error}}},LD}
+ end;
+ true ->
+ {reply,{error,session_already_started},LD}
+ end;
+%% This is doing restore session on the current history.
+handle_call({restore_session,MoreTDGargs},_From,LD=#ld{chl=CHL}) ->
+ case is_tracing(LD#ld.session_state) of
+ false ->
+ case history_exists_chl(CHL) of
+ true -> % There is a history to redo.
+ if
is_list(MoreTDGargs) ->
- case h_restore_session(MoreTDGargs,LD) of
- {ok,{SessionNr,ReturnVal,NewLD}} ->
- {reply,
- {ok,{SessionNr,ReturnVal}},
- NewLD#ld{session_state=tracing_sessionstate()}};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- true ->
- {reply,{error,{badarg,MoreTDGargs}},LD}
- end;
- false ->
- {reply,{error,no_history},LD}
- end;
- true ->
- {reply,{error,session_already_started},LD}
- end;
-
-%% To stop tracing means stop_tracing through the inviso API. But we must also
-%% remove any help processes executing inviso commands (trace case executers
-%% and reactivators).
-%% Note that to be really sure we should actually wait for EXIT-signals from those
-%% processes before returning a successful returnvalue to the caller. In theory
-%% those processes could issue an inviso call effecting a new trace session started
-%% with init_tracing shortly after the call to stop_tracing. But too complicated! :-)
-%% Further, stop-tracing is done on all nodes in our nodes structure. Regardless
-%% if the node is tracing or not
-handle_call(stop_session,_From,LD=#ld{session_state=SState,chl=CHL,reactivators=ReAct}) ->
- case is_tracing(SState) of
- true ->
- NewCHL=stop_all_tc_executer_chl(CHL), % Stop any running trace case proc.
- NewReAct=stop_all_reactivators(ReAct), % Stop any running reactivators.
- case h_stop_session(LD) of
- {ok,{SessionNr,Result}} ->
- NewNodesD=set_inactive_nodes(Result,LD#ld.nodes),
- {reply,
- {ok,{SessionNr,Result}},
- LD#ld{session_state=passive_sessionstate(),
- nodes=NewNodesD,
- chl=NewCHL,
- reactivators=NewReAct,
- started_initial_tcs=[]}};
- {error,Reason} -> % Now we're really in deep shit :-)
- {reply,{error,{unrecoverable,Reason}},LD}
- end;
- false ->
- {reply,{error,no_session},LD}
- end;
-
-handle_call({reset_nodes,Nodes},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of
- false -> % We can not be in a session.
- {reply,h_reset_nodes(Nodes,LD#ld.c_node),LD};
- true ->
- {reply,{error,session_active},LD}
- end;
-
-%% Calling a trace-case, or "turning it on".
-handle_call({atc,{TC,Id,Vars}},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of % Check that we are tracing now.
- true ->
- case h_atc(TC,Id,Vars,LD) of
- {ok,NewLD} -> % Trace case executed.
- {reply,ok,NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- false -> % Can't activate if not tracing.
- {reply,{error,no_session},LD}
- end;
-
-handle_call({sync_atc,{TC,Id,Vars,TimeOut}},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of
- true ->
- if
+ case h_restore_session(MoreTDGargs,LD) of
+ {ok,{SessionNr,ReturnVal,NewLD}} ->
+ {reply,
+ {ok,{SessionNr,ReturnVal}},
+ NewLD#ld{session_state=tracing_sessionstate()}};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ true ->
+ {reply,{error,{badarg,MoreTDGargs}},LD}
+ end;
+ false ->
+ {reply,{error,no_history},LD}
+ end;
+ true ->
+ {reply,{error,session_already_started},LD}
+ end;
+
+%% To stop tracing means stop_tracing through the inviso API. But we must also
+%% remove any help processes executing inviso commands (trace case executers
+%% and reactivators).
+%% Note that to be really sure we should actually wait for EXIT-signals from those
+%% processes before returning a successful returnvalue to the caller. In theory
+%% those processes could issue an inviso call effecting a new trace session started
+%% with init_tracing shortly after the call to stop_tracing. But too complicated! :-)
+%% Further, stop-tracing is done on all nodes in our nodes structure. Regardless
+%% if the node is tracing or not
+handle_call(stop_session,_From,LD=#ld{session_state=SState,chl=CHL,reactivators=ReAct}) ->
+ case is_tracing(SState) of
+ true ->
+ NewCHL=stop_all_tc_executer_chl(CHL), % Stop any running trace case proc.
+ NewReAct=stop_all_reactivators(ReAct), % Stop any running reactivators.
+ case h_stop_session(LD) of
+ {ok,{SessionNr,Result}} ->
+ NewNodesD=set_inactive_nodes(Result,LD#ld.nodes),
+ {reply,
+ {ok,{SessionNr,Result}},
+ LD#ld{session_state=passive_sessionstate(),
+ nodes=NewNodesD,
+ chl=NewCHL,
+ reactivators=NewReAct,
+ started_initial_tcs=[]}};
+ {error,Reason} -> % Now we're really in deep shit :-)
+ {reply,{error,{unrecoverable,Reason}},LD}
+ end;
+ false ->
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({reset_nodes,Nodes},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of
+ false -> % We can not be in a session.
+ {reply,h_reset_nodes(Nodes,LD#ld.c_node),LD};
+ true ->
+ {reply,{error,session_active},LD}
+ end;
+
+%% Calling a trace-case, or "turning it on".
+handle_call({atc,{TC,Id,Vars}},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of % Check that we are tracing now.
+ true ->
+ case h_atc(TC,Id,Vars,LD) of
+ {ok,NewLD} -> % Trace case executed.
+ {reply,ok,NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ false -> % Can't activate if not tracing.
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({sync_atc,{TC,Id,Vars,TimeOut}},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of
+ true ->
+ if
is_integer(TimeOut);TimeOut==infinity ->
- case h_sync_atc(TC,Id,Vars,TimeOut,LD) of
- {ok,NewLD,Result} ->
- {reply,Result,NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- true ->
- {reply,{error,{badarg,TimeOut}},LD}
- end;
- false ->
- {reply,{error,no_session},LD}
- end;
-
-handle_call({sync_rtc,{TC,Vars,TimeOut}},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of
- true ->
- if
+ case h_sync_atc(TC,Id,Vars,TimeOut,LD) of
+ {ok,NewLD,Result} ->
+ {reply,Result,NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ true ->
+ {reply,{error,{badarg,TimeOut}},LD}
+ end;
+ false ->
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({sync_rtc,{TC,Vars,TimeOut}},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of
+ true ->
+ if
is_integer(TimeOut);TimeOut==infinity ->
- case h_sync_rtc(TC,Vars,TimeOut,LD) of
- {ok,NewLD,Result} ->
- {reply,Result,NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- true ->
- {reply,{error,{badarg,TimeOut}},LD}
- end;
- false ->
- {reply,{error,no_session},LD}
- end;
-
-
-handle_call({dtc,{TC,Id}},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of % Check that we are tracing now.
- true ->
- case h_dtc(TC,Id,LD) of
- {ok,NewLD} ->
- {reply,ok,NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- false -> % Can't activate if not tracing.
- {reply,{error,no_session},LD}
- end;
-
-handle_call({sync_dtc,{TC,Id,TimeOut}},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of % Check that we are tracing now.
- true ->
- if
+ case h_sync_rtc(TC,Vars,TimeOut,LD) of
+ {ok,NewLD,Result} ->
+ {reply,Result,NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ true ->
+ {reply,{error,{badarg,TimeOut}},LD}
+ end;
+ false ->
+ {reply,{error,no_session},LD}
+ end;
+
+
+handle_call({dtc,{TC,Id}},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of % Check that we are tracing now.
+ true ->
+ case h_dtc(TC,Id,LD) of
+ {ok,NewLD} ->
+ {reply,ok,NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ false -> % Can't activate if not tracing.
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({sync_dtc,{TC,Id,TimeOut}},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of % Check that we are tracing now.
+ true ->
+ if
is_integer(TimeOut);TimeOut==infinity ->
- case h_sync_dtc(TC,Id,TimeOut,LD) of
- {ok,NewLD,Result} ->
- {reply,Result,NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- true ->
- {reply,{error,{badarg,TimeOut}},LD}
- end;
- false -> % Can't activate if not tracing.
- {reply,{error,no_session},LD}
- end;
-
-handle_call({inviso,{Cmd,Args}},_From,LD=#ld{session_state=SState}) ->
- case is_tracing(SState) of
- true ->
- if
+ case h_sync_dtc(TC,Id,TimeOut,LD) of
+ {ok,NewLD,Result} ->
+ {reply,Result,NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ true ->
+ {reply,{error,{badarg,TimeOut}},LD}
+ end;
+ false -> % Can't activate if not tracing.
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({inviso,{Cmd,Args}},_From,LD=#ld{session_state=SState}) ->
+ case is_tracing(SState) of
+ true ->
+ if
is_list(Args) ->
- case h_inviso(Cmd,Args,LD) of
- {ok,{Reply,NewLD}} ->
- {reply,Reply,NewLD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- true ->
- {reply,{error,{badarg,Args}},LD}
- end;
- false -> % Can't do if not tracing.
- {reply,{error,no_session},LD}
- end;
-
-handle_call({reactivate,Node},_From,LD=#ld{nodes=NodesD,c_node=CNode}) ->
- case get_state_nodes(Node,NodesD) of
- {trace_failure,_} ->
- {reply,{error,trace_failure},LD};
- {State,suspended} -> % The node is infact suspended.
- case h_reactivate(Node,CNode) of
- ok ->
- case {State,is_tracing(LD#ld.session_state)} of
- {tracing,true} -> % Only then shall we redo cmds.
- {reply,ok,redo_cmd_history(Node,LD)};
- _ -> % All other just no longer suspended.
- {reply,ok,LD#ld{nodes=set_running_nodes(Node,NodesD)}}
- end;
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end;
- reactivating ->
- {reply,{error,reactivating},LD};
- {_,running} ->
- {reply,{error,already_running},LD};
- down ->
- {reply,{error,not_available},LD};
- false ->
- {reply,{error,unknown_node},LD}
- end;
-
-handle_call({save_history,FileName},_From,LD=#ld{chl=CHL,dir=Dir,history_dir=HDir}) ->
- case lists:keysort(2,get_loglist_chl(CHL)) of
- [] -> % Empty history or no history.
- {reply,{error,no_history},LD};
- Log ->
- case h_save_history(HDir,Dir,FileName,Log) of
- {ok,AbsFileName} ->
- {reply,{ok,AbsFileName},LD};
- {error,Reason} ->
- {reply,{error,Reason},LD}
- end
- end;
-
-
-handle_call({get_autostart_data,{Nodes,Dependency}},_From,LD=#ld{chl=CHL}) ->
- case build_autostart_data(lists:keysort(2,get_loglist_chl(CHL)),LD#ld.tc_dict) of
- {ok,ASD} ->
- TDGargs=get_latest_tdgargs_tracer_data(LD#ld.tracer_data),
- {M,F,_}=LD#ld.tdg,
- OptsG=LD#ld.optg, % Addnodes options generator.
- {reply,
- h_get_autostart_data(Nodes,LD#ld.c_node,Dependency,ASD,M,F,TDGargs,OptsG),
- LD};
- {error,Reason} -> % Bad datatypes in command args.
- {reply,{error,Reason},LD}
- end;
-
-handle_call({get_autostart_data,Dependency},From,LD=#ld{c_node=undefined}) ->
- handle_call({get_autostart_data,{local_runtime,Dependency}},From,LD);
-handle_call({get_autostart_data,Dependency},From,LD=#ld{nodes=NodesD}) ->
- Nodes=get_all_nodenames_nodes(NodesD),
- handle_call({get_autostart_data,{local_runtime,{Nodes,Dependency}}},From,LD);
-
-handle_call(get_activities,_From,LD=#ld{chl=CHL,reactivators=Reactivators}) ->
- TraceCases=get_ongoing_chl(CHL),
- RNodes=get_all_nodes_reactivators(Reactivators),
- ReturnList1=
- if
- TraceCases==[] ->
- [];
- true ->
- [{tracecases,TraceCases}]
- end,
- ReturnList2=
- if
- RNodes==[] ->
- ReturnList1;
- true ->
- [{reactivating_nodes,RNodes}|ReturnList1]
- end,
- {reply,{ok,ReturnList2},LD};
-
-handle_call({get_node_status,Node},_Node,LD) ->
- case get_state_nodes(Node,LD#ld.nodes) of
- false ->
- {reply,{error,unknown_node},LD};
- StateStatus ->
- {reply,{ok,StateStatus},LD}
- end;
-
-handle_call(get_session_data,_From,LD=#ld{session_state=SState,tracer_data=TD}) ->
- case get_latest_session_nr_tracer_data(TD) of
- undefined ->
- {reply,{error,no_session},LD};
- SessionNr ->
- TDGargs=get_latest_tdgargs_tracer_data(TD),
- case is_tracing(SState) of
- true ->
- {reply,{ok,{tracing,SessionNr,TDGargs}},LD};
- false ->
- {reply,{ok,{not_tracing,SessionNr,TDGargs}},LD}
- end
- end;
-
-handle_call(flush,_From,LD=#ld{c_node=CNode,nodes=NodesD}) ->
- Nodes=get_tracing_nodes(NodesD),
- {reply,h_flush(CNode,Nodes),LD};
-handle_call({flush,Nodes},_From,LD=#ld{c_node=CNode}) ->
- {reply,h_flush(CNode,Nodes),LD};
-
-handle_call(get_loopdata,_From,LD) ->
- {reply,LD,LD};
-
-%% Internal handle_call callbacks.
-
-handle_call({reactivator_reply,{Counter,RPid}},_From,LD=#ld{chl=CHL}) ->
- HighestUsedCounter=get_highest_used_counter_chl(CHL),
- if
- HighestUsedCounter>Counter -> % There are now more log entries.
- NewUnsortedLog=get_loglist_chl(CHL),
- {reply,{more,NewUnsortedLog},LD};
- true -> % No Counter is youngest log entry.
- NodesD=LD#ld.nodes,
- Node=get_node_reactivators(RPid,LD#ld.reactivators),
- {reply,
- done,
- LD#ld{nodes=set_running_nodes(Node,NodesD),
- reactivators=del_reactivators(RPid,LD#ld.reactivators)}}
- end.
-%% -----------------------------------------------------------------------------
-
-%% Handling a notification from a trace case execution process. Receiving this
-%% indicated that this phase of the trace case is finnished.
-handle_cast({tc_executer_reply,{Phase,ProcH,Result}},LD) ->
- case Phase of
- activating -> % The trace case is running now.
- {ok,NewLD}=h_tc_activation_done(ProcH,Result,LD),
- {noreply,NewLD};
- stopping ->
- {ok,NewLD}=h_tc_stopping_done(ProcH,Result,LD),
- {noreply,NewLD};
- _ ->
- {noreply,LD}
- end;
-handle_cast(_,LD) ->
- {noreply,LD}.
-%% -----------------------------------------------------------------------------
-
-%% This is the case when a runtime component goes down. We stop all running
-%% reactivators for this node. Note that there can also be tracecases ongoing
-%% where this node is part of the Nodes variable. But there is not much we can
-%% do about that. Other then informing the user that it is unwise to reconnect
-%% this node before those tracecases have stopped being ongoing.
-handle_info({inviso_event,_CNode,_Time,{disconnected,Node,_}},LD) ->
- {noreply,LD#ld{nodes=set_down_nodes(Node,LD#ld.nodes),
- reactivators=stop_node_reactivators(Node,LD#ld.reactivators)}};
-
-%% This is the case when a runtime component gets suspended. Much of the same
-%% problem as described above applies.
-handle_info({inviso_event,_CNode,_Time,{state_change,Node,{_,{suspended,_}}}},LD) ->
- {noreply,LD#ld{nodes=set_suspended_nodes(Node,LD#ld.nodes),
- reactivators=stop_node_reactivators(Node,LD#ld.reactivators)}};
-
-handle_info(_,LD) ->
- {noreply,LD}.
-%% -----------------------------------------------------------------------------
-
-%% Called when the tool server stops. First clause, termination is initiated by
-%% our self and therefore controlled another way. In the second case we are
-%% stopping for some external reason, and we must then do more here in terminate/2.
-terminate(normal,#ld{c_node=CNode}) -> % This is when we are stopping our self.
- stop_inviso_at_c_node(CNode);
-terminate(_,#ld{c_node=CNode,nodes=NodesD,keep_nodes=KeepNodes}) ->
- remove_all_trace_patterns(CNode,KeepNodes,get_all_nodenames_nodes(NodesD)),
- stop_inviso_at_c_node(CNode).
-%% -----------------------------------------------------------------------------
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% =============================================================================
-%% Handler first level help functions.
-%% =============================================================================
-
-%% -----------------------------------------------------------------------------
-%% reconnect_nodes
-%% -----------------------------------------------------------------------------
-
-%% Help function reconnecting the nodes in Nodes. Listed nodes must be part of
-%% the set of nodes handled by the tool. It is not possible to reconnect a node
-%% that is not marked as down. This partly because we otherwise risk losing the
-%% trace_failure state (which can not be rediscovered).
-h_reconnect_nodes(local_runtime,LD=#ld{nodes=NodesD}) -> % Non-distributed.
- case get_state_nodes(local_runtime,NodesD) of
- down ->
- {ok,{local_runtime,[],start_runtime_components(local_runtime,LD)}};
- _ -> % Allready connected!
- {ok,{[],{error,already_connected},LD}}
- end;
+ case h_inviso(Cmd,Args,LD) of
+ {ok,{Reply,NewLD}} ->
+ {reply,Reply,NewLD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ true ->
+ {reply,{error,{badarg,Args}},LD}
+ end;
+ false -> % Can't do if not tracing.
+ {reply,{error,no_session},LD}
+ end;
+
+handle_call({reactivate,Node},_From,LD=#ld{nodes=NodesD,c_node=CNode}) ->
+ case get_state_nodes(Node,NodesD) of
+ {trace_failure,_} ->
+ {reply,{error,trace_failure},LD};
+ {State,suspended} -> % The node is infact suspended.
+ case h_reactivate(Node,CNode) of
+ ok ->
+ case {State,is_tracing(LD#ld.session_state)} of
+ {tracing,true} -> % Only then shall we redo cmds.
+ {reply,ok,redo_cmd_history(Node,LD)};
+ _ -> % All other just no longer suspended.
+ {reply,ok,LD#ld{nodes=set_running_nodes(Node,NodesD)}}
+ end;
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end;
+ reactivating ->
+ {reply,{error,reactivating},LD};
+ {_,running} ->
+ {reply,{error,already_running},LD};
+ down ->
+ {reply,{error,not_available},LD};
+ false ->
+ {reply,{error,unknown_node},LD}
+ end;
+
+handle_call({save_history,FileName},_From,LD=#ld{chl=CHL,dir=Dir,history_dir=HDir}) ->
+ case lists:keysort(2,get_loglist_chl(CHL)) of
+ [] -> % Empty history or no history.
+ {reply,{error,no_history},LD};
+ Log ->
+ case h_save_history(HDir,Dir,FileName,Log) of
+ {ok,AbsFileName} ->
+ {reply,{ok,AbsFileName},LD};
+ {error,Reason} ->
+ {reply,{error,Reason},LD}
+ end
+ end;
+
+handle_call({get_autostart_data,{Nodes,Dependency}},_From,LD=#ld{chl=CHL}) ->
+ {ok,ASD} = build_autostart_data(lists:keysort(2,get_loglist_chl(CHL)),LD#ld.tc_dict),
+ TDGargs=get_latest_tdgargs_tracer_data(LD#ld.tracer_data),
+ {M,F,_}=LD#ld.tdg,
+ OptsG=LD#ld.optg, % Addnodes options generator.
+ {reply,
+ h_get_autostart_data(Nodes,LD#ld.c_node,Dependency,ASD,M,F,TDGargs,OptsG),
+ LD};
+
+handle_call({get_autostart_data,Dependency},From,LD=#ld{c_node=undefined}) ->
+ handle_call({get_autostart_data,{local_runtime,Dependency}},From,LD);
+handle_call({get_autostart_data,Dependency},From,LD=#ld{nodes=NodesD}) ->
+ Nodes=get_all_nodenames_nodes(NodesD),
+ handle_call({get_autostart_data,{local_runtime,{Nodes,Dependency}}},From,LD);
+
+handle_call(get_activities,_From,LD=#ld{chl=CHL,reactivators=Reactivators}) ->
+ TraceCases=get_ongoing_chl(CHL),
+ RNodes=get_all_nodes_reactivators(Reactivators),
+ ReturnList1=
+ if
+ TraceCases==[] ->
+ [];
+ true ->
+ [{tracecases,TraceCases}]
+ end,
+ ReturnList2=
+ if
+ RNodes==[] ->
+ ReturnList1;
+ true ->
+ [{reactivating_nodes,RNodes}|ReturnList1]
+ end,
+ {reply,{ok,ReturnList2},LD};
+
+handle_call({get_node_status,Node},_Node,LD) ->
+ case get_state_nodes(Node,LD#ld.nodes) of
+ false ->
+ {reply,{error,unknown_node},LD};
+ StateStatus ->
+ {reply,{ok,StateStatus},LD}
+ end;
+
+handle_call(get_session_data,_From,LD=#ld{session_state=SState,tracer_data=TD}) ->
+ case get_latest_session_nr_tracer_data(TD) of
+ undefined ->
+ {reply,{error,no_session},LD};
+ SessionNr ->
+ TDGargs=get_latest_tdgargs_tracer_data(TD),
+ case is_tracing(SState) of
+ true ->
+ {reply,{ok,{tracing,SessionNr,TDGargs}},LD};
+ false ->
+ {reply,{ok,{not_tracing,SessionNr,TDGargs}},LD}
+ end
+ end;
+
+handle_call(flush,_From,LD=#ld{c_node=CNode,nodes=NodesD}) ->
+ Nodes=get_tracing_nodes(NodesD),
+ {reply,h_flush(CNode,Nodes),LD};
+handle_call({flush,Nodes},_From,LD=#ld{c_node=CNode}) ->
+ {reply,h_flush(CNode,Nodes),LD};
+
+handle_call(get_loopdata,_From,LD) ->
+ {reply,LD,LD};
+
+%% Internal handle_call callbacks.
+
+handle_call({reactivator_reply,{Counter,RPid}},_From,LD=#ld{chl=CHL}) ->
+ HighestUsedCounter=get_highest_used_counter_chl(CHL),
+ if
+ HighestUsedCounter>Counter -> % There are now more log entries.
+ NewUnsortedLog=get_loglist_chl(CHL),
+ {reply,{more,NewUnsortedLog},LD};
+ true -> % No Counter is youngest log entry.
+ NodesD=LD#ld.nodes,
+ Node=get_node_reactivators(RPid,LD#ld.reactivators),
+ {reply,
+ done,
+ LD#ld{nodes=set_running_nodes(Node,NodesD),
+ reactivators=del_reactivators(RPid,LD#ld.reactivators)}}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Handling a notification from a trace case execution process. Receiving this
+%% indicated that this phase of the trace case is finnished.
+handle_cast({tc_executer_reply,{Phase,ProcH,Result}},LD) ->
+ case Phase of
+ activating -> % The trace case is running now.
+ {ok,NewLD}=h_tc_activation_done(ProcH,Result,LD),
+ {noreply,NewLD};
+ stopping ->
+ {ok,NewLD}=h_tc_stopping_done(ProcH,Result,LD),
+ {noreply,NewLD};
+ _ ->
+ {noreply,LD}
+ end;
+handle_cast(_,LD) ->
+ {noreply,LD}.
+%% -----------------------------------------------------------------------------
+
+%% This is the case when a runtime component goes down. We stop all running
+%% reactivators for this node. Note that there can also be tracecases ongoing
+%% where this node is part of the Nodes variable. But there is not much we can
+%% do about that. Other then informing the user that it is unwise to reconnect
+%% this node before those tracecases have stopped being ongoing.
+handle_info({inviso_event,_CNode,_Time,{disconnected,Node,_}},LD) ->
+ {noreply,LD#ld{nodes=set_down_nodes(Node,LD#ld.nodes),
+ reactivators=stop_node_reactivators(Node,LD#ld.reactivators)}};
+
+%% This is the case when a runtime component gets suspended. Much of the same
+%% problem as described above applies.
+handle_info({inviso_event,_CNode,_Time,{state_change,Node,{_,{suspended,_}}}},LD) ->
+ {noreply,LD#ld{nodes=set_suspended_nodes(Node,LD#ld.nodes),
+ reactivators=stop_node_reactivators(Node,LD#ld.reactivators)}};
+
+handle_info(_,LD) ->
+ {noreply,LD}.
+%% -----------------------------------------------------------------------------
+
+%% Called when the tool server stops. First clause, termination is initiated by
+%% our self and therefore controlled another way. In the second case we are
+%% stopping for some external reason, and we must then do more here in terminate/2.
+terminate(normal,#ld{c_node=CNode}) -> % This is when we are stopping our self.
+ stop_inviso_at_c_node(CNode);
+terminate(_,#ld{c_node=CNode,nodes=NodesD,keep_nodes=KeepNodes}) ->
+ remove_all_trace_patterns(CNode,KeepNodes,get_all_nodenames_nodes(NodesD)),
+ stop_inviso_at_c_node(CNode).
+%% -----------------------------------------------------------------------------
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% =============================================================================
+%% Handler first level help functions.
+%% =============================================================================
+
+%% -----------------------------------------------------------------------------
+%% reconnect_nodes
+%% -----------------------------------------------------------------------------
+
+%% Help function reconnecting the nodes in Nodes. Listed nodes must be part of
+%% the set of nodes handled by the tool. It is not possible to reconnect a node
+%% that is not marked as down. This partly because we otherwise risk losing the
+%% trace_failure state (which can not be rediscovered).
+h_reconnect_nodes(local_runtime,LD=#ld{nodes=NodesD}) -> % Non-distributed.
+ case get_state_nodes(local_runtime,NodesD) of
+ down ->
+ {ok,{local_runtime,[],start_runtime_components(local_runtime,LD)}};
+ _ -> % Allready connected!
+ {ok,{[],{error,already_connected},LD}}
+ end;
h_reconnect_nodes(Nodes,LD=#ld{nodes=NodesD}) when is_list(Nodes) ->
- {Nodes2,NodesErr}=
- lists:foldl(fun(N,{Nodes2,NodesErr})->
- case get_state_nodes(N,NodesD) of
- down -> % Yes this node can be reconnected.
- {[N|Nodes2],NodesErr};
- false -> % Not part of the node-set!
- {Nodes2,[{N,{error,unknown_node}}|NodesErr]};
- _ -> % Allready connected!
- {Nodes2,[{N,{error,already_connected}}|NodesErr]}
- end
- end,
- {[],[]},
- Nodes),
- LD2=start_runtime_components(Nodes2,LD), % Inpect the #ld.nodes for result.
- {ok,{Nodes2,NodesErr,LD2}};
-h_reconnect_nodes(Nodes,_LD) ->
- {error,{badarg,Nodes}}.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% start_session
-%% -----------------------------------------------------------------------------
-
-%% Help function starting the tracing at all nodes. Note that the tracer data
-%% is calculated using a user defined function. This is how for instance the
-%% file names (of the log files) are determined.
-%% Before the nodes are initiated their (possibly remaining) trace patterns are
-%% cleared, both local and global.
-h_start_session(M,F,TDGargs,LD=#ld{c_node=CNode,nodes=NodesD,tracer_data=TDs}) ->
- case get_inactive_running_nodes(NodesD) of
- [] -> % There are no nodes to initiate!
- h_start_session_nonodes(TDGargs,LD,[]);
- Nodes -> % List of nodes or 'local_runtime'.
- case h_start_session_ctp_all(CNode,Nodes) of
- {ok,Errors,[]} -> % Now no nodes to initiate!
- h_start_session_nonodes(TDGargs,LD,Errors);
- {ok,Errors,Nodes2} -> % Now these nodes are fresh.
- case call_tracer_data_generator(CNode,M,F,TDGargs,Nodes2) of
- {ok,TracerList} -> % Generated our tracerdata.
- case h_start_session_2(CNode,TracerList,Errors) of
- {ok,ReturnValue} -> % Some nodes are initialized now.
- {NewNodesD,Nodes3}=
- set_tracing_running_nodes(CNode,ReturnValue,NodesD),
- {SessionNr,NewTDs}=insert_td_tracer_data(TDGargs,TDs),
- {ok,{SessionNr,
- ReturnValue,
- Nodes3, % The nodes that shall get initial tracases.
- LD#ld{nodes=NewNodesD,tracer_data=NewTDs}}};
- {error,Reason} ->
- {error,Reason}
- end;
- {error,Reason} -> % Faulty tracer data generator func.
- {error,{bad_tdg,Reason}}
- end;
- {error,Reason} -> % Error clearing patterns.
- {error,Reason}
- end
- end.
-
-h_start_session_nonodes(TDGargs,LD=#ld{c_node=CNode,tracer_data=TDs},Errors) ->
- {SessionNr,NewTDs}=insert_td_tracer_data(TDGargs,TDs),
- if
- CNode==undefined ->
- {ok,{SessionNr,[],LD#ld{tracer_data=NewTDs}}};
- true ->
- {ok,{SessionNr,{ok,Errors},LD#ld{tracer_data=NewTDs}}}
- end.
-
-%% Help function clearing all trace patterns on all nodes.
-h_start_session_ctp_all(CNode,Nodes) ->
- case remove_all_trace_patterns(CNode,[],Nodes) of
- ok -> % Non-distributed case1.
- {ok,[],local_runtime};
- {error,Reason} -> % Non-distributed case2 and general failure.
- {error,Reason};
- {ok,NodeResults} ->
- h_start_session_ctp_all_2(NodeResults,[],[])
- end.
-
-h_start_session_ctp_all_2([{Node,{error,Reason}}|Rest],Errors,Nodes) ->
- h_start_session_ctp_all_2(Rest,[{Node,{error,Reason}}|Errors],Nodes);
-h_start_session_ctp_all_2([{Node,_OkOrPatternsUntouched}|Rest],Errors,Nodes) ->
- h_start_session_ctp_all_2(Rest,Errors,[Node|Nodes]);
-h_start_session_ctp_all_2([],Errors,Nodes) ->
- {ok,Errors,Nodes}.
-
-%% Help function doing the actual init_tracing.
-h_start_session_2(undefined,TracerData,_Errors) -> % Non distributed case.
- case inviso:init_tracing(TracerData) of
+ {Nodes2,NodesErr}=
+ lists:foldl(fun(N,{Nodes2,NodesErr})->
+ case get_state_nodes(N,NodesD) of
+ down -> % Yes this node can be reconnected.
+ {[N|Nodes2],NodesErr};
+ false -> % Not part of the node-set!
+ {Nodes2,[{N,{error,unknown_node}}|NodesErr]};
+ _ -> % Allready connected!
+ {Nodes2,[{N,{error,already_connected}}|NodesErr]}
+ end
+ end,
+ {[],[]},
+ Nodes),
+ LD2=start_runtime_components(Nodes2,LD), % Inpect the #ld.nodes for result.
+ {ok,{Nodes2,NodesErr,LD2}};
+h_reconnect_nodes(Nodes,_LD) ->
+ {error,{badarg,Nodes}}.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% start_session
+%% -----------------------------------------------------------------------------
+
+%% Help function starting the tracing at all nodes. Note that the tracer data
+%% is calculated using a user defined function. This is how for instance the
+%% file names (of the log files) are determined.
+%% Before the nodes are initiated their (possibly remaining) trace patterns are
+%% cleared, both local and global.
+h_start_session(M,F,TDGargs,LD=#ld{c_node=CNode,nodes=NodesD,tracer_data=TDs}) ->
+ case get_inactive_running_nodes(NodesD) of
+ [] -> % There are no nodes to initiate!
+ h_start_session_nonodes(TDGargs,LD,[]);
+ Nodes -> % List of nodes or 'local_runtime'.
+ case h_start_session_ctp_all(CNode,Nodes) of
+ {ok,Errors,[]} -> % Now no nodes to initiate!
+ h_start_session_nonodes(TDGargs,LD,Errors);
+ {ok,Errors,Nodes2} -> % Now these nodes are fresh.
+ case call_tracer_data_generator(CNode,M,F,TDGargs,Nodes2) of
+ {ok,TracerList} -> % Generated our tracerdata.
+ case h_start_session_2(CNode,TracerList,Errors) of
+ {ok,ReturnValue} -> % Some nodes are initialized now.
+ {NewNodesD,Nodes3}=
+ set_tracing_running_nodes(CNode,ReturnValue,NodesD),
+ {SessionNr,NewTDs}=insert_td_tracer_data(TDGargs,TDs),
+ {ok,{SessionNr,
+ ReturnValue,
+ Nodes3, % The nodes that shall get initial tracases.
+ LD#ld{nodes=NewNodesD,tracer_data=NewTDs}}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+ {error,Reason} -> % Faulty tracer data generator func.
+ {error,{bad_tdg,Reason}}
+ end;
+ {error,Reason} -> % Error clearing patterns.
+ {error,Reason}
+ end
+ end.
+
+h_start_session_nonodes(TDGargs,LD=#ld{c_node=CNode,tracer_data=TDs},Errors) ->
+ {SessionNr,NewTDs}=insert_td_tracer_data(TDGargs,TDs),
+ if
+ CNode==undefined ->
+ {ok,{SessionNr,[],LD#ld{tracer_data=NewTDs}}};
+ true ->
+ {ok,{SessionNr,{ok,Errors},LD#ld{tracer_data=NewTDs}}}
+ end.
+
+%% Help function clearing all trace patterns on all nodes.
+h_start_session_ctp_all(CNode,Nodes) ->
+ case remove_all_trace_patterns(CNode,[],Nodes) of
+ ok -> % Non-distributed case1.
+ {ok,[],local_runtime};
+ {error,Reason} -> % Non-distributed case2 and general failure.
+ {error,Reason};
+ {ok,NodeResults} ->
+ h_start_session_ctp_all_2(NodeResults,[],[])
+ end.
+
+h_start_session_ctp_all_2([{Node,{error,Reason}}|Rest],Errors,Nodes) ->
+ h_start_session_ctp_all_2(Rest,[{Node,{error,Reason}}|Errors],Nodes);
+h_start_session_ctp_all_2([{Node,_OkOrPatternsUntouched}|Rest],Errors,Nodes) ->
+ h_start_session_ctp_all_2(Rest,Errors,[Node|Nodes]);
+h_start_session_ctp_all_2([],Errors,Nodes) ->
+ {ok,Errors,Nodes}.
+
+%% Help function doing the actual init_tracing.
+h_start_session_2(undefined,TracerData,_Errors) -> % Non distributed case.
+ case inviso:init_tracing(TracerData) of
{ok,LogResult} when is_list(LogResult) ->
- {ok,{ok,LogResult}};
- {error,already_initated} -> % Perhaps adopted!?
- {ok,{error,already_initiated}}; % Not necessarily wrong.
- {error,Reason} ->
- {error,Reason}
- end;
-h_start_session_2(CNode,TracerList,Errors) ->
- case rpc:call(CNode,inviso,init_tracing,[TracerList]) of
- {ok,NodeResults} ->
- {ok,{ok,Errors++NodeResults}};
- {error,Reason} ->
- {error,Reason};
- {badrpc,Reason} ->
- {error,{inviso_control_node_error,Reason}}
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function starting all initial trace cases. They are actually handled
-%% the same way as user started trace cases. We actually only start initial
-%% tracecases at Nodes (if Nodes is a list of nodes). This because we may have
-%% adopted some nodes some already tracing nodes, and such are supposed to have
-%% the correct patterns and flags set.
-do_initial_tcs([{TC,Vars}|Rest],Nodes,LD) ->
- Id=make_ref(), % Trace case ID.
- case h_atc(TC,Id,Vars,LD,Nodes) of % Start using regular start methods.
- {ok,NewLD} -> % Trace case was successfully started.
- NewInitialTcs=add_initial_tcs(TC,Id,NewLD#ld.started_initial_tcs),
- do_initial_tcs(Rest,Nodes,NewLD#ld{started_initial_tcs=NewInitialTcs});
- {error,_Reason} ->
- do_initial_tcs(Rest,Nodes,LD)
- end;
-do_initial_tcs([_|Rest],Nodes,LD) ->
- do_initial_tcs(Rest,Nodes,LD);
-do_initial_tcs([],_Nodes,LD) ->
- LD.
-%% -----------------------------------------------------------------------------
-
-%% This help functio is used instead of do_initial_tcs/3 if there actually are no
-%% nodes to do the trace cases on. The reason we must have this function is that
-%% the tracecases must still be entered into the history with bindings and all.
-%% But we let them be marked as 'running' immediately (no need for the activator
-%% process).
-add_initial_tcs_to_history([{TC,Vars}|Rest],LD=#ld{tc_dict=TCdict,chl=CHL}) ->
- case get_tracecase_tc_dict(TC,TCdict) of
- {ok,TraceCase} ->
- case check_bindings(Vars,TraceCase) of
- {ok,Bindings} ->
- Id=make_ref(), % Trace case ID.
- FakeProcH=make_ref(), % Need something to enter as activator.
- NewCHL=set_activating_chl(TC,Id,CHL,Bindings,FakeProcH),
- NewCHL2=set_running_chl(FakeProcH,TC,Id,void,NewCHL), % Result=void.
- NewInitialTcs=add_initial_tcs(TC,Id,LD#ld.started_initial_tcs),
- add_initial_tcs_to_history(Rest,LD#ld{chl=NewCHL2,
- started_initial_tcs=NewInitialTcs});
- {error,_Reason} -> % Not much we can do about that.
- add_initial_tcs_to_history(Rest,LD)
- end;
- false ->
- add_initial_tcs_to_history(Rest,LD)
- end;
-add_initial_tcs_to_history([],LD) ->
- LD.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% reinitiate_session
-%% -----------------------------------------------------------------------------
-
-%% Function doing the reinitiation. That means first do init_tracing at the nodes
-%% in question. Then redo the command history to bring them up to speed.
-%% But first the runtime component is cleared of all trace patterns.
-h_reinitiate_session(Nodes,M,F,TDGargs,LD=#ld{c_node=CNode,nodes=NodesD}) ->
- case h_reinitiate_session_2(Nodes,NodesD,CNode) of
- {ok,{[],NodesErr}} -> % No nodes to reinitiate.
- {ok,{NodesErr,{ok,[]},LD}};
- {ok,{Nodes2,NodesErr}} -> % List of nodes or local_runtime.
- case call_tracer_data_generator(CNode,M,F,TDGargs,Nodes2) of
- {ok,TracerList} ->
- case h_start_session_2(CNode,TracerList,[]) of % Borrow from start_session.
- {ok,ReturnValue} -> % Ok, now we must redo cmd history.
- {NewNodesD,_Nodes}=
- set_tracing_running_nodes(CNode,ReturnValue,NodesD),
- NewLD=h_reinitiate_session_chl(Nodes2,LD#ld{nodes=NewNodesD}),
- {ok,{NodesErr,ReturnValue,NewLD}};
- {error,Reason} ->
- {error,Reason}
- end;
- {error,Reason} ->
- {error,{bad_tdg,Reason}}
- end;
- {error,Reason} ->
- {error,Reason}
- end.
-
-%% Help function finding out which nodes in Nodes actually can be reinitiated.
-%% A node must be up, inactive and not suspended in order for this to work. All the
-%% rest is just a matter of how detailed error return values we want to generate.
-h_reinitiate_session_2(local_runtime,NodesD,undefined) -> % Non distributed case.
- case get_state_nodes(local_runtime,NodesD) of
- {inactive,running} -> % Only ok case.
- case inviso:ctp_all() of
- ok ->
- {ok,{local_runtime,[]}};
- {error,Reason} -> % This is strange.
- {error,Reason}
- end;
- {_,suspended} ->
- {ok,{[],{error,suspended}}};
- down ->
- {ok,{[],{error,down}}};
- _ ->
- {ok,{[],{error,already_in_session}}}
- end;
+ {ok,{ok,LogResult}};
+ {error,already_initated} -> % Perhaps adopted!?
+ {ok,{error,already_initiated}}; % Not necessarily wrong.
+ {error,Reason} ->
+ {error,Reason}
+ end;
+h_start_session_2(CNode,TracerList,Errors) ->
+ case rpc:call(CNode,inviso,init_tracing,[TracerList]) of
+ {ok,NodeResults} ->
+ {ok,{ok,Errors++NodeResults}};
+ {error,Reason} ->
+ {error,Reason};
+ {badrpc,Reason} ->
+ {error,{inviso_control_node_error,Reason}}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function starting all initial trace cases. They are actually handled
+%% the same way as user started trace cases. We actually only start initial
+%% tracecases at Nodes (if Nodes is a list of nodes). This because we may have
+%% adopted some nodes some already tracing nodes, and such are supposed to have
+%% the correct patterns and flags set.
+do_initial_tcs([{TC,Vars}|Rest],Nodes,LD) ->
+ Id=make_ref(), % Trace case ID.
+ case h_atc(TC,Id,Vars,LD,Nodes) of % Start using regular start methods.
+ {ok,NewLD} -> % Trace case was successfully started.
+ NewInitialTcs=add_initial_tcs(TC,Id,NewLD#ld.started_initial_tcs),
+ do_initial_tcs(Rest,Nodes,NewLD#ld{started_initial_tcs=NewInitialTcs});
+ {error,_Reason} ->
+ do_initial_tcs(Rest,Nodes,LD)
+ end;
+do_initial_tcs([_|Rest],Nodes,LD) ->
+ do_initial_tcs(Rest,Nodes,LD);
+do_initial_tcs([],_Nodes,LD) ->
+ LD.
+%% -----------------------------------------------------------------------------
+
+%% This help functio is used instead of do_initial_tcs/3 if there actually are no
+%% nodes to do the trace cases on. The reason we must have this function is that
+%% the tracecases must still be entered into the history with bindings and all.
+%% But we let them be marked as 'running' immediately (no need for the activator
+%% process).
+add_initial_tcs_to_history([{TC,Vars}|Rest],LD=#ld{tc_dict=TCdict,chl=CHL}) ->
+ case get_tracecase_tc_dict(TC,TCdict) of
+ {ok,TraceCase} ->
+ case check_bindings(Vars,TraceCase) of
+ {ok,Bindings} ->
+ Id=make_ref(), % Trace case ID.
+ FakeProcH=make_ref(), % Need something to enter as activator.
+ NewCHL=set_activating_chl(TC,Id,CHL,Bindings,FakeProcH),
+ NewCHL2=set_running_chl(FakeProcH,TC,Id,void,NewCHL), % Result=void.
+ NewInitialTcs=add_initial_tcs(TC,Id,LD#ld.started_initial_tcs),
+ add_initial_tcs_to_history(Rest,LD#ld{chl=NewCHL2,
+ started_initial_tcs=NewInitialTcs});
+ {error,_Reason} -> % Not much we can do about that.
+ add_initial_tcs_to_history(Rest,LD)
+ end;
+ false ->
+ add_initial_tcs_to_history(Rest,LD)
+ end;
+add_initial_tcs_to_history([],LD) ->
+ LD.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% reinitiate_session
+%% -----------------------------------------------------------------------------
+
+%% Function doing the reinitiation. That means first do init_tracing at the nodes
+%% in question. Then redo the command history to bring them up to speed.
+%% But first the runtime component is cleared of all trace patterns.
+h_reinitiate_session(Nodes,M,F,TDGargs,LD=#ld{c_node=CNode,nodes=NodesD}) ->
+ case h_reinitiate_session_2(Nodes,NodesD,CNode) of
+ {ok,{[],NodesErr}} -> % No nodes to reinitiate.
+ {ok,{NodesErr,{ok,[]},LD}};
+ {ok,{Nodes2,NodesErr}} -> % List of nodes or local_runtime.
+ case call_tracer_data_generator(CNode,M,F,TDGargs,Nodes2) of
+ {ok,TracerList} ->
+ case h_start_session_2(CNode,TracerList,[]) of % Borrow from start_session.
+ {ok,ReturnValue} -> % Ok, now we must redo cmd history.
+ {NewNodesD,_Nodes}=
+ set_tracing_running_nodes(CNode,ReturnValue,NodesD),
+ NewLD=h_reinitiate_session_chl(Nodes2,LD#ld{nodes=NewNodesD}),
+ {ok,{NodesErr,ReturnValue,NewLD}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+ {error,Reason} ->
+ {error,{bad_tdg,Reason}}
+ end;
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+%% Help function finding out which nodes in Nodes actually can be reinitiated.
+%% A node must be up, inactive and not suspended in order for this to work. All the
+%% rest is just a matter of how detailed error return values we want to generate.
+h_reinitiate_session_2(local_runtime,NodesD,undefined) -> % Non distributed case.
+ case get_state_nodes(local_runtime,NodesD) of
+ {inactive,running} -> % Only ok case.
+ case inviso:ctp_all() of
+ ok ->
+ {ok,{local_runtime,[]}};
+ {error,Reason} -> % This is strange.
+ {error,Reason}
+ end;
+ {_,suspended} ->
+ {ok,{[],{error,suspended}}};
+ down ->
+ {ok,{[],{error,down}}};
+ _ ->
+ {ok,{[],{error,already_in_session}}}
+ end;
h_reinitiate_session_2(Nodes,NodesD,CNode) when is_list(Nodes) ->
- {ok,lists:foldl(fun(N,{Nodes2,NodesErr})->
- case get_state_nodes(N,NodesD) of
- {inactive,running} -> % Only ok case.
- case rpc:call(CNode,inviso,ctp_all,[[N]]) of
- {ok,[{N,ok}]} ->
- {[N|Nodes2],NodesErr};
- {ok,[{N,{error,Reason}}]} ->
- {Nodes2,[{N,{error,Reason}}|NodesErr]};
- {error,Reason} ->
- {Nodes2,[{N,{error,Reason}}|NodesErr]};
- {badrpc,Reason} ->
- {Nodes2,[{N,{error,{badrpc,Reason}}}|NodesErr]}
- end;
- {_,suspended} ->
- {Nodes2,[{N,{error,suspended}}|NodesErr]};
- down ->
- {Nodes2,[{N,{error,down}}|NodesErr]};
- false ->
- {Nodes2,[{N,{error,unknown_node}}|NodesErr]};
- _ ->
- {Nodes2,[{N,{error,already_in_session}}|NodesErr]}
- end
- end,
- {[],[]},
- Nodes)};
-h_reinitiate_session_2(Nodes,_NodesD,_CNode) ->
- {error,{badarg7,Nodes}}.
-
-%% Help function redoing the command history log at all nodes that actually
-%% started to trace. Note that we do not modify the return value which will be
-%% given to the caller just because we decide not to redo commands. The user
-%% must conclude him self from the inviso return value that commands were not
-%% redone at a particular node.
-h_reinitiate_session_chl(local_runtime,LD) ->
- h_reinitiate_session_chl([local_runtime],LD);
-h_reinitiate_session_chl([Node|Rest],LD=#ld{nodes=NodesD}) ->
- case get_state_nodes(Node,NodesD) of
- {tracing,running} -> % Only case when we shall redo!
- h_reinitiate_session_chl(Rest,redo_cmd_history(Node,LD));
- _ -> % No redo of chl in other cases.
- h_reinitiate_session_chl(Rest,LD)
- end;
-h_reinitiate_session_chl([],LD) ->
- LD.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% restore_session
-%% -----------------------------------------------------------------------------
-
-%% Help function starting a session (init tracing) and redoes the history
-%% found in CHL.
-h_restore_session(MoreTDGargs,LD) ->
- DateTime=calendar:universal_time(),
- {M,F,Args}=LD#ld.tdg,
- TDGargs=inviso_tool_lib:mk_tdg_args(DateTime,MoreTDGargs++Args),
- case h_start_session(M,F,TDGargs,LD) of
- {ok,{SessionNr,ReturnVal,NewLD}} -> % There were no available nodes.
- {ok,{SessionNr,ReturnVal,NewLD}};
- {ok,{SessionNr,ReturnVal,Nodes2,NewLD}} ->
- NewLD2=h_reinitiate_session_chl(Nodes2,NewLD),
- {ok,{SessionNr,ReturnVal,NewLD2}};
- {error,Reason} -> % Risk of out of control.
- {error,Reason}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% stop_session
-%% -----------------------------------------------------------------------------
-
-%% Help function stopping tracing at tracing nodes.
-h_stop_session(#ld{c_node=CNode,nodes=NodesD,tracer_data=TDs}) ->
- case h_stop_session_2(CNode,NodesD) of
- {ok,Result} ->
- {ok,{get_latest_session_nr_tracer_data(TDs),Result}};
- {error,Reason} ->
- {error,Reason}
- end.
-
-h_stop_session_2(undefined,NodesD) -> % The non distributed case.
- case get_tracing_nodes(NodesD) of
- {up,{inactive,_}} -> % Already not tracing!
- {ok,[]};
- {up,_} ->
- case inviso:stop_tracing() of
- {ok,_State} ->
- {ok,[ok]};
- {error,no_response} ->
- {ok,[]};
- {error,Reason} ->
- {error,Reason}
- end;
- down ->
- {ok,[]}
- end;
-h_stop_session_2(CNode,NodesD) ->
- Nodes=get_tracing_nodes(NodesD),
- case rpc:call(CNode,inviso,stop_tracing,[Nodes]) of
- {ok,NodeResults} ->
- {ok,lists:map(fun({N,{ok,_}})->{N,ok};
- (NodeError)->NodeError
- end,
- NodeResults)};
- {error,Reason} ->
- {error,Reason};
- {badrpc,Reason} ->
- {error,{inviso_control_node_error,Reason}}
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function removing any trace flags, trace patterns and meta trace patterns
-%% at Nodes. This will cause the nodes to become "fresh".
-h_reset_nodes(local_runtime,_CNode) ->
- inviso:clear([keep_log_files]);
-h_reset_nodes(Nodes,CNode) ->
- case inviso_tool_lib:inviso_cmd(CNode,clear,[Nodes,[keep_log_files]]) of
- {ok,NodeResults} ->
- {ok,NodeResults};
- {error,Reason} ->
- {error,Reason}
- end.
-%% -----------------------------------------------------------------------------
-
-
-%% -----------------------------------------------------------------------------
-%% atc
-%% -----------------------------------------------------------------------------
-
-%% Function handling ativating a trace case. Trace cases that do not have a
-%% particular on/off handling (but just on in some scense) are handled here too.
-%% The trace case is entered into the Command History Log.
-%% Note that the trace case can not be executed at this node but must be
-%% executed where the inviso control component is.
-%% Further it is possible to either activated the tracecase for all running and
-%% tracing nodes, or just for a specified list of nodes.
-%% TC=tracecase_name(),
-%% Id=term(), identifiying this usage so we can turn it off later.
-%% Vars=list(), list of variable-value bindnings.
-h_atc(TC,Id,Vars,LD) ->
- h_atc(TC,Id,Vars,LD,void). % For all running-tracing nodes.
-
-h_atc(TC,Id,Vars,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL},Nodes) ->
- case find_id_chl(TC,Id,CHL) of
- activating -> % Already started.
- {error,activating};
- stopping -> % Not yet stopped.
- {error,deactivating};
- false ->
- case get_tracecase_tc_dict(TC,TCdict) of
- {ok,TraceCase} -> % Such a trace case exists.
- case check_bindings(Vars,TraceCase) of
- {ok,Bindings} -> % Necessary vars exists in Vars.
- if
+ {ok,lists:foldl(fun(N,{Nodes2,NodesErr})->
+ case get_state_nodes(N,NodesD) of
+ {inactive,running} -> % Only ok case.
+ case rpc:call(CNode,inviso,ctp_all,[[N]]) of
+ {ok,[{N,ok}]} ->
+ {[N|Nodes2],NodesErr};
+ {ok,[{N,{error,Reason}}]} ->
+ {Nodes2,[{N,{error,Reason}}|NodesErr]};
+ {error,Reason} ->
+ {Nodes2,[{N,{error,Reason}}|NodesErr]};
+ {badrpc,Reason} ->
+ {Nodes2,[{N,{error,{badrpc,Reason}}}|NodesErr]}
+ end;
+ {_,suspended} ->
+ {Nodes2,[{N,{error,suspended}}|NodesErr]};
+ down ->
+ {Nodes2,[{N,{error,down}}|NodesErr]};
+ false ->
+ {Nodes2,[{N,{error,unknown_node}}|NodesErr]};
+ _ ->
+ {Nodes2,[{N,{error,already_in_session}}|NodesErr]}
+ end
+ end,
+ {[],[]},
+ Nodes)};
+h_reinitiate_session_2(Nodes,_NodesD,_CNode) ->
+ {error,{badarg7,Nodes}}.
+
+%% Help function redoing the command history log at all nodes that actually
+%% started to trace. Note that we do not modify the return value which will be
+%% given to the caller just because we decide not to redo commands. The user
+%% must conclude him self from the inviso return value that commands were not
+%% redone at a particular node.
+h_reinitiate_session_chl(local_runtime,LD) ->
+ h_reinitiate_session_chl([local_runtime],LD);
+h_reinitiate_session_chl([Node|Rest],LD=#ld{nodes=NodesD}) ->
+ case get_state_nodes(Node,NodesD) of
+ {tracing,running} -> % Only case when we shall redo!
+ h_reinitiate_session_chl(Rest,redo_cmd_history(Node,LD));
+ _ -> % No redo of chl in other cases.
+ h_reinitiate_session_chl(Rest,LD)
+ end;
+h_reinitiate_session_chl([],LD) ->
+ LD.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% restore_session
+%% -----------------------------------------------------------------------------
+
+%% Help function starting a session (init tracing) and redoes the history
+%% found in CHL.
+h_restore_session(MoreTDGargs,LD) ->
+ DateTime=calendar:universal_time(),
+ {M,F,Args}=LD#ld.tdg,
+ TDGargs=inviso_tool_lib:mk_tdg_args(DateTime,MoreTDGargs++Args),
+ case h_start_session(M,F,TDGargs,LD) of
+ {ok,{SessionNr,ReturnVal,NewLD}} -> % There were no available nodes.
+ {ok,{SessionNr,ReturnVal,NewLD}};
+ {ok,{SessionNr,ReturnVal,Nodes2,NewLD}} ->
+ NewLD2=h_reinitiate_session_chl(Nodes2,NewLD),
+ {ok,{SessionNr,ReturnVal,NewLD2}};
+ {error,Reason} -> % Risk of out of control.
+ {error,Reason}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% stop_session
+%% -----------------------------------------------------------------------------
+
+%% Help function stopping tracing at tracing nodes.
+h_stop_session(#ld{c_node=CNode,nodes=NodesD,tracer_data=TDs}) ->
+ case h_stop_session_2(CNode,NodesD) of
+ {ok,Result} ->
+ {ok,{get_latest_session_nr_tracer_data(TDs),Result}};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+h_stop_session_2(undefined,NodesD) -> % The non distributed case.
+ case get_tracing_nodes(NodesD) of
+ {up,{inactive,_}} -> % Already not tracing!
+ {ok,[]};
+ {up,_} ->
+ case inviso:stop_tracing() of
+ {ok,_State} ->
+ {ok,[ok]};
+ {error,no_response} ->
+ {ok,[]};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+ down ->
+ {ok,[]}
+ end;
+h_stop_session_2(CNode,NodesD) ->
+ Nodes=get_tracing_nodes(NodesD),
+ case rpc:call(CNode,inviso,stop_tracing,[Nodes]) of
+ {ok,NodeResults} ->
+ {ok,lists:map(fun({N,{ok,_}})->{N,ok};
+ (NodeError)->NodeError
+ end,
+ NodeResults)};
+ {error,Reason} ->
+ {error,Reason};
+ {badrpc,Reason} ->
+ {error,{inviso_control_node_error,Reason}}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function removing any trace flags, trace patterns and meta trace patterns
+%% at Nodes. This will cause the nodes to become "fresh".
+h_reset_nodes(local_runtime,_CNode) ->
+ inviso:clear([keep_log_files]);
+h_reset_nodes(Nodes,CNode) ->
+ case inviso_tool_lib:inviso_cmd(CNode,clear,[Nodes,[keep_log_files]]) of
+ {ok,NodeResults} ->
+ {ok,NodeResults};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+%% -----------------------------------------------------------------------------
+
+
+%% -----------------------------------------------------------------------------
+%% atc
+%% -----------------------------------------------------------------------------
+
+%% Function handling ativating a trace case. Trace cases that do not have a
+%% particular on/off handling (but just on in some scense) are handled here too.
+%% The trace case is entered into the Command History Log.
+%% Note that the trace case can not be executed at this node but must be
+%% executed where the inviso control component is.
+%% Further it is possible to either activated the tracecase for all running and
+%% tracing nodes, or just for a specified list of nodes.
+%% TC=tracecase_name(),
+%% Id=term(), identifiying this usage so we can turn it off later.
+%% Vars=list(), list of variable-value bindnings.
+h_atc(TC,Id,Vars,LD) ->
+ h_atc(TC,Id,Vars,LD,void). % For all running-tracing nodes.
+
+h_atc(TC,Id,Vars,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL},Nodes) ->
+ case find_id_chl(TC,Id,CHL) of
+ activating -> % Already started.
+ {error,activating};
+ stopping -> % Not yet stopped.
+ {error,deactivating};
+ false ->
+ case get_tracecase_tc_dict(TC,TCdict) of
+ {ok,TraceCase} -> % Such a trace case exists.
+ case check_bindings(Vars,TraceCase) of
+ {ok,Bindings} -> % Necessary vars exists in Vars.
+ if
is_list(Nodes) -> % Nodes predefined.
- h_atc_2(TC,Id,CNode,CHL,LD,TraceCase,Bindings,Nodes);
- true -> % Use all tracing and running nodes.
- Nodes1=get_nodenames_running_nodes(LD#ld.nodes),
- h_atc_2(TC,Id,CNode,CHL,LD,TraceCase,Bindings,Nodes1)
- end;
- {error,Reason} -> % Variable def missing.
- {error,Reason}
- end;
- false ->
- {error,unknown_tracecase}
- end;
- {ok,_Bindings} -> % Already activated and running.
- {error,already_started}
- end.
-
-h_atc_2(TC,Id,CNode,CHL,LD,TraceCase,Bindings,Nodes) ->
- case exec_trace_case_on(CNode,TraceCase,Bindings,Nodes) of
- {ok,ProcH} -> % Trace cases have no return values.
- NewCHL=set_activating_chl(TC,Id,CHL,Bindings,ProcH),
- {ok,LD#ld{chl=NewCHL}};
- {error,Reason} ->
- {error,Reason}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% sync_atc
-%% -----------------------------------------------------------------------------
-
-h_sync_atc(TC,Id,Vars,TimeOut,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
- case find_id_chl(TC,Id,CHL) of
- activating -> % Already started.
- {error,activating};
- stopping -> % Not yet stopped.
- {error,deactivating};
- false ->
- case get_tracecase_tc_dict(TC,TCdict) of
- {ok,TraceCase} -> % Such a trace case exists.
- case check_bindings(Vars,TraceCase) of
- {ok,Bindings} -> % Necessary vars exists in Vars.
- {ok,TcFName}=get_tc_activate_fname(TraceCase),
- Nodes=get_nodenames_running_nodes(LD#ld.nodes),
- Bindings2=erl_eval:add_binding('Nodes',Nodes,Bindings),
- RpcNode=get_rpc_nodename(CNode),
- case rpc:call(RpcNode,file,script,[TcFName,Bindings2],TimeOut) of
- {ok,Value} ->
- FakeProcH=make_ref(),
- NewCHL1=set_activating_chl(TC,Id,CHL,Bindings,FakeProcH),
- NewCHL2=set_running_chl(FakeProcH,TC,Id,Value,NewCHL1),
- {ok,LD#ld{chl=NewCHL2},Value};
- {error,Reason} ->
- {error,{faulty_tracecase,{TcFName,Reason}}};
- {badrpc,Reason} ->
- {error,{badrpc,Reason}}
- end;
- {error,Reason} -> % Variable def missing.
- {error,Reason}
- end;
- false ->
- {error,unknown_tracecase}
- end;
- {ok,_Bindings} -> % Already activated and running.
- {error,already_started}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% rtc
-%% -----------------------------------------------------------------------------
-
-%% Function handling running a trace case without marking it as activated. It
-%% is in the history mearly indicated as activated
-h_sync_rtc(TC,Vars,TimeOut,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
- case get_tracecase_tc_dict(TC,TCdict) of
- {ok,TraceCase} -> % Such a trace case exists.
- case check_bindings(Vars,TraceCase) of
- {ok,Bindings} -> % Necessary vars exists in Vars.
- {ok,TcFName}=get_tc_activate_fname(TraceCase),
- Nodes=get_nodenames_running_nodes(LD#ld.nodes),
- Bindings2=erl_eval:add_binding('Nodes',Nodes,Bindings),
- RpcNode=get_rpc_nodename(CNode),
- case rpc:call(RpcNode,file,script,[TcFName,Bindings2],TimeOut) of
- {ok,Value} ->
- {ok,LD#ld{chl=add_rtc_chl(TC,Bindings2,CHL)},Value};
- {error,Reason} ->
- {error,{faulty_tracecase,{TcFName,Reason}}};
- {badrpc,Reason} ->
- {error,{badrpc,Reason}}
- end;
- {error,Reason} -> % Variable def missing.
- {error,Reason}
- end;
- false ->
- {error,unknown_tracecase}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% dtc
-%% -----------------------------------------------------------------------------
-
-%% Function handling turning a trace case off. The trace case must be registered
-%% as having an off mechanism. If it has an off mechanism and was previously entered
-%% into the Command History Log and is done with its activation phase, it will be
-%% executed and removed from the CHL.
-h_dtc(TC,Id,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
- case find_id_chl(TC,Id,CHL) of
- {ok,Bindings} -> % Yes, we have turned it on before.
- case get_tracecase_tc_dict(TC,TCdict) of
- {ok,TraceCase} ->
- Nodes=get_nodenames_running_nodes(LD#ld.nodes),
- case exec_trace_case_off(CNode,TraceCase,Bindings,Nodes) of
- {ok,ProcH} ->
- NewCHL=set_stopping_chl(TC,Id,CHL,ProcH),
- {ok,LD#ld{chl=NewCHL}};
- {error,Reason} ->
- {error,Reason}
- end;
- false -> % Strange, Id ok but no such trace case.
- {error,unknown_tracecase}
- end;
- false -> % Not previously turned on.
- {error,unknown_id};
- activating ->
- {error,activating};
- stopping ->
- {error,already_deactivating}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% sync_dtc
-%% -----------------------------------------------------------------------------
-
-h_sync_dtc(TC,Id,TimeOut,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
- case find_id_chl(TC,Id,CHL) of
- {ok,Bindings} -> % Yes, we have turned it on before.
- case get_tracecase_tc_dict(TC,TCdict) of
- {ok,TraceCase} ->
- case get_tc_deactivate_fname(TraceCase) of
- {ok,TcFName} ->
- Nodes=get_nodenames_running_nodes(LD#ld.nodes),
- Bindings2=erl_eval:add_binding('Nodes',Nodes,Bindings),
- RpcNode=get_rpc_nodename(CNode),
- case rpc:call(RpcNode,file,script,[TcFName,Bindings2],TimeOut) of
- {ok,Value} ->
- FakeProcH=make_ref(),
- NewCHL1=set_stopping_chl(TC,Id,CHL,FakeProcH),
- NewCHL2=nullify_chl(FakeProcH,TC,Id,NewCHL1),
- {ok,LD#ld{chl=NewCHL2},Value};
- {error,Reason} -> % Script fault.
- {error,{faulty_tracecase,{TcFName,Reason}}};
- {badrpc,Reason} ->
- {error,{badrpc,Reason}}
- end;
- false ->
- {error,no_deactivation}
- end;
- false -> % Strange, Id ok but no such trace case.
- {error,unknown_tracecase}
- end;
- false -> % Not previously turned on.
- {error,unknown_id};
- activating ->
- {error,activating};
- stopping ->
- {error,already_deactivating}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% inviso
-%% -----------------------------------------------------------------------------
-
-%% Function executing one inviso command. The returnvalue from the inviso
-%% function call will be the return value to the client. The command is
-%% entered into the history command log.
-%% Note that the inviso call may have to be done at another node, dictated
-%% by the c_node field. Further, if the module name is not an atom it is
-%% most likely a regexp, which must be expanded at the regexp_node. Note
-%% this is only relevant for tp and tpl.
-h_inviso(Cmd,Args,LD=#ld{c_node=CNode,regexp_node=RegExpNode,chl=CHL}) ->
- Arity=length(Args),
- case check_proper_inviso_call(Cmd,Arity) of
- {true,RegExpFlag} -> % Yes it is an inviso call.
- Nodes=get_nodenames_running_nodes(LD#ld.nodes),
- case h_inviso_2(Cmd,Args,CNode,RegExpNode,RegExpFlag,Nodes) of
- {ok,Result} ->
- case check_inviso_call_to_history(Cmd,Arity) of
- true -> % This function shall be added to chl.
- {ok,{Result,LD#ld{chl=add_inviso_call_chl(Cmd,Args,CHL)}}};
- false -> % Do not add it.
- {ok,{Result,LD}}
- end;
- {error,Reason} ->
- {error,Reason}
- end;
- false -> % Not an inviso function.
- {error,invalid_function_name}
- end.
-
-h_inviso_2(Cmd,Args,undefined,_,_,_) -> % A non distributed system.
- case catch apply(inviso,Cmd,Args) of % Regexp expansion only relevant when
- {'EXIT',Reason} -> % distributed, here let inviso_rt expand.
- {error,{'EXIT',Reason}};
- Result ->
- {ok,Result}
- end;
-h_inviso_2(Cmd,Args,CNode,RegExpNode,RegExpFlag,Nodes) ->
- case expand_module_regexps(Args,RegExpNode,Nodes,RegExpFlag) of
- {ok,NewArgs} ->
- case catch inviso_tool_lib:inviso_cmd(CNode,Cmd,[Nodes|NewArgs]) of
- {'EXIT',Reason} ->
- {error,{'EXIT',Reason}};
- {error,{badrpc,Reason}} -> % Includes runtime failure.
- {error,{badrpc,Reason}};
- Result ->
- {ok,Result}
- end;
- {error,Reason} ->
- {error,Reason}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% reactivate
-%% -----------------------------------------------------------------------------
-
-h_reactivate(_Node,undefined) -> % The non-distributed case.
- case inviso:cancel_suspension() of
- ok ->
- ok;
- {error,Reason} ->
- {error,Reason}
- end;
-h_reactivate(Node,CNode) ->
- case inviso_tool_lib:inviso_cmd(CNode,cancel_suspension,[[Node]]) of
- {ok,[{Node,ok}]} ->
- ok;
- {ok,[{Node,{error,Reason}}]} ->
- {error,Reason};
- {error,Reason} ->
- {error,Reason}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% save_history
-%% -----------------------------------------------------------------------------
-
-h_save_history(HDir,Dir,FileName,SortedLog) ->
- Dir0=
- if
+ h_atc_2(TC,Id,CNode,CHL,LD,TraceCase,Bindings,Nodes);
+ true -> % Use all tracing and running nodes.
+ Nodes1=get_nodenames_running_nodes(LD#ld.nodes),
+ h_atc_2(TC,Id,CNode,CHL,LD,TraceCase,Bindings,Nodes1)
+ end;
+ {error,Reason} -> % Variable def missing.
+ {error,Reason}
+ end;
+ false ->
+ {error,unknown_tracecase}
+ end;
+ {ok,_Bindings} -> % Already activated and running.
+ {error,already_started}
+ end.
+
+h_atc_2(TC,Id,CNode,CHL,LD,TraceCase,Bindings,Nodes) ->
+ {ok,ProcH} = exec_trace_case_on(CNode,TraceCase,Bindings,Nodes),
+ %% Trace cases have no return values.
+ NewCHL=set_activating_chl(TC,Id,CHL,Bindings,ProcH),
+ {ok,LD#ld{chl=NewCHL}}.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% sync_atc
+%% -----------------------------------------------------------------------------
+
+h_sync_atc(TC,Id,Vars,TimeOut,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
+ case find_id_chl(TC,Id,CHL) of
+ activating -> % Already started.
+ {error,activating};
+ stopping -> % Not yet stopped.
+ {error,deactivating};
+ false ->
+ case get_tracecase_tc_dict(TC,TCdict) of
+ {ok,TraceCase} -> % Such a trace case exists.
+ case check_bindings(Vars,TraceCase) of
+ {ok,Bindings} -> % Necessary vars exists in Vars.
+ {ok,TcFName}=get_tc_activate_fname(TraceCase),
+ Nodes=get_nodenames_running_nodes(LD#ld.nodes),
+ Bindings2=erl_eval:add_binding('Nodes',Nodes,Bindings),
+ RpcNode=get_rpc_nodename(CNode),
+ case rpc:call(RpcNode,file,script,[TcFName,Bindings2],TimeOut) of
+ {ok,Value} ->
+ FakeProcH=make_ref(),
+ NewCHL1=set_activating_chl(TC,Id,CHL,Bindings,FakeProcH),
+ NewCHL2=set_running_chl(FakeProcH,TC,Id,Value,NewCHL1),
+ {ok,LD#ld{chl=NewCHL2},Value};
+ {error,Reason} ->
+ {error,{faulty_tracecase,{TcFName,Reason}}};
+ {badrpc,Reason} ->
+ {error,{badrpc,Reason}}
+ end;
+ {error,Reason} -> % Variable def missing.
+ {error,Reason}
+ end;
+ false ->
+ {error,unknown_tracecase}
+ end;
+ {ok,_Bindings} -> % Already activated and running.
+ {error,already_started}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% rtc
+%% -----------------------------------------------------------------------------
+
+%% Function handling running a trace case without marking it as activated. It
+%% is in the history mearly indicated as activated
+h_sync_rtc(TC,Vars,TimeOut,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
+ case get_tracecase_tc_dict(TC,TCdict) of
+ {ok,TraceCase} -> % Such a trace case exists.
+ case check_bindings(Vars,TraceCase) of
+ {ok,Bindings} -> % Necessary vars exists in Vars.
+ {ok,TcFName}=get_tc_activate_fname(TraceCase),
+ Nodes=get_nodenames_running_nodes(LD#ld.nodes),
+ Bindings2=erl_eval:add_binding('Nodes',Nodes,Bindings),
+ RpcNode=get_rpc_nodename(CNode),
+ case rpc:call(RpcNode,file,script,[TcFName,Bindings2],TimeOut) of
+ {ok,Value} ->
+ {ok,LD#ld{chl=add_rtc_chl(TC,Bindings2,CHL)},Value};
+ {error,Reason} ->
+ {error,{faulty_tracecase,{TcFName,Reason}}};
+ {badrpc,Reason} ->
+ {error,{badrpc,Reason}}
+ end;
+ {error,Reason} -> % Variable def missing.
+ {error,Reason}
+ end;
+ false ->
+ {error,unknown_tracecase}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% dtc
+%% -----------------------------------------------------------------------------
+
+%% Function handling turning a trace case off. The trace case must be registered
+%% as having an off mechanism. If it has an off mechanism and was previously entered
+%% into the Command History Log and is done with its activation phase, it will be
+%% executed and removed from the CHL.
+h_dtc(TC,Id,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
+ case find_id_chl(TC,Id,CHL) of
+ {ok,Bindings} -> % Yes, we have turned it on before.
+ case get_tracecase_tc_dict(TC,TCdict) of
+ {ok,TraceCase} ->
+ Nodes=get_nodenames_running_nodes(LD#ld.nodes),
+ case exec_trace_case_off(CNode,TraceCase,Bindings,Nodes) of
+ {ok,ProcH} ->
+ NewCHL=set_stopping_chl(TC,Id,CHL,ProcH),
+ {ok,LD#ld{chl=NewCHL}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+ false -> % Strange, Id ok but no such trace case.
+ {error,unknown_tracecase}
+ end;
+ false -> % Not previously turned on.
+ {error,unknown_id};
+ activating ->
+ {error,activating};
+ stopping ->
+ {error,already_deactivating}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% sync_dtc
+%% -----------------------------------------------------------------------------
+
+h_sync_dtc(TC,Id,TimeOut,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL}) ->
+ case find_id_chl(TC,Id,CHL) of
+ {ok,Bindings} -> % Yes, we have turned it on before.
+ case get_tracecase_tc_dict(TC,TCdict) of
+ {ok,TraceCase} ->
+ case get_tc_deactivate_fname(TraceCase) of
+ {ok,TcFName} ->
+ Nodes=get_nodenames_running_nodes(LD#ld.nodes),
+ Bindings2=erl_eval:add_binding('Nodes',Nodes,Bindings),
+ RpcNode=get_rpc_nodename(CNode),
+ case rpc:call(RpcNode,file,script,[TcFName,Bindings2],TimeOut) of
+ {ok,Value} ->
+ FakeProcH=make_ref(),
+ NewCHL1=set_stopping_chl(TC,Id,CHL,FakeProcH),
+ NewCHL2=nullify_chl(FakeProcH,TC,Id,NewCHL1),
+ {ok,LD#ld{chl=NewCHL2},Value};
+ {error,Reason} -> % Script fault.
+ {error,{faulty_tracecase,{TcFName,Reason}}};
+ {badrpc,Reason} ->
+ {error,{badrpc,Reason}}
+ end;
+ false ->
+ {error,no_deactivation}
+ end;
+ false -> % Strange, Id ok but no such trace case.
+ {error,unknown_tracecase}
+ end;
+ false -> % Not previously turned on.
+ {error,unknown_id};
+ activating ->
+ {error,activating};
+ stopping ->
+ {error,already_deactivating}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% inviso
+%% -----------------------------------------------------------------------------
+
+%% Function executing one inviso command. The returnvalue from the inviso
+%% function call will be the return value to the client. The command is
+%% entered into the history command log.
+%% Note that the inviso call may have to be done at another node, dictated
+%% by the c_node field. Further, if the module name is not an atom it is
+%% most likely a regexp, which must be expanded at the regexp_node. Note
+%% this is only relevant for tp and tpl.
+h_inviso(Cmd,Args,LD=#ld{c_node=CNode,regexp_node=RegExpNode,chl=CHL}) ->
+ Arity=length(Args),
+ case check_proper_inviso_call(Cmd,Arity) of
+ {true,RegExpFlag} -> % Yes it is an inviso call.
+ Nodes=get_nodenames_running_nodes(LD#ld.nodes),
+ case h_inviso_2(Cmd,Args,CNode,RegExpNode,RegExpFlag,Nodes) of
+ {ok,Result} ->
+ case check_inviso_call_to_history(Cmd,Arity) of
+ true -> % This function shall be added to chl.
+ {ok,{Result,LD#ld{chl=add_inviso_call_chl(Cmd,Args,CHL)}}};
+ false -> % Do not add it.
+ {ok,{Result,LD}}
+ end;
+ {error,Reason} ->
+ {error,Reason}
+ end;
+ false -> % Not an inviso function.
+ {error,invalid_function_name}
+ end.
+
+h_inviso_2(Cmd,Args,undefined,_,_,_) -> % A non distributed system.
+ case catch apply(inviso,Cmd,Args) of % Regexp expansion only relevant when
+ {'EXIT',Reason} -> % distributed, here let inviso_rt expand.
+ {error,{'EXIT',Reason}};
+ Result ->
+ {ok,Result}
+ end;
+h_inviso_2(Cmd,Args,CNode,RegExpNode,RegExpFlag,Nodes) ->
+ case expand_module_regexps(Args,RegExpNode,Nodes,RegExpFlag) of
+ {ok,NewArgs} ->
+ case catch inviso_tool_lib:inviso_cmd(CNode,Cmd,[Nodes|NewArgs]) of
+ {'EXIT',Reason} ->
+ {error,{'EXIT',Reason}};
+ {error,{badrpc,Reason}} -> % Includes runtime failure.
+ {error,{badrpc,Reason}};
+ Result ->
+ {ok,Result}
+ end;
+ {error,Reason} ->
+ {error,Reason}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% reactivate
+%% -----------------------------------------------------------------------------
+
+h_reactivate(_Node,undefined) -> % The non-distributed case.
+ case inviso:cancel_suspension() of
+ ok ->
+ ok;
+ {error,Reason} ->
+ {error,Reason}
+ end;
+h_reactivate(Node,CNode) ->
+ case inviso_tool_lib:inviso_cmd(CNode,cancel_suspension,[[Node]]) of
+ {ok,[{Node,ok}]} ->
+ ok;
+ {ok,[{Node,{error,Reason}}]} ->
+ {error,Reason};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% save_history
+%% -----------------------------------------------------------------------------
+
+h_save_history(HDir,Dir,FileName,SortedLog) ->
+ Dir0=
+ if
is_list(HDir) -> % There is a history dir specified.
- HDir; % Use it then.
- true ->
- Dir % Else use the tool dir.
- end,
- case catch make_absolute_path(FileName,Dir0) of
+ HDir; % Use it then.
+ true ->
+ Dir % Else use the tool dir.
+ end,
+ case catch make_absolute_path(FileName,Dir0) of
AbsFileName when is_list(AbsFileName) ->
- Log2=build_saved_history_data(SortedLog), % Remove stopped tracecases.
- case file:write_file(AbsFileName,term_to_binary(Log2)) of
- ok ->
- {ok,AbsFileName};
- {error,Reason} ->
- {error,{write_file,Reason}}
- end;
- {'EXIT',_Reason} ->
- {error,{bad_filename,FileName}}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% get_autostart_data
-%% -----------------------------------------------------------------------------
-
-%% Help function building the structures used when exporting autostart information
-%% from the tool. Note that we remove the tool-dependency and insert the one
-%% specify in the get_autostart_data call.
-h_get_autostart_data(local_runtime,_,Dependency,ASD,M,F,TDGargs,OptsG) ->
- CompleteTDGargs=call_tracer_data_generator_mkargs(local_runtime,TDGargs),
- Opts0=start_runtime_components_mk_opts(local_runtime,OptsG),
- Opts=[Dependency|lists:keydelete(dependency,1,Opts0)],
- {ok,{ASD,{ok,{Opts,{tdg,{M,F,CompleteTDGargs}}}}}};
-
+ Log2=build_saved_history_data(SortedLog), % Remove stopped tracecases.
+ case file:write_file(AbsFileName,term_to_binary(Log2)) of
+ ok ->
+ {ok,AbsFileName};
+ {error,Reason} ->
+ {error,{write_file,Reason}}
+ end;
+ {'EXIT',_Reason} ->
+ {error,{bad_filename,FileName}}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% get_autostart_data
+%% -----------------------------------------------------------------------------
+
+%% Help function building the structures used when exporting autostart information
+%% from the tool. Note that we remove the tool-dependency and insert the one
+%% specify in the get_autostart_data call.
+h_get_autostart_data(local_runtime,_,Dependency,ASD,M,F,TDGargs,OptsG) ->
+ CompleteTDGargs=call_tracer_data_generator_mkargs(local_runtime,TDGargs),
+ Opts0=start_runtime_components_mk_opts(local_runtime,OptsG),
+ Opts=[Dependency|lists:keydelete(dependency,1,Opts0)],
+ {ok,{ASD,{ok,{Opts,{tdg,{M,F,CompleteTDGargs}}}}}};
+
h_get_autostart_data(Nodes,CNode,Dependency,ASD,M,F,TDGargs,OptsG) when is_list(Nodes) ->
- {ok,{ASD,h_get_autostart_data_2(Nodes,CNode,Dependency,M,F,TDGargs,OptsG)}};
-h_get_autostart_data(Nodes,_CNode,_Dependency,_ASD,_M,_F,_TDGargs,_OptsG) ->
- {error,{badarg,Nodes}}.
-
-h_get_autostart_data_2([Node|Rest],CNode,Dependency,M,F,TDGargs,OptsG) ->
- CompleteTDGargs=call_tracer_data_generator_mkargs(Node,TDGargs),
- Opts0=start_runtime_components_mk_opts(Node,OptsG),
- Opts=[Dependency|lists:keydelete(dependency,1,Opts0)],
- [{Node,{ok,{Opts,{tdg,{M,F,CompleteTDGargs}}}}}|
- h_get_autostart_data_2(Rest,CNode,Dependency,M,F,TDGargs,OptsG)];
-h_get_autostart_data_2([],_CNode,_Dependency,_M,_F,_TDGargs,_OptsG) ->
- [].
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% flush
-%% -----------------------------------------------------------------------------
-
-h_flush(undefined,_Nodes) ->
- inviso:flush();
-h_flush(CNode,Nodes) ->
- inviso_tool_lib:inviso_cmd(CNode,flush,[Nodes]).
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% tc_executer_reply
-%% -----------------------------------------------------------------------------
-
-%% Function handling that a trace case has completed its activation phase and
-%% shall now be marked in the Command History Log as running.
-h_tc_activation_done(ProcH,Result,LD=#ld{chl=CHL}) ->
- case find_tc_executer_chl(ProcH,CHL) of
- {activating,{TC,Id}} ->
- case Result of
- {ok,Value} -> % The trace case is successful activated.
- {ok,LD#ld{chl=set_running_chl(ProcH,TC,Id,Value,CHL)}};
- {error,_} -> % Then pretend it never happend :-)
- {ok,LD#ld{chl=del_tc_chl(ProcH,TC,Id,CHL)}} % Remove it.
- end;
- _ -> % Where did this come from?
- {ok,LD} % Well just ignore it then.
- end.
-%% -----------------------------------------------------------------------------
-
-%% Function handling that a trace case has completed its stopping phase and
-%% shall now be nulled in the Command History Log (meaning that it will not
-%% be repeated in the event of a reactivation).
-h_tc_stopping_done(ProcH,Result,LD=#ld{chl=CHL}) ->
- case find_tc_executer_chl(ProcH,CHL) of
- {stopping,{TC,Id}} ->
- case Result of
- {ok,_Result} -> % _Result is returned from the tracecase.
- {ok,LD#ld{chl=nullify_chl(ProcH,TC,Id,CHL)}};
- {error,_} -> % This is difficult, is it still active?
- {ok,LD#ld{chl=nullify_chl(ProcH,TC,Id,CHL)}}
- end;
- _ -> % Strange.
- {ok,LD}
- end.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% Terminate.
-%% -----------------------------------------------------------------------------
-
-%% Help function stopping the inviso control component. Does not return
-%% anything significant.
-stop_inviso_at_c_node(undefined) -> % Non distributed case.
- inviso:stop();
-stop_inviso_at_c_node(CNode) ->
- rpc:call(CNode,inviso,stop,[]).
-%% -----------------------------------------------------------------------------
-
-%% Help function that removes all trace patterns from the nodes that are not
-%% marked as such were patterns shall be left after stopping of inviso.
-%% Returns {ok,NodeResult} or {error,Reason}. In the non-distributed case
-%% 'ok' is returned incase of success, ot 'patterns_untouched'.
-remove_all_trace_patterns(undefined,KeepNodes,_Nodes) ->
- case KeepNodes of
- undefined -> % No, remove patterns from localruntime.
- inviso:ctp_all();
- _ ->
- patterns_untouched
- end;
-remove_all_trace_patterns(CNode,KeepNodes,Nodes) ->
- Nodes2=lists:filter(fun(N)->not(lists:member(N,KeepNodes)) end,Nodes),
- case inviso_tool_lib:inviso_cmd(CNode,ctp_all,[Nodes2]) of
- {ok,NodeResults} ->
- F=fun(N) ->
- case lists:member(N,KeepNodes) of
- true ->
- {N,patterns_untouched};
- false ->
- case lists:keysearch(N,1,NodeResults) of
- {value,Result} ->
- Result; % {Node,ok}
- false -> % Extremely strange.
- {N,{error,general_error}}
- end
- end
- end,
- {ok,lists:map(F,Nodes)};
- {error,{badrpc,Reason}} ->
- {error,{inviso_control_node_error,Reason}};
- {error,Reason} ->
- {error,Reason}
- end.
-%% -----------------------------------------------------------------------------
-
-%% =============================================================================
-%% Second level help functions.
-%% =============================================================================
-
-%% Help function building a reply to a reconnection call based on which nodes
-%% where asked to be reconnected and which of those are actually now working.
-%% We actually make an effort to serve the return value in the same order as the
-%% nodes were mentioned in the original call (Nodes).
-build_reconnect_nodes_reply(local_runtime,local_runtime,_NodesErr,NodesD) ->
- case get_state_nodes(local_runtime,NodesD) of
- down ->
- {error,down};
- {State,Status} ->
- {ok,{State,Status}}
- end;
-build_reconnect_nodes_reply(local_runtime,_,NodesErr,_NodesD) ->
- NodesErr;
-build_reconnect_nodes_reply([Node|Rest],Nodes2,NodesErr,NodesD) ->
- case lists:member(Node,Nodes2) of
- true -> % Ok, look in the #ld.nodes.
- case get_state_nodes(Node,NodesD) of
- down -> % Somekind of failure, still down.
- [{Node,{error,down}}|
- build_reconnect_nodes_reply(Rest,Nodes2,NodesErr,NodesD)];
- {State,Status} -> % {State,Status}
- [{Node,{ok,{State,Status}}}|
- build_reconnect_nodes_reply(Rest,Nodes2,NodesErr,NodesD)]
- end;
- false -> % Error already from the beginning.
- {value,{_,Error}}=lists:keysearch(Node,1,NodesErr),
- [{Node,Error}|build_reconnect_nodes_reply(Rest,Nodes2,NodesErr,NodesD)]
- end;
-build_reconnect_nodes_reply([],_,_,_) ->
- [].
-%% -----------------------------------------------------------------------------
-
-%% Help function building a return value to reinitiate_session. Nodes contains
-%% all involved nodes. If the node occurrs in NodesErr, we choose the error in
-%% NodesErr. Otherwise the returnvalue in ReturnVal is used.
-build_reinitiate_session_reply(Nodes,NodesErr,{ok,NodesResults}) ->
- {ok,build_reinitiate_session_reply_2(Nodes,NodesErr,NodesResults)};
-build_reinitiate_session_reply(local_runtime,[],NodeResult) ->
- NodeResult;
-build_reinitiate_session_reply(local_runtime,NodesErr,_NodeResult) ->
- NodesErr.
-build_reinitiate_session_reply_2([Node|Rest],NodesErr,NodeResults) ->
- case lists:keysearch(Node,1,NodesErr) of
- {value,{_,Error}} ->
- [{Node,Error}|build_reinitiate_session_reply_2(Rest,NodesErr,NodeResults)];
- false ->
- case lists:keysearch(Node,1,NodeResults) of
- {value,Value} ->
- [Value|build_reinitiate_session_reply_2(Rest,NodesErr,NodeResults)]
- end
- end;
-build_reinitiate_session_reply_2([],_NodesErr,_NodeResults) ->
- [].
-%% -----------------------------------------------------------------------------
-
-%% Help function returning a history log where stop and stopping entries have
-%% been removed. Further all tracecase log entries must be set to running since
-%% there can not be such a thing as an activating tracecase stored away in a
-%% saved historyfile!
-%% We must also take away any #Ref.
-build_saved_history_data(SortedLog) ->
- CleanedLog=
- lists:filter(fun({_,_,Stop,_}) when Stop==stop;Stop==stopping -> false;
- (_) -> true
- end,
- SortedLog),
- lists:map(fun({{TC,Id},C,activating,B}) -> {{TC,Id},C,running,B};
- ({{TC,Id},C,S,B}) -> {{TC,Id},C,S,B};
- ({{M,F,Args,_Ref},C}) -> {{M,F,Args},C};
- ({{TC,_Ref},C,B}) -> {TC,C,B} % An rtc.
- end,
- CleanedLog).
-%% -----------------------------------------------------------------------------
-
-%% This help function builds the AutoStartData structure which is returned from
-%% get_austostart_data. An AutoStartData structure is a list of trace-files and
-%% inviso commands. The order is significant since it is the idea that doing
-%% the trace case files and inviso commands in that order will bring a node to
-%% a certain state in a trace perspective.
-%% Returns {ok,AutoStartData} or {error,Reason}
-build_autostart_data(SortedLog,TCdict) ->
- build_autostart_data_2(SortedLog,TCdict,[]).
-
-build_autostart_data_2([{_,_C,Stop,_B}|Rest],TCdict,Accum) when Stop==stop;Stop==stopping->
- build_autostart_data_2(Rest,TCdict,Accum); % Simply skip deactivated/deativating.
-build_autostart_data_2([{{TCname,_},_C,activating,Bindings}|Rest],TCdict,Accum) ->
- build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum);
-build_autostart_data_2([{{TCname,_},_C,running,Bindings}|Rest],TCdict,Accum) ->
- build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum);
-build_autostart_data_2([{{TCname,_Ref},_C,Bindings}|Rest],TCdict,Accum) ->
- build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum);
-build_autostart_data_2([{{M,F,Args,_Ref},_C}|Rest],TCdict,Accum) ->
- build_autostart_data_2(Rest,TCdict,[{mfa,{M,F,Args}}|Accum]);
-build_autostart_data_2([],_TCdict,Accum) ->
- {ok,lists:reverse(Accum)}.
-
-%% Help function placing the filename in the AutoStartData structure.
-build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum) ->
- {ok,TC}=get_tracecase_tc_dict(TCname,TCdict),
- {ok,FName}=get_tc_activate_fname(TC),
- build_autostart_data_2(Rest,TCdict,[{file,{FName,Bindings}}|Accum]).
-%% -----------------------------------------------------------------------------
-
-%% Help function generating tracerdata to init inviso tracing. The generation
-%% is done by the TracerDataGenerator, TDG, function.
-%% Individual tracerdata is generated for each node in Nodes.
-%% Returns {ok,TracerData} or {error,Reason}.
-call_tracer_data_generator(undefined,M,F,TDGargs,_Nodes) -> % Non distributed.
- case catch call_tracer_data_generator_3(M,F,TDGargs,local_runtime) of
- {'EXIT',Reason} ->
- {error,{'EXIT',Reason}};
- TracerData ->
- {ok,TracerData}
- end;
-call_tracer_data_generator(_CNode,M,F,TDGargs,Nodes) ->
- case catch call_tracer_data_generator_2(M,F,TDGargs,Nodes) of
- {'EXIT',Reason} ->
- {error,{'EXIT',Reason}};
- TracerList ->
- {ok,TracerList}
- end.
-
-call_tracer_data_generator_2(M,F,TDGargs,[Node|Rest]) ->
- [{Node,call_tracer_data_generator_3(M,F,TDGargs,Node)}|
- call_tracer_data_generator_2(M,F,TDGargs,Rest)];
-call_tracer_data_generator_2(_,_,_,[]) ->
- [].
-
-call_tracer_data_generator_3(M,F,TDGargs,Node) ->
- apply(M,F,call_tracer_data_generator_mkargs(Node,TDGargs)).
-
-%% This function creates the arguments that the tracer data generator function
-%% accepts (in an apply call). The reason for making it a sepparate function is
-%% that the arguments are constructed in more situations than just when actually
-%% doing the apply. By having a function it will become obvious where to change
-%% should the arguments change.
-call_tracer_data_generator_mkargs(Node,TDGargs) ->
- inviso_tool_lib:mk_complete_tdg_args(Node,TDGargs).
-%% -----------------------------------------------------------------------------
-
-%% This function acts as standard options generator function. That is returning
-%% the options argument to inviso:add_node/3. Note that this function must not
-%% return the dependency part of that option.
-std_options_generator(_Node) ->
- []. % No particular options(!)
-%% -----------------------------------------------------------------------------
-
-
-%% Help function checking that Vars contains a binding for every variable
-%% listed in the VarNames field in TraceCase. Note that the special variable 'Nodes'
-%% is disregarded, since it is always added by the inviso_tool.
-%% Returns {ok,Bindings} or {error,Reason}. Where Bindings is a bindngs structure
-%% according to file:eval functionality.
-check_bindings(Vars,TraceCase) ->
- case catch check_bindings_2(Vars,
- get_tc_varnames(TraceCase),
- erl_eval:new_bindings()) of
- {'EXIT',_Reason} ->
- {error,variable_error};
- {error,Reason} -> % Missing a bindning.
- {error,Reason};
- {ok,Bindings} ->
- {ok,Bindings}
- end.
-
-check_bindings_2(Vars,['Nodes'|Rest],Bindings) ->
- check_bindings_2(Vars,Rest,Bindings); % Disregard Nodes since it is automatic.
-check_bindings_2(Vars,[VarName|Rest],Bindings) ->
- case lists:keysearch(VarName,1,Vars) of
- {value,{_,Val}} ->
- check_bindings_2(Vars,Rest,erl_eval:add_binding(VarName,Val,Bindings));
- false -> % Mandatory variable missing.
- {error,{missing_variable,VarName}} % Quite here then.
- end;
-check_bindings_2(_,[],Bindings) ->
- {ok,Bindings}.
-%% -----------------------------------------------------------------------------
-
-%% This help function checks that the command the user tries to do is amongst
-%% the inviso API. It at the same time returns what kind of command it is.
-%% {true,RegExpFlag} or 'false' where RegExpFlag indicates if this command
-%% needs to have its argument modified by module regexp expansion or not.
-check_proper_inviso_call(Cmd,Arity) ->
- case lists:member({Cmd,Arity},?INVISO_CMDS) of
- true -> % It is part of inviso API.
- {true,check_proper_inviso_call_regexp(Cmd,Arity)};
- false ->
- false
- end.
-
-%% Returns {Type,Arity,PlaceOfModuleSpec} or 'false'.
-check_proper_inviso_call_regexp(tp,5) -> {tp,5,1};
-check_proper_inviso_call_regexp(tp,4) -> {tp,4,1};
-check_proper_inviso_call_regexp(tp,1) -> {tp,1,1};
-check_proper_inviso_call_regexp(tpl,5) -> {tp,5,1};
-check_proper_inviso_call_regexp(tpl,4) -> {tp,4,1};
-check_proper_inviso_call_regexp(tpl,1) -> {tp,1,1};
-check_proper_inviso_call_regexp(ctp,3) -> {ctp,3,1};
-check_proper_inviso_call_regexp(ctp,1) -> {ctp,1,1};
-check_proper_inviso_call_regexp(ctpl,3) -> {ctp,3,1};
-check_proper_inviso_call_regexp(ctpl,1) -> {ctp,1,1};
-check_proper_inviso_call_regexp(_,_) -> % No regexp expansion.
- false.
-%% -----------------------------------------------------------------------------
-
-%% Help function checking if this inviso command shall be added to the command
-%% history log. Returns true or false.
-check_inviso_call_to_history(Cmd,Arity) ->
- case lists:member({Cmd,Arity},?INVISO_CMD_HISTORY) of
- true ->
- true;
- false ->
- false
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function traversing the arguments and expanding module names stated
-%% as regular expressions. This means that the resulting arguments may be longer
-%% than the orginal ones.
-%% When we run this function it has been determined that we are a distributed
-%% system.
-%% Also note that if there are no regexps in Args, no regexpansion will be
-%% made and RegExpNode may be 'undefined' (as it is if not set at start-up).
-%% If RegExpNode is unavailable the nodes found in Nodes will be used until
-%% one that works is found.
-expand_module_regexps(Args,_RegExpNode,_Nodes,false) ->
- {ok,Args};
-expand_module_regexps([PatternList],RegExpNode,Nodes,{tp,1,1}) ->
- case catch expand_module_regexps_tp(PatternList,RegExpNode,Nodes) of
+ {ok,{ASD,h_get_autostart_data_2(Nodes,CNode,Dependency,M,F,TDGargs,OptsG)}};
+h_get_autostart_data(Nodes,_CNode,_Dependency,_ASD,_M,_F,_TDGargs,_OptsG) ->
+ {error,{badarg,Nodes}}.
+
+h_get_autostart_data_2([Node|Rest],CNode,Dependency,M,F,TDGargs,OptsG) ->
+ CompleteTDGargs=call_tracer_data_generator_mkargs(Node,TDGargs),
+ Opts0=start_runtime_components_mk_opts(Node,OptsG),
+ Opts=[Dependency|lists:keydelete(dependency,1,Opts0)],
+ [{Node,{ok,{Opts,{tdg,{M,F,CompleteTDGargs}}}}}|
+ h_get_autostart_data_2(Rest,CNode,Dependency,M,F,TDGargs,OptsG)];
+h_get_autostart_data_2([],_CNode,_Dependency,_M,_F,_TDGargs,_OptsG) ->
+ [].
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% flush
+%% -----------------------------------------------------------------------------
+
+h_flush(undefined,_Nodes) ->
+ inviso:flush();
+h_flush(CNode,Nodes) ->
+ inviso_tool_lib:inviso_cmd(CNode,flush,[Nodes]).
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% tc_executer_reply
+%% -----------------------------------------------------------------------------
+
+%% Function handling that a trace case has completed its activation phase and
+%% shall now be marked in the Command History Log as running.
+h_tc_activation_done(ProcH,Result,LD=#ld{chl=CHL}) ->
+ case find_tc_executer_chl(ProcH,CHL) of
+ {activating,{TC,Id}} ->
+ case Result of
+ {ok,Value} -> % The trace case is successful activated.
+ {ok,LD#ld{chl=set_running_chl(ProcH,TC,Id,Value,CHL)}};
+ {error,_} -> % Then pretend it never happend :-)
+ {ok,LD#ld{chl=del_tc_chl(ProcH,TC,Id,CHL)}} % Remove it.
+ end;
+ _ -> % Where did this come from?
+ {ok,LD} % Well just ignore it then.
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Function handling that a trace case has completed its stopping phase and
+%% shall now be nulled in the Command History Log (meaning that it will not
+%% be repeated in the event of a reactivation).
+h_tc_stopping_done(ProcH,Result,LD=#ld{chl=CHL}) ->
+ case find_tc_executer_chl(ProcH,CHL) of
+ {stopping,{TC,Id}} ->
+ case Result of
+ {ok,_Result} -> % _Result is returned from the tracecase.
+ {ok,LD#ld{chl=nullify_chl(ProcH,TC,Id,CHL)}};
+ {error,_} -> % This is difficult, is it still active?
+ {ok,LD#ld{chl=nullify_chl(ProcH,TC,Id,CHL)}}
+ end;
+ _ -> % Strange.
+ {ok,LD}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% Terminate.
+%% -----------------------------------------------------------------------------
+
+%% Help function stopping the inviso control component. Does not return
+%% anything significant.
+stop_inviso_at_c_node(undefined) -> % Non distributed case.
+ inviso:stop();
+stop_inviso_at_c_node(CNode) ->
+ rpc:call(CNode,inviso,stop,[]).
+%% -----------------------------------------------------------------------------
+
+%% Help function that removes all trace patterns from the nodes that are not
+%% marked as such were patterns shall be left after stopping of inviso.
+%% Returns {ok,NodeResult} or {error,Reason}. In the non-distributed case
+%% 'ok' is returned incase of success, ot 'patterns_untouched'.
+remove_all_trace_patterns(undefined,KeepNodes,_Nodes) ->
+ case KeepNodes of
+ undefined -> % No, remove patterns from localruntime.
+ inviso:ctp_all();
+ _ ->
+ patterns_untouched
+ end;
+remove_all_trace_patterns(CNode,KeepNodes,Nodes) ->
+ Nodes2=lists:filter(fun(N)->not(lists:member(N,KeepNodes)) end,Nodes),
+ case inviso_tool_lib:inviso_cmd(CNode,ctp_all,[Nodes2]) of
+ {ok,NodeResults} ->
+ F=fun(N) ->
+ case lists:member(N,KeepNodes) of
+ true ->
+ {N,patterns_untouched};
+ false ->
+ case lists:keysearch(N,1,NodeResults) of
+ {value,Result} ->
+ Result; % {Node,ok}
+ false -> % Extremely strange.
+ {N,{error,general_error}}
+ end
+ end
+ end,
+ {ok,lists:map(F,Nodes)};
+ {error,{badrpc,Reason}} ->
+ {error,{inviso_control_node_error,Reason}};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+%% -----------------------------------------------------------------------------
+
+%% =============================================================================
+%% Second level help functions.
+%% =============================================================================
+
+%% Help function building a reply to a reconnection call based on which nodes
+%% where asked to be reconnected and which of those are actually now working.
+%% We actually make an effort to serve the return value in the same order as the
+%% nodes were mentioned in the original call (Nodes).
+build_reconnect_nodes_reply(local_runtime,local_runtime,_NodesErr,NodesD) ->
+ case get_state_nodes(local_runtime,NodesD) of
+ down ->
+ {error,down};
+ {State,Status} ->
+ {ok,{State,Status}}
+ end;
+build_reconnect_nodes_reply(local_runtime,_,NodesErr,_NodesD) ->
+ NodesErr;
+build_reconnect_nodes_reply([Node|Rest],Nodes2,NodesErr,NodesD) ->
+ case lists:member(Node,Nodes2) of
+ true -> % Ok, look in the #ld.nodes.
+ case get_state_nodes(Node,NodesD) of
+ down -> % Somekind of failure, still down.
+ [{Node,{error,down}}|
+ build_reconnect_nodes_reply(Rest,Nodes2,NodesErr,NodesD)];
+ {State,Status} -> % {State,Status}
+ [{Node,{ok,{State,Status}}}|
+ build_reconnect_nodes_reply(Rest,Nodes2,NodesErr,NodesD)]
+ end;
+ false -> % Error already from the beginning.
+ {value,{_,Error}}=lists:keysearch(Node,1,NodesErr),
+ [{Node,Error}|build_reconnect_nodes_reply(Rest,Nodes2,NodesErr,NodesD)]
+ end;
+build_reconnect_nodes_reply([],_,_,_) ->
+ [].
+%% -----------------------------------------------------------------------------
+
+%% Help function building a return value to reinitiate_session. Nodes contains
+%% all involved nodes. If the node occurrs in NodesErr, we choose the error in
+%% NodesErr. Otherwise the returnvalue in ReturnVal is used.
+build_reinitiate_session_reply(Nodes,NodesErr,{ok,NodesResults}) ->
+ {ok,build_reinitiate_session_reply_2(Nodes,NodesErr,NodesResults)};
+build_reinitiate_session_reply(local_runtime,[],NodeResult) ->
+ NodeResult;
+build_reinitiate_session_reply(local_runtime,NodesErr,_NodeResult) ->
+ NodesErr.
+build_reinitiate_session_reply_2([Node|Rest],NodesErr,NodeResults) ->
+ case lists:keysearch(Node,1,NodesErr) of
+ {value,{_,Error}} ->
+ [{Node,Error}|build_reinitiate_session_reply_2(Rest,NodesErr,NodeResults)];
+ false ->
+ case lists:keysearch(Node,1,NodeResults) of
+ {value,Value} ->
+ [Value|build_reinitiate_session_reply_2(Rest,NodesErr,NodeResults)]
+ end
+ end;
+build_reinitiate_session_reply_2([],_NodesErr,_NodeResults) ->
+ [].
+%% -----------------------------------------------------------------------------
+
+%% Help function returning a history log where stop and stopping entries have
+%% been removed. Further all tracecase log entries must be set to running since
+%% there can not be such a thing as an activating tracecase stored away in a
+%% saved historyfile!
+%% We must also take away any #Ref.
+build_saved_history_data(SortedLog) ->
+ CleanedLog=
+ lists:filter(fun({_,_,Stop,_}) when Stop==stop;Stop==stopping -> false;
+ (_) -> true
+ end,
+ SortedLog),
+ lists:map(fun({{TC,Id},C,activating,B}) -> {{TC,Id},C,running,B};
+ ({{TC,Id},C,S,B}) -> {{TC,Id},C,S,B};
+ ({{M,F,Args,_Ref},C}) -> {{M,F,Args},C};
+ ({{TC,_Ref},C,B}) -> {TC,C,B} % An rtc.
+ end,
+ CleanedLog).
+%% -----------------------------------------------------------------------------
+
+%% This help function builds the AutoStartData structure which is returned from
+%% get_austostart_data. An AutoStartData structure is a list of trace-files and
+%% inviso commands. The order is significant since it is the idea that doing
+%% the trace case files and inviso commands in that order will bring a node to
+%% a certain state in a trace perspective.
+%% Returns {ok,AutoStartData} or {error,Reason}
+build_autostart_data(SortedLog,TCdict) ->
+ build_autostart_data_2(SortedLog,TCdict,[]).
+
+build_autostart_data_2([{_,_C,Stop,_B}|Rest],TCdict,Accum) when Stop==stop;Stop==stopping->
+ build_autostart_data_2(Rest,TCdict,Accum); % Simply skip deactivated/deativating.
+build_autostart_data_2([{{TCname,_},_C,activating,Bindings}|Rest],TCdict,Accum) ->
+ build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum);
+build_autostart_data_2([{{TCname,_},_C,running,Bindings}|Rest],TCdict,Accum) ->
+ build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum);
+build_autostart_data_2([{{TCname,_Ref},_C,Bindings}|Rest],TCdict,Accum) ->
+ build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum);
+build_autostart_data_2([{{M,F,Args,_Ref},_C}|Rest],TCdict,Accum) ->
+ build_autostart_data_2(Rest,TCdict,[{mfa,{M,F,Args}}|Accum]);
+build_autostart_data_2([],_TCdict,Accum) ->
+ {ok,lists:reverse(Accum)}.
+
+%% Help function placing the filename in the AutoStartData structure.
+build_autostart_data_tc(TCname,Bindings,TCdict,Rest,Accum) ->
+ {ok,TC}=get_tracecase_tc_dict(TCname,TCdict),
+ {ok,FName}=get_tc_activate_fname(TC),
+ build_autostart_data_2(Rest,TCdict,[{file,{FName,Bindings}}|Accum]).
+%% -----------------------------------------------------------------------------
+
+%% Help function generating tracerdata to init inviso tracing. The generation
+%% is done by the TracerDataGenerator, TDG, function.
+%% Individual tracerdata is generated for each node in Nodes.
+%% Returns {ok,TracerData} or {error,Reason}.
+call_tracer_data_generator(undefined,M,F,TDGargs,_Nodes) -> % Non distributed.
+ case catch call_tracer_data_generator_3(M,F,TDGargs,local_runtime) of
+ {'EXIT',Reason} ->
+ {error,{'EXIT',Reason}};
+ TracerData ->
+ {ok,TracerData}
+ end;
+call_tracer_data_generator(_CNode,M,F,TDGargs,Nodes) ->
+ case catch call_tracer_data_generator_2(M,F,TDGargs,Nodes) of
+ {'EXIT',Reason} ->
+ {error,{'EXIT',Reason}};
+ TracerList ->
+ {ok,TracerList}
+ end.
+
+call_tracer_data_generator_2(M,F,TDGargs,[Node|Rest]) ->
+ [{Node,call_tracer_data_generator_3(M,F,TDGargs,Node)}|
+ call_tracer_data_generator_2(M,F,TDGargs,Rest)];
+call_tracer_data_generator_2(_,_,_,[]) ->
+ [].
+
+call_tracer_data_generator_3(M,F,TDGargs,Node) ->
+ apply(M,F,call_tracer_data_generator_mkargs(Node,TDGargs)).
+
+%% This function creates the arguments that the tracer data generator function
+%% accepts (in an apply call). The reason for making it a sepparate function is
+%% that the arguments are constructed in more situations than just when actually
+%% doing the apply. By having a function it will become obvious where to change
+%% should the arguments change.
+call_tracer_data_generator_mkargs(Node,TDGargs) ->
+ inviso_tool_lib:mk_complete_tdg_args(Node,TDGargs).
+%% -----------------------------------------------------------------------------
+
+%% This function acts as standard options generator function. That is returning
+%% the options argument to inviso:add_node/3. Note that this function must not
+%% return the dependency part of that option.
+std_options_generator(_Node) ->
+ []. % No particular options(!)
+%% -----------------------------------------------------------------------------
+
+
+%% Help function checking that Vars contains a binding for every variable
+%% listed in the VarNames field in TraceCase. Note that the special variable 'Nodes'
+%% is disregarded, since it is always added by the inviso_tool.
+%% Returns {ok,Bindings} or {error,Reason}. Where Bindings is a bindngs structure
+%% according to file:eval functionality.
+check_bindings(Vars,TraceCase) ->
+ case catch check_bindings_2(Vars,
+ get_tc_varnames(TraceCase),
+ erl_eval:new_bindings()) of
+ {'EXIT',_Reason} ->
+ {error,variable_error};
+ {error,Reason} -> % Missing a bindning.
+ {error,Reason};
+ {ok,Bindings} ->
+ {ok,Bindings}
+ end.
+
+check_bindings_2(Vars,['Nodes'|Rest],Bindings) ->
+ check_bindings_2(Vars,Rest,Bindings); % Disregard Nodes since it is automatic.
+check_bindings_2(Vars,[VarName|Rest],Bindings) ->
+ case lists:keysearch(VarName,1,Vars) of
+ {value,{_,Val}} ->
+ check_bindings_2(Vars,Rest,erl_eval:add_binding(VarName,Val,Bindings));
+ false -> % Mandatory variable missing.
+ {error,{missing_variable,VarName}} % Quite here then.
+ end;
+check_bindings_2(_,[],Bindings) ->
+ {ok,Bindings}.
+%% -----------------------------------------------------------------------------
+
+%% This help function checks that the command the user tries to do is amongst
+%% the inviso API. It at the same time returns what kind of command it is.
+%% {true,RegExpFlag} or 'false' where RegExpFlag indicates if this command
+%% needs to have its argument modified by module regexp expansion or not.
+check_proper_inviso_call(Cmd,Arity) ->
+ case lists:member({Cmd,Arity},?INVISO_CMDS) of
+ true -> % It is part of inviso API.
+ {true,check_proper_inviso_call_regexp(Cmd,Arity)};
+ false ->
+ false
+ end.
+
+%% Returns {Type,Arity,PlaceOfModuleSpec} or 'false'.
+check_proper_inviso_call_regexp(tp,5) -> {tp,5,1};
+check_proper_inviso_call_regexp(tp,4) -> {tp,4,1};
+check_proper_inviso_call_regexp(tp,1) -> {tp,1,1};
+check_proper_inviso_call_regexp(tpl,5) -> {tp,5,1};
+check_proper_inviso_call_regexp(tpl,4) -> {tp,4,1};
+check_proper_inviso_call_regexp(tpl,1) -> {tp,1,1};
+check_proper_inviso_call_regexp(ctp,3) -> {ctp,3,1};
+check_proper_inviso_call_regexp(ctp,1) -> {ctp,1,1};
+check_proper_inviso_call_regexp(ctpl,3) -> {ctp,3,1};
+check_proper_inviso_call_regexp(ctpl,1) -> {ctp,1,1};
+check_proper_inviso_call_regexp(_,_) -> % No regexp expansion.
+ false.
+%% -----------------------------------------------------------------------------
+
+%% Help function checking if this inviso command shall be added to the command
+%% history log. Returns true or false.
+check_inviso_call_to_history(Cmd,Arity) ->
+ case lists:member({Cmd,Arity},?INVISO_CMD_HISTORY) of
+ true ->
+ true;
+ false ->
+ false
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function traversing the arguments and expanding module names stated
+%% as regular expressions. This means that the resulting arguments may be longer
+%% than the orginal ones.
+%% When we run this function it has been determined that we are a distributed
+%% system.
+%% Also note that if there are no regexps in Args, no regexpansion will be
+%% made and RegExpNode may be 'undefined' (as it is if not set at start-up).
+%% If RegExpNode is unavailable the nodes found in Nodes will be used until
+%% one that works is found.
+expand_module_regexps(Args,_RegExpNode,_Nodes,false) ->
+ {ok,Args};
+expand_module_regexps([PatternList],RegExpNode,Nodes,{tp,1,1}) ->
+ case catch expand_module_regexps_tp(PatternList,RegExpNode,Nodes) of
NewPatternList when is_list(NewPatternList) ->
- {ok,[NewPatternList]};
- {error,Reason} ->
- {error,Reason}
- end;
-expand_module_regexps([PatternList],RegExpNode,Nodes,{ctp,1,1}) ->
- case catch expand_module_regexps_ctp(PatternList,RegExpNode,Nodes) of
+ {ok,[NewPatternList]};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+expand_module_regexps([PatternList],RegExpNode,Nodes,{ctp,1,1}) ->
+ case catch expand_module_regexps_ctp(PatternList,RegExpNode,Nodes) of
NewPatternList when is_list(NewPatternList) ->
- {ok,[NewPatternList]};
- {error,Reason} ->
- {error,Reason}
- end;
-expand_module_regexps([M,F,Arity,MS,Opts],RegExpNode,Nodes,{tp,5,1}) ->
- expand_module_regexps([[{M,F,Arity,MS,Opts}]],RegExpNode,Nodes,{tp,1,1});
-expand_module_regexps([M,F,Arity,MS],RegExpNode,Nodes,{tp,4,1}) ->
- expand_module_regexps([[{M,F,Arity,MS,[]}]],RegExpNode,Nodes,{tp,1,1});
-expand_module_regexps([M,F,Arity],RegExpNode,Nodes,{ctp,3,1}) ->
- expand_module_regexps([[{M,F,Arity}]],RegExpNode,Nodes,{ctp,1,1}).
-
-
+ {ok,[NewPatternList]};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+expand_module_regexps([M,F,Arity,MS,Opts],RegExpNode,Nodes,{tp,5,1}) ->
+ expand_module_regexps([[{M,F,Arity,MS,Opts}]],RegExpNode,Nodes,{tp,1,1});
+expand_module_regexps([M,F,Arity,MS],RegExpNode,Nodes,{tp,4,1}) ->
+ expand_module_regexps([[{M,F,Arity,MS,[]}]],RegExpNode,Nodes,{tp,1,1});
+expand_module_regexps([M,F,Arity],RegExpNode,Nodes,{ctp,3,1}) ->
+ expand_module_regexps([[{M,F,Arity}]],RegExpNode,Nodes,{ctp,1,1}).
+
+
expand_module_regexps_tp([E={M,_,_,_,_}|Rest],RegExpNode,Nodes) when is_atom(M) ->
- [E|expand_module_regexps_tp(Rest,RegExpNode,Nodes)];
+ [E|expand_module_regexps_tp(Rest,RegExpNode,Nodes)];
expand_module_regexps_tp([{M,F,Arity,MS,Opts}|Rest],RegExpNode,Nodes) when is_list(M);is_tuple(M) ->
- case inviso_tool_lib:expand_module_names([RegExpNode],
- M,
- [{expand_only_at,RegExpNode}]) of
- {singlenode_expansion,Modules} ->
- expand_module_regexps_tp_2(Modules,F,Arity,MS,Opts,Rest,RegExpNode,Nodes);
- {error,{faulty_node,RegExpNode}} -> % RegExpNode probably down.
- case Nodes of
- [NewRegExpNode|RestNodes] -> % Ok, just choose a node.
- expand_module_regexps_tp([{M,F,Arity,MS,Opts}|Rest],NewRegExpNode,RestNodes);
- [] -> % No more nodes to choose from.
- throw({error,no_available_regexpnode})
- end;
- {error,_Reason} ->
- expand_module_regexps_tp(Rest,RegExpNode,Nodes)
- end;
-expand_module_regexps_tp([_|Rest],RegExpNode,Nodes) ->
- expand_module_regexps_tp(Rest,RegExpNode,Nodes); % Skip faulty module specification.
-expand_module_regexps_tp([],_RegExpNodes,_Nodes) ->
- [].
-
-expand_module_regexps_tp_2([M|MRest],F,Arity,MS,Opts,Rest,RegExpNode,Nodes) ->
- [{M,F,Arity,MS,Opts}|
- expand_module_regexps_tp_2(MRest,F,Arity,MS,Opts,Rest,RegExpNode,Nodes)];
-expand_module_regexps_tp_2([],_,_,_,_,Rest,RegExpNode,Nodes) ->
- expand_module_regexps_tp(Rest,RegExpNode,Nodes).
-
+ case inviso_tool_lib:expand_module_names([RegExpNode],
+ M,
+ [{expand_only_at,RegExpNode}]) of
+ {singlenode_expansion,Modules} ->
+ expand_module_regexps_tp_2(Modules,F,Arity,MS,Opts,Rest,RegExpNode,Nodes);
+ {error,{faulty_node,RegExpNode}} -> % RegExpNode probably down.
+ case Nodes of
+ [NewRegExpNode|RestNodes] -> % Ok, just choose a node.
+ expand_module_regexps_tp([{M,F,Arity,MS,Opts}|Rest],NewRegExpNode,RestNodes);
+ [] -> % No more nodes to choose from.
+ throw({error,no_available_regexpnode})
+ end;
+ {error,_Reason} ->
+ expand_module_regexps_tp(Rest,RegExpNode,Nodes)
+ end;
+expand_module_regexps_tp([_|Rest],RegExpNode,Nodes) ->
+ expand_module_regexps_tp(Rest,RegExpNode,Nodes); % Skip faulty module specification.
+expand_module_regexps_tp([],_RegExpNodes,_Nodes) ->
+ [].
+
+expand_module_regexps_tp_2([M|MRest],F,Arity,MS,Opts,Rest,RegExpNode,Nodes) ->
+ [{M,F,Arity,MS,Opts}|
+ expand_module_regexps_tp_2(MRest,F,Arity,MS,Opts,Rest,RegExpNode,Nodes)];
+expand_module_regexps_tp_2([],_,_,_,_,Rest,RegExpNode,Nodes) ->
+ expand_module_regexps_tp(Rest,RegExpNode,Nodes).
+
expand_module_regexps_ctp([E={M,_,_}|Rest],RegExpNode,Nodes) when is_atom(M) ->
- [E|expand_module_regexps_ctp(Rest,RegExpNode,Nodes)];
+ [E|expand_module_regexps_ctp(Rest,RegExpNode,Nodes)];
expand_module_regexps_ctp([{M,F,Arity}|Rest],RegExpNode,Nodes) when is_list(M);is_tuple(M) ->
- case inviso_tool_lib:expand_module_names([RegExpNode],
- M,
- [{expand_only_at,RegExpNode}]) of
- {singlenode_expansion,badrpc} -> % RegExpNode probably down.
- case Nodes of
- [NewRegExpNode|RestNodes] -> % Ok, just choose a node.
- expand_module_regexps_ctp([{M,F,Arity}|Rest],NewRegExpNode,RestNodes);
- [] -> % No more nodes to choose from.
- throw({error,no_available_regexpnode})
- end;
- {singlenode_expansion,Modules} ->
- expand_module_regexps_ctp_2(Modules,F,Arity,Rest,RegExpNode,Nodes);
- {error,_Reason} ->
- expand_module_regexps_ctp(Rest,RegExpNode,Nodes)
- end;
-expand_module_regexps_ctp([_|Rest],RegExpNode,Nodes) ->
- expand_module_regexps_tp(Rest,RegExpNode,Nodes); % Skip faulty module specification.
-expand_module_regexps_ctp([],_RegExpNodes,_Nodes) ->
- [].
-
-expand_module_regexps_ctp_2([M|MRest],F,Arity,Rest,RegExpNode,Nodes) ->
- [{M,F,Arity}|expand_module_regexps_ctp_2(MRest,F,Arity,Rest,RegExpNode,Nodes)];
-expand_module_regexps_ctp_2([],_,_,Rest,RegExpNode,Nodes) ->
- expand_module_regexps_ctp(Rest,RegExpNode,Nodes).
-%% -----------------------------------------------------------------------------
-
-
-
-%% Help function running the activation of a trace case. Note that this must
-%% be done at the inviso control component's Erlang node *and* that it must be
-%% done in its own process since there is no telling for how long a trace case
-%% may run.
-%% Returns {ok,ActivationHandler}.
-exec_trace_case_on(CNode,TraceCase,Bindings,Nodes) ->
- {ok,TcFName}=get_tc_activate_fname(TraceCase),
- {ok,exec_trace_case_2(CNode,
- TcFName,
- erl_eval:add_binding('Nodes',Nodes,Bindings),
- activating)}.
-
-%% Help function running the deactivation of a trace case.
-exec_trace_case_off(CNode,TraceCase,Bindings,Nodes) ->
- case get_tc_deactivate_fname(TraceCase) of
- {ok,TcFName} -> % There is a deactivation.
- {ok,exec_trace_case_2(CNode,
- TcFName,
- erl_eval:add_binding('Nodes',Nodes,Bindings),
- stopping)};
- false ->
- {error,no_deactivation}
- end.
-
-exec_trace_case_2(CNode,TcFName,Bindings,Phase) ->
- if
- CNode==undefined -> % The non distributed case.
- spawn_link(?MODULE,tc_executer,[TcFName,Bindings,Phase,self()]);
- true ->
- spawn_link(CNode,?MODULE,tc_executer,[TcFName,Bindings,Phase,self()])
- end.
-
-%% This function is run in its own process and is responsible for executing
-%% the trace case.
-tc_executer(TcFName,Bindings,Phase,Parent) ->
- case catch file:script(TcFName,Bindings) of
- {ok,Value} ->
- tc_executer_reply(Parent,{Phase,self(),{ok,Value}});
- {'EXIT',Reason} ->
- tc_executer_reply(Parent,{Phase,self(),{error,{'EXIT',Reason}}});
- Error ->
- tc_executer_reply(Parent,{Phase,self(),Error})
- end.
-%% -----------------------------------------------------------------------------
-
-%% Help function which starts a reactivator process redoing command history at
-%% Node. It also updates the loopdata to indicate that Node is now in state
-%% reactivating. It is a good idea to only handle one node per reactivator process.
-%% This because if the node terminates and comes back up, the reactivator must be
-%% stopped.
-redo_cmd_history(Node,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL,nodes=NodesD}) ->
- P=start_reactivator(Node,CNode,TCdict,CHL),
- LD#ld{nodes=set_reactivating_nodes(Node,NodesD),
- reactivators=add_reactivators(Node,P,LD#ld.reactivators)}.
-
-%% Help function starting a reactivator process replaying the command history log.
-%% Returns a pid of the reactivator process.
-start_reactivator(Node,CNode,TCdict,CHL) ->
- UnsortedLog=get_loglist_chl(CHL), % Must fetch here, later on wrong node.
- if
- CNode==undefined -> % The non-distributed case.
- spawn_link(?MODULE,
- reactivator_executer,
- [Node,TCdict,UnsortedLog,self(),0,[]]);
- true ->
- spawn_link(CNode,
- ?MODULE,
- reactivator_executer,
- [Node,TCdict,UnsortedLog,self(),0,[]])
- end.
-
-%% The strategy is to traverse the CHL ETS table in Counter order, redoing the
-%% commands one by one. We wait until one command is finished until we do the
-%% next. Commands marked as nullified are not performed. In fact when a command
-%% is nullified only the stop will be found in the CHL. Its activation will be
-%% removed.
-reactivator_executer(Node,TCdict,UnsortedLog,TPid,StartCounter,DoneCases) ->
- SortedLog=lists:keysort(2,UnsortedLog), % Sort on Counter, oldest first.
- Log=reactivator_skip_log_entries(SortedLog,StartCounter),
- case reactivator_executer_2(Node,TCdict,TPid,StartCounter,DoneCases,Log) of
- done ->
- true; % Simply terminate the reactivator then.
- {more,{NewStartCounter,NewDoneCases,NewUnsortedLog}} ->
- reactivator_executer(Node,TCdict,NewUnsortedLog,TPid,NewStartCounter,NewDoneCases)
- end.
-
-reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
- [{{TCname,Id},NextC,running,Bindings}|Rest]) ->
- reactivator_executer_3(Node,TCdict,TPid,DoneCases,Rest,TCname,Id,NextC,Bindings,Rest);
-reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
- [{{TCname,_Ref},NextC,Bindings}|Rest]) ->
- reactivator_executer_rtc(Node,TCdict,TPid,DoneCases,Rest,TCname,NextC,Bindings,Rest);
-reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
- [{{TCname,Id},NextC,activating,Bindings}|Rest]) ->
- reactivator_executer_3(Node,TCdict,TPid,DoneCases,Rest,TCname,Id,NextC,Bindings,Rest);
-reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
- [{{M,F,Args,_Ref},NextC}|Rest]) ->
- reactivator_executer_cmd(Node,M,F,Args),
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
-reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
- [{{_TCname,_Id},NextC,stopping,_Bindings}|Rest]) ->
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
-reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
- [{{TCname,Id,_Ref},NextC,stop,Bindings}|Rest]) ->
- case lists:member({TCname,Id},DoneCases) of
- true -> % We have activated it, must stop then.
- case get_tracecase_tc_dict(TCname,TCdict) of
- {ok,{_,_,_,_,FNameOff}} ->
- reactivator_executer_tc(Node,Bindings,FNameOff),
- NewDoneCases=lists:delete({TCname,Id},DoneCases),
- reactivator_executer_2(Node,TCdict,TPid,NextC,NewDoneCases,Rest);
- {ok,_} -> % No stop-filename, strange!
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
- false -> % Even stranger, does not exist!?
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
- end;
- false -> % Never activated in the first place.
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
- end;
-%% Done all log entries found this lap. See if there are more entries by now.
-reactivator_executer_2(_Node,_TCdict,TPid,Counter,DoneCases,[]) ->
- case reactivator_reply(TPid,Counter) of % Ask the tool process for more entries.
- done -> % No more entries in the CHL.
- done;
- {more,NewUnsortedLog} -> % Repeat the procedure
- {more,{Counter+1,DoneCases,NewUnsortedLog}} % with log entries from Counter+1.
- end.
-
-%% This help function activates a tracecase.
-reactivator_executer_3(Node,TCdict,TPid,DoneCases,Rest,TCname,Id,NextC,Bindings,Rest) ->
- case get_tracecase_tc_dict(TCname,TCdict) of
- {ok,{_,_,_,FNameOn}} -> % A case with just on functionality.
- reactivator_executer_tc(Node,Bindings,FNameOn),
- reactivator_executer_2(Node,TCdict,TPid,NextC,[{TCname,Id}|DoneCases],Rest);
- {ok,{_,_,_,FNameOn,_}} ->
- reactivator_executer_tc(Node,Bindings,FNameOn),
- reactivator_executer_2(Node,TCdict,TPid,NextC,[{TCname,Id}|DoneCases],Rest);
- false -> % Strange, does not exist anylonger!?
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
- end.
-
-%% Help function executing a trace case in the reactivators context. Does not
-%% return anything significant.
-reactivator_executer_tc(Node,Bindings,FileName) ->
- catch file:eval(FileName,erl_eval:add_binding('Nodes',[Node],Bindings)).
-
-%% Help function handling trace case that are simply executed - rtc.
-reactivator_executer_rtc(Node,TCdict,TPid,DoneCases,Rest,TCname,NextC,Bindings,Rest) ->
- case get_tracecase_tc_dict(TCname,TCdict) of
- {ok,{_,_,_,FNameOn}} -> % A case with just on functionality.
- reactivator_executer_tc(Node,Bindings,FNameOn),
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
- {ok,{_,_,_,FNameOn,_}} ->
- reactivator_executer_tc(Node,Bindings,FNameOn),
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
- false -> % Strange, does not exist anylonger!?
- reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
- end.
-
-reactivator_executer_cmd(nonode@nohost,M,F,Args) ->
- catch apply(M,F,Args); % Non-distributed.
-reactivator_executer_cmd(Node,M,F,Args) ->
- catch apply(M,F,[[Node]|Args]).
-
-%% Help function returning a list of log entries missing the first entries
-%% having a counter less or equal to C1.
-reactivator_skip_log_entries([{_,C,_,_}|Rest],C1) when C<C1 ->
- reactivator_skip_log_entries(Rest,C1);
-reactivator_skip_log_entries([{_,C}|Rest],C1) when C<C1 ->
- reactivator_skip_log_entries(Rest,C1);
-reactivator_skip_log_entries(Log,_) ->
- Log.
-%% -----------------------------------------------------------------------------
-
-%% Help function returning the node name to use in an rpc call.
-get_rpc_nodename(undefined) ->
- node();
-get_rpc_nodename(CNode) ->
- CNode.
-%% -----------------------------------------------------------------------------
-
-mk_rt_tag() ->
- inviso_tool.
-%% -----------------------------------------------------------------------------
-
-is_string([C|Rest]) when C>=32, C=<255 ->
- is_string(Rest);
-is_string([]) ->
- true;
-is_string(_) ->
- false.
-%% -----------------------------------------------------------------------------
-
-
-%% -----------------------------------------------------------------------------
-%% Functions for handling the configuration file.
-%% -----------------------------------------------------------------------------
-
-%% The inviso tool is configured via start arguments and/or a configuration file.
-%% Start arguments will override any definitions in a configuration file.
-%% The configuration file is pointed out by either a start argument or the
-%% inviso application parameter 'inviso_tool_config_file'.
-
-%% Help function building the internal configuration structure. Configurations
-%% in the start argument will override parameters found in a configuration file.
-fetch_configuration(Config) ->
- case fetch_config_filename(Config) of
- {ok,FName} -> % We are supposed to use a conf-file.
- case read_config_file(FName) of
- {ok,LD} -> % Managed to open a file.
- NewLD=read_config_list(LD,Config),
- {ok,NewLD};
- {error,_Reason} -> % Problem finding/opening file.
- LD=read_config_list(#ld{},Config),
- {ok,LD}
- end;
- false -> % No filename specified.
- LD=read_config_list(#ld{},Config),
- {ok,LD}
- end.
-
-%% Help function determining the name of the file which shall be consulted as
-%% the main configuration file.
-%% Returns {ok,FileName} or 'false'. The latter if no name could be determined.
-fetch_config_filename(Config) ->
- case catch lists:keysearch(config_file,1,Config) of
+ case inviso_tool_lib:expand_module_names([RegExpNode],
+ M,
+ [{expand_only_at,RegExpNode}]) of
+ {singlenode_expansion,Modules} ->
+ expand_module_regexps_ctp_2(Modules,F,Arity,Rest,RegExpNode,Nodes);
+ {error,_Reason} ->
+ expand_module_regexps_ctp(Rest,RegExpNode,Nodes)
+ end;
+expand_module_regexps_ctp([_|Rest],RegExpNode,Nodes) ->
+ expand_module_regexps_tp(Rest,RegExpNode,Nodes); % Skip faulty module specification.
+expand_module_regexps_ctp([],_RegExpNodes,_Nodes) ->
+ [].
+
+expand_module_regexps_ctp_2([M|MRest],F,Arity,Rest,RegExpNode,Nodes) ->
+ [{M,F,Arity}|expand_module_regexps_ctp_2(MRest,F,Arity,Rest,RegExpNode,Nodes)];
+expand_module_regexps_ctp_2([],_,_,Rest,RegExpNode,Nodes) ->
+ expand_module_regexps_ctp(Rest,RegExpNode,Nodes).
+%% -----------------------------------------------------------------------------
+
+
+
+%% Help function running the activation of a trace case. Note that this must
+%% be done at the inviso control component's Erlang node *and* that it must be
+%% done in its own process since there is no telling for how long a trace case
+%% may run.
+%% Returns {ok,ActivationHandler}.
+exec_trace_case_on(CNode,TraceCase,Bindings,Nodes) ->
+ {ok,TcFName}=get_tc_activate_fname(TraceCase),
+ {ok,exec_trace_case_2(CNode,
+ TcFName,
+ erl_eval:add_binding('Nodes',Nodes,Bindings),
+ activating)}.
+
+%% Help function running the deactivation of a trace case.
+exec_trace_case_off(CNode,TraceCase,Bindings,Nodes) ->
+ case get_tc_deactivate_fname(TraceCase) of
+ {ok,TcFName} -> % There is a deactivation.
+ {ok,exec_trace_case_2(CNode,
+ TcFName,
+ erl_eval:add_binding('Nodes',Nodes,Bindings),
+ stopping)};
+ false ->
+ {error,no_deactivation}
+ end.
+
+exec_trace_case_2(CNode,TcFName,Bindings,Phase) ->
+ if
+ CNode==undefined -> % The non distributed case.
+ spawn_link(?MODULE,tc_executer,[TcFName,Bindings,Phase,self()]);
+ true ->
+ spawn_link(CNode,?MODULE,tc_executer,[TcFName,Bindings,Phase,self()])
+ end.
+
+%% This function is run in its own process and is responsible for executing
+%% the trace case.
+tc_executer(TcFName,Bindings,Phase,Parent) ->
+ case catch file:script(TcFName,Bindings) of
+ {ok,Value} ->
+ tc_executer_reply(Parent,{Phase,self(),{ok,Value}});
+ {'EXIT',Reason} ->
+ tc_executer_reply(Parent,{Phase,self(),{error,{'EXIT',Reason}}});
+ Error ->
+ tc_executer_reply(Parent,{Phase,self(),Error})
+ end.
+%% -----------------------------------------------------------------------------
+
+%% Help function which starts a reactivator process redoing command history at
+%% Node. It also updates the loopdata to indicate that Node is now in state
+%% reactivating. It is a good idea to only handle one node per reactivator process.
+%% This because if the node terminates and comes back up, the reactivator must be
+%% stopped.
+redo_cmd_history(Node,LD=#ld{c_node=CNode,tc_dict=TCdict,chl=CHL,nodes=NodesD}) ->
+ P=start_reactivator(Node,CNode,TCdict,CHL),
+ LD#ld{nodes=set_reactivating_nodes(Node,NodesD),
+ reactivators=add_reactivators(Node,P,LD#ld.reactivators)}.
+
+%% Help function starting a reactivator process replaying the command history log.
+%% Returns a pid of the reactivator process.
+start_reactivator(Node,CNode,TCdict,CHL) ->
+ UnsortedLog=get_loglist_chl(CHL), % Must fetch here, later on wrong node.
+ if
+ CNode==undefined -> % The non-distributed case.
+ spawn_link(?MODULE,
+ reactivator_executer,
+ [Node,TCdict,UnsortedLog,self(),0,[]]);
+ true ->
+ spawn_link(CNode,
+ ?MODULE,
+ reactivator_executer,
+ [Node,TCdict,UnsortedLog,self(),0,[]])
+ end.
+
+%% The strategy is to traverse the CHL ETS table in Counter order, redoing the
+%% commands one by one. We wait until one command is finished until we do the
+%% next. Commands marked as nullified are not performed. In fact when a command
+%% is nullified only the stop will be found in the CHL. Its activation will be
+%% removed.
+reactivator_executer(Node,TCdict,UnsortedLog,TPid,StartCounter,DoneCases) ->
+ SortedLog=lists:keysort(2,UnsortedLog), % Sort on Counter, oldest first.
+ Log=reactivator_skip_log_entries(SortedLog,StartCounter),
+ case reactivator_executer_2(Node,TCdict,TPid,StartCounter,DoneCases,Log) of
+ done ->
+ true; % Simply terminate the reactivator then.
+ {more,{NewStartCounter,NewDoneCases,NewUnsortedLog}} ->
+ reactivator_executer(Node,TCdict,NewUnsortedLog,TPid,NewStartCounter,NewDoneCases)
+ end.
+
+reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
+ [{{TCname,Id},NextC,running,Bindings}|Rest]) ->
+ reactivator_executer_3(Node,TCdict,TPid,DoneCases,Rest,TCname,Id,NextC,Bindings,Rest);
+reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
+ [{{TCname,_Ref},NextC,Bindings}|Rest]) ->
+ reactivator_executer_rtc(Node,TCdict,TPid,DoneCases,Rest,TCname,NextC,Bindings,Rest);
+reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
+ [{{TCname,Id},NextC,activating,Bindings}|Rest]) ->
+ reactivator_executer_3(Node,TCdict,TPid,DoneCases,Rest,TCname,Id,NextC,Bindings,Rest);
+reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
+ [{{M,F,Args,_Ref},NextC}|Rest]) ->
+ reactivator_executer_cmd(Node,M,F,Args),
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
+reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
+ [{{_TCname,_Id},NextC,stopping,_Bindings}|Rest]) ->
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
+reactivator_executer_2(Node,TCdict,TPid,_Counter,DoneCases,
+ [{{TCname,Id,_Ref},NextC,stop,Bindings}|Rest]) ->
+ case lists:member({TCname,Id},DoneCases) of
+ true -> % We have activated it, must stop then.
+ case get_tracecase_tc_dict(TCname,TCdict) of
+ {ok,{_,_,_,_,FNameOff}} ->
+ reactivator_executer_tc(Node,Bindings,FNameOff),
+ NewDoneCases=lists:delete({TCname,Id},DoneCases),
+ reactivator_executer_2(Node,TCdict,TPid,NextC,NewDoneCases,Rest);
+ {ok,_} -> % No stop-filename, strange!
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
+ false -> % Even stranger, does not exist!?
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
+ end;
+ false -> % Never activated in the first place.
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
+ end;
+%% Done all log entries found this lap. See if there are more entries by now.
+reactivator_executer_2(_Node,_TCdict,TPid,Counter,DoneCases,[]) ->
+ case reactivator_reply(TPid,Counter) of % Ask the tool process for more entries.
+ done -> % No more entries in the CHL.
+ done;
+ {more,NewUnsortedLog} -> % Repeat the procedure
+ {more,{Counter+1,DoneCases,NewUnsortedLog}} % with log entries from Counter+1.
+ end.
+
+%% This help function activates a tracecase.
+reactivator_executer_3(Node,TCdict,TPid,DoneCases,Rest,TCname,Id,NextC,Bindings,Rest) ->
+ case get_tracecase_tc_dict(TCname,TCdict) of
+ {ok,{_,_,_,FNameOn}} -> % A case with just on functionality.
+ reactivator_executer_tc(Node,Bindings,FNameOn),
+ reactivator_executer_2(Node,TCdict,TPid,NextC,[{TCname,Id}|DoneCases],Rest);
+ {ok,{_,_,_,FNameOn,_}} ->
+ reactivator_executer_tc(Node,Bindings,FNameOn),
+ reactivator_executer_2(Node,TCdict,TPid,NextC,[{TCname,Id}|DoneCases],Rest);
+ false -> % Strange, does not exist anylonger!?
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
+ end.
+
+%% Help function executing a trace case in the reactivators context. Does not
+%% return anything significant.
+reactivator_executer_tc(Node,Bindings,FileName) ->
+ catch file:eval(FileName,erl_eval:add_binding('Nodes',[Node],Bindings)).
+
+%% Help function handling trace case that are simply executed - rtc.
+reactivator_executer_rtc(Node,TCdict,TPid,DoneCases,Rest,TCname,NextC,Bindings,Rest) ->
+ case get_tracecase_tc_dict(TCname,TCdict) of
+ {ok,{_,_,_,FNameOn}} -> % A case with just on functionality.
+ reactivator_executer_tc(Node,Bindings,FNameOn),
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
+ {ok,{_,_,_,FNameOn,_}} ->
+ reactivator_executer_tc(Node,Bindings,FNameOn),
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest);
+ false -> % Strange, does not exist anylonger!?
+ reactivator_executer_2(Node,TCdict,TPid,NextC,DoneCases,Rest)
+ end.
+
+reactivator_executer_cmd(nonode@nohost,M,F,Args) ->
+ catch apply(M,F,Args); % Non-distributed.
+reactivator_executer_cmd(Node,M,F,Args) ->
+ catch apply(M,F,[[Node]|Args]).
+
+%% Help function returning a list of log entries missing the first entries
+%% having a counter less or equal to C1.
+reactivator_skip_log_entries([{_,C,_,_}|Rest],C1) when C<C1 ->
+ reactivator_skip_log_entries(Rest,C1);
+reactivator_skip_log_entries([{_,C}|Rest],C1) when C<C1 ->
+ reactivator_skip_log_entries(Rest,C1);
+reactivator_skip_log_entries(Log,_) ->
+ Log.
+%% -----------------------------------------------------------------------------
+
+%% Help function returning the node name to use in an rpc call.
+get_rpc_nodename(undefined) ->
+ node();
+get_rpc_nodename(CNode) ->
+ CNode.
+%% -----------------------------------------------------------------------------
+
+mk_rt_tag() ->
+ inviso_tool.
+%% -----------------------------------------------------------------------------
+
+is_string([C|Rest]) when C>=32, C=<255 ->
+ is_string(Rest);
+is_string([]) ->
+ true;
+is_string(_) ->
+ false.
+%% -----------------------------------------------------------------------------
+
+
+%% -----------------------------------------------------------------------------
+%% Functions for handling the configuration file.
+%% -----------------------------------------------------------------------------
+
+%% The inviso tool is configured via start arguments and/or a configuration file.
+%% Start arguments will override any definitions in a configuration file.
+%% The configuration file is pointed out by either a start argument or the
+%% inviso application parameter 'inviso_tool_config_file'.
+
+%% Help function building the internal configuration structure. Configurations
+%% in the start argument will override parameters found in a configuration file.
+fetch_configuration(Config) ->
+ case fetch_config_filename(Config) of
+ {ok,FName} -> % We are supposed to use a conf-file.
+ case read_config_file(FName) of
+ {ok,LD} -> % Managed to open a file.
+ NewLD=read_config_list(LD,Config),
+ {ok,NewLD};
+ Error = {error,_Reason} -> % Problem finding/opening file.
+ Error
+ end;
+ false -> % No filename specified.
+ LD=read_config_list(#ld{},Config),
+ {ok,LD}
+ end.
+
+%% Help function determining the name of the file which shall be consulted as
+%% the main configuration file.
+%% Returns {ok,FileName} or 'false'. The latter if no name could be determined.
+fetch_config_filename(Config) ->
+ case catch lists:keysearch(config_file,1,Config) of
{value,{_,FName}} when is_list(FName) ->
- {ok,FName};
- _ -> % No filename in the start argument.
- fetch_config_filename_2()
- end.
-
-fetch_config_filename_2() ->
- case application:get_env(inviso_tool_config_file) of
+ {ok,FName};
+ _ -> % No filename in the start argument.
+ fetch_config_filename_2()
+ end.
+
+fetch_config_filename_2() ->
+ case application:get_env(inviso_tool_config_file) of
{ok,FName} when is_list(FName) ->
- {ok,FName};
- _ -> % Application parameter not specified.
- false % Means no config file will be used.
- end.
-
-%% Help function reading the configuration file. Returns a #conf or {error,Reason}.
-read_config_file(FName) ->
- case catch file:consult(FName) of
- {ok,Terms} ->
- {ok,read_config_list(#ld{},Terms)};
- {error,Reason} ->
- {error,{file_consult,Reason}};
- {'EXIT',Reason} ->
- {error,{failure,Reason}}
- end.
-
-%% Help function traversing the Terms list entering known tag-values into #ld.
-read_config_list(LD,Terms) ->
- LD1=read_config_list_2(LD,Terms,nodes),
- LD2=read_config_list_2(LD1,Terms,c_node),
- LD3=read_config_list_2(LD2,Terms,regexp_node),
- LD4=read_config_list_2(LD3,Terms,tc_def_file),
- LD6=read_config_list_2(LD4,Terms,tdg),
- LD8=read_config_list_2(LD6,Terms,debug),
- LD10=read_config_list_2(LD8,Terms,initial_tcs),
- LD11=read_config_list_2(LD10,Terms,dir),
- _LD12=read_config_list_2(LD11,Terms,optg).
-
-read_config_list_2(LD,Terms,Tag) ->
- case catch lists:keysearch(Tag,1,Terms) of
- {value,{_,Value}} ->
- update_ld_record(LD,Tag,Value);
- _ ->
- LD % Tag not found in Terms (or error!)
- end.
-%% -----------------------------------------------------------------------------
-
-%% Function updating a named field in a record. Returns a new record. Note that
-%% this function must be maintained due the fact that field names are removed
-%% at compile time.
-update_ld_record(LD,nodes,Value) when is_record(LD,ld) ->
- case mk_nodes(Value) of
- {ok,NodesD} ->
- LD#ld{nodes=NodesD};
- error ->
- LD
- end;
-update_ld_record(LD,Tag,Value) when is_record(LD,ld) ->
- Index=
- case Tag of
- c_node -> % atom()
- #ld.c_node;
- regexp_node -> % atom()
- #ld.regexp_node;
- tc_def_file -> % string()
- #ld.tc_def_file;
- initial_tcs -> % [{TCname,VarList},...]
- #ld.initial_tcs;
- history_dir -> % string()
- #ld.history_dir;
- debug -> % true | false
- #ld.debug;
- dir -> % string()
- #ld.dir;
- optg -> % {Mod,Func,Args}
- #ld.optg;
- tdg -> % {Mod,Func,Args}
- #ld.tdg;
- keep_nodes -> % [Nodes,...]
- #ld.keep_nodes
- end,
- setelement(Index,LD,Value). % Cheeting!
-%% -----------------------------------------------------------------------------
-
-
-%% Help function which, if it exists, consults the trace definition file. The
-%% idea behind the trace definition file is to point out which trace cases there
-%% are, where to find them and how to turn them on and off.
-%% Trace case definitions are:
-%% {TCname,Type,VariableNameList,ActivatioFileName} |
-%% {TCname,Type,VariableNameList,ActivationFileName,DeactivationFileName}
-%% TCname=atom()
-%% Type=on | on_off
-%% VariableNameList=[atom(),...]
-%% ActivationFileName=DeactivationFileName=string()
-read_trace_case_definitions(LD) ->
- case LD#ld.tc_def_file of
+ {ok,FName};
+ _ -> % Application parameter not specified.
+ false % Means no config file will be used.
+ end.
+
+%% Help function reading the configuration file. Returns a #conf or {error,Reason}.
+read_config_file(FName) ->
+ case catch file:consult(FName) of
+ {ok,Terms} ->
+ {ok,read_config_list(#ld{},Terms)};
+ {error,Reason} ->
+ {error,{file_consult,Reason}};
+ {'EXIT',Reason} ->
+ {error,{failure,Reason}}
+ end.
+
+%% Help function traversing the Terms list entering known tag-values into #ld.
+read_config_list(LD,Terms) ->
+ LD#ld{
+ nodes = case mk_nodes(proplists:get_value(nodes,Terms,LD#ld.nodes)) of
+ {ok,Nodes} -> Nodes;
+ _ -> LD#ld.nodes
+ end,
+ c_node = proplists:get_value(c_node,Terms,LD#ld.c_node), % atom8)
+ regexp_node = proplists:get_value(regexp_node,Terms,LD#ld.regexp_node), % atom()
+ tc_def_file = proplists:get_value(tc_def_file,Terms,LD#ld.tc_def_file),
+ tdg = proplists:get_value(tdg,Terms,LD#ld.tdg),
+ debug = proplists:get_value(debug,Terms,LD#ld.debug),
+ initial_tcs = proplists:get_value(initial_tcs,Terms,LD#ld.initial_tcs),
+ dir = proplists:get_value(dir,Terms,LD#ld.dir),
+ optg = proplists:get_value(optg,Terms,LD#ld.optg)
+ }.
+
+%% -----------------------------------------------------------------------------
+
+
+%% Help function which, if it exists, consults the trace definition file. The
+%% idea behind the trace definition file is to point out which trace cases there
+%% are, where to find them and how to turn them on and off.
+%% Trace case definitions are:
+%% {TCname,Type,VariableNameList,ActivatioFileName} |
+%% {TCname,Type,VariableNameList,ActivationFileName,DeactivationFileName}
+%% TCname=atom()
+%% Type=on | on_off
+%% VariableNameList=[atom(),...]
+%% ActivationFileName=DeactivationFileName=string()
+read_trace_case_definitions(LD) ->
+ case LD#ld.tc_def_file of
TCfileName when is_list(TCfileName) ->
- case catch file:consult(TCfileName) of
- {ok,Terms} ->
- Dir=LD#ld.dir, % The working directory of the tool.
- TCdict=read_trace_case_definitions_2(Terms,Dir,mk_tc_dict()),
- LD#ld{tc_dict=TCdict};
- _ ->
- LD
- end;
- _ ->
- LD
- end.
-
-read_trace_case_definitions_2([{TCname,on,VarNames,FName}|Rest],Dir,TCdict) ->
- FileName=make_absolute_path(FName,Dir),
- read_trace_case_definitions_2(Rest,
- Dir,
- insert_tracecase_tc_dict(TCname,
- on,
- VarNames,
- FileName,
- TCdict));
-read_trace_case_definitions_2([{TCname,on_off,VarNames,FNameOn,FNameOff}|Rest],Dir,TCdict) ->
- FileNameOn=make_absolute_path(FNameOn,Dir),
- FileNameOff=make_absolute_path(FNameOff,Dir),
- read_trace_case_definitions_2(Rest,
- Dir,
- insert_tracecase_tc_dict(TCname,
- on_off,
- VarNames,
- FileNameOn,
- FileNameOff,
- TCdict));
-read_trace_case_definitions_2([_|Rest],Dir,TCdict) ->
- read_trace_case_definitions_2(Rest,Dir,TCdict);
-read_trace_case_definitions_2([],_Dir,TCdict) ->
- TCdict.
-
-%% Help function returning an absolute path to FName if FName is not already
-%% absolute. Dir is the working dir of the tool and supposed to be absolute.
-make_absolute_path(FName,Dir) ->
- case filename:pathtype(FName) of
- absolute -> % Then do nothing, allready absolute.
- FName;
- _ ->
- filename:join(Dir,FName)
- end.
-%% -----------------------------------------------------------------------------
-
-get_status(undefined,_Node) ->
- inviso:get_status();
-get_status(CNode,Nodes) ->
- inviso_tool_lib:inviso_cmd(CNode,get_status,[Nodes]).
-%% -----------------------------------------------------------------------------
-
-
-%% =============================================================================
-%% Internal data structure functions.
-%% =============================================================================
-
-%% -----------------------------------------------------------------------------
-%% The nodes database structure.
-%% -----------------------------------------------------------------------------
-
-%% The purpose of the nodes database structure is to keep track of what runtime
-%% nodes we have, and their current status.
-%% Implementation:
-%% [{NodeName,AvailableStatus},...] or AvailableStatus in the
-%% non-distributed case.
-%% AvailableStatus={up,Status1} | down
-%% Status1={State,Status} | reactivating
-%% State=tracing | inactive | trace_failure
-%% Status=running | suspended
-%% reactivating=the node is now being brought up to date.
-%% inactive=not tracing, can be initiated and then reactivated.
-%% The following states can occure.
-%% {inactive,running}
-%% Mainly when we start the tool, before a session has been started.
-%% {tracing,running}
-%% When a trace session is on-going.
-%% {trace_failure,running}
-%% If init_tracing failed for some reason.
-%% {tracing,suspended}
-%% reactivating
-%% The node is tracing (has always been) but was suspended. It is now
-%% no longer suspended and the tool is redong commands.
-%% {inactive,suspended}
-%% We can end up here if a session is stopped with this node suspended.
-
-%% Returns a nodes database structure filled with the nodes Nodes.
+ case catch file:consult(TCfileName) of
+ {ok,Terms} ->
+ Dir=LD#ld.dir, % The working directory of the tool.
+ TCdict=read_trace_case_definitions_2(Terms,Dir,mk_tc_dict()),
+ LD#ld{tc_dict=TCdict};
+ _ ->
+ LD
+ end;
+ _ ->
+ LD
+ end.
+
+read_trace_case_definitions_2([{TCname,on,VarNames,FName}|Rest],Dir,TCdict) ->
+ FileName=make_absolute_path(FName,Dir),
+ read_trace_case_definitions_2(Rest,
+ Dir,
+ insert_tracecase_tc_dict(TCname,
+ on,
+ VarNames,
+ FileName,
+ TCdict));
+read_trace_case_definitions_2([{TCname,on_off,VarNames,FNameOn,FNameOff}|Rest],Dir,TCdict) ->
+ FileNameOn=make_absolute_path(FNameOn,Dir),
+ FileNameOff=make_absolute_path(FNameOff,Dir),
+ read_trace_case_definitions_2(Rest,
+ Dir,
+ insert_tracecase_tc_dict(TCname,
+ on_off,
+ VarNames,
+ FileNameOn,
+ FileNameOff,
+ TCdict));
+read_trace_case_definitions_2([_|Rest],Dir,TCdict) ->
+ read_trace_case_definitions_2(Rest,Dir,TCdict);
+read_trace_case_definitions_2([],_Dir,TCdict) ->
+ TCdict.
+
+%% Help function returning an absolute path to FName if FName is not already
+%% absolute. Dir is the working dir of the tool and supposed to be absolute.
+make_absolute_path(FName,Dir) ->
+ case filename:pathtype(FName) of
+ absolute -> % Then do nothing, allready absolute.
+ FName;
+ _ ->
+ filename:join(Dir,FName)
+ end.
+%% -----------------------------------------------------------------------------
+
+get_status(undefined,_Node) ->
+ inviso:get_status();
+get_status(CNode,Nodes) ->
+ inviso_tool_lib:inviso_cmd(CNode,get_status,[Nodes]).
+%% -----------------------------------------------------------------------------
+
+
+%% =============================================================================
+%% Internal data structure functions.
+%% =============================================================================
+
+%% -----------------------------------------------------------------------------
+%% The nodes database structure.
+%% -----------------------------------------------------------------------------
+
+%% The purpose of the nodes database structure is to keep track of what runtime
+%% nodes we have, and their current status.
+%% Implementation:
+%% [{NodeName,AvailableStatus},...] or AvailableStatus in the
+%% non-distributed case.
+%% AvailableStatus={up,Status1} | down
+%% Status1={State,Status} | reactivating
+%% State=tracing | inactive | trace_failure
+%% Status=running | suspended
+%% reactivating=the node is now being brought up to date.
+%% inactive=not tracing, can be initiated and then reactivated.
+%% The following states can occure.
+%% {inactive,running}
+%% Mainly when we start the tool, before a session has been started.
+%% {tracing,running}
+%% When a trace session is on-going.
+%% {trace_failure,running}
+%% If init_tracing failed for some reason.
+%% {tracing,suspended}
+%% reactivating
+%% The node is tracing (has always been) but was suspended. It is now
+%% no longer suspended and the tool is redong commands.
+%% {inactive,suspended}
+%% We can end up here if a session is stopped with this node suspended.
+
+%% Returns a nodes database structure filled with the nodes Nodes.
mk_nodes(Nodes) when is_list(Nodes) ->
{ok,lists:map(fun(N) when is_atom(N)->{N,down} end,Nodes)};
-mk_nodes(local_runtime) -> % The non-distributed case.
- down;
-mk_nodes(_Nodes) ->
- error.
-%% -----------------------------------------------------------------------------
-
-%% Updates the nodes database structure for each node that has been added.
-%% This is the case when we start the tool or reactivate a node. Note that a node
-%% may have become adopted instead of started.
-%% Returns a new nodes database structure.
-update_added_nodes(CNode,[{Node,NodeResult}|Rest],NodesD) ->
- case update_added_nodes_3(NodeResult) of
- already_added -> % Already added to the control component.
- case get_status(CNode,[Node]) of % Examine if it is tracing or not.
- {ok,[{Node,NodeResult2}]} ->
- Result=mk_nodes_state_from_status(NodeResult2),
- update_added_nodes_2(CNode,Node,Result,NodesD,Rest);
- {error,_Reason} -> % Strange, mark it as down now.
- update_added_nodes_2(CNode,Node,down,NodesD,Rest)
- end;
- Result ->
- update_added_nodes_2(CNode,Node,Result,NodesD,Rest)
- end;
-update_added_nodes(_CNode,[],NodesD) ->
- NodesD;
-update_added_nodes(_CNode,NodeResult,_NodesD) -> % Non distributed case.
- case update_added_nodes_3(NodeResult) of
- already_added -> % Already added, most likely autostart.
- mk_nodes_state_from_status(inviso:get_status());
- Result ->
- Result % Simply replace NodesD.
- end.
-
-update_added_nodes_2(CNode,Node,Result,NodesD,Rest) ->
- case lists:keysearch(Node,1,NodesD) of
- {value,_} -> % Node already exists, replace!
- update_added_nodes(CNode,Rest,lists:keyreplace(Node,1,NodesD,{Node,Result}));
- false -> % Strange, unknown node!
- update_added_nodes(CNode,Rest,NodesD)
- end.
-
-update_added_nodes_3({ok,{adopted,tracing,running,_Tag}}) ->
- {up,{tracing,running}};
-update_added_nodes_3({ok,{adopted,tracing,{suspended,_SReason},_Tag}}) ->
- {up,{tracing,suspended}};
-update_added_nodes_3({ok,{adopted,_,running,_Tag}}) ->
- {up,{inactive,running}};
-update_added_nodes_3({ok,{adopted,_,{suspended,_SReason},_Tag}}) ->
- {up,{inactive,suspended}};
-update_added_nodes_3({ok,new}) ->
- {up,{inactive,running}};
-update_added_nodes_3({ok,already_added}) ->
- already_added; % This is an error value!
-update_added_nodes_3({error,_Reason}) ->
- down.
-%% -----------------------------------------------------------------------------
-
-%% Function marking all nodes that, according to the returnvalue from init_tracing,
-%% now are successfully initiated as tracing and running. Note that nodes that
-%% does not fully respond 'ok' when init_tracing are marked as 'trace_failure'.
-%% Also note that we assume that the nodes must be running to have made it this far.
-%% A node can of course have become suspended in the process, but that node will
-%% be marked as suspended later when that inviso event message arrives to the tool.
-%% Returns {NewNodesD,Nodes} where Nodes are the nodes that actually got initiated
-%% as a result of the init_tracing call (judged from the LogResults).
-set_tracing_running_nodes(undefined,{ok,LogResults},_AvailableStatus) -> % Non-distr. case.
- case set_tracing_running_nodes_checkresult(LogResults) of
- ok ->
- {{up,{tracing,running}},local_runtime};
- error ->
- {down,[]}
- end;
-set_tracing_running_nodes(undefined,{error,already_initiated},_) -> % Non-distributed case.
- {mk_nodes_state_from_status(inviso:get_status()),[]}; % Ask it for its status.
-set_tracing_running_nodes(undefined,{error,_Reason},_) -> % Non-distributed case.
- {down,[]}; % This is questionable!
-set_tracing_running_nodes(CNode,{ok,NodeResults},NodesD) ->
- set_tracing_running_nodes_2(CNode,NodeResults,NodesD,[]).
-
-set_tracing_running_nodes_2(CNode,[{Node,{ok,LogResults}}|Rest],NodesD,Nodes) ->
- case set_tracing_running_nodes_checkresult(LogResults) of
- ok -> % The result is good.
- case lists:keysearch(Node,1,NodesD) of
- {value,_} ->
- NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,{up,{tracing,running}}}),
- set_tracing_running_nodes_2(CNode,Rest,NewNodesD,[Node|Nodes]);
- false -> % Strange.
- set_tracing_running_nodes_2(CNode,Rest,NodesD,Nodes)
- end;
- error -> % This node is not tracing correctly.
- NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,down}),
- set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes)
- end;
-set_tracing_running_nodes_2(CNode,[{Node,{error,already_initiated}}|Rest],NodesD,Nodes) ->
- case get_status(CNode,[Node]) of % Then we must ask what it is doing now.
- {ok,[{Node,NodeResult}]} ->
- Result=mk_nodes_state_from_status(NodeResult),
- NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,Result}),
- set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes);
- {error,_Reason} -> % Strange, mark it as down.
- NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,down}),
- set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes)
- end;
-set_tracing_running_nodes_2(CNode,[{Node,{error,_Reason}}|Rest],NodesD,Nodes) ->
- NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,{up,{trace_failure,running}}}),
- set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes);
-set_tracing_running_nodes_2(_CNode,[],NodesD,Nodes) ->
- {NodesD,Nodes}. % New NodesD and nodes successfully initiated.
-
-%% Help function checking if a returnvalue from inviso:init_tracing really
-%% means that tracing has started as requested.
-set_tracing_running_nodes_checkresult(_LogResults) -> ok. % Should really be better!
-%% -----------------------------------------------------------------------------
-
-%% Function updating Node in the NodesD structure and sets it to 'down'.
-%% Returns a new nodes structure.
-set_down_nodes(Node,[{Node,_}|Rest]) ->
- [{Node,down}|Rest];
-set_down_nodes(Node,[NodeStruct|Rest]) ->
- [NodeStruct|set_down_nodes(Node,Rest)];
-set_down_nodes(_,[]) ->
- [];
-set_down_nodes(_,_) -> % Non-distributed case.
- down. % One can argue if this can happend.
-%% -----------------------------------------------------------------------------
-
-%% Function updating Node in NodesD to now be suspended. Note that if the node is
-%% reactivating it must be moved to state tracing because that is what is doing.
-set_suspended_nodes(Node,[{Node,{up,reactivating}}|Rest]) ->
- [{Node,{up,{tracing,suspended}}}|Rest];
-set_suspended_nodes(Node,[{Node,{up,{State,_}}}|Rest]) ->
- [{Node,{up,{State,suspended}}}|Rest];
-set_suspended_nodes(Node,[NodesData|Rest]) ->
- [NodesData|set_suspended_nodes(Node,Rest)];
-set_suspended_nodes(_Node,[]) -> % Hmm, strange why did we end up here?
- [];
-set_suspended_nodes(_,{up,reactivating}) -> % Non-distributed case.
- {up,{tracing,suspended}};
-set_suspended_nodes(_,{up,{State,_}}) ->
- {up,{State,suspended}}.
-%% -----------------------------------------------------------------------------
-
-%% This function is called when reactivation is completed. Hence it moves the
-%% node to no longer suspended. Note this can mean that the node is either
-%% tracing or inactive. Reactivation is not allowed for a node have trace_failure.
+mk_nodes(local_runtime) -> % The non-distributed case.
+ down;
+mk_nodes(_Nodes) ->
+ error.
+%% -----------------------------------------------------------------------------
+
+%% Updates the nodes database structure for each node that has been added.
+%% This is the case when we start the tool or reactivate a node. Note that a node
+%% may have become adopted instead of started.
+%% Returns a new nodes database structure.
+update_added_nodes(CNode,[{Node,NodeResult}|Rest],NodesD) ->
+ case update_added_nodes_3(NodeResult) of
+ already_added -> % Already added to the control component.
+ case get_status(CNode,[Node]) of % Examine if it is tracing or not.
+ {ok,[{Node,NodeResult2}]} ->
+ Result=mk_nodes_state_from_status(NodeResult2),
+ update_added_nodes_2(CNode,Node,Result,NodesD,Rest);
+ {error,_Reason} -> % Strange, mark it as down now.
+ update_added_nodes_2(CNode,Node,down,NodesD,Rest)
+ end;
+ Result ->
+ update_added_nodes_2(CNode,Node,Result,NodesD,Rest)
+ end;
+update_added_nodes(_CNode,[],NodesD) ->
+ NodesD;
+update_added_nodes(_CNode,NodeResult,_NodesD) -> % Non distributed case.
+ case update_added_nodes_3(NodeResult) of
+ already_added -> % Already added, most likely autostart.
+ mk_nodes_state_from_status(inviso:get_status());
+ Result ->
+ Result % Simply replace NodesD.
+ end.
+
+update_added_nodes_2(CNode,Node,Result,NodesD,Rest) ->
+ case lists:keysearch(Node,1,NodesD) of
+ {value,_} -> % Node already exists, replace!
+ update_added_nodes(CNode,Rest,lists:keyreplace(Node,1,NodesD,{Node,Result}));
+ false -> % Strange, unknown node!
+ update_added_nodes(CNode,Rest,NodesD)
+ end.
+
+update_added_nodes_3({ok,{adopted,tracing,running,_Tag}}) ->
+ {up,{tracing,running}};
+update_added_nodes_3({ok,{adopted,tracing,{suspended,_SReason},_Tag}}) ->
+ {up,{tracing,suspended}};
+update_added_nodes_3({ok,{adopted,_,running,_Tag}}) ->
+ {up,{inactive,running}};
+update_added_nodes_3({ok,{adopted,_,{suspended,_SReason},_Tag}}) ->
+ {up,{inactive,suspended}};
+update_added_nodes_3({ok,new}) ->
+ {up,{inactive,running}};
+update_added_nodes_3({ok,already_added}) ->
+ already_added; % This is an error value!
+update_added_nodes_3({error,_Reason}) ->
+ down.
+%% -----------------------------------------------------------------------------
+
+%% Function marking all nodes that, according to the returnvalue from init_tracing,
+%% now are successfully initiated as tracing and running. Note that nodes that
+%% does not fully respond 'ok' when init_tracing are marked as 'trace_failure'.
+%% Also note that we assume that the nodes must be running to have made it this far.
+%% A node can of course have become suspended in the process, but that node will
+%% be marked as suspended later when that inviso event message arrives to the tool.
+%% Returns {NewNodesD,Nodes} where Nodes are the nodes that actually got initiated
+%% as a result of the init_tracing call (judged from the LogResults).
+set_tracing_running_nodes(undefined,{ok,_LogResults},_AvailableStatus) -> % Non-distr. case.
+ {{up,{tracing,running}},local_runtime};
+set_tracing_running_nodes(undefined,{error,already_initiated},_) -> % Non-distributed case.
+ {mk_nodes_state_from_status(inviso:get_status()),[]}; % Ask it for its status.
+set_tracing_running_nodes(undefined,{error,_Reason},_) -> % Non-distributed case.
+ {down,[]}; % This is questionable!
+set_tracing_running_nodes(CNode,{ok,NodeResults},NodesD) ->
+ set_tracing_running_nodes_2(CNode,NodeResults,NodesD,[]).
+
+set_tracing_running_nodes_2(CNode,[{Node,{ok,_LogResults}}|Rest],NodesD,Nodes) ->
+ case lists:keysearch(Node,1,NodesD) of
+ {value,_} ->
+ NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,{up,{tracing,running}}}),
+ set_tracing_running_nodes_2(CNode,Rest,NewNodesD,[Node|Nodes]);
+ false -> % Strange.
+ set_tracing_running_nodes_2(CNode,Rest,NodesD,Nodes)
+ end;
+set_tracing_running_nodes_2(CNode,[{Node,{error,already_initiated}}|Rest],NodesD,Nodes) ->
+ case get_status(CNode,[Node]) of % Then we must ask what it is doing now.
+ {ok,[{Node,NodeResult}]} ->
+ Result=mk_nodes_state_from_status(NodeResult),
+ NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,Result}),
+ set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes);
+ {error,_Reason} -> % Strange, mark it as down.
+ NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,down}),
+ set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes)
+ end;
+set_tracing_running_nodes_2(CNode,[{Node,{error,_Reason}}|Rest],NodesD,Nodes) ->
+ NewNodesD=lists:keyreplace(Node,1,NodesD,{Node,{up,{trace_failure,running}}}),
+ set_tracing_running_nodes_2(CNode,Rest,NewNodesD,Nodes);
+set_tracing_running_nodes_2(_CNode,[],NodesD,Nodes) ->
+ {NodesD,Nodes}. % New NodesD and nodes successfully initiated.
+
+%% -----------------------------------------------------------------------------
+
+%% Function updating Node in the NodesD structure and sets it to 'down'.
+%% Returns a new nodes structure.
+set_down_nodes(Node,[{Node,_}|Rest]) ->
+ [{Node,down}|Rest];
+set_down_nodes(Node,[NodeStruct|Rest]) ->
+ [NodeStruct|set_down_nodes(Node,Rest)];
+set_down_nodes(_,[]) ->
+ [];
+set_down_nodes(_,_) -> % Non-distributed case.
+ down. % One can argue if this can happend.
+%% -----------------------------------------------------------------------------
+
+%% Function updating Node in NodesD to now be suspended. Note that if the node is
+%% reactivating it must be moved to state tracing because that is what is doing.
+set_suspended_nodes(Node,[{Node,{up,reactivating}}|Rest]) ->
+ [{Node,{up,{tracing,suspended}}}|Rest];
+set_suspended_nodes(Node,[{Node,{up,{State,_}}}|Rest]) ->
+ [{Node,{up,{State,suspended}}}|Rest];
+set_suspended_nodes(Node,[NodesData|Rest]) ->
+ [NodesData|set_suspended_nodes(Node,Rest)];
+set_suspended_nodes(_Node,[]) -> % Hmm, strange why did we end up here?
+ [];
+set_suspended_nodes(_,{up,reactivating}) -> % Non-distributed case.
+ {up,{tracing,suspended}};
+set_suspended_nodes(_,{up,{State,_}}) ->
+ {up,{State,suspended}}.
+%% -----------------------------------------------------------------------------
+
+%% This function is called when reactivation is completed. Hence it moves the
+%% node to no longer suspended. Note this can mean that the node is either
+%% tracing or inactive. Reactivation is not allowed for a node have trace_failure.
set_running_nodes(Node,NodesD) when is_list(NodesD) ->
- case lists:keysearch(Node,1,NodesD) of
- {value,{_,AvailableStatus}} ->
- lists:keyreplace(Node,1,NodesD,{Node,set_running_nodes_2(AvailableStatus)});
- false -> % Very strange!
- NodesD
- end;
-set_running_nodes(_,NodesD) -> % The non-distributed case.
- set_running_nodes_2(NodesD).
-
-set_running_nodes_2({up,reactivating}) ->
- {up,{tracing,running}};
-set_running_nodes_2({up,{State,suspended}}) ->
- {up,{State,running}}.
-%% -----------------------------------------------------------------------------
-
-%% Function marking node as now reactivating. That means it is not suspended
-%% any longer (and tracing), but still not part of the set of nodes which shall
-%% get all commands. Returns a new NodesD.
-set_reactivating_nodes(Node,[{Node,_}|Rest]) ->
- [{Node,{up,reactivating}}|Rest];
-set_reactivating_nodes(Node,[NodesData|Rest]) ->
- [NodesData|set_reactivating_nodes(Node,Rest)];
-set_reactivating_nodes(_,[]) ->
- [];
-set_reactivating_nodes(_,{up,_}) -> % The non-distributed case.
- {up,reactivating}.
-%% -----------------------------------------------------------------------------
-
-%% Function called when stop-tracing is done. That is all nodes in Nodes shall
-%% be inactive now. Note that an inactive node can still be suspended.
-%% Returns a new NodesD.
-set_inactive_nodes(_,{up,reactivating}) -> % Non-distributed case.
- {up,{inactive,running}};
-set_inactive_nodes(_,{up,{_,Status}}) -> % Tracing or trace_failure.
- {up,{inactive,Status}};
-set_inactive_nodes(_,down) ->
- down;
-set_inactive_nodes([{Node,ok}|Rest],NodesD) ->
- case lists:keysearch(Node,1,NodesD) of
- {value,{_,{up,reactivating}}} ->
- set_inactive_nodes(Rest,lists:keyreplace(Node,1,NodesD,{Node,{up,{inactive,running}}}));
- {value,{_,{up,{_,Status}}}} -> % Tracing or trace_failure.
- set_inactive_nodes(Rest,lists:keyreplace(Node,1,NodesD,{Node,{up,{inactive,Status}}}));
- _ -> % This should not happend.
- set_inactive_nodes(Rest,NodesD)
- end;
-set_inactive_nodes([{_Node,_Error}|Rest],NodesD) ->
- set_inactive_nodes(Rest,NodesD);
-set_inactive_nodes([],NodesD) ->
- NodesD.
-%% -----------------------------------------------------------------------------
-
-%% Returns a list of all node names. Note that it can only be used in the
-%% distributed case.
-get_all_nodenames_nodes(NodesD) ->
- lists:map(fun({Node,_})->Node end,NodesD).
-%% -----------------------------------------------------------------------------
-
-%% Returns a list of all nodes that are up, tracing and running (not suspended),
-%% or 'void' in the non-distributed case. This is the list of nodes that shall get
-%% inviso commands.
-get_nodenames_running_nodes([{Node,{up,{tracing,running}}}|Rest]) ->
- [Node|get_nodenames_running_nodes(Rest)];
-get_nodenames_running_nodes([{_Node,_}|Rest]) ->
- get_nodenames_running_nodes(Rest);
-get_nodenames_running_nodes([]) ->
- [];
-get_nodenames_running_nodes(_) ->
- void. % When non distributed, N/A.
-%% -----------------------------------------------------------------------------
-
-%% Returns a list of nodes that can be made to initiate tracing.
-get_inactive_running_nodes({up,{inactive,running}}) ->
- local_runtime;
-get_inactive_running_nodes(NonDistributed) when not(is_list(NonDistributed)) ->
- [];
-get_inactive_running_nodes([{Node,{up,{inactive,running}}}|Rest]) ->
- [Node|get_inactive_running_nodes(Rest)];
-get_inactive_running_nodes([{_Node,_}|Rest]) ->
- get_inactive_running_nodes(Rest);
-get_inactive_running_nodes([]) ->
- [].
-%% -----------------------------------------------------------------------------
-
-%% Returns a list of nodes that are currently tracing (not necessarily running).
-%% In the non-distributed case the status of the runtime component will be
-%% returned.
-%% Note that nodes showing trace_failure will be included since we like to stop
-%% tracing at those nodes too.
-get_tracing_nodes([{Node,{up,{tracing,_}}}|Rest]) ->
- [Node|get_tracing_nodes(Rest)];
-get_tracing_nodes([{Node,{up,{trace_failure,_}}}|Rest]) ->
- [Node|get_tracing_nodes(Rest)];
-get_tracing_nodes([{Node,{up,reactivating}}|Rest]) ->
- [Node|get_tracing_nodes(Rest)];
-get_tracing_nodes([_|Rest]) ->
- get_tracing_nodes(Rest);
-get_tracing_nodes([]) ->
- [];
-get_tracing_nodes(AvailableStatus) ->
- AvailableStatus.
-%% -----------------------------------------------------------------------------
-
-%% Returns a list of all nodes that are currently up.
-get_available_nodes(down) ->
- undefined;
-get_available_nodes([{_Node,down}|Rest]) ->
- get_available_nodes(Rest);
-get_available_nodes([{Node,_}|Rest]) ->
- [Node|get_available_nodes(Rest)];
-get_available_nodes([]) ->
- [].
-%% -----------------------------------------------------------------------------
-
-%% Function returning the "state" of Node. Mainly used to check if the node is
-%% suspended or not.
-%% Returns {State,Status} | reactivating | down
-%% where
+ case lists:keysearch(Node,1,NodesD) of
+ {value,{_,AvailableStatus}} ->
+ lists:keyreplace(Node,1,NodesD,{Node,set_running_nodes_2(AvailableStatus)});
+ false -> % Very strange!
+ NodesD
+ end;
+set_running_nodes(_,NodesD) -> % The non-distributed case.
+ set_running_nodes_2(NodesD).
+
+set_running_nodes_2({up,reactivating}) ->
+ {up,{tracing,running}};
+set_running_nodes_2({up,{State,suspended}}) ->
+ {up,{State,running}}.
+%% -----------------------------------------------------------------------------
+
+%% Function marking node as now reactivating. That means it is not suspended
+%% any longer (and tracing), but still not part of the set of nodes which shall
+%% get all commands. Returns a new NodesD.
+set_reactivating_nodes(Node,[{Node,_}|Rest]) ->
+ [{Node,{up,reactivating}}|Rest];
+set_reactivating_nodes(Node,[NodesData|Rest]) ->
+ [NodesData|set_reactivating_nodes(Node,Rest)];
+set_reactivating_nodes(_,[]) ->
+ [];
+set_reactivating_nodes(_,{up,_}) -> % The non-distributed case.
+ {up,reactivating}.
+%% -----------------------------------------------------------------------------
+
+%% Function called when stop-tracing is done. That is all nodes in Nodes shall
+%% be inactive now. Note that an inactive node can still be suspended.
+%% Returns a new NodesD.
+set_inactive_nodes(_,{up,reactivating}) -> % Non-distributed case.
+ {up,{inactive,running}};
+set_inactive_nodes(_,{up,{_,Status}}) -> % Tracing or trace_failure.
+ {up,{inactive,Status}};
+set_inactive_nodes(_,down) ->
+ down;
+set_inactive_nodes([{Node,ok}|Rest],NodesD) ->
+ case lists:keysearch(Node,1,NodesD) of
+ {value,{_,{up,reactivating}}} ->
+ set_inactive_nodes(Rest,lists:keyreplace(Node,1,NodesD,{Node,{up,{inactive,running}}}));
+ {value,{_,{up,{_,Status}}}} -> % Tracing or trace_failure.
+ set_inactive_nodes(Rest,lists:keyreplace(Node,1,NodesD,{Node,{up,{inactive,Status}}}));
+ _ -> % This should not happend.
+ set_inactive_nodes(Rest,NodesD)
+ end;
+set_inactive_nodes([{_Node,_Error}|Rest],NodesD) ->
+ set_inactive_nodes(Rest,NodesD);
+set_inactive_nodes([],NodesD) ->
+ NodesD.
+%% -----------------------------------------------------------------------------
+
+%% Returns a list of all node names. Note that it can only be used in the
+%% distributed case.
+get_all_nodenames_nodes(NodesD) ->
+ lists:map(fun({Node,_})->Node end,NodesD).
+%% -----------------------------------------------------------------------------
+
+%% Returns a list of all nodes that are up, tracing and running (not suspended),
+%% or 'void' in the non-distributed case. This is the list of nodes that shall get
+%% inviso commands.
+get_nodenames_running_nodes([{Node,{up,{tracing,running}}}|Rest]) ->
+ [Node|get_nodenames_running_nodes(Rest)];
+get_nodenames_running_nodes([{_Node,_}|Rest]) ->
+ get_nodenames_running_nodes(Rest);
+get_nodenames_running_nodes([]) ->
+ [];
+get_nodenames_running_nodes(_) ->
+ void. % When non distributed, N/A.
+%% -----------------------------------------------------------------------------
+
+%% Returns a list of nodes that can be made to initiate tracing.
+get_inactive_running_nodes({up,{inactive,running}}) ->
+ local_runtime;
+get_inactive_running_nodes(NonDistributed) when not(is_list(NonDistributed)) ->
+ [];
+get_inactive_running_nodes([{Node,{up,{inactive,running}}}|Rest]) ->
+ [Node|get_inactive_running_nodes(Rest)];
+get_inactive_running_nodes([{_Node,_}|Rest]) ->
+ get_inactive_running_nodes(Rest);
+get_inactive_running_nodes([]) ->
+ [].
+%% -----------------------------------------------------------------------------
+
+%% Returns a list of nodes that are currently tracing (not necessarily running).
+%% In the non-distributed case the status of the runtime component will be
+%% returned.
+%% Note that nodes showing trace_failure will be included since we like to stop
+%% tracing at those nodes too.
+get_tracing_nodes([{Node,{up,{tracing,_}}}|Rest]) ->
+ [Node|get_tracing_nodes(Rest)];
+get_tracing_nodes([{Node,{up,{trace_failure,_}}}|Rest]) ->
+ [Node|get_tracing_nodes(Rest)];
+get_tracing_nodes([{Node,{up,reactivating}}|Rest]) ->
+ [Node|get_tracing_nodes(Rest)];
+get_tracing_nodes([_|Rest]) ->
+ get_tracing_nodes(Rest);
+get_tracing_nodes([]) ->
+ [];
+get_tracing_nodes(AvailableStatus) ->
+ AvailableStatus.
+%% -----------------------------------------------------------------------------
+
+%% Returns a list of all nodes that are currently up.
+get_available_nodes(down) ->
+ undefined;
+get_available_nodes([{_Node,down}|Rest]) ->
+ get_available_nodes(Rest);
+get_available_nodes([{Node,_}|Rest]) ->
+ [Node|get_available_nodes(Rest)];
+get_available_nodes([]) ->
+ [].
+%% -----------------------------------------------------------------------------
+
+%% Function returning the "state" of Node. Mainly used to check if the node is
+%% suspended or not.
+%% Returns {State,Status} | reactivating | down
+%% where
get_state_nodes(Node,NodesD) when is_list(NodesD) ->
- case lists:keysearch(Node,1,NodesD) of
- {value,{_,AvailableStatus}} ->
- get_state_nodes_2(AvailableStatus);
- false ->
- false
- end;
-get_state_nodes(_,NodesD) -> % Non distributed case.
- get_state_nodes_2(NodesD).
-
-get_state_nodes_2({up,{trace_failure,Status}}) ->
- {trace_failure,Status};
-get_state_nodes_2({up,{State,suspended}}) -> % {tracing|inactive,suspended}
- {State,suspended};
-get_state_nodes_2({up,reactivating}) ->
- reactivating;
-get_state_nodes_2({up,{State,running}}) ->
- {State,running};
-get_state_nodes_2(down) ->
- down.
-%% -----------------------------------------------------------------------------
-
-%% Help function in the case we need to consult the state/status of a runtime
-%% component. Returns a nodesD value that can be added to the nodes database.
-mk_nodes_state_from_status({ok,{tracing,running}}) ->
- {up,{tracing,running}};
-mk_nodes_state_from_status({ok,{tracing,{suspended,_SReason}}}) ->
- {up,{tracing,suspended}};
-mk_nodes_state_from_status({ok,{_,running}}) ->
- {up,{inactive,running}};
-mk_nodes_state_from_status({ok,{_,{suspended,_SReason}}}) ->
- {up,{inactive,suspended}};
-mk_nodes_state_from_status({error,_Reason}) ->
- down.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% The session_state.
-%% -----------------------------------------------------------------------------
-
-%% The session state reflects if the inviso_tool is tracing or not.
-%% This means that if the tool is tracing a reconnected node can be made to
-%% restart_session.
-
-%% Returns the correct value indicating that we are tracing now.
-tracing_sessionstate() ->
- tracing.
-%% -----------------------------------------------------------------------------
-
-%% Returns true or false depending on if we are tracing now or not.
-is_tracing(tracing) ->
- true;
-is_tracing(_) ->
- false.
-%% -----------------------------------------------------------------------------
-
-%% Returns the correct value indicating that the tool is not tracing.
-passive_sessionstate() ->
- idle.
-%% -----------------------------------------------------------------------------
-
-%% -----------------------------------------------------------------------------
-%% The tracer_data datastructure.
-%% -----------------------------------------------------------------------------
-
-%% The tracer_data structure collects the tracer data arguments used to init tracing
-%% by this inviso tool. The args are saved per session. Each session has
-%% a number.
-%% Implementation:
-%% Sessions=[{SessionNr,TDGargs},...]
-%% SessionNr=integer()
-%% TDGargs=list(), args given to the tracer data generator
-%% minus the first argument which is the Node name.
-
-%% Function taking tracerdata args structure inserting yet another session.
-%% Returns {SessionNr,NewTDs}.
-insert_td_tracer_data(TDGargs,TDs=[{SNr,_}|_]) ->
- {SNr+1,[{SNr+1,TDGargs}|TDs]};
-insert_td_tracer_data(TDGargs,undefined) ->
- {1,[{1,TDGargs}]}.
-%% -----------------------------------------------------------------------------
-
-%% Returns the latest session nr.
-get_latest_session_nr_tracer_data(undefined) ->
- undefined;
-get_latest_session_nr_tracer_data([{SessionNr,_}|_]) ->
- SessionNr.
-%% -----------------------------------------------------------------------------
-
-%% Returns the tracer data arguments used when creating the trace data for the
-%% latest session.
-get_latest_tdgargs_tracer_data(undefined) ->
- undefined;
-get_latest_tdgargs_tracer_data([{_,TDGargs}|_]) ->
- TDGargs.
-%% -----------------------------------------------------------------------------
-
-
-%% -----------------------------------------------------------------------------
-%% The tc_dict or trace case dictionary datastructure.
-%% -----------------------------------------------------------------------------
-
-%% The tc_dict stores information about all available trace cases.
-%% Implementation:
-%% [{TCname,Type,VarNames,FNameOn [,FNameOff]},...]
-%% TCname=atom()
-%% Type=on | on_off
-%% VarNames=[atom(),...]
-%% FNameOn=FNameOff=string()
-
-%% Returns the empty trace case dictionary.
-mk_tc_dict() ->
- [].
-%% -----------------------------------------------------------------------------
-
-%% Function inserting a new trace case into the trace case dictionary.
-insert_tracecase_tc_dict(TCname,on,VarNames,FNameOn,TCdict) ->
- [{TCname,on,VarNames,FNameOn}|TCdict].
-insert_tracecase_tc_dict(TCname,on_off,VarNames,FNameOn,FNameOff,TCdict) ->
- [{TCname,on_off,VarNames,FNameOn,FNameOff}|TCdict].
-%% -----------------------------------------------------------------------------
-
-%% Function finding a trace case definition in the tc_dict structure.
-%% Returns {ok,{TCname,Type,VarNAmes,FNameOn [,FNameOff]}} or 'false'.
-get_tracecase_tc_dict(TCname,[Tuple|_]) when element(1,Tuple)==TCname ->
- {ok,Tuple};
-get_tracecase_tc_dict(TCname,[_|Rest]) ->
- get_tracecase_tc_dict(TCname,Rest);
-get_tracecase_tc_dict(_,[]) ->
- false;
-get_tracecase_tc_dict(_,_) -> % There are no trace cases!
- false.
-%% -----------------------------------------------------------------------------
-
-%% Function working on the trace case definition returned by get_tracecase_tc_dict/2
-%% function.
-%% Returning {ok,ActivationFileName}.
-get_tc_activate_fname({_TCname,_Type,_VarNames,FNameOn}) ->
- {ok,FNameOn};
-get_tc_activate_fname({_TCname,_Type,_VarNames,FNameOn,_FNameOff}) ->
- {ok,FNameOn}.
-
-get_tc_deactivate_fname({_TCname,_Type,_VarNames,_FNameOn,FNameOff}) ->
- {ok,FNameOff};
-get_tc_deactivate_fname(_) -> % Not a case with off function.
- false.
-
-get_tc_varnames({_TCname,_Type,VarNames,_FNameOn}) ->
- VarNames;
-get_tc_varnames({_TCname,_Type,VarNames,_FNameOn,_FNameOff}) ->
- VarNames.
-
-%% -----------------------------------------------------------------------------
-
-
-%% The Command History Log (CHL) stores commands to make it possible to
-%% reactivate suspended nodes, reconnect restarted nodes, and to make
-%% autostart files.
-%% Each time tracing is initiated (that is started) the CHL is cleared since
-%% it would not make scense to repeat commands from an earlier tracing at
-%% reactivation for instance.
-
-%% Implementation: {NextCounter,OnGoingList,ETStable}
-%% NextCounter=integer(), next command number - to be able to sort them in order.
-%% OnGoingList=[{ProcH,{TCname,ID}},...]
-%% ID=term(), instance id for this execution of this trace case.
-%% ETStable=tid() -> {{TCname,Id},Counter,State1,Bindings}
-%% ETStable=tid() -> {{TCname,Id},Counter,running,Bindings,Result} |
-%% {{TCname,Id,#Ref},Counter,stop,Bindings} |
-%% {{TCname,#Ref},Counter,Bindings} % An rtc
-%% {{M,F,Args,#Ref},Counter}
-%% Counter=integer(), the order-counter for this logged entry.
-%% State1=activating | stopping
-%% Where:
-%% activating: the activation file for the tracecase is running.
-%% running : activation is completed.
-%% stopping : set on the previously running ETS entry when deactivation
-%% file is currently executing.
-%% stop : entered with own Counter into the ETS table when
-%% deactivation file is executing. Remains after too.
-%% Result=term(), the result returned from the tr-case or inviso call.
-
-
-%% Returning an initial empty CHL.
-mk_chl(undefined) ->
- {1,[],ets:new(inviso_tool_chl,[set,protected])};
-mk_chl({_,_,TId}) ->
- ets:delete(TId),
- mk_chl(undefined).
-
-%% Help function returning 'true' if there is a current history.
-history_exists_chl(undefined) ->
- false;
-history_exists_chl({_,_,_}) ->
- true.
-
-%% Function looking up the state of this trace case.
-find_id_chl(TCname,Id,{_NextCounter,_OnGoingList,TId}) ->
- case ets:lookup(TId,{TCname,Id}) of
- [{_,_,running,Bindings,_Result}] -> % The trace case is tracing.
- {ok,Bindings};
- [{_,_,State,_}] -> % activating or stopping.
- State;
- [] ->
- false
- end.
-
-%% Function finding the Trace case associated with a process handle
-%% doing this trace case's activation or stopping.
-find_tc_executer_chl(ProcH,{_,OnGoingList,TId}) ->
- case lists:keysearch(ProcH,1,OnGoingList) of
- {value,{_,{TCname,Id}}} ->
- [{_,_,State,_}]=ets:lookup(TId,{TCname,Id}),
- {State,{TCname,Id}}; % Should be activating or stopping.
- false ->
- false
- end.
-
-%% Adds a Trace case to the CHL. This is done when it is turned on. Or when it
-%% is called for trace cases that do not have on/off functionality.
-set_activating_chl(TCname,Id,{Counter,OnGoingList,TId},Bindings,ProcH) ->
- ets:insert(TId,{{TCname,Id},Counter,activating,Bindings}),
- {Counter+1,[{ProcH,{TCname,Id}}|OnGoingList],TId}.
-
-%% Function marking a trace case as now running. That is the activation
-%% phase is completed. It is normaly completed when the process executing
-%% the trace case signals that it is done.
-set_running_chl(ProcH,TCname,Id,Result,{NextCounter,OnGoingList,TId}) ->
- [{_,Counter,_,Bindings}]=ets:lookup(TId,{TCname,Id}),
- ets:insert(TId,{{TCname,Id},Counter,running,Bindings,Result}),
- NewOnGoingList=lists:keydelete(ProcH,1,OnGoingList),
- {NextCounter,NewOnGoingList,TId}.
-
-%% Function marking trace case TCname with identifier Id as now in its stopping
-%% state. Where ProcH is the handler to the process running the stopping
-%% trace case.
-set_stopping_chl(TCname,Id,{NextCounter,OnGoingList,TId},ProcH)->
- [{_,Counter,_,Bindings,_}]=ets:lookup(TId,{TCname,Id}),
- ets:insert(TId,{{TCname,Id},Counter,stopping,Bindings}),
- ets:insert(TId,{{TCname,Id,make_ref()},NextCounter,stop,Bindings}),
- {NextCounter+1,[{ProcH,{TCname,Id}}|OnGoingList],TId}.
-
-%% Function removing a TCname-Id from the CHL. This is mostly used
-%% if activating the trace case failed for some reason. We do not then
-%% expect the user to stop the trace case. Hence it must be removed now.
-%% A reactivation process may have noticed the activating-entry and started
-%% to activate it. But since the general state reached after an unsuccessful
-%% activation can not easily be determined, we don't try to do much about it.
-del_tc_chl(ProcH,TCname,Id,{NextCounter,OnGoingList,TId}) ->
- ets:delete(TId,{TCname,Id}),
- NewOnGoingList=lists:keydelete(ProcH,1,OnGoingList),
- {NextCounter,NewOnGoingList,TId}.
-
-%% Function removing the entry TCname+Id from the CHL. This makes it
-%% possible to activate a tracecase with this id again. The entry was
-%% previously marked as stopping.
-nullify_chl(ProcH,TCname,Id,{NextCounter,OnGoingList,TId}) ->
- ets:delete(TId,{TCname,Id}),
- NewOnGoingList=lists:keydelete(ProcH,1,OnGoingList),
- {NextCounter+1,NewOnGoingList,TId}.
-
-%% Function stopping all processes saved as being now running tc executers.
-%% This is useful as cleanup during stop tracing for instance.
-%% Returns a new CHL which is not in all parts correct. Entries in the
-%% ETS table are for instance not properly state-changed. But the CHL will
-%% from now on only be used to create command files and similar.
-stop_all_tc_executer_chl({NextCounter,[{ProcH,_}|Rest],TId}) ->
- exit(ProcH,kill),
- stop_all_tc_executer_chl({NextCounter,Rest,TId});
-stop_all_tc_executer_chl({NextCounter,[],TId}) ->
- {NextCounter,[],TId}.
-
-%% Function adding a "plain" inviso call to the CHL.
-add_inviso_call_chl(Cmd,Args,{NextCounter,OnGoingList,TId}) ->
- ets:insert(TId,{{inviso,Cmd,Args,make_ref()},NextCounter}),
- {NextCounter+1,OnGoingList,TId}.
-
-%% Function adding a run trace case entry to the chl.
-add_rtc_chl(TCname,Bindings,{NextCounter,OnGoingList,TId}) ->
- ets:insert(TId,{{TCname,make_ref()},NextCounter,Bindings}),
- {NextCounter+1,OnGoingList,TId}.
-%% Returns the highest used counter number in the command history log.
-get_highest_used_counter_chl({NextCounter,_,_}) ->
- NextCounter-1.
-
-%% Help function returning a list of {{TCname,Id},Phase} for all ongoing
-%% assynchronous tracecases.
-get_ongoing_chl(undefined) ->
- [];
-get_ongoing_chl({_,OngoingList,TId}) ->
- get_ongoing_chl_2(OngoingList,TId).
-
-get_ongoing_chl_2([{_ProcH,{TCname,Id}}|Rest],TId) ->
- case ets:lookup(TId,{TCname,Id}) of
- [{_,_C,activating,_B}] ->
- [{{TCname,Id},activating}|get_ongoing_chl_2(Rest,TId)];
- [{_,_C,stopping,_B}] ->
- [{{TCname,Id},deactivating}|get_ongoing_chl_2(Rest,TId)]
- end;
-get_ongoing_chl_2([],_) ->
- [].
-
-%% Function returning a list of log entries. Note that the list is unsorted
-%% in respect to Counter.
-get_loglist_chl({_,_,TId}) ->
- L=ets:tab2list(TId),
- lists:map(fun({{TC,Id},C,S,B,_Result}) -> {{TC,Id},C,S,B}; % running
- (Tuple={{_TC,_Id},_C,_S,_B}) -> Tuple; % activating | stopping
- (Tuple={{_TC,_Id,_Ref},_C,_S,_B}) -> Tuple; % stop
- (Tuple={{_M,_F,_Args,_Ref},_C}) -> Tuple;
- (Tuple={{_TC,_Ref},_C,_B}) -> Tuple
- end,
- L);
-get_loglist_chl(_) -> % The history is not initiated, ever!
- [].
-
-%% Function returning a list of log entries, but only those which are not
-%% cancelled out by deactivations.
-% get_loglist_active_chl({_,_,TId}) ->
-% L=ets:tab2list(TId),
-% lists:zf(fun({{TC,Id},C,S,B,_Result}) -> {true,{{TC,Id},C,S,B}}; % running
-% (Tuple={{_TC,_Id},_C,_S,_B}) -> Tuple; % activating | stopping
-% (Tuple={{_TC,_Id,_Ref},_C,_S,_B}) -> Tuple; % stop
-% (Tuple={{_M,_F,_Args,_Ref},_C}) -> Tuple
-% end,
-% L);
-% get_loglist_chl(_) -> % The history is not initiated, ever!
-% [].
-
-
-%% This helpfunction recreates a history from a saved history list. This function
-%% is supposed to crash if the log is not well formatted. Note that we must restore
-%% the counter in order for the counter to work if new commands are added to the
-%% history.
-replace_history_chl(OldCHL,SortedLog) ->
- {_,Ongoing,TId}=mk_chl(OldCHL),
- {NewTId,Counter}=replace_history_chl_2(TId,SortedLog,0),
- {ok,{Counter+1,Ongoing,NewTId}}.
-
-replace_history_chl_2(TId,[{{TC,Id},C,running,B}|Rest],_Counter) ->
- ets:insert(TId,{{TC,Id},C,running,B,undefined}),
- replace_history_chl_2(TId,Rest,C);
-replace_history_chl_2(TId,[{{M,F,Args},C}|Rest],_Counter) ->
- ets:insert(TId,{{M,F,Args,make_ref()},C}),
- replace_history_chl_2(TId,Rest,C);
-replace_history_chl_2(TId,[{TC,C,B}|Rest],_Counter) ->
- ets:insert(TId,{{TC,make_ref()},C,B}),
- replace_history_chl_2(TId,Rest,C);
-replace_history_chl_2(TId,[],Counter) ->
- {TId,Counter}.
-%% -----------------------------------------------------------------------------
-
-
-%% -----------------------------------------------------------------------------
-%% Reactivators data structure.
-%% -----------------------------------------------------------------------------
-
-%% Function adding a new node-reactivatorpid pair to the reactivators structure.
-%% In this way we know which reactivators to remove if Node terminates, or when
-%% a node is fully updated when a reactivator is done.
-add_reactivators(Node,Pid,Reactivators) ->
- [{Node,Pid}|Reactivators].
-
-%% Function removing a reactivator entry from the reactivators structure.
-del_reactivators(RPid,[{_Node,RPid}|Rest]) ->
- Rest;
-del_reactivators(RPid,[Element|Rest]) ->
- [Element|del_reactivators(RPid,Rest)];
-del_reactivators(_,[]) -> % This should not happend.
- [].
-
-get_node_reactivators(RPid,Reactivators) ->
- case lists:keysearch(RPid,2,Reactivators) of
- {value,{Node,_}} ->
- Node;
- false -> % This should not happend.
- false
- end.
-
-%% Returns a list of list all nodes that are currently reactivating.
-get_all_nodes_reactivators([{Nodes,_Pid}|Rest]) ->
- [Nodes|get_all_nodes_reactivators(Rest)];
-get_all_nodes_reactivators([]) ->
- [].
-
-%% Function stopping all running reactivator processes. Returns a new empty
-%% reactivators structure. Note that this function does not set the state of
-%% Nodes. It must most often be set to running.
-stop_all_reactivators([{_Nodes,Pid}|Rest]) ->
- exit(Pid,kill),
- stop_all_reactivators(Rest);
-stop_all_reactivators([]) ->
- []. % Returns an empty reactivators.
-
-%% Help function stopping the reactivator (if any) that reactivates Node.
-%% Returns a new list of reactivators structure.
-stop_node_reactivators(Node,[{Node,Pid}|Rest]) ->
- exit(Pid,kill),
- Rest;
-stop_node_reactivators(Node,[NodePid|Rest]) ->
- [NodePid|stop_node_reactivators(Node,Rest)];
-stop_node_reactivators(_,[]) ->
- [].
-%% -----------------------------------------------------------------------------
-
-
-%% -----------------------------------------------------------------------------
-%% Started initial trace cases data structure.
-%% -----------------------------------------------------------------------------
-
-%% This datastructure keeps information about ongoing trace cases started
-%% automatically at init_tracing. These must be automatically stopped when calling
-%% stop_tracing.
-
-add_initial_tcs(TCname,Id,StartedInitialTcs) ->
- [{TCname,Id}|StartedInitialTcs].
-%% -----------------------------------------------------------------------------
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+ case lists:keysearch(Node,1,NodesD) of
+ {value,{_,AvailableStatus}} ->
+ get_state_nodes_2(AvailableStatus);
+ false ->
+ false
+ end;
+get_state_nodes(_,NodesD) -> % Non distributed case.
+ get_state_nodes_2(NodesD).
+
+get_state_nodes_2({up,{trace_failure,Status}}) ->
+ {trace_failure,Status};
+get_state_nodes_2({up,{State,suspended}}) -> % {tracing|inactive,suspended}
+ {State,suspended};
+get_state_nodes_2({up,reactivating}) ->
+ reactivating;
+get_state_nodes_2({up,{State,running}}) ->
+ {State,running};
+get_state_nodes_2(down) ->
+ down.
+%% -----------------------------------------------------------------------------
+
+%% Help function in the case we need to consult the state/status of a runtime
+%% component. Returns a nodesD value that can be added to the nodes database.
+mk_nodes_state_from_status({ok,{tracing,running}}) ->
+ {up,{tracing,running}};
+mk_nodes_state_from_status({ok,{tracing,{suspended,_SReason}}}) ->
+ {up,{tracing,suspended}};
+mk_nodes_state_from_status({ok,{_,running}}) ->
+ {up,{inactive,running}};
+mk_nodes_state_from_status({ok,{_,{suspended,_SReason}}}) ->
+ {up,{inactive,suspended}};
+mk_nodes_state_from_status({error,_Reason}) ->
+ down.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% The session_state.
+%% -----------------------------------------------------------------------------
+
+%% The session state reflects if the inviso_tool is tracing or not.
+%% This means that if the tool is tracing a reconnected node can be made to
+%% restart_session.
+
+%% Returns the correct value indicating that we are tracing now.
+tracing_sessionstate() ->
+ tracing.
+%% -----------------------------------------------------------------------------
+
+%% Returns true or false depending on if we are tracing now or not.
+is_tracing(tracing) ->
+ true;
+is_tracing(_) ->
+ false.
+%% -----------------------------------------------------------------------------
+
+%% Returns the correct value indicating that the tool is not tracing.
+passive_sessionstate() ->
+ idle.
+%% -----------------------------------------------------------------------------
+
+%% -----------------------------------------------------------------------------
+%% The tracer_data datastructure.
+%% -----------------------------------------------------------------------------
+
+%% The tracer_data structure collects the tracer data arguments used to init tracing
+%% by this inviso tool. The args are saved per session. Each session has
+%% a number.
+%% Implementation:
+%% Sessions=[{SessionNr,TDGargs},...]
+%% SessionNr=integer()
+%% TDGargs=list(), args given to the tracer data generator
+%% minus the first argument which is the Node name.
+
+%% Function taking tracerdata args structure inserting yet another session.
+%% Returns {SessionNr,NewTDs}.
+insert_td_tracer_data(TDGargs,TDs=[{SNr,_}|_]) ->
+ {SNr+1,[{SNr+1,TDGargs}|TDs]};
+insert_td_tracer_data(TDGargs,undefined) ->
+ {1,[{1,TDGargs}]}.
+%% -----------------------------------------------------------------------------
+
+%% Returns the latest session nr.
+get_latest_session_nr_tracer_data(undefined) ->
+ undefined;
+get_latest_session_nr_tracer_data([{SessionNr,_}|_]) ->
+ SessionNr.
+%% -----------------------------------------------------------------------------
+
+%% Returns the tracer data arguments used when creating the trace data for the
+%% latest session.
+get_latest_tdgargs_tracer_data(undefined) ->
+ undefined;
+get_latest_tdgargs_tracer_data([{_,TDGargs}|_]) ->
+ TDGargs.
+%% -----------------------------------------------------------------------------
+
+
+%% -----------------------------------------------------------------------------
+%% The tc_dict or trace case dictionary datastructure.
+%% -----------------------------------------------------------------------------
+
+%% The tc_dict stores information about all available trace cases.
+%% Implementation:
+%% [{TCname,Type,VarNames,FNameOn [,FNameOff]},...]
+%% TCname=atom()
+%% Type=on | on_off
+%% VarNames=[atom(),...]
+%% FNameOn=FNameOff=string()
+
+%% Returns the empty trace case dictionary.
+mk_tc_dict() ->
+ [].
+%% -----------------------------------------------------------------------------
+
+%% Function inserting a new trace case into the trace case dictionary.
+insert_tracecase_tc_dict(TCname,on,VarNames,FNameOn,TCdict) ->
+ [{TCname,on,VarNames,FNameOn}|TCdict].
+insert_tracecase_tc_dict(TCname,on_off,VarNames,FNameOn,FNameOff,TCdict) ->
+ [{TCname,on_off,VarNames,FNameOn,FNameOff}|TCdict].
+%% -----------------------------------------------------------------------------
+
+%% Function finding a trace case definition in the tc_dict structure.
+%% Returns {ok,{TCname,Type,VarNAmes,FNameOn [,FNameOff]}} or 'false'.
+get_tracecase_tc_dict(TCname,[Tuple|_]) when element(1,Tuple)==TCname ->
+ {ok,Tuple};
+get_tracecase_tc_dict(TCname,[_|Rest]) ->
+ get_tracecase_tc_dict(TCname,Rest);
+get_tracecase_tc_dict(_,[]) ->
+ false;
+get_tracecase_tc_dict(_,_) -> % There are no trace cases!
+ false.
+%% -----------------------------------------------------------------------------
+
+%% Function working on the trace case definition returned by get_tracecase_tc_dict/2
+%% function.
+%% Returning {ok,ActivationFileName}.
+get_tc_activate_fname({_TCname,_Type,_VarNames,FNameOn}) ->
+ {ok,FNameOn};
+get_tc_activate_fname({_TCname,_Type,_VarNames,FNameOn,_FNameOff}) ->
+ {ok,FNameOn}.
+
+get_tc_deactivate_fname({_TCname,_Type,_VarNames,_FNameOn,FNameOff}) ->
+ {ok,FNameOff};
+get_tc_deactivate_fname(_) -> % Not a case with off function.
+ false.
+
+get_tc_varnames({_TCname,_Type,VarNames,_FNameOn}) ->
+ VarNames;
+get_tc_varnames({_TCname,_Type,VarNames,_FNameOn,_FNameOff}) ->
+ VarNames.
+
+%% -----------------------------------------------------------------------------
+
+
+%% The Command History Log (CHL) stores commands to make it possible to
+%% reactivate suspended nodes, reconnect restarted nodes, and to make
+%% autostart files.
+%% Each time tracing is initiated (that is started) the CHL is cleared since
+%% it would not make scense to repeat commands from an earlier tracing at
+%% reactivation for instance.
+
+%% Implementation: {NextCounter,OnGoingList,ETStable}
+%% NextCounter=integer(), next command number - to be able to sort them in order.
+%% OnGoingList=[{ProcH,{TCname,ID}},...]
+%% ID=term(), instance id for this execution of this trace case.
+%% ETStable=tid() -> {{TCname,Id},Counter,State1,Bindings}
+%% ETStable=tid() -> {{TCname,Id},Counter,running,Bindings,Result} |
+%% {{TCname,Id,#Ref},Counter,stop,Bindings} |
+%% {{TCname,#Ref},Counter,Bindings} % An rtc
+%% {{M,F,Args,#Ref},Counter}
+%% Counter=integer(), the order-counter for this logged entry.
+%% State1=activating | stopping
+%% Where:
+%% activating: the activation file for the tracecase is running.
+%% running : activation is completed.
+%% stopping : set on the previously running ETS entry when deactivation
+%% file is currently executing.
+%% stop : entered with own Counter into the ETS table when
+%% deactivation file is executing. Remains after too.
+%% Result=term(), the result returned from the tr-case or inviso call.
+
+
+%% Returning an initial empty CHL.
+mk_chl(undefined) ->
+ {1,[],ets:new(inviso_tool_chl,[set,protected])};
+mk_chl({_,_,TId}) ->
+ ets:delete(TId),
+ mk_chl(undefined).
+
+%% Help function returning 'true' if there is a current history.
+history_exists_chl(undefined) ->
+ false;
+history_exists_chl({_,_,_}) ->
+ true.
+
+%% Function looking up the state of this trace case.
+find_id_chl(TCname,Id,{_NextCounter,_OnGoingList,TId}) ->
+ case ets:lookup(TId,{TCname,Id}) of
+ [{_,_,running,Bindings,_Result}] -> % The trace case is tracing.
+ {ok,Bindings};
+ [{_,_,State,_}] -> % activating or stopping.
+ State;
+ [] ->
+ false
+ end.
+
+%% Function finding the Trace case associated with a process handle
+%% doing this trace case's activation or stopping.
+find_tc_executer_chl(ProcH,{_,OnGoingList,TId}) ->
+ case lists:keysearch(ProcH,1,OnGoingList) of
+ {value,{_,{TCname,Id}}} ->
+ [{_,_,State,_}]=ets:lookup(TId,{TCname,Id}),
+ {State,{TCname,Id}}; % Should be activating or stopping.
+ false ->
+ false
+ end.
+
+%% Adds a Trace case to the CHL. This is done when it is turned on. Or when it
+%% is called for trace cases that do not have on/off functionality.
+set_activating_chl(TCname,Id,{Counter,OnGoingList,TId},Bindings,ProcH) ->
+ ets:insert(TId,{{TCname,Id},Counter,activating,Bindings}),
+ {Counter+1,[{ProcH,{TCname,Id}}|OnGoingList],TId}.
+
+%% Function marking a trace case as now running. That is the activation
+%% phase is completed. It is normaly completed when the process executing
+%% the trace case signals that it is done.
+set_running_chl(ProcH,TCname,Id,Result,{NextCounter,OnGoingList,TId}) ->
+ [{_,Counter,_,Bindings}]=ets:lookup(TId,{TCname,Id}),
+ ets:insert(TId,{{TCname,Id},Counter,running,Bindings,Result}),
+ NewOnGoingList=lists:keydelete(ProcH,1,OnGoingList),
+ {NextCounter,NewOnGoingList,TId}.
+
+%% Function marking trace case TCname with identifier Id as now in its stopping
+%% state. Where ProcH is the handler to the process running the stopping
+%% trace case.
+set_stopping_chl(TCname,Id,{NextCounter,OnGoingList,TId},ProcH)->
+ [{_,Counter,_,Bindings,_}]=ets:lookup(TId,{TCname,Id}),
+ ets:insert(TId,{{TCname,Id},Counter,stopping,Bindings}),
+ ets:insert(TId,{{TCname,Id,make_ref()},NextCounter,stop,Bindings}),
+ {NextCounter+1,[{ProcH,{TCname,Id}}|OnGoingList],TId}.
+
+%% Function removing a TCname-Id from the CHL. This is mostly used
+%% if activating the trace case failed for some reason. We do not then
+%% expect the user to stop the trace case. Hence it must be removed now.
+%% A reactivation process may have noticed the activating-entry and started
+%% to activate it. But since the general state reached after an unsuccessful
+%% activation can not easily be determined, we don't try to do much about it.
+del_tc_chl(ProcH,TCname,Id,{NextCounter,OnGoingList,TId}) ->
+ ets:delete(TId,{TCname,Id}),
+ NewOnGoingList=lists:keydelete(ProcH,1,OnGoingList),
+ {NextCounter,NewOnGoingList,TId}.
+
+%% Function removing the entry TCname+Id from the CHL. This makes it
+%% possible to activate a tracecase with this id again. The entry was
+%% previously marked as stopping.
+nullify_chl(ProcH,TCname,Id,{NextCounter,OnGoingList,TId}) ->
+ ets:delete(TId,{TCname,Id}),
+ NewOnGoingList=lists:keydelete(ProcH,1,OnGoingList),
+ {NextCounter+1,NewOnGoingList,TId}.
+
+%% Function stopping all processes saved as being now running tc executers.
+%% This is useful as cleanup during stop tracing for instance.
+%% Returns a new CHL which is not in all parts correct. Entries in the
+%% ETS table are for instance not properly state-changed. But the CHL will
+%% from now on only be used to create command files and similar.
+stop_all_tc_executer_chl({NextCounter,[{ProcH,_}|Rest],TId}) ->
+ exit(ProcH,kill),
+ stop_all_tc_executer_chl({NextCounter,Rest,TId});
+stop_all_tc_executer_chl({NextCounter,[],TId}) ->
+ {NextCounter,[],TId}.
+
+%% Function adding a "plain" inviso call to the CHL.
+add_inviso_call_chl(Cmd,Args,{NextCounter,OnGoingList,TId}) ->
+ ets:insert(TId,{{inviso,Cmd,Args,make_ref()},NextCounter}),
+ {NextCounter+1,OnGoingList,TId}.
+
+%% Function adding a run trace case entry to the chl.
+add_rtc_chl(TCname,Bindings,{NextCounter,OnGoingList,TId}) ->
+ ets:insert(TId,{{TCname,make_ref()},NextCounter,Bindings}),
+ {NextCounter+1,OnGoingList,TId}.
+%% Returns the highest used counter number in the command history log.
+get_highest_used_counter_chl({NextCounter,_,_}) ->
+ NextCounter-1.
+
+%% Help function returning a list of {{TCname,Id},Phase} for all ongoing
+%% assynchronous tracecases.
+get_ongoing_chl(undefined) ->
+ [];
+get_ongoing_chl({_,OngoingList,TId}) ->
+ get_ongoing_chl_2(OngoingList,TId).
+
+get_ongoing_chl_2([{_ProcH,{TCname,Id}}|Rest],TId) ->
+ case ets:lookup(TId,{TCname,Id}) of
+ [{_,_C,activating,_B}] ->
+ [{{TCname,Id},activating}|get_ongoing_chl_2(Rest,TId)];
+ [{_,_C,stopping,_B}] ->
+ [{{TCname,Id},deactivating}|get_ongoing_chl_2(Rest,TId)]
+ end;
+get_ongoing_chl_2([],_) ->
+ [].
+
+%% Function returning a list of log entries. Note that the list is unsorted
+%% in respect to Counter.
+get_loglist_chl({_,_,TId}) ->
+ L=ets:tab2list(TId),
+ lists:map(fun({{TC,Id},C,S,B,_Result}) -> {{TC,Id},C,S,B}; % running
+ (Tuple={{_TC,_Id},_C,_S,_B}) -> Tuple; % activating | stopping
+ (Tuple={{_TC,_Id,_Ref},_C,_S,_B}) -> Tuple; % stop
+ (Tuple={{_M,_F,_Args,_Ref},_C}) -> Tuple;
+ (Tuple={{_TC,_Ref},_C,_B}) -> Tuple
+ end,
+ L);
+get_loglist_chl(_) -> % The history is not initiated, ever!
+ [].
+
+%% Function returning a list of log entries, but only those which are not
+%% cancelled out by deactivations.
+% get_loglist_active_chl({_,_,TId}) ->
+% L=ets:tab2list(TId),
+% lists:zf(fun({{TC,Id},C,S,B,_Result}) -> {true,{{TC,Id},C,S,B}}; % running
+% (Tuple={{_TC,_Id},_C,_S,_B}) -> Tuple; % activating | stopping
+% (Tuple={{_TC,_Id,_Ref},_C,_S,_B}) -> Tuple; % stop
+% (Tuple={{_M,_F,_Args,_Ref},_C}) -> Tuple
+% end,
+% L);
+% get_loglist_chl(_) -> % The history is not initiated, ever!
+% [].
+
+
+%% This helpfunction recreates a history from a saved history list. This function
+%% is supposed to crash if the log is not well formatted. Note that we must restore
+%% the counter in order for the counter to work if new commands are added to the
+%% history.
+replace_history_chl(OldCHL,SortedLog) ->
+ {_,Ongoing,TId}=mk_chl(OldCHL),
+ {NewTId,Counter}=replace_history_chl_2(TId,SortedLog,0),
+ {ok,{Counter+1,Ongoing,NewTId}}.
+
+replace_history_chl_2(TId,[{{TC,Id},C,running,B}|Rest],_Counter) ->
+ ets:insert(TId,{{TC,Id},C,running,B,undefined}),
+ replace_history_chl_2(TId,Rest,C);
+replace_history_chl_2(TId,[{{M,F,Args},C}|Rest],_Counter) ->
+ ets:insert(TId,{{M,F,Args,make_ref()},C}),
+ replace_history_chl_2(TId,Rest,C);
+replace_history_chl_2(TId,[{TC,C,B}|Rest],_Counter) ->
+ ets:insert(TId,{{TC,make_ref()},C,B}),
+ replace_history_chl_2(TId,Rest,C);
+replace_history_chl_2(TId,[],Counter) ->
+ {TId,Counter}.
+%% -----------------------------------------------------------------------------
+
+
+%% -----------------------------------------------------------------------------
+%% Reactivators data structure.
+%% -----------------------------------------------------------------------------
+
+%% Function adding a new node-reactivatorpid pair to the reactivators structure.
+%% In this way we know which reactivators to remove if Node terminates, or when
+%% a node is fully updated when a reactivator is done.
+add_reactivators(Node,Pid,Reactivators) ->
+ [{Node,Pid}|Reactivators].
+
+%% Function removing a reactivator entry from the reactivators structure.
+del_reactivators(RPid,[{_Node,RPid}|Rest]) ->
+ Rest;
+del_reactivators(RPid,[Element|Rest]) ->
+ [Element|del_reactivators(RPid,Rest)];
+del_reactivators(_,[]) -> % This should not happend.
+ [].
+
+get_node_reactivators(RPid,Reactivators) ->
+ case lists:keysearch(RPid,2,Reactivators) of
+ {value,{Node,_}} ->
+ Node;
+ false -> % This should not happend.
+ false
+ end.
+
+%% Returns a list of list all nodes that are currently reactivating.
+get_all_nodes_reactivators([{Nodes,_Pid}|Rest]) ->
+ [Nodes|get_all_nodes_reactivators(Rest)];
+get_all_nodes_reactivators([]) ->
+ [].
+
+%% Function stopping all running reactivator processes. Returns a new empty
+%% reactivators structure. Note that this function does not set the state of
+%% Nodes. It must most often be set to running.
+stop_all_reactivators([{_Nodes,Pid}|Rest]) ->
+ exit(Pid,kill),
+ stop_all_reactivators(Rest);
+stop_all_reactivators([]) ->
+ []. % Returns an empty reactivators.
+
+%% Help function stopping the reactivator (if any) that reactivates Node.
+%% Returns a new list of reactivators structure.
+stop_node_reactivators(Node,[{Node,Pid}|Rest]) ->
+ exit(Pid,kill),
+ Rest;
+stop_node_reactivators(Node,[NodePid|Rest]) ->
+ [NodePid|stop_node_reactivators(Node,Rest)];
+stop_node_reactivators(_,[]) ->
+ [].
+%% -----------------------------------------------------------------------------
+
+
+%% -----------------------------------------------------------------------------
+%% Started initial trace cases data structure.
+%% -----------------------------------------------------------------------------
+
+%% This datastructure keeps information about ongoing trace cases started
+%% automatically at init_tracing. These must be automatically stopped when calling
+%% stop_tracing.
+
+add_initial_tcs(TCname,Id,StartedInitialTcs) ->
+ [{TCname,Id}|StartedInitialTcs].
+%% -----------------------------------------------------------------------------
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/inviso/src/inviso_tool_sh.erl b/lib/inviso/src/inviso_tool_sh.erl
index fe876b955a..b02f498c5b 100644
--- a/lib/inviso/src/inviso_tool_sh.erl
+++ b/lib/inviso/src/inviso_tool_sh.erl
@@ -1,1731 +1,1749 @@
-%%%------------------------------------------------------------------------------
-%%% File : inviso_tool_sh.erl
-%%% Author : Lennart �hman <[email protected]>
-%%% Description :
-%%%
-%%% Created : 24 Oct 2005 by Lennart �hman
-%%%------------------------------------------------------------------------------
--module(inviso_tool_sh).
-
-%% Inviso Session Handler.
-%% This is the code for the session handler process. Its purpose is that we have
-%% one session handler process for each trace session started through the
-%% start_session inviso tool API. The session handler process is responsible for:
-%%
-%% -Knowing the state/status of all participating runtime components.
-%% -Keeping storage of all tracerdata all our participants have used. This means
-%% also to find out the tracerdata of runtime components connecting by them
-%% selves.
-%%
-%% STORAGE STRATEGY
-%% ----------------
-%% The local information storage can be changed by two things. Either by executing
-%% commands issued through our APIs. Or by receiving trace_event from the control
-%% component. When we execute commands, a corresponding event will also follow.
-%% Meaning that in those situations we are informed twice.
-%% A simple strategy could be to wait for the event even when doing the changes
-%% to the runtime components our self (through commands). But that may result in
-%% a small time frame where someone might do yet another command and failing
-%% because the local information storage is not uptodate as it would have been
-%% expected to be. Therefore we always update the local storage when making changes
-%% to a runtime component our selves. There will eventually be a double update
-%% through an incoming event. But the storage must coop with that, preventing
-%% inconsitancies to happend. An example of a strategy is that the tracerdata table
-%% is a bag, not allowing for double entries of the same kind. Therefore a double
-%% update is harmless there.
-
-%% ------------------------------------------------------------------------------
-%% Module wide constants.
-%% ------------------------------------------------------------------------------
--define(LOCAL_RUNTIME,local_runtime). % Used as node name when non-disitrbuted.
--define(TRACING,tracing). % A state defined by the control component.
--define(RUNNING,running). % A status according to control componet.
-
--define(COPY_LOG_FROM,copy_log_from). % Common fileystem option.
-%% ------------------------------------------------------------------------------
-
-%% ------------------------------------------------------------------------------
-%% API exports.
-%% ------------------------------------------------------------------------------
--export([start_link/5,start_link/8]).
--export([cancel_session/1,stop_session/3]).
--export([reactivate/1,reactivate/2]).
--export([tpl/5,tpl/6,tpl/7,
- tf/2,tf/3,
- tpm_localnames/2,init_tpm/6,init_tpm/9,tpm/6,tpm/7,tpm/10,
- tpm_ms/7,ctpm_ms/6,ctpm/5
- ]).
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Internal exports.
-%% ------------------------------------------------------------------------------
--export([init/1,handle_call/3,handle_info/2,terminate/2]).
-
--export([get_loopdata/1]).
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Includes.
-%% ------------------------------------------------------------------------------
--include_lib("kernel/include/file.hrl"). % Necessary for file module.
-%% ------------------------------------------------------------------------------
-
-
-%% ==============================================================================
-%% Exported API functions.
-%% ==============================================================================
-
-%% start_link(From,NodeParams,CtrlNode,CtrlPid,SafetyCatches,NodesIn,NodesNotIn) =
-%% {ok,Pid} | {error,Reason}
-%% From= pid(), the initial client expecting the reply.
-%% NodeParams=[{Node,TracerData},{Node,TracerData,Opts}...]
-%% CtrlNode=atom() | 'void', the node where the trace control component is.
-%% CtrlPid=pid(), the pid of the trace control component.
-%% SafetyCatches=
-%% Dir=string(), where to place fetched logs and the merged log.
-%% Dbg=debug structure.
-%% NodesIn=[Node,...], list of nodes already in another session.
-%% NodesNotIn=[Node,...], list of nodes not in another session.
-%%
-%% Starts a session-handler. It keeps track of the the state and status of all
-%% participating runtime components. Note that there is a non-distributed case too.
-%% In the non-distributed case there is no things such as CtrlNode.
-start_link(From,TracerData,CtrlPid,SafetyCatches,Dbg) ->
- gen_server:start_link(?MODULE,
- {self(),From,TracerData,CtrlPid,SafetyCatches,Dbg},
- []).
-
-start_link(From,NodeParams,CtrlNode,CtrlPid,SafetyCatches,Dbg,NodesIn,NodesNotIn) ->
- gen_server:start_link(?MODULE,
- {self(),From,NodeParams,CtrlNode,CtrlPid,
- SafetyCatches,Dbg,NodesIn,NodesNotIn},
- []).
-%% ------------------------------------------------------------------------------
-
-%% Stops tracing where it is ongoing. Fetches all logfiles.
-stop_session(SID,Dir,Prefix) ->
- gen_server:call(SID,{stop_session,Dir,Prefix}).
-%% ------------------------------------------------------------------------------
-
-%% stop_session(SID) = ok
-%%
-%% Cancels the session brutaly. All runtime components are made to stop tracing,
-%% all local log files are removed using the tracerdata we know for them.
-cancel_session(SID) ->
- gen_server:call(SID,cancel_session).
-%% ------------------------------------------------------------------------------
-
-%% reactivate(SID) = {ok,
-%% reactivate(SID,Nodes) = {ok,NodeResults} | {error,Reason}.
-%% SID=session id, pid().
-%% Nodes=[Node,...]
-%% NodeResult=[{Node,Result},...]
-%% Result={Good,Bad}
-%% Good,Bad=integer(), the number of redone activities.
-%%
-%% Function which reactivates runtime components being suspended. This is done
-%% replaying all trace flags (in the correct order) to the corresponding nodes.
-%% Note that this may also mean turning flags off. Like first turning them on
-%% then off a split second later.
-reactivate(SID) ->
- gen_server:call(SID,reactivate). %% NOT IMPLEMENTED YET.
-reactivate(SID,Nodes) ->
- gen_server:call(SID,{reactivate,Nodes}).
-%% ------------------------------------------------------------------------------
-
-
-%% tpl(SessionID,Mod,Func,Arity,MS)=
-%% tpl(SessionID,Mod,Func,Arity,MS,Opts)={ok,N}|{error,Reason}.
-%% tpl(SessionID,Nodes,Mod,Func,Arity,MS)=
-%% tpl(SessionID,Nodes,Mod,Func,Arity,MS,Opts)={ok,Result}|{error,Reason}
-%% Mod='_' | ModuleName | ModRegExp | {DirRegExp,ModRegExp}
-%% ModRegExp=DirRegExp= string()
-%% Func='_' | FunctionName
-%% Arity='_' | integer()
-%% MS=[] | false | a match specification
-%% Opts=[Opts,...]
-%% Opt={arg,Arg}, disable_safety, {expand_regexp_at,NodeName}, only_loaded
-%% Nodes=[NodeName,...]
-tpl(SID,Mod,Func,Arity,MS) ->
- gen_server:call(SID,{tp,tpl,Mod,Func,Arity,MS,[]}).
-tpl(SID,Mod,Func,Arity,MS,Opts) when list(MS);MS==true;MS==false ->
- gen_server:call(SID,{tp,tpl,Mod,Func,Arity,MS,Opts});
-tpl(SID,Nodes,Mod,Func,Arity,MS) when integer(Arity);Arity=='_' ->
- gen_server:call(SID,{tp,tpl,Nodes,Mod,Func,Arity,MS,[]}).
-tpl(SID,Nodes,Mod,Func,Arity,MS,Opts) ->
- gen_server:call(SID,{tp,tpl,Nodes,Mod,Func,Arity,MS,Opts}).
-%% ------------------------------------------------------------------------------
-
-%% ctpl(SessionID,Nodes,Mod,Func,Arity)=
-%% See tpl/X for arguments.
-%%
-%% Removes local trace-patterns from functions.
-ctpl(SID,Nodes,Mod,Func,Arity) ->
- gen_server:call(SID,{ctp,ctpl,Nodes,Mod,Func,Arity}).
-%% ------------------------------------------------------------------------------
-
-
-tpm_localnames(SID,Nodes) ->
- gen_server:call(SID,{tpm_localnames,Nodes}).
-tpm_globalnames(SID,Nodes) ->
- gen_server:call(SID,{tpm_globalnames,Nodes}).
-
-init_tpm(SID,Nodes,Mod,Func,Arity,CallFunc) ->
- gen_server:call(SID,{init_tpm,Nodes,Mod,Func,Arity,CallFunc}).
-init_tpm(SID,Nodes,Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc) ->
- gen_server:call(SID,
- {init_tpm,Nodes,Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc}).
-tpm(SID,Nodes,Mod,Func,Arity,MS) ->
- gen_server:call(SID,{tpm,Nodes,Mod,Func,Arity,MS}).
-tpm(SID,Nodes,Mod,Func,Arity,MS,CallFunc) ->
- gen_server:call(SID,{tpm,Nodes,Mod,Func,Arity,MS,CallFunc}).
-tpm(SID,Nodes,Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc) ->
- gen_server:call(SID,{tpm,Nodes,Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc}).
-
-tpm_ms(SID,Nodes,Mod,Func,Arity,MSname,MS) ->
- gen_server:call(SID,{tpm_ms,Nodes,Mod,Func,Arity,MSname,MS}).
-
-ctpm_ms(SID,Nodes,Mod,Func,Arity,MSname) ->
- gen_server:call(SID,{tpm_ms,Nodes,Mod,Func,Arity,MSname}).
-
-ctpm(SID,Nodes,Mod,Func,Arity) ->
- gen_server:call(SID,{ctpm,Nodes,Mod,Func,Arity}).
-%% ------------------------------------------------------------------------------
-
-
-%% tf(SessionID,Nodes,TraceConfList)=
-%% TraceConfList=[{PidSpec,Flags},...]
-%% PidSpec=pid()|atom()|all|new|existing
-%% Flags=[Flag,...]
-tf(SID,TraceConfList) ->
- gen_server:call(SID,{tf,TraceConfList}).
-tf(SID,Nodes,TraceConfList) ->
- gen_server:call(SID,{tf,Nodes,TraceConfList}).
-%% ------------------------------------------------------------------------------
-
-
-get_loopdata(SID) ->
- gen_server:call(SID,get_loopdata).
-%% ------------------------------------------------------------------------------
-
-%% ==============================================================================
-%% Genserver call-backs.
-%% ==============================================================================
-
-%% Initial function for the session handler process. The nodes participating in
-%% the session must previously have been added to our control component by the tool.
-%% The session handler first finds out the state/status of the specified runtime
-%% components, then it tries to initiate tracing on those where it is applicable.
-%% Note that a reply to the initial (tool)client is done from here instead from
-%% the tool-server.
-init({Parent,From,TracerData,CtrlPid,SafetyCatches,Dbg}) -> % The non-distributed case.
- {ok,StateStatus}=init_rtcomponent_states([],void,CtrlPid,[?LOCAL_RUNTIME]),
- case is_tool_internal_tracerdata(TracerData) of
- false -> % We shall initiate local runtime.
- case inviso:init_tracing(TracerData) of
- ok ->
- gen_server:reply(From,{ok,{self(),ok}}),
- {ok,mk_ld(Parent,
- void,
- CtrlPid,
- to_rtstates([{?LOCAL_RUNTIME,{tracing,?RUNNING},[]}]),
- [{?LOCAL_RUNTIME,TracerData}],
- [],
- SafetyCatches,
- Dbg)};
- {error,Reason} -> % It might have become suspended?!
- gen_server:reply(From,{error,Reason}),
- {ok,mk_ld(Parent,
- void,
- CtrlPid,
- to_rtstates([{?LOCAL_RUNTIME,StateStatus,[]}]),
- [{?LOCAL_RUNTIME,TracerData}],
- [],
- SafetyCatches,
- Dbg)}
- end;
- true -> % We shall not pass this one on.
- gen_server:reply(From,{ok,{self(),ok}}), % Then it is ok.
- {ok,mk_ld(Parent,
- void,
- CtrlPid,
- to_rtstates([{?LOCAL_RUNTIME,StateStatus,[]}]),
- [],
- [?LOCAL_RUNTIME],
- SafetyCatches,
- Dbg)}
- end;
-init({Parent,From,NodeParams,CtrlNode,CtrlPid,SafetyCatches,Dbg,NodesIn,NodesNotIn}) ->
- case init_rtcomponent_states(NodeParams,CtrlNode,CtrlPid,NodesNotIn) of
- {ok,States} -> % A list of {Node,{State,Status},Opts}.
- {NodeParams2,Nodes2}=remove_nodeparams(NodesIn,NodeParams),
- case inviso_tool_lib:inviso_cmd(CtrlNode,init_tracing,[NodeParams2]) of
- {ok,Result} -> % Resulted in state changes!
- RTStates=set_tracing_rtstates(to_rtstates(States),Result),
- ReplyValue=init_fix_resultnodes(NodesIn,Nodes2,Result),
- gen_server:reply(From,{ok,{self(),ReplyValue}}),
- {ok,mk_ld(Parent,CtrlNode,CtrlPid,RTStates,
- NodeParams2,Nodes2,SafetyCatches,Dbg)};
- {error,Reason} -> % Some general failure.
- inviso_tool_lib:inviso_cmd(CtrlNode,unsubscribe,[]),
- gen_server:reply(From,{error,{init_tracing,Reason}}),
- {stop,{init_tracing,Reason}};
- What ->
- io:format("GOT:~n~w~n",[What]),
- exit(foo)
- end;
- {error,Reason} -> % Unable to get the state/status.
- inviso_tool_lib:inviso_cmd(CtrlNode,unsubscribe,[]),
- gen_server:reply(From,{error,Reason}),
- {stop,{error,Reason}};
- What ->
- io:format("GOT:~n~w~n",[What]),
- exit(foo)
- end.
-%% ------------------------------------------------------------------------------
-
-%% To stop a session means stop the tracing and remove all local files on the
-%% runtime nodes. We do have a table with all tracer data and that is how we are
-%% going to recreate what files to remove.
-%% Since runtime components may actually change state when this procedure is
-%% on-going, we do not care! It is the state in the session handling process at
-%% the time of start of this procedure which is used.
-handle_call(cancel_session,_From,LD) ->
- CtrlNode=get_ctrlnode_ld(LD),
- RTStates=get_rtstates_ld(LD),
- Dbg=get_dbg_ld(LD),
- TracingNodes=get_all_tracing_nodes_rtstates(RTStates),
- case stop_all_tracing(CtrlNode,Dbg,TracingNodes) of
- ok-> % Hopefully all nodes are stopped now.
- AvailableNodes=get_all_available_nodes_rtstates(RTStates),
- TRDstorage=get_trdstorage_ld(LD),
- remove_all_local_logs(CtrlNode,TRDstorage,AvailableNodes,Dbg),
- {stop,normal,ok,LD}; % LD actually not correct now!
- {error,Reason} -> % Some serious error when stop_tracing.
- {stop,normal,{error,Reason},LD}
- end;
-%% ------------------------------------------------------------------------------
-
-%% *Stop all tracing on runtime components still tracing.
-%% *Copy all local log files to the collection directory.
-handle_call({stop_session,Dir,Prefix},_From,LD) ->
- case check_directory_exists(Dir) of % Check that this directory exists here.
- true ->
- RTStates=get_rtstates_ld(LD),
- CtrlNode=get_ctrlnode_ld(LD),
- Dbg=get_dbg_ld(LD),
- TracingNodes=get_all_tracing_nodes_rtstates(RTStates),
- case stop_all_tracing(CtrlNode,Dbg,TracingNodes) of
- ok -> % Hopefully no node is still tracing now.
- TRDstorage=get_trdstorage_ld(LD),
- AvailableNodes=get_all_available_nodes_rtstates(RTStates),
- {FailedNodes,FetchedFiles}=
- transfer_logfiles(RTStates,CtrlNode,Dir,Prefix,
- TRDstorage,Dbg,AvailableNodes),
- RemoveNodes= % We only delete local logs where fetch ok.
- lists:filter(fun(N)->
- case lists:keysearch(N,1,FailedNodes) of
- {value,_} ->
- false;
- false ->
- true
- end
- end,
- AvailableNodes),
- remove_all_local_logs(CtrlNode,TRDstorage,RemoveNodes,Dbg),
- {stop,normal,{ok,{FailedNodes,FetchedFiles}},LD};
- {error,Reason} -> % Some general failure, quit.
- {stop,normal,{error,Reason},LD}
- end;
- false -> % You specified a non-existing directory!
- {reply,{error,{faulty_dir,Dir}},LD}
- end;
-%% ------------------------------------------------------------------------------
-
-handle_call({reactivate,Nodes},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- {OurNodes,OtherNodes}=
- remove_nodes_not_ours(Nodes,get_all_session_nodes_rtstates(RTStates)),
- CtrlNode=get_ctrlnode_ld(LD),
- ACTstorage=get_actstorage_ld(LD),
- case h_reactivate(CtrlNode,OurNodes,ACTstorage) of
- {ok,Results} -> % A list of {Node,Result}.
- if
- OtherNodes==[] -> % Normal case, no non-session nodes.
- {reply,{ok,Results},LD};
- true -> % Add error values for non-session nodes.
- {reply,
- {ok,
- lists:map(fun(N)->{N,{error,not_in_session}} end,OtherNodes)++
- Results},
- LD}
- end;
- {error,Reason} -> % Then this error takes presidence.
- {reply,{error,Reason},LD}
- end;
-%% ------------------------------------------------------------------------------
-
-%% Call-back for set trace-pattern for both global and local functions.
-handle_call({tp,PatternFunc,Mod,F,A,MS,Opts},_From,LD) ->
- Reply=h_tp(all,PatternFunc,Mod,F,A,MS,Opts,LD), % For all active nodes in the session.
- {reply,Reply,LD};
-handle_call({tp,PatternFunc,Nodes,Mod,F,A,MS,Opts},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- SNodes=get_all_session_nodes_rtstates(RTStates), % Notes belongoing to the session.
- {Nodes2,FaultyNodes}=remove_nodes_not_ours(Nodes,SNodes),
- Reply=h_tp(Nodes2,PatternFunc,Mod,F,A,MS,Opts,LD),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,FaultyNodes),
- {reply,ErrorReply++Reply,LD};
-%% ------------------------------------------------------------------------------
-
-%% Call-back handling the removal of both local and global trace-patterns.
-%% NOT IMPLEMENTED YET.
-handle_call({ctp,PatternFunc,Nodes,Mod,F,A},_From,LD) ->
- Reply=h_ctp(Nodes,PatternFunc,Mod,F,A,LD),
- {reply,Reply,LD};
-%% ------------------------------------------------------------------------------
-
-handle_call({tpm_localnames,Nodes},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_tpm_localnames(get_ctrlnode_ld(LD),Nodes2,RTStates,ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({init_tpm,Nodes,Mod,Func,Arity,CallFunc},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),
- Nodes2,
- init_tpm,
- [Mod,Func,Arity,CallFunc],
- RTStates,
- ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({init_tpm,Nodes,Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),
- Nodes2,
- init_tpm,
- [Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc],
- RTStates,
- ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({tpm,Nodes,Mod,Func,Arity,MS},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),Nodes2,tpm,[Mod,Func,Arity,MS],RTStates,ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({tpm,Nodes,Mod,Func,Arity,MS,CallFunc},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),
- Nodes2,
- tpm,
- [Mod,Func,Arity,MS,CallFunc],
- RTStates,
- ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({tpm,Nodes,Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),
- Nodes2,
- tpm,
- [Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc],
- RTStates,
- ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({tpm_ms,Nodes,Mod,Func,Arity,MSname,MS},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),
- Nodes2,
- tpm_ms,
- [Mod,Func,Arity,MSname,MS],
- RTStates,
- ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({ctpm_ms,Nodes,Mod,Func,Arity,MSname},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),
- Nodes2,
- ctpm_ms,
- [Mod,Func,Arity,MSname],
- RTStates,
- ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-
-handle_call({ctpm,Nodes,Mod,Func,Arity},_From,LD) ->
- RTStates=get_rtstates_ld(LD),
- OurNodes=get_all_session_nodes_rtstates(RTStates),
- {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
- ACTstorage=get_actstorage_ld(LD),
- {Reply,NewACTstorage}=
- h_all_tpm(get_ctrlnode_ld(LD),Nodes2,ctpm,[Mod,Func,Arity],RTStates,ACTstorage),
- ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
- {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
-%% ------------------------------------------------------------------------------
-
-%% Call-back for setting process trace-flags. Handles both distributed and non-
-%% distributed case.
-handle_call({tf,TraceConfList},From,LD) ->
- handle_call({tf,all,TraceConfList},From,LD);
-handle_call({tf,Nodes,TraceConfList},_From,LD) ->
- {Reply,NewACTstorage}=h_tf(get_ctrlnode_ld(LD),
- Nodes,
- TraceConfList,
- get_actstorage_ld(LD),
- get_rtstates_ld(LD)),
- {reply,Reply,put_actstorage_ld(NewACTstorage,LD)};
-%% ------------------------------------------------------------------------------
-
-
-
-handle_call(get_loopdata,_From,LD) ->
- io:format("The loopdata:~n~p~n",[LD]),
- {reply,ok,LD}.
-%% ------------------------------------------------------------------------------
-
-
-%% Clause handling an incomming state-change event from the control component.
-%% Note that it does not have to be one of our nodes since it is not possible
-%% to subscribe to certain node-events.
-%% We may very well get state-change events for state-changes we are the source
-%% to our selves. Those state-changes are already incorporated into the RTStates.
-%% There is however no harm in doing them again since we know that this event
-%% message will reach us before a reply to a potentially following state-change
-%% request will reach us. Hence we will do all state-changes in the correct order,
-%% even if sometimes done twice.
-handle_info({trace_event,CtrlPid,_Time,{state_change,Node,{State,Status}}},LD) ->
- case get_ctrlpid_ld(LD) of
- CtrlPid -> % It is from our control component.
- case {State,Status} of
- {?TRACING,?RUNNING} -> % This is the only case when new tracerdata!
- NewTracerData=add_current_tracerdata_ld(get_ctrlnode_ld(LD),
- Node,
- get_rtstates_ld(LD),
- get_trdstorage_ld(LD)),
- NewRTStates=statechange_rtstates(Node,State,Status,get_rtstates_ld(LD)),
- {noreply,put_trdstorage_ld(NewTracerData,
- put_rtstates_ld(NewRTStates,LD))};
- _ -> % In all other cases, just fix rtstates.
- NewRTStates=statechange_rtstates(Node,State,Status,get_rtstates_ld(LD)),
- {noreply,put_rtstates_ld(NewRTStates,LD)}
- end;
- _ ->
- {noreply,LD}
- end;
-%% If a new runtime component connects to our trace control component, and it is
-%% in our list of runtime components belonging to this session, we may update its
-%% state to now being present. Otherwise it does not belong to this session.
-%% Note that we avoid updating an already connected runtime component. This
-%% can happend if it connected by itself after we started the session handler,
-%% but before we managed to initiate tracing. Doing so or not will not result in
-%% any error in the long run, but during a short period of time we might be
-%% prevented from doing things with the runtime though it actually is tracing.
-handle_info({trace_event,CtrlPid,_Time,{connected,Node,{_Tag,{State,Status}}}},LD) ->
- case get_ctrlpid_ld(LD) of
- CtrlPid -> % It is from our control component.
- case get_statestatus_rtstates(Node,get_rtstates_ld(LD)) of
- {ok,unavailable} -> % This is the situation when we update!
- NewRTStates=statechange_rtstates(Node,State,Status,get_rtstates_ld(LD)),
- {noreply,put_rtstates_ld(NewRTStates,LD)};
- _ -> % In all other cases, let it be.
- {noreply,LD}
- end;
- _ -> % Not from our control component.
- {noreply,LD}
- end;
-%% If a runtime component disconnects we mark it as unavailable. We must also
-%% remove all saved trace-flags in order for them to not be accidently reactivated
-%% should the runtime component reconnect and then suspend.
-handle_info({trace_event,CtrlPid,_Time,{disconnected,Node,_}},LD) ->
- case get_ctrlpid_ld(LD) of
- CtrlPid -> % It is from our control component.
- NewRTStates=set_unavailable_rtstates(Node,get_rtstates_ld(LD)),
- NewACTstorage=del_node_actstorage(Node,get_actstorage_ld(LD)),
- {noreply,put_actstorage_ld(NewACTstorage,put_rtstates_ld(NewRTStates,LD))};
- _ ->
- {noreply,LD}
- end;
-handle_info(_,LD) ->
- {noreply,LD}.
-%% ------------------------------------------------------------------------------
-
-%% In terminate we cancel our subscription to event from the trace control component.
-%% That should actually not be necessary, but lets do it the correct way!
-terminate(_,LD) ->
- case get_ctrlnode_ld(LD) of
- void -> % Non-distributed.
- inviso:unsubscribe();
- Node ->
- inviso_tool_lib:inviso_cmd(Node,unsubscribe,[])
- end.
-%% ------------------------------------------------------------------------------
-
-
-
-%% ==============================================================================
-%% First level help functions to call-backs.
-%% ==============================================================================
-
-%% ------------------------------------------------------------------------------
-%% Help functions to init.
-%% ------------------------------------------------------------------------------
-
-%% Help function which find out the state/status of the runtime components.
-%% Note that since we have just started subscribe to state changes we must
-%% check our inqueue to see that we have no waiting messages for the nodes
-%% we learned the state/status of. If there is a waiting message we don't
-%% know whether that was a state change received before or after the state
-%% check was done. We will then redo the state-check.
-%% Returns {ok,States} or {error,Reason}.
-%% Where States is [{Node,{State,Status},Opts},...].
-%% Note that {error,Reason} can not occur in the non-distributed case.
-init_rtcomponent_states(NodeParams,void,CtrlPid,Nodes) -> % The non-distributed case.
- ok=inviso:subscribe(),
- init_rtcomponent_states_2(NodeParams,void,CtrlPid,Nodes,[]);
-init_rtcomponent_states(NodeParams,CtrlNode,CtrlPid,Nodes) ->
- ok=inviso_tool_lib:inviso_cmd(CtrlNode,subscribe,[]),
- init_rtcomponent_states_2(NodeParams,CtrlNode,CtrlPid,Nodes,[]).
-
-init_rtcomponent_states_2(_,_,_,[],States) ->
- {ok,States};
-init_rtcomponent_states_2(NodeParams,void,CtrlPid,_Nodes,States) ->
- case inviso:get_status() of
- {ok,StateStatus} -> % Got its state/status, now...
- {ProblemNodes,NewStates}=
- init_rtcomponent_states_3(NodeParams,CtrlPid,[{?LOCAL_RUNTIME,{ok,StateStatus}}],
- [],States),
- init_rtcomponent_states_2(NodeParams,void,CtrlPid,ProblemNodes,NewStates);
- {error,_Reason} -> % The runtime is not available!?
- {ok,[{?LOCAL_RUNTIME,unavailable,[]}]} % Create the return value immediately.
- end;
-init_rtcomponent_states_2(NodeParams,CtrlNode,CtrlPid,Nodes,States) ->
- case inviso_tool_lib:inviso_cmd(CtrlNode,get_status,[Nodes]) of
- {ok,NodeResult} ->
- {ProblemNodes,NewStates}=
- init_rtcomponent_states_3(NodeParams,CtrlPid,NodeResult,[],States),
- init_rtcomponent_states_2(NodeParams,CtrlNode,CtrlPid,ProblemNodes,NewStates);
- {error,Reason} -> % Severe problem, abort the session.
- {error,{get_status,Reason}}
- end.
-
-%% Traverses the list of returnvalues and checks that we do not have an event
-%% waiting in the message queue. If we do have, it is a problem. That node will
-%% be asked about its state again.
-%% Note that it is here we construct the RTStatesList.
-init_rtcomponent_states_3(NodeParams,CtrlPid,[{Node,{ok,{State,Status}}}|Rest],Problems,States) ->
- receive
- {trace_event,CtrlPid,_Time,{state_change,Node,_}} ->
- init_rtcomponent_states_3(NodeParams,CtrlPid,Rest,[Node|Problems],States)
- after
- 0 -> % Not in msg queue, then we're safe!
- RTState=case lists:keysearch(Node,1,NodeParams) of
- {value,{_Node,_TracerData,Opts}} ->
- {Node,{State,Status},Opts};
- _ -> % No option available, use [].
- {Node,{State,Status},[]}
- end,
- init_rtcomponent_states_3(NodeParams,CtrlPid,Rest,Problems,[RTState|States])
- end;
-init_rtcomponent_states_3(NodeParams,CtrlPid,[{Node,{error,_Reason}}|Rest],Problems,States) ->
- RTState=case lists:keysearch(Node,1,NodeParams) of
- {value,{_Node,_TracerData,Opts}} ->
- {Node,unavailable,Opts};
- _ -> % No option available, use [].
- {Node,unavailable,[]}
- end,
- init_rtcomponent_states_3(NodeParams,CtrlPid,Rest,Problems,[RTState|States]);
-init_rtcomponent_states_3(_,_,[],Problems,States) ->
- {Problems,States}.
-%% ------------------------------------------------------------------------------
-
-%% Help function removing nodes from NodeParams. The reason for this can either
-%% be that we are using a tool internal tracerdata that shall not be forwarded to
-%% the trace control component, or that the node is actually already part of
-%% another session.
-%% Returns {NewNodeParams,NodesWhichShallNotBeInitiated}.
-remove_nodeparams(Nodes,NodesParams) ->
- remove_nodeparams_2(Nodes,NodesParams,[],[]).
-
-remove_nodeparams_2(Nodes,[NodeParam|Rest],NPAcc,NAcc) when % NPAcc=NodeParamsAcc.
- (is_tuple(NodeParam) and ((size(NodeParam)==2) or (size(NodeParam)==3))) ->
- Node=element(1,NodeParam),
- Params=element(2,NodeParam), % This is tracerdata!
- case lists:member(Node,Nodes) of
- true -> % Remove this one, in another session.
- remove_nodeparams_2(Nodes,Rest,NPAcc,NAcc);
- false -> % Ok so far...
- case is_tool_internal_tracerdata(Params) of
- false -> % Then keep it and use it later!
- remove_nodeparams_2(Nodes,Rest,[{Node,Params}|NPAcc],NAcc);
- true -> % Since it is, remove it from the list.
- remove_nodeparams_2(Nodes,Rest,NPAcc,[Node|NAcc])
- end
- end;
-remove_nodeparams_2(Nodes,[_|Rest],NPAcc,NAcc) -> % Faulty NodeParam, skip it!
- remove_nodeparams_2(Nodes,Rest,NPAcc,NAcc);
-remove_nodeparams_2(_,[],NPAcc,NAcc) ->
- {lists:reverse(NPAcc),NAcc}.
-%% ------------------------------------------------------------------------------
-
-%% Help function which adds both the nodes which were already part of another
-%% session and the nodes that we actually did not issue any init_tracing for.
-%% Returns a new Result list of [{Node,NodeResult},...].
-init_fix_resultnodes(NodesOtherSes,NodesNotInit,Result) ->
- NewResult=init_fix_resultnodes_2(NodesOtherSes,{error,in_other_session},Result),
- init_fix_resultnodes_2(NodesNotInit,ok,NewResult).
-
-init_fix_resultnodes_2([Node|Rest],NodeResult,Result) ->
- [{Node,NodeResult}|init_fix_resultnodes_2(Rest,NodeResult,Result)];
-init_fix_resultnodes_2([],_,Result) ->
- Result. % Append Result to the end of the list.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Help functions to reactivate.
-%% ------------------------------------------------------------------------------
-
-h_reactivate(CtrlNode,Nodes,ACTstorage) -> % Distributed case.
- case inviso_tool_lib:inviso_cmd(CtrlNode,cancel_suspension,[Nodes]) of
- {ok,CSuspResults} ->
- {GoodNodes,BadResults}= % Sort out nodes no longer suspended.
- lists:foldl(fun({Node,ok},{GoodNs,BadNs})->
- {[Node|GoodNs],BadNs};
- ({Node,{error,Reason}},{GoodNs,BadNs})->
- {GoodNs,[{Node,{error,{cancel_suspension,Reason}}}|BadNs]}
- end,
- {[],[]},
- CSuspResults),
- Results=h_reactivate_redo_activity(CtrlNode,GoodNodes,ACTstorage,[]),
- {ok,BadResults++Results};
- {error,Reason} -> % General failure cancelling suspend.
- {error,{cancel_suspension,Reason}}
- end.
-%% ------------------------------------------------------------------------------
-
-%% Help function which traverses the list of nodes known to be ours and have
-%% cancelled their suspend. If we fail redoing one of the activities associated
-%% with a node, the node will be reported in the return value as failed. From
-%% that point on its state must be considered unknown since we do not know how
-%% many of the activities were successfully redone.
-h_reactivate_redo_activity(CtrlNode,[Node|Rest],ACTstorage,Acc) ->
- case get_activities_actstorage(Node,ACTstorage) of
- {ok,Activities} -> % The node existed in activity storage.
- {Good,Bad}=h_reactivate_redo_activity_2(CtrlNode,Node,Activities,0,0),
- h_reactivate_redo_activity(CtrlNode,Rest,ACTstorage,[{Node,{Good,Bad}}|Acc]);
- false -> % Node not present in activity storage.
- h_reactivate_redo_activity(CtrlNode,Rest,ACTstorage,[{Node,{0,0}}|Acc])
- end;
-h_reactivate_redo_activity(_CtrlNode,[],_,Acc) ->
- lists:reverse(Acc).
-
-%% Help function actually redoing the activity. Note that there must be one
-%% clause here for every type of activity.
-%% Returns {NrGoodCmds,NrBadCmds}.
-%% The number of good or bad commands refers to inviso commands done. If any
-%% of the subparts of such a command returned an error, the command is concidered
-%% no good.
-h_reactivate_redo_activity_2(CtrlNode,Node,[{tf,{Op,TraceConfList}}|Rest],Good,Bad) ->
- case inviso_tool_lib:inviso_cmd(CtrlNode,Op,[[Node],TraceConfList]) of
- {ok,[{_Node,{ok,Answers}}]} ->
- case h_reactivate_redo_activity_check_tf(Answers) of
- ok ->
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good+1,Bad);
- error -> % At least oneReports the first encountered error.
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1)
- end;
- {ok,[{_Node,{error,_Reason}}]} ->
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1);
- {error,_Reason} -> % General error when doing cmd.
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1)
- end;
-h_reactivate_redo_activity_2(CtrlNode,Node,[{tpm,{Op,InvisoCmdParams}}|Rest],Good,Bad) ->
- case inviso_tool_lib:inviso_cmd(CtrlNode,Op,[[Node]|InvisoCmdParams]) of
- {ok,[{_Node,ok}]} ->
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good+1,Bad);
- {ok,[{_Node,{error,_Reason}}]} ->
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1);
- {error,_Reason} -> % General error when doing cmd.
- h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1)
- end;
-h_reactivate_redo_activity_2(_CtrlNode,_Node,[],Good,Bad) ->
- {Good,Bad}.
-
-%% Help function traversing a list of results from inviso:tf/2 or inviso:ctf/2
-%% to see if there were any errors.
-h_reactivate_redo_activity_check_tf([N|Rest]) when integer(N) ->
- h_reactivate_redo_activity_check_tf(Rest);
-h_reactivate_redo_activity_check_tf([{error,_Reason}|_]) ->
- error;
-h_reactivate_redo_activity_check_tf([]) ->
- ok.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Help functions to tp (setting trace patterns, both local and global).
-%% ------------------------------------------------------------------------------
-
-%% Help function which handles both tpl and tp. Note that the non-distributed case
-%% handled with Nodes='all'.
-%% Returns what shall be the reply to the client.
-h_tp(all,PatternFunc,Mod,F,A,MS,Opts,LD) -> % All available runtime nodes.
- Nodes=get_all_available_nodes_rtstates(get_rtstates_ld(LD)),
- h_tp(Nodes,PatternFunc,Mod,F,A,MS,Opts,LD);
-h_tp(Nodes,PatternFunc,Mod,F,A,MS,Opts,LD) -> % Only certain nodes in the session.
- CtrlNode=get_ctrlnode_ld(LD),
- Dbg=get_dbg_ld(LD),
- SafetyCatches=get_safetycatches_ld(LD),
- case inviso_tool_lib:expand_module_names(Nodes,Mod,Opts) of % Take care of any reg-exps.
- {multinode_expansion,NodeMods} ->
- NodeTPs=inviso_tool_lib:make_patterns(SafetyCatches,Opts,Dbg,NodeMods,F,A,MS),
- h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,NodeTPs,[]);
- {singlenode_expansion,Modules} ->
- TPs=inviso_tool_lib:make_patterns(SafetyCatches,Opts,Dbg,Modules,F,A,MS),
- h_tp_do_tps(CtrlNode,Nodes,TPs,PatternFunc,Dbg);
- module ->
- TPs=inviso_tool_lib:make_patterns(SafetyCatches,Opts,Dbg,[Mod],F,A,MS),
- h_tp_do_tps(CtrlNode,Nodes,TPs,PatternFunc,Dbg);
- wildcard -> % Means do for all modules, no safety.
- h_tp_do_tps(CtrlNode,Nodes,[{Mod,F,A,MS}],PatternFunc,Dbg);
- {error,Reason} ->
- {error,Reason}
- end.
-
-%% Note that this function can never be called in the non-distributed case.
-h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,[{Node,TPs}|Rest],Accum) ->
- case h_tp_do_tps(CtrlNode,[Node],TPs,PatternFunc,Dbg) of
- {ok,[{Node,Result}]} ->
- h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,Rest,[{Node,Result}|Accum]);
- {error,Reason} -> % Failure, but don't stop.
- h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,Rest,[{Node,{error,Reason}}|Accum])
- end;
-h_tp_node_by_node(_,_,_,[],Accum) ->
- {ok,lists:reverse(Accum)}.
-
-%% Help function which does the actual call to the trace control component.
-%% Note that Nodes can be a list of nodes (including a single one) or
-%% ?LOCAL_RUNTIME if we are not distributed. The non-distributed case is otherwise
-%% detected by the 'void' CtrlNode.
-%% Returns {ok,[{Node,{ok,{NrOfFunctions,NrOfErrors}}},{Node,{error,Reason}},...]} or
-%% {error,Reason}. In the non-distributed case {ok,{NrOfFunctions,NrOfErros}} or
-%% {error,Reason}.
-h_tp_do_tps(void,_Nodes,TPs,PatternFunc,Dbg) -> % Non distributed case!
- inviso_tool_lib:debug(tp,Dbg,[TPs,PatternFunc]),
- case inviso:PatternFunc(TPs) of
- {ok,Result} -> % A list of [Nr1,Nr2,error,...].
- {ok,
- lists:foldl(fun(N,{AccNr,AccErr}) when integer(N) ->
- {AccNr+N,AccErr};
- (error,{AccNr,AccErr}) ->
- {AccNr,AccErr+1}
- end,
- {0,0},
- Result)};
- {error,Reason} ->
- {error,{PatternFunc,Reason}}
- end;
-h_tp_do_tps(CtrlNode,Nodes,TPs,PatternFunc,Dbg) ->
- inviso_tool_lib:debug(tp,Dbg,[Nodes,TPs,PatternFunc]),
- case inviso_tool_lib:inviso_cmd(CtrlNode,PatternFunc,[Nodes,TPs]) of
- {ok,Result} -> % Result is [{Node,Result},...].
- {ok,
- lists:map(fun({Node,{ok,Res}})->
- {Node,lists:foldl(fun(N,{ok,{AccNr,AccErr}}) when integer(N) ->
- {ok,{AccNr+N,AccErr}};
- (error,{AccNr,AccErr}) ->
- {ok,{AccNr,AccErr+1}}
- end,
- {ok,{0,0}},
- Res)};
- ({_Node,{error,Reason}})->
- {error,Reason}
- end,
- Result)};
- {error,Reason} ->
- {error,{PatternFunc,Reason}}
- end.
-%% ------------------------------------------------------------------------------
-
-%% ------------------------------------------------------------------------------
-%% Help functions for removing trace-patterns.
-%% ------------------------------------------------------------------------------
-
-%% NOT IMPLEMENTED YET.
-h_ctp(Node,PatternFunc,Mod,F,A,LD) ->
- tbd.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Help functions for calling the trace information facility.
-%% ------------------------------------------------------------------------------
-
-
-%% Function handling the meta trace pattern for capturing registration of local
-%% process names.
-h_tpm_localnames(CtrlNode,Nodes,RTStates,ACTstorage) ->
- AvailableNodes=get_all_available_nodes_rtstates(RTStates),
- {Nodes3,FaultyNodes}=remove_nodes_not_ours(Nodes,AvailableNodes),
- case inviso_tool_lib:inviso_cmd(CtrlNode,tpm_localnames,[Nodes3]) of
- {ok,Result} -> % That good we want to modify tpmstorage!
- NewACTstorage=add_tpm_actstorage(Result,tpm_localnames,[],ACTstorage),
- ErrorResult=lists:map(fun(N)->{N,{error,not_available}} end,FaultyNodes),
- {{ok,ErrorResult++Result},NewACTstorage};
- {error,Reason} -> % If general failure, do not modify storage.
- {{error,Reason},ACTstorage}
- end.
-%% ------------------------------------------------------------------------------
-
-%% Functions calling meta trace functions for specified nodes. This function is
-%% intended for use with all tmp function calls, init_tpm,tpm,tpm_ms,ctpm_ms and
-%% ctpm.
-%% Note that we must store called meta trace functions and their parameters in the
-%% activity storage in order to be able to redo them in case of a reactivate.
-h_all_tpm(CtrlNode,Nodes,TpmCmd,InvisoCmdParams,RTStates,ACTstorage) ->
- AvailableNodes=get_all_available_nodes_rtstates(RTStates),
- {Nodes3,FaultyNodes}=remove_nodes_not_ours(Nodes,AvailableNodes),
- case inviso_tool_lib:inviso_cmd(CtrlNode,TpmCmd,[Nodes3|InvisoCmdParams]) of
- {ok,Result} -> % That good we want to modify tpmstorage!
- NewACTstorage=add_tpm_actstorage(Result,TpmCmd,InvisoCmdParams,ACTstorage),
- ErrorResult=lists:map(fun(N)->{N,{error,not_available}} end,FaultyNodes),
- {{ok,ErrorResult++Result},NewACTstorage};
- {error,Reason} -> % If general failure, do not modify storage.
- {{error,Reason},ACTstorage}
- end.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Help functions for set trace flags.
-%% ------------------------------------------------------------------------------
-
-%% Help function which sets the tracepatterns in TraceConfList for all nodes
-%% mentioned in Nodes. Note that non-distributed case is handled with Nodes='all'.
-%% Returns {Reply,NewACTstorage} where Reply is whatever shall be returned to caller
-%% and NewACTstorage is traceflag storage modified with the flags added to the
-%% corresponding nodes.
-h_tf(void,_Nodes,TraceConfList,ACTstorage,_RTStates) -> % The non-distributed case.
- Reply=inviso:tf(TraceConfList),
- NewACTstorage=add_tf_actstorage([{?LOCAL_RUNTIME,Reply}],tf,TraceConfList,ACTstorage),
- {Reply,NewACTstorage};
-h_tf(CtrlNode,all,TraceConfList,ACTstorage,RTStates) ->
- AllNodes=get_all_session_nodes_rtstates(RTStates),
- h_tf(CtrlNode,AllNodes,TraceConfList,ACTstorage,RTStates);
-h_tf(CtrlNode,Nodes,TraceConfList,ACTstorage,_RTStates) ->
- case inviso_tool_lib:inviso_cmd(CtrlNode,tf,[Nodes,TraceConfList]) of
- {ok,Result} -> % That good we want to modify actstorage!
- NewACTstorage=add_tf_actstorage(Result,tf,TraceConfList,ACTstorage),
- {{ok,Result},NewACTstorage};
- {error,Reason} -> % If general failure, do not modify actstorage.
- {{error,Reason},ACTstorage}
- end.
-%% ------------------------------------------------------------------------------
-
-%% ------------------------------------------------------------------------------
-%% Help functions to stop_session.
-%% ------------------------------------------------------------------------------
-
-%% This function fetches all local log-files using our stored tracerdata. Note
-%% that there are two major ways of tranfering logfiles. Either via distributed
-%% Erlang or by common filesystem (like NFS). The default is distributed Erlang.
-%% But there may be info in the RTStates structure about a common file-system.
-%% Returns {FailedNodes,FetchedFileNames} where FailedNodes is a list of
-%% nodenames where problems occurred. Note that problems does not necessarily
-%% mean that no files were copied.
-%% FetchedFileNames contains one or two of the tuples {trace_log,Files} and/or
-%% {ti_log,Files}, listing all files successfully fetched. Note that the
-%% list of fetched files contains sublists of filenames. One for each node and
-%% tracerdata.
-%% In the non-distributed system we always use copy (since the files always
-%% resides locally).
-transfer_logfiles(RTStates,CtrlNode,Dir,Prefix,TRDstorage,Dbg,AvailableNodes) ->
- if
- CtrlNode==void -> % When non-distributed, always copy!
- fetch_logfiles_copy(CtrlNode,Dir,Prefix,TRDstorage,Dbg,[?LOCAL_RUNTIME]);
- true -> % The distributed case.
- {FetchNodes,CopyNodes}=find_logfile_transfer_methods(AvailableNodes,RTStates),
- {FailedFetchNodes,FetchedFiles}=
- case fetch_logfiles_distributed(CtrlNode,Dir,Prefix,TRDstorage,Dbg,FetchNodes) of
- {ok,Failed,Files} -> % So far no disasters.
- {Failed,Files};
- {error,Reason} -> % Means all fetch-nodes failed!
- inviso_tool_lib:debug(transfer_logfiles,Dbg,[FetchNodes,Reason]),
- {lists:map(fun(N)->{N,error} end,FetchNodes),[]}
- end,
- {FailedCopyNodes,CopiedFiles}=
- fetch_logfiles_copy(CtrlNode,Dir,Prefix,TRDstorage,Dbg,CopyNodes),
- {FailedFetchNodes++FailedCopyNodes,FetchedFiles++CopiedFiles}
- end.
-
-%% Help function which finds out which node we have a common file system with
-%% and from which we must make distributed erlang tranfere.
-%% Returns {DistributedNodes,CopyNodes} where CopyNode is [{Node,CopyFromDir},...].
-find_logfile_transfer_methods(Nodes,RTStates) ->
- find_logfile_transfer_methods_2(Nodes,RTStates,[],[]).
-
-find_logfile_transfer_methods_2([Node|Rest],RTStates,FetchAcc,CopyAcc) ->
- {ok,Opts}=get_opts_rtstates(Node,RTStates), % Node must be in RTStates!
- case lists:keysearch(?COPY_LOG_FROM,1,Opts) of
- {value,{_,FromDir}} when list(FromDir) -> % Node has common filesystem.
- find_logfile_transfer_methods_2(Rest,RTStates,FetchAcc,[{Node,FromDir}|CopyAcc]);
- {value,_} -> % Can't understand dir option.
- find_logfile_transfer_methods_2(Rest,RTStates,[Node|FetchAcc],CopyAcc);
- false -> % Then we want to use fetch instead.
- find_logfile_transfer_methods_2(Rest,RTStates,[Node|FetchAcc],CopyAcc)
- end;
-find_logfile_transfer_methods_2([],_,FetchAcc,CopyAcc) ->
- {FetchAcc,CopyAcc}.
-%% ------------------------------------------------------------------------------
-
-%% Help function which transferes all local logfiles according to the tracerdata
-%% stored for the nodes in Nodes.
-%% Returns {ok,FailedNodes,FileNodeSpecs} or {error,Reason}.
-%% FailedNodes is a list of nodes where fetching logs did not succeed, partially
-%% or not at all.
-%% FileNames is a list of list of actually fetched files (the name as it is here, including
-%% Dir). The sublists are files which belong together.
-fetch_logfiles_distributed(CtrlNode,Dir,Prefix,TRDstorage,Dbg,Nodes) ->
- LogSpecList=build_logspeclist(Nodes,TRDstorage),
- case inviso_fetch_log(inviso_tool_lib:inviso_cmd(CtrlNode,
- fetch_log,
- [LogSpecList,Dir,Prefix])) of
- {ok,Result} ->
- Files=get_all_filenames_fetchlog_result(Result,Dbg),
- FailedNodes=get_all_failednodes_fetchlog_result(Result),
- {ok,FailedNodes,Files};
- {error,Reason} -> % Some general failure!
- {error,{fetch_log,Reason}}
- end.
-
-%% Help function which constructs a list {Node,TracerData} for all nodes in Nodes.
-%% Note that there may be more than one tracerdata for a node, resulting in multiple
-%% tuples for that node.
-build_logspeclist(Nodes,TRDstorage) ->
- build_logspeclist_2(Nodes,TRDstorage,[]).
-
-build_logspeclist_2([Node|Rest],TRDstorage,Acc) ->
- TRDlist=find_tracerdata_for_node_trd(Node,TRDstorage), % A list of all tracerdata.
- build_logspeclist_2(Rest,
- TRDstorage,
- [lists:map(fun(TRD)->{Node,TRD} end,TRDlist)|Acc]);
-build_logspeclist_2([],_,Acc) ->
- lists:flatten(Acc).
-
-%% Help function which translates inviso:fetch_log return values to what I
-%% want!
-inviso_fetch_log({error,Reason}) ->
- {error,Reason};
-inviso_fetch_log({_Success,ResultList}) ->
- {ok,ResultList}.
-
-%% Help function which collects all filenames mentioned in a noderesult structure.
-%% The files may or may not be complete.
-%% Returns a list of list of filenames. Each sublist contains files which belong
-%% together, i.e because they are a wrap-set.
-get_all_filenames_fetchlog_result(NodeResult,Dbg) ->
- get_all_filenames_fetchlog_result_2(NodeResult,Dbg,[]).
-
-get_all_filenames_fetchlog_result_2([{Node,{Success,FileInfo}}|Rest],Dbg,Accum)
- when Success=/=error, list(FileInfo) ->
- SubAccum=get_all_filenames_fetchlog_result_3(FileInfo,[]),
- get_all_filenames_fetchlog_result_2(Rest,Dbg,[{Node,SubAccum}|Accum]);
-get_all_filenames_fetchlog_result_2([{Node,{error,FReason}}|Rest],Dbg,Accum) ->
- inviso_tool_lib:debug(fetch_files,Dbg,[Node,FReason]),
- get_all_filenames_fetchlog_result_2(Rest,Dbg,Accum);
-get_all_filenames_fetchlog_result_2([],_Dbg,Accum) ->
- Accum.
-
-get_all_filenames_fetchlog_result_3([{FType,Files}|Rest],SubAccum) ->
- FilesOnly=lists:foldl(fun({ok,FName},Acc)->[FName|Acc];(_,Acc)->Acc end,[],Files),
- get_all_filenames_fetchlog_result_3(Rest,[{FType,FilesOnly}|SubAccum]);
-get_all_filenames_fetchlog_result_3([],SubAccum) ->
- SubAccum.
-
-%% Help function which traverses a noderesult and builds a list as return
-%% value containing the nodenames of all nodes not being complete.
-%% Note that a node may occur multiple times since may have fetched logfiles
-%% for several tracerdata from the same node. Makes sure the list contains
-%% unique node names.
-%% Returns a list nodes.
-get_all_failednodes_fetchlog_result(NodeResult) ->
- get_all_failednodes_fetchlog_result_2(NodeResult,[]).
-
-get_all_failednodes_fetchlog_result_2([{_Node,{complete,_}}|Rest],Acc) ->
- get_all_failednodes_fetchlog_result_2(Rest,Acc);
-get_all_failednodes_fetchlog_result_2([{Node,{_Severity,_}}|Rest],Acc) ->
- case lists:member(Node,Acc) of
- true -> % Already in the list.
- get_all_failednodes_fetchlog_result_2(Rest,Acc);
- false -> % Not in Acc, add it!
- get_all_failednodes_fetchlog_result_2(Rest,[Node|Acc])
- end;
-get_all_failednodes_fetchlog_result_2([],Acc) ->
- Acc.
-%% ------------------------------------------------------------------------------
-
-%% Help function which copies files from one location to Dir and at the same time
-%% adds the Prefix to the filename. NodeSpecs contains full path to the files. The
-%% reason the node information is still part of NodeSpecs is that otherwise we can
-%% not report faulty nodes. Note that one node may occur multiple times since there
-%% may be more than one tracerdata for a node.
-%% Returns {FailedNodes,Files} where FailedNodes is a list of nodes where problems
-%% occurred. Files is a tuple list of [{Node,[{FType,FileNames},...]},...].
-fetch_logfiles_copy(CtrlNode,Dir,Prefix,TRDstorage,Dbg,NodeSpecs) ->
- CopySpecList=build_copylist(CtrlNode,Dbg,NodeSpecs,TRDstorage),
- fetch_logfiles_copy_2(Dir,Prefix,Dbg,CopySpecList,[],[]).
-
-fetch_logfiles_copy_2(Dir,Prefix,Dbg,[{Node,CopySpecs}|Rest],FailedNodes,Files) ->
- case fetch_logfiles_copy_3(Dir,Prefix,Dbg,CopySpecs,[],0) of
- {0,LocalFiles} -> % Copy went ok and zero errors.
- fetch_logfiles_copy_2(Dir,Prefix,Dbg,Rest,FailedNodes,[{Node,LocalFiles}|Files]);
- {_N,LocalFiles} -> % Copied files, but some went wrong.
- case lists:member(Node,FailedNodes) of
- true -> % Node already in FailedNodes.
- fetch_logfiles_copy_2(Dir,Prefix,Dbg,Rest,FailedNodes,
- [{Node,LocalFiles}|Files]);
- false -> % Node not marked as failed, yet.
- fetch_logfiles_copy_2(Dir,Prefix,Dbg,Rest,[Node|FailedNodes],
- [{Node,LocalFiles}|Files])
- end
- end;
-fetch_logfiles_copy_2(_,_,_,[],FailedNodes,Files) ->
- {FailedNodes,Files}. % The return value from fetch_logfiles_copy.
-
-fetch_logfiles_copy_3(Dir,Prefix,Dbg,[{FType,RemoteFiles}|Rest],Results,Errors) ->
- {Err,LocalFiles}=fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,RemoteFiles,[],0),
- fetch_logfiles_copy_3(Dir,Prefix,Dbg,Rest,[{FType,LocalFiles}|Results],Errors+Err);
-fetch_logfiles_copy_3(_,_,_,[],Results,Errors) ->
- {Errors,Results}.
-
-%% For each file of one file-type (e.g. trace_log).
-fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,[File|Rest],LocalFiles,Errors) ->
- DestName=Prefix++filename:basename(File),
- Destination=filename:join(Dir,DestName),
- case do_copy_file(File,Destination) of
- ok ->
- fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,Rest,[DestName|LocalFiles],Errors);
- {error,Reason} ->
- inviso_tool_lib:debug(copy_files,Dbg,[File,Destination,Reason]),
- fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,Rest,LocalFiles,Errors+1)
- end;
-fetch_logfiles_copy_3_1(_,_,_,[],LocalFiles,Errors) ->
- {Errors,LocalFiles}.
-
-%% Help function which builds a [{Node,[{Type,[ListOfRemoteFiles]}},...}]
-%% where Type describes trace_log or ti_log and each entry in ListOfRemoteFiles
-%% is a complete path to a file to be copied.
-build_copylist(CtrlNode,Dbg,NodeSpecList,TRDstorage) ->
- build_copylist_2(CtrlNode,Dbg,NodeSpecList,TRDstorage,[]).
-
-%% For each node specified in the NodeSpecList.
-build_copylist_2(CtrlNode,Dbg,[{Node,SourceDir}|Rest],TRDstorage,Acc) ->
- TRDlist=find_tracerdata_for_node_trd(Node,TRDstorage),
- CopySpecList=build_copylist_3(CtrlNode,Dbg,SourceDir,Node,TRDlist),
- build_copylist_2(CtrlNode,Dbg,Rest,TRDstorage,[CopySpecList|Acc]);
-build_copylist_2(_,_,[],_,Acc) ->
- lists:flatten(Acc).
-
-%% For each tracerdata found for the node.
-build_copylist_3(void,Dbg,SourceDir,Node,[TRD|Rest]) -> % The non-distributed case.
- case inviso:list_logs(TRD) of
- {ok,FileSpec} when list(FileSpec) -> % [{trace_log,Dir,Files},...]
- NewFileSpec=build_copylist_4(SourceDir,FileSpec,[]),
- [{Node,NewFileSpec}|build_copylist_3(void,Dbg,SourceDir,Node,Rest)];
- {ok,no_log} -> % This tracedata not associated with any log.
- build_copylist_3(void,Dbg,SourceDir,Node,Rest);
- {error,Reason} ->
- inviso_tool_lib:debug(list_logs,Dbg,[Node,TRD,Reason]),
- build_copylist_3(void,Dbg,SourceDir,Node,Rest)
- end;
-build_copylist_3(CtrlNode,Dbg,SourceDir,Node,[TRD|Rest]) -> % The distributed case.
- case inviso_tool_lib:inviso_cmd(CtrlNode,list_logs,[[{Node,TRD}]]) of
- {ok,[{Node,{ok,FileSpec}}]} when list(FileSpec) ->
- NewFileSpec=build_copylist_4(SourceDir,FileSpec,[]),
- [{Node,NewFileSpec}|build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest)];
- {ok,[{Node,{ok,no_log}}]} -> % It relays to another node, no files!
- build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest);
- {ok,[{Node,{error,Reason}}]} ->
- inviso_tool_lib:debug(list_logs,Dbg,[Node,TRD,Reason]),
- build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest);
- {error,Reason} -> % Some general failure.
- inviso_tool_lib:debug(list_logs,Dbg,[Node,TRD,Reason]),
- build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest)
- end;
-build_copylist_3(_,_,_,_,[]) ->
- [].
-
-%% Help function which makes a [{Type,Files},...] list where each file in Files
-%% is with full path as found from our file-system.
-build_copylist_4(SourceDir,[{Type,_Dir,Files}|Rest],Accum) ->
- NewFiles=
- lists:foldl(fun(FName,LocalAcc)->[filename:join(SourceDir,FName)|LocalAcc] end,
- [],
- Files),
- build_copylist_4(SourceDir,Rest,[{Type,NewFiles}|Accum]);
-build_copylist_4(_,[],Accum) ->
- Accum.
-
-
-%% Help function which copies a file using os:cmd.
-%% Returns 'ok' or {error,Reason}.
-do_copy_file(Source,Destination) ->
- case os:type() of
- {win32,_} ->
- os:cmd("copy "++Source++" "++Destination), % Perhaps a test on success?
- ok;
- {unix,_} ->
- os:cmd("cp "++Source++" "++Destination), % Perhaps a test on success?
- ok
- end.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-
-%% ==============================================================================
-%% Various help functions.
-%% ==============================================================================
-
-%% Help function going through the Nodes list and checking that only nodes
-%% mentioned in OurNodes gets returned. It also makes the nodes in the return
-%% value unique.
-remove_nodes_not_ours(Nodes,OurNodes) ->
- remove_nodes_not_ours_2(Nodes,OurNodes,[],[]).
-
-remove_nodes_not_ours_2([Node|Rest],OurNodes,OurAcc,OtherAcc) ->
- case lists:member(Node,OurNodes) of
- true -> % Ok it is one of our nodes.
- case lists:member(Node,OurAcc) of
- true -> % Already in the list, skip.
- remove_nodes_not_ours_2(Rest,OurNodes,OurAcc,OtherAcc);
- false ->
- remove_nodes_not_ours_2(Rest,OurNodes,[Node|OurAcc],OtherAcc)
- end;
- false ->
- case lists:member(Node,OtherAcc) of
- true ->
- remove_nodes_not_ours_2(Rest,OurNodes,OurAcc,OtherAcc);
- false ->
- remove_nodes_not_ours_2(Rest,OurNodes,OurAcc,[Node|OtherAcc])
- end
- end;
-remove_nodes_not_ours_2([],_,OurAcc,OtherAcc) ->
- {lists:reverse(OurAcc),lists:reverse(OtherAcc)}.
-%% ------------------------------------------------------------------------------
-
-%% Help function which returns 'true' or 'false' depending on if TracerData is
-%% meant to be used by the session handler (true) or if it supposed to be passed
-%% on to the trace system.
-is_tool_internal_tracerdata(_) -> % CURRENTLY NO INTERNAL TRACER DATA!
- false.
-%% ------------------------------------------------------------------------------
-
-%% Help function which checks that all nodes in the first list of nodes exists
-%% in the second list of nodes. Returns 'true' or 'false'. The latter if as much
-%% as one incorrect node was found.
-check_our_nodes([Node|Rest],AllNodes) ->
- case lists:member(Node,AllNodes) of
- true ->
- check_our_nodes(Rest,AllNodes);
- false -> % Then we can stop right here.
- false
- end;
-check_our_nodes([],_) ->
- true.
-%% ------------------------------------------------------------------------------
-
-%% Help function which checks that a directory actually exists. Returns 'true' or
-%% 'false'.
-check_directory_exists(Dir) ->
- case file:read_file_info(Dir) of
- {ok,#file_info{type=directory}} ->
- true;
- _ -> % In all other cases it is not valid.
- false
- end.
-%% ------------------------------------------------------------------------------
-
-%% This function stops the tracing on all nodes in Nodes. Preferably Nodes is a list
-%% of only tracing runtime components. Not that there will actually be any difference
-%% since the return value does not reflect how stopping the nodes went.
-%% Returns 'ok' or {error,Reason}, the latter only in case of general failure.
-stop_all_tracing(void,Dbg,[?LOCAL_RUNTIME]) -> % The non-distributed case, and is tracing.
- case inviso:stop_tracing() of
- {ok,_State} ->
- ok;
- {error,Reason} -> % We actually don't care.
- inviso_tool_lib:debug(stop_tracing,Dbg,[?LOCAL_RUNTIME,Reason]),
- ok
- end;
-stop_all_tracing(void,_,_) -> % There is no local runtime started.
- ok;
-stop_all_tracing(CtrlNode,Dbg,Nodes) ->
- case inviso_tool_lib:inviso_cmd(CtrlNode,stop_tracing,[Nodes]) of
- {ok,Result} -> % The result is only used for debug.
- Failed=lists:foldl(fun({N,{error,Reason}},Acc)->[{N,{error,Reason}}|Acc];
- (_,Acc)->Acc
- end,
- [],
- Result),
- if
- Failed==[] ->
- ok;
- true ->
- inviso_tool_lib:debug(stop_tracing,Dbg,[Nodes,Failed]),
- ok
- end;
- {error,Reason} ->
- {error,{stop_tracing,Reason}}
- end.
-%% ------------------------------------------------------------------------------
-
-%% Help function removing all local logs using the tracerdata to determine what
-%% logs to remove from where.
-%% There is no significant return value since it is not really clear what to do
-%% if removal went wrong. The function can make debug-reports thought.
-remove_all_local_logs(CtrlNode,TRDstorage,Nodes,Dbg) ->
- LogSpecList=build_logspeclist_remove_logs(Nodes,TRDstorage),
- case inviso_tool_lib:inviso_cmd(CtrlNode,delete_log,[LogSpecList]) of
- {ok,Results} ->
- case look_for_errors_resultlist(Results) of
- [] -> % No errors found in the result!
- true;
- Errors ->
- inviso_tool_lib:debug(remove_all_local_logs,Dbg,[Errors]),
- true
- end;
- {error,Reason} -> % Some general error.
- inviso_tool_lib:debug(remove_all_local_logs,Dbg,[{error,Reason}]),
- true
- end.
-
-%% Help function which puts together a list of {Node,Tracerdata} tuples. Note that
-%% we must build one tuple for each tracerdata for one node.
-build_logspeclist_remove_logs(Nodes,TRDstorage) ->
- [{Node,TracerData}||Node<-Nodes,TracerData<-find_tracerdata_for_node_trd(Node,TRDstorage)].
-%% ------------------------------------------------------------------------------
-
-%% Help function which traverses a resultlist from an inviso function. Such are
-%% built up as [{Node,SubResults},...] where SubResult is a list of tuples for each
-%% file-type (e.g trace_log) {FType,FileList} where a FileList is either {error,Reason}
-%% or {ok,FileName}.
-%% Returns a list of {Node,[{error,Reason},...]}.
-look_for_errors_resultlist([{Node,{error,Reason}}|Rest]) ->
- [{Node,{error,Reason}}|look_for_errors_resultlist(Rest)];
-look_for_errors_resultlist([{Node,{ok,NResults}}|Rest]) when list(NResults) ->
- case look_for_errors_resultlist_2(NResults,[]) of
- [] ->
- look_for_errors_resultlist(Rest);
- Errors -> % A list of lists.
- [{Node,lists:flatten(Errors)}|look_for_errors_resultlist(Rest)]
- end;
-look_for_errors_resultlist([_|Rest]) ->
- look_for_errors_resultlist(Rest);
-look_for_errors_resultlist([]) ->
- [].
-
-look_for_errors_resultlist_2([{_FType,NSubResult}|Rest],Accum) ->
- case lists:filter(fun({error,_Reason})->true;(_)->false end,NSubResult) of
- [] -> % No errors for this node.
- look_for_errors_resultlist_2(Rest,Accum);
- Errors -> % A list of at least one error.
- look_for_errors_resultlist_2(Rest,[Errors|Accum])
- end;
-look_for_errors_resultlist_2([],Accum) ->
- Accum.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Functions working on the loopdata structure.
-%% Its main purpose is to store information about runtime components participating
-%% in the session and their current status.
-%% ------------------------------------------------------------------------------
-
--record(ld,{parent,
- ctrlnode,
- ctrlpid, % To where to send inviso cmd.
- rtstates,
- tracerdata,
- safetycatches,
- dbg,
- actstorage % Activity storage, for reactivate.
- }).
-
-%% Function creating the initial datastructure.
-%% The datastructure is [{Node,State},...].
-%%
-%% The tracerdata table is a bag simply for the reason that if we try to insert
-%% the same tracerdata for a node twice, we will end up with one tracerdata after
-%% all. This is useful when we insert tracerdata ourselves, the tracerdata will
-%% come as a state-change too.
-mk_ld(Parent,CtrlNode,CtrlPid,RTStates,NodeParams,OtherNodes,SafetyCatches,Dbg) ->
- TRDtableName=list_to_atom("inviso_tool_sh_trdstorage_"++pid_to_list(self())),
- TRDtid=ets:new(TRDtableName,[bag]),
- ACTtableName=list_to_atom("inviso_tool_sh_actstorage_"++pid_to_list(self())),
- ACTtid=ets:new(ACTtableName,[bag]),
- mk_ld_fill_tracerdata(CtrlNode,TRDtid,NodeParams,OtherNodes), % Fill the ETS table.
- #ld{parent=Parent, % The tool main process.
- ctrlnode=CtrlNode, % Node name where the control component is.
- ctrlpid=CtrlPid, % The process id of the control component.
- rtstates=RTStates, % All nodes and their state/status.
- tracerdata=TRDtid,
- safetycatches=SafetyCatches,
- dbg=Dbg,
- actstorage=ACTtid
- }.
-
-%% Help function which inserts tracer data for the nodes. Note that we can get
-%% tracer data either from the return value from init_tracing or by asking the
-%% node for it. The latter is necessary for the nodes which were marked not to
-%% be initiated by the session handler. This maybe because those nodes have
-%% autostarted.
-mk_ld_fill_tracerdata(CtrlNode,TId,NodeParams,OtherNodes) ->
- mk_ld_fill_tracerdata_nodeparams(TId,NodeParams),
- mk_ld_fill_tracerdata_othernodes(CtrlNode,TId,OtherNodes).
-
-mk_ld_fill_tracerdata_nodeparams(TId,[{Node,TracerData}|Rest]) ->
- ets:insert(TId,{Node,TracerData}),
- mk_ld_fill_tracerdata_nodeparams(TId,Rest);
-mk_ld_fill_tracerdata_nodeparams(_,[]) ->
- ok.
-
-mk_ld_fill_tracerdata_othernodes(_,_,[]) -> % Then not necessary to do anything.
- ok;
-mk_ld_fill_tracerdata_othernodes(void,TId,[Node]) -> % The non-distributed case.
- case inviso:get_tracerdata() of
- {error,_Reason} -> % Perhaps in state new or disconnected.
- ok; % Do nothing.
- {ok,TracerData} ->
- ets:insert(TId,{Node,TracerData})
- end;
-mk_ld_fill_tracerdata_othernodes(CtrlNode,TId,Nodes) ->
- case inviso_tool_lib:invisomd(CtrlNode,get_tracerdata,[Nodes]) of
- {ok,Results} ->
- mk_ld_fill_tracerdata_othernodes_2(TId,Results);
- {error,_Reason} -> % Strange, we will probably crash later.
- ok
- end.
-
-mk_ld_fill_tracerdata_othernodes_2(TId,[{_Node,{ok,no_tracerdata}}|Rest]) ->
- mk_ld_fill_tracerdata_othernodes_2(TId,Rest); % It was not initiated then!
-mk_ld_fill_tracerdata_othernodes_2(TId,[{Node,{ok,TracerData}}|Rest]) ->
- ets:insert(TId,{Node,TracerData}),
- mk_ld_fill_tracerdata_othernodes_2(TId,Rest);
-mk_ld_fill_tracerdata_othernodes_2(_,[]) ->
- ok.
-%% ------------------------------------------------------------------------------
-
-get_ctrlnode_ld(#ld{ctrlnode=CtrlNode}) ->
- CtrlNode.
-%% ------------------------------------------------------------------------------
-
-
-get_ctrlpid_ld(#ld{ctrlpid=CtrlPid}) ->
- CtrlPid.
-%% ------------------------------------------------------------------------------
-
-get_rtstates_ld(#ld{rtstates=RTStates}) ->
- RTStates.
-
-put_rtstates_ld(NewRTStates,LD) ->
- LD#ld{rtstates=NewRTStates}.
-%% ------------------------------------------------------------------------------
-
-get_trdstorage_ld(#ld{tracerdata=TId}) ->
- TId.
-
-put_trdstorage_ld(_NewTId,LD) ->
- LD.
-%% ------------------------------------------------------------------------------
-
-%% Help function which adds the current tracerdata of node Node to the tracerdata
-%% storage. We only want to add tracerdata we have not seen before. We therefore
-%% avoid adding it if the node already is in state ?TRACING.
-%% Returns a new tracerdata (what ever it is)!
-add_current_tracerdata_ld(CtrlNode,Node,RTStates,TId) ->
- case get_statestatus_rtstates(Node,RTStates) of
- {ok,{?TRACING,_}} -> % Then we have already added the tracerdata.
- TId; % Then do nothing.
- {ok,_} -> % Since we were not tracing before.
- case add_current_tracerdata_ld_fetchtracerdata(CtrlNode,Node) of
- {ok,TracerData} ->
- ets:insert(TId,{Node,TracerData});
- no_tracerdata -> % Strange, how could we become tracing
- ok;
- {error,_Reason} -> % The node perhaps disconnected!?
- ok
- end;
- false -> % Very strange, not our node!
- ok % Do nothing.
- end.
-
-add_current_tracerdata_ld_fetchtracerdata(void,_Node) ->
- case inviso:get_tracerdata() of
- {ok,TracerData} ->
- {ok,TracerData};
- {error,no_tracerdata} ->
- no_tracerdata;
- {error,Reason} ->
- {error,Reason}
- end;
-add_current_tracerdata_ld_fetchtracerdata(CtrlNode,Node) ->
- case inviso_tool_lib:inviso_cmd(CtrlNode,get_tracerdata,[[Node]]) of
- {ok,[{Node,{ok,TracerData}}]} ->
- {ok,TracerData};
- {ok,[{Node,{error,no_tracerdata}}]} ->
- no_tracerdata;
- {ok,[{Node,{error,Reason}}]} ->
- {error,Reason};
- {error,Reason} ->
- {error,Reason}
- end.
-%% ------------------------------------------------------------------------------
-
-
-get_safetycatches_ld(#ld{safetycatches=SCs}) ->
- SCs.
-%% ------------------------------------------------------------------------------
-
-get_dbg_ld(#ld{dbg=Dbg}) ->
- Dbg.
-%% ------------------------------------------------------------------------------
-
-get_actstorage_ld(#ld{actstorage=ACTstorage}) ->
- ACTstorage.
-
-put_actstorage_ld(_NewACTstorage,LD) ->
- LD.
-%% ------------------------------------------------------------------------------
-
-
-
-%% ------------------------------------------------------------------------------
-%% Functions working on the rtstates structure (which is a substructure of loopdata).
-%% It is either:
-%% [{Node,StateStatus,Opts},...]
-%% Node is either the node name of the runtime component erlang node or
-%% ?LOCAL_RUNTIME as returned from the trace control component.
-%% StateStatus is {State,Status}, 'unavailable' or 'unknown'.
-%% Status is the returnvalue from trace control component.
-%% i.e: running | {suspended,Reason}
-%% ------------------------------------------------------------------------------
-
-%% Function contructing an rtstates structure from a list of [{Node,StateStatus,Opts},...].
-to_rtstates(ListOfStates) when list(ListOfStates) ->
- ListOfStates.
-%% ------------------------------------------------------------------------------
-
-%% Function which takes a rtstates structure and returns a list of [{Node,StateStatus},...].
-from_rtstates(RTStates) ->
- RTStates.
-%% ------------------------------------------------------------------------------
-
-%% Function which takes an rtstates structure and a result as returned from
-%% init_tracing. The RTStates is modified for the nodes that changed state as a
-%% result of successful init_tracing.
-%% Returns a new RTStates.
-set_tracing_rtstates([E={Node,_StateStatus,Opts}|Rest],Result) ->
- case lists:keysearch(Node,1,Result) of
- {value,{_,ok}} -> % Means state-change to tracing!
- [{Node,{tracing,running},Opts}|set_tracing_rtstates(Rest,Result)];
- _ -> % Otherwise, leave it as is.
- [E|set_tracing_rtstates(Rest,Result)]
- end;
-set_tracing_rtstates([],_Result) ->
- [].
-%% ------------------------------------------------------------------------------
-
-%% Function updating the state/status for a certain runtime component.
-%% Returns a new RTStates structure. Note that Node must not necessarily be one
-%% of the nodes in the session. Meaning that Node shall not be added to RTStates
-%% should it not already be in there.
-statechange_rtstates(Node,State,Status,RTStates) when list(RTStates) ->
- case lists:keysearch(Node,1,RTStates) of
- {value,{_,_,Opts}} ->
- lists:keyreplace(Node,1,RTStates,{Node,{State,Status},Opts});
- _ -> % Then Node does not exist.
- RTStates % Just keep it as is, as keyreplace would have done.
- end.
-%% ------------------------------------------------------------------------------
-
-%% Function updating the state/status for a certain runtime component. The
-%% state/status is set to 'unavailable'.
-%% Returns a new RTStates structure.
-set_unavailable_rtstates(Node,RTStates) when list(RTStates) ->
- case lists:keysearch(Node,1,RTStates) of
- {value,{_,_,Opts}} ->
- lists:keyreplace(Node,1,RTStates,{Node,unavailable,Opts});
- _ -> % Then Node does not exist.
- RTStates % Just keep it as is, as keyreplace would have done.
- end.
-%% ------------------------------------------------------------------------------
-
-%% Function finding the statestatus associated with Node in the RTStates structure.
-%% Returns {ok,StateStatus} or 'false'.
-get_statestatus_rtstates(Node,RTStates) ->
- case lists:keysearch(Node,1,RTStates) of
- {value,{_,StateStatus,_}} ->
- {ok,StateStatus};
- false ->
- false
- end.
-%% ------------------------------------------------------------------------------
-
-%% Help function which returns a list of all nodes that are currently marked
-%% as available to us in the runtime state structure.
-get_all_available_nodes_rtstates(RTStates) ->
- get_all_session_nodes_rtstates(lists:filter(fun({_N,unavailable,_})->false;
- (_)->true
- end,
- RTStates)).
-%% ------------------------------------------------------------------------------
-
-%% Help function returning a list of all nodes belonging to this session.
-get_all_session_nodes_rtstates(RTStates) ->
- lists:map(fun({Node,_,_})->Node end,RTStates).
-%% ------------------------------------------------------------------------------
-
-%% Function which returns a list of nodes that are indicated as tracing in the
-%% RTStates structure.
-get_all_tracing_nodes_rtstates(RTStates) ->
- lists:map(fun({N,_,_})->N end,
- lists:filter(fun({_,{tracing,_},_})->true;(_)->false end,RTStates)).
-%% ------------------------------------------------------------------------------
-
-%% Returns the options associated with Node in the RTStates structure.
-get_opts_rtstates(Node,RTStates) ->
- case lists:keysearch(Node,1,RTStates) of
- {value,{_,_,Opts}} ->
- {ok,Opts};
- false ->
- false
- end.
-
-%% ------------------------------------------------------------------------------
-%% Functions working on the tracerdata structure, which is a part of the loopdata.
-%% The tracerdata structure is an ETS-table of type bag storing:
-%% {Node,TracerData}.
-%% Note that there can of course be multiple entries for a node.
-%% ------------------------------------------------------------------------------
-
-%% Help function which takes a tracerdata loopdata structure and returns a list
-%% of all stored tracerdata for a certain Node.
-find_tracerdata_for_node_trd(Node,TRD) ->
- case ets:lookup(TRD,Node) of
- Result when list(Result) ->
- lists:map(fun({_Node,TracerData})->TracerData end,Result);
- _ -> % Should probably never happend.
- []
- end.
-%% ------------------------------------------------------------------------------
-
-
-%% ------------------------------------------------------------------------------
-%% Functions working on the activity storage structure, which is part of the
-%% loopdata. It stores entries about things that needs to be "redone" in case
-%% of a reactivation of the node. The time order is also important.
-%% Note that for every ActivityType there must be a "handler" in the reactivation
-%% functionality.
-%%
-%% The structure is a bag of {Node,ActivityType,What}.
-%% ActivityType/What=tf/{Op,TraceConfList}|tpm/{Op,[Mod,Func,Arity,MS,CallFunc]}
-%% /{Op,[Mod,Func,Arity,MS,CallFunc,ReturnFunc]}
-%% /{Op,[]}
-%% TraceConfList=[{Proc,Flags},...]
-%% How=true|false
-%% ------------------------------------------------------------------------------
-
-%% Function that adds meta-pattern activities to the activity storage. Note
-%% that one of the parameters to the function is a return value from an
-%% inviso call. In that way we do not enter activities that were unsuccessful.
-%% Op can be either the setting or clearing of a meta pattern.
-%% Returns a new ACTstorage.
-add_tpm_actstorage([{Node,ok}|Rest],Op,InvisoCmdParams,ACTstorage) ->
- true=ets:insert(ACTstorage,{Node,tpm,{Op,InvisoCmdParams}}),
- add_tpm_actstorage(Rest,Op,InvisoCmdParams,ACTstorage);
-add_tpm_actstorage([_|Rest],Op,InvisoCmdParams,ACTstorage) ->
- add_tpm_actstorage(Rest,Op,InvisoCmdParams,ACTstorage);
-add_tpm_actstorage([],_,_,ACTstorage) ->
- ACTstorage.
-
-%% Function that adds process trace-flags to the activity storage. Note that one
-%% of the parameters is the return value from an inviso function. Meaning that
-%% if the flags failed in their entirety, no activity will be saved. If only
-%% some of the flags failed, we will not go through the effort of trying to find
-%% out exactly which.
-%% Returns a new activity storage structure.
-add_tf_actstorage([{_Node,{error,_Reason}}|Rest],Op,TraceConfList,ACTstorage) ->
- add_tf_actstorage(Rest,Op,TraceConfList,ACTstorage);
-add_tf_actstorage([{Node,_Result}|Rest],Op,TraceConfList,ACTstorage) ->
- true=ets:insert(ACTstorage,{Node,tf,{Op,TraceConfList}}),
- add_tf_actstorage(Rest,Op,TraceConfList,ACTstorage);
-add_tf_actstorage([],_,_,ACTstorage) ->
- ACTstorage.
-%% ------------------------------------------------------------------------------
-
-%% Finds all activities associated with Node. Returns a list of them in the
-%% same order as they were inserted.
-get_activities_actstorage(Node,ACTstorage) ->
- case ets:lookup(ACTstorage,Node) of
- [] ->
- false;
- Result when list(Result) ->
- {ok,lists:map(fun({_N,Type,What})->{Type,What} end,Result)}
- end.
-%% ------------------------------------------------------------------------------
-
-%% Function removing all activity entries associated with Node. This is useful
-%% if the Node disconnects for instance.
-del_node_actstorage(Node,ACTstorage) ->
- ets:delete(ACTstorage,Node),
- ACTstorage.
-%% ------------------------------------------------------------------------------
-
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% Description:
+%% The runtime component of the trace tool Inviso.
+%%
+%% Authors:
+%% Lennart �hman, [email protected]
+%% -----------------------------------------------------------------------------
+
+-module(inviso_tool_sh).
+
+%% Inviso Session Handler.
+%% This is the code for the session handler process. Its purpose is that we have
+%% one session handler process for each trace session started through the
+%% start_session inviso tool API. The session handler process is responsible for:
+%%
+%% -Knowing the state/status of all participating runtime components.
+%% -Keeping storage of all tracerdata all our participants have used. This means
+%% also to find out the tracerdata of runtime components connecting by them
+%% selves.
+%%
+%% STORAGE STRATEGY
+%% ----------------
+%% The local information storage can be changed by two things. Either by executing
+%% commands issued through our APIs. Or by receiving trace_event from the control
+%% component. When we execute commands, a corresponding event will also follow.
+%% Meaning that in those situations we are informed twice.
+%% A simple strategy could be to wait for the event even when doing the changes
+%% to the runtime components our self (through commands). But that may result in
+%% a small time frame where someone might do yet another command and failing
+%% because the local information storage is not uptodate as it would have been
+%% expected to be. Therefore we always update the local storage when making changes
+%% to a runtime component our selves. There will eventually be a double update
+%% through an incoming event. But the storage must coop with that, preventing
+%% inconsitancies to happend. An example of a strategy is that the tracerdata table
+%% is a bag, not allowing for double entries of the same kind. Therefore a double
+%% update is harmless there.
+
+%% ------------------------------------------------------------------------------
+%% Module wide constants.
+%% ------------------------------------------------------------------------------
+-define(LOCAL_RUNTIME,local_runtime). % Used as node name when non-disitrbuted.
+-define(TRACING,tracing). % A state defined by the control component.
+-define(RUNNING,running). % A status according to control componet.
+
+-define(COPY_LOG_FROM,copy_log_from). % Common fileystem option.
+%% ------------------------------------------------------------------------------
+
+%% ------------------------------------------------------------------------------
+%% API exports.
+%% ------------------------------------------------------------------------------
+-export([start_link/5,start_link/8]).
+-export([cancel_session/1,stop_session/3]).
+-export([reactivate/1,reactivate/2]).
+-export([ctpl/5,tpl/5,tpl/6,tpl/7,
+ tf/2,tf/3,
+ tpm_localnames/2,init_tpm/6,init_tpm/9,tpm/6,tpm/7,tpm/10,
+ tpm_ms/7,ctpm_ms/6,ctpm/5
+ ]).
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Internal exports.
+%% ------------------------------------------------------------------------------
+-export([init/1,handle_call/3,handle_info/2,terminate/2]).
+
+-export([get_loopdata/1]).
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Includes.
+%% ------------------------------------------------------------------------------
+-include_lib("kernel/include/file.hrl"). % Necessary for file module.
+%% ------------------------------------------------------------------------------
+
+
+%% ==============================================================================
+%% Exported API functions.
+%% ==============================================================================
+
+%% start_link(From,NodeParams,CtrlNode,CtrlPid,SafetyCatches,NodesIn,NodesNotIn) =
+%% {ok,Pid} | {error,Reason}
+%% From= pid(), the initial client expecting the reply.
+%% NodeParams=[{Node,TracerData},{Node,TracerData,Opts}...]
+%% CtrlNode=atom() | 'void', the node where the trace control component is.
+%% CtrlPid=pid(), the pid of the trace control component.
+%% SafetyCatches=
+%% Dir=string(), where to place fetched logs and the merged log.
+%% Dbg=debug structure.
+%% NodesIn=[Node,...], list of nodes already in another session.
+%% NodesNotIn=[Node,...], list of nodes not in another session.
+%%
+%% Starts a session-handler. It keeps track of the the state and status of all
+%% participating runtime components. Note that there is a non-distributed case too.
+%% In the non-distributed case there is no things such as CtrlNode.
+start_link(From,TracerData,CtrlPid,SafetyCatches,Dbg) ->
+ gen_server:start_link(?MODULE,
+ {self(),From,TracerData,CtrlPid,SafetyCatches,Dbg},
+ []).
+
+start_link(From,NodeParams,CtrlNode,CtrlPid,SafetyCatches,Dbg,NodesIn,NodesNotIn) ->
+ gen_server:start_link(?MODULE,
+ {self(),From,NodeParams,CtrlNode,CtrlPid,
+ SafetyCatches,Dbg,NodesIn,NodesNotIn},
+ []).
+%% ------------------------------------------------------------------------------
+
+%% Stops tracing where it is ongoing. Fetches all logfiles.
+stop_session(SID,Dir,Prefix) ->
+ gen_server:call(SID,{stop_session,Dir,Prefix}).
+%% ------------------------------------------------------------------------------
+
+%% stop_session(SID) = ok
+%%
+%% Cancels the session brutaly. All runtime components are made to stop tracing,
+%% all local log files are removed using the tracerdata we know for them.
+cancel_session(SID) ->
+ gen_server:call(SID,cancel_session).
+%% ------------------------------------------------------------------------------
+
+%% reactivate(SID) = {ok,
+%% reactivate(SID,Nodes) = {ok,NodeResults} | {error,Reason}.
+%% SID=session id, pid().
+%% Nodes=[Node,...]
+%% NodeResult=[{Node,Result},...]
+%% Result={Good,Bad}
+%% Good,Bad=integer(), the number of redone activities.
+%%
+%% Function which reactivates runtime components being suspended. This is done
+%% replaying all trace flags (in the correct order) to the corresponding nodes.
+%% Note that this may also mean turning flags off. Like first turning them on
+%% then off a split second later.
+reactivate(SID) ->
+ gen_server:call(SID,reactivate). %% NOT IMPLEMENTED YET.
+reactivate(SID,Nodes) ->
+ gen_server:call(SID,{reactivate,Nodes}).
+%% ------------------------------------------------------------------------------
+
+
+%% tpl(SessionID,Mod,Func,Arity,MS)=
+%% tpl(SessionID,Mod,Func,Arity,MS,Opts)={ok,N}|{error,Reason}.
+%% tpl(SessionID,Nodes,Mod,Func,Arity,MS)=
+%% tpl(SessionID,Nodes,Mod,Func,Arity,MS,Opts)={ok,Result}|{error,Reason}
+%% Mod='_' | ModuleName | ModRegExp | {DirRegExp,ModRegExp}
+%% ModRegExp=DirRegExp= string()
+%% Func='_' | FunctionName
+%% Arity='_' | integer()
+%% MS=[] | false | a match specification
+%% Opts=[Opts,...]
+%% Opt={arg,Arg}, disable_safety, {expand_regexp_at,NodeName}, only_loaded
+%% Nodes=[NodeName,...]
+tpl(SID,Mod,Func,Arity,MS) ->
+ gen_server:call(SID,{tp,tpl,Mod,Func,Arity,MS,[]}).
+tpl(SID,Mod,Func,Arity,MS,Opts) when list(MS);MS==true;MS==false ->
+ gen_server:call(SID,{tp,tpl,Mod,Func,Arity,MS,Opts});
+tpl(SID,Nodes,Mod,Func,Arity,MS) when integer(Arity);Arity=='_' ->
+ gen_server:call(SID,{tp,tpl,Nodes,Mod,Func,Arity,MS,[]}).
+tpl(SID,Nodes,Mod,Func,Arity,MS,Opts) ->
+ gen_server:call(SID,{tp,tpl,Nodes,Mod,Func,Arity,MS,Opts}).
+%% ------------------------------------------------------------------------------
+
+%% ctpl(SessionID,Nodes,Mod,Func,Arity)=
+%% See tpl/X for arguments.
+%%
+%% Removes local trace-patterns from functions.
+ctpl(SID,Nodes,Mod,Func,Arity) ->
+ gen_server:call(SID,{ctp,ctpl,Nodes,Mod,Func,Arity}).
+%% ------------------------------------------------------------------------------
+
+
+tpm_localnames(SID,Nodes) ->
+ gen_server:call(SID,{tpm_localnames,Nodes}).
+
+%% tpm_globalnames(SID,Nodes) ->
+%% gen_server:call(SID,{tpm_globalnames,Nodes}).
+
+init_tpm(SID,Nodes,Mod,Func,Arity,CallFunc) ->
+ gen_server:call(SID,{init_tpm,Nodes,Mod,Func,Arity,CallFunc}).
+init_tpm(SID,Nodes,Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc) ->
+ gen_server:call(SID,
+ {init_tpm,Nodes,Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc}).
+tpm(SID,Nodes,Mod,Func,Arity,MS) ->
+ gen_server:call(SID,{tpm,Nodes,Mod,Func,Arity,MS}).
+tpm(SID,Nodes,Mod,Func,Arity,MS,CallFunc) ->
+ gen_server:call(SID,{tpm,Nodes,Mod,Func,Arity,MS,CallFunc}).
+tpm(SID,Nodes,Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc) ->
+ gen_server:call(SID,{tpm,Nodes,Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc}).
+
+tpm_ms(SID,Nodes,Mod,Func,Arity,MSname,MS) ->
+ gen_server:call(SID,{tpm_ms,Nodes,Mod,Func,Arity,MSname,MS}).
+
+ctpm_ms(SID,Nodes,Mod,Func,Arity,MSname) ->
+ gen_server:call(SID,{tpm_ms,Nodes,Mod,Func,Arity,MSname}).
+
+ctpm(SID,Nodes,Mod,Func,Arity) ->
+ gen_server:call(SID,{ctpm,Nodes,Mod,Func,Arity}).
+%% ------------------------------------------------------------------------------
+
+
+%% tf(SessionID,Nodes,TraceConfList)=
+%% TraceConfList=[{PidSpec,Flags},...]
+%% PidSpec=pid()|atom()|all|new|existing
+%% Flags=[Flag,...]
+tf(SID,TraceConfList) ->
+ gen_server:call(SID,{tf,TraceConfList}).
+tf(SID,Nodes,TraceConfList) ->
+ gen_server:call(SID,{tf,Nodes,TraceConfList}).
+%% ------------------------------------------------------------------------------
+
+
+get_loopdata(SID) ->
+ gen_server:call(SID,get_loopdata).
+%% ------------------------------------------------------------------------------
+
+%% ==============================================================================
+%% Genserver call-backs.
+%% ==============================================================================
+
+%% Initial function for the session handler process. The nodes participating in
+%% the session must previously have been added to our control component by the tool.
+%% The session handler first finds out the state/status of the specified runtime
+%% components, then it tries to initiate tracing on those where it is applicable.
+%% Note that a reply to the initial (tool)client is done from here instead from
+%% the tool-server.
+init({Parent,From,TracerData,CtrlPid,SafetyCatches,Dbg}) -> % The non-distributed case.
+ {ok,StateStatus}=init_rtcomponent_states([],void,CtrlPid,[?LOCAL_RUNTIME]),
+ case is_tool_internal_tracerdata(TracerData) of
+ false -> % We shall initiate local runtime.
+ case inviso:init_tracing(TracerData) of
+ ok ->
+ gen_server:reply(From,{ok,{self(),ok}}),
+ {ok,mk_ld(Parent,
+ void,
+ CtrlPid,
+ to_rtstates([{?LOCAL_RUNTIME,{tracing,?RUNNING},[]}]),
+ [{?LOCAL_RUNTIME,TracerData}],
+ [],
+ SafetyCatches,
+ Dbg)};
+ {error,Reason} -> % It might have become suspended?!
+ gen_server:reply(From,{error,Reason}),
+ {ok,mk_ld(Parent,
+ void,
+ CtrlPid,
+ to_rtstates([{?LOCAL_RUNTIME,StateStatus,[]}]),
+ [{?LOCAL_RUNTIME,TracerData}],
+ [],
+ SafetyCatches,
+ Dbg)}
+ end;
+ true -> % We shall not pass this one on.
+ gen_server:reply(From,{ok,{self(),ok}}), % Then it is ok.
+ {ok,mk_ld(Parent,
+ void,
+ CtrlPid,
+ to_rtstates([{?LOCAL_RUNTIME,StateStatus,[]}]),
+ [],
+ [?LOCAL_RUNTIME],
+ SafetyCatches,
+ Dbg)}
+ end;
+init({Parent,From,NodeParams,CtrlNode,CtrlPid,SafetyCatches,Dbg,NodesIn,NodesNotIn}) ->
+ case init_rtcomponent_states(NodeParams,CtrlNode,CtrlPid,NodesNotIn) of
+ {ok,States} -> % A list of {Node,{State,Status},Opts}.
+ {NodeParams2,Nodes2}=remove_nodeparams(NodesIn,NodeParams),
+ case inviso_tool_lib:inviso_cmd(CtrlNode,init_tracing,[NodeParams2]) of
+ {ok,Result} -> % Resulted in state changes!
+ RTStates=set_tracing_rtstates(to_rtstates(States),Result),
+ ReplyValue=init_fix_resultnodes(NodesIn,Nodes2,Result),
+ gen_server:reply(From,{ok,{self(),ReplyValue}}),
+ {ok,mk_ld(Parent,CtrlNode,CtrlPid,RTStates,
+ NodeParams2,Nodes2,SafetyCatches,Dbg)};
+ {error,Reason} -> % Some general failure.
+ inviso_tool_lib:inviso_cmd(CtrlNode,unsubscribe,[]),
+ gen_server:reply(From,{error,{init_tracing,Reason}}),
+ {stop,{init_tracing,Reason}};
+ What ->
+ io:format("GOT:~n~w~n",[What]),
+ exit(foo)
+ end;
+ {error,Reason} -> % Unable to get the state/status.
+ inviso_tool_lib:inviso_cmd(CtrlNode,unsubscribe,[]),
+ gen_server:reply(From,{error,Reason}),
+ {stop,{error,Reason}};
+ What ->
+ io:format("GOT:~n~w~n",[What]),
+ exit(foo)
+ end.
+%% ------------------------------------------------------------------------------
+
+%% To stop a session means stop the tracing and remove all local files on the
+%% runtime nodes. We do have a table with all tracer data and that is how we are
+%% going to recreate what files to remove.
+%% Since runtime components may actually change state when this procedure is
+%% on-going, we do not care! It is the state in the session handling process at
+%% the time of start of this procedure which is used.
+handle_call(cancel_session,_From,LD) ->
+ CtrlNode=get_ctrlnode_ld(LD),
+ RTStates=get_rtstates_ld(LD),
+ Dbg=get_dbg_ld(LD),
+ TracingNodes=get_all_tracing_nodes_rtstates(RTStates),
+ case stop_all_tracing(CtrlNode,Dbg,TracingNodes) of
+ ok-> % Hopefully all nodes are stopped now.
+ AvailableNodes=get_all_available_nodes_rtstates(RTStates),
+ TRDstorage=get_trdstorage_ld(LD),
+ remove_all_local_logs(CtrlNode,TRDstorage,AvailableNodes,Dbg),
+ {stop,normal,ok,LD}; % LD actually not correct now!
+ {error,Reason} -> % Some serious error when stop_tracing.
+ {stop,normal,{error,Reason},LD}
+ end;
+%% ------------------------------------------------------------------------------
+
+%% *Stop all tracing on runtime components still tracing.
+%% *Copy all local log files to the collection directory.
+handle_call({stop_session,Dir,Prefix},_From,LD) ->
+ case check_directory_exists(Dir) of % Check that this directory exists here.
+ true ->
+ RTStates=get_rtstates_ld(LD),
+ CtrlNode=get_ctrlnode_ld(LD),
+ Dbg=get_dbg_ld(LD),
+ TracingNodes=get_all_tracing_nodes_rtstates(RTStates),
+ case stop_all_tracing(CtrlNode,Dbg,TracingNodes) of
+ ok -> % Hopefully no node is still tracing now.
+ TRDstorage=get_trdstorage_ld(LD),
+ AvailableNodes=get_all_available_nodes_rtstates(RTStates),
+ {FailedNodes,FetchedFiles}=
+ transfer_logfiles(RTStates,CtrlNode,Dir,Prefix,
+ TRDstorage,Dbg,AvailableNodes),
+ RemoveNodes= % We only delete local logs where fetch ok.
+ lists:filter(fun(N)->
+ case lists:keysearch(N,1,FailedNodes) of
+ {value,_} ->
+ false;
+ false ->
+ true
+ end
+ end,
+ AvailableNodes),
+ remove_all_local_logs(CtrlNode,TRDstorage,RemoveNodes,Dbg),
+ {stop,normal,{ok,{FailedNodes,FetchedFiles}},LD};
+ {error,Reason} -> % Some general failure, quit.
+ {stop,normal,{error,Reason},LD}
+ end;
+ false -> % You specified a non-existing directory!
+ {reply,{error,{faulty_dir,Dir}},LD}
+ end;
+%% ------------------------------------------------------------------------------
+
+handle_call({reactivate,Nodes},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ {OurNodes,OtherNodes}=
+ remove_nodes_not_ours(Nodes,get_all_session_nodes_rtstates(RTStates)),
+ CtrlNode=get_ctrlnode_ld(LD),
+ ACTstorage=get_actstorage_ld(LD),
+ case h_reactivate(CtrlNode,OurNodes,ACTstorage) of
+ {ok,Results} -> % A list of {Node,Result}.
+ if
+ OtherNodes==[] -> % Normal case, no non-session nodes.
+ {reply,{ok,Results},LD};
+ true -> % Add error values for non-session nodes.
+ {reply,
+ {ok,
+ lists:map(fun(N)->{N,{error,not_in_session}} end,OtherNodes)++
+ Results},
+ LD}
+ end;
+ {error,Reason} -> % Then this error takes presidence.
+ {reply,{error,Reason},LD}
+ end;
+%% ------------------------------------------------------------------------------
+
+%% Call-back for set trace-pattern for both global and local functions.
+handle_call({tp,PatternFunc,Mod,F,A,MS,Opts},_From,LD) ->
+ Reply=h_tp(all,PatternFunc,Mod,F,A,MS,Opts,LD), % For all active nodes in the session.
+ {reply,Reply,LD};
+handle_call({tp,PatternFunc,Nodes,Mod,F,A,MS,Opts},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ SNodes=get_all_session_nodes_rtstates(RTStates), % Notes belongoing to the session.
+ {Nodes2,FaultyNodes}=remove_nodes_not_ours(Nodes,SNodes),
+ Reply=h_tp(Nodes2,PatternFunc,Mod,F,A,MS,Opts,LD),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,FaultyNodes),
+ {reply,ErrorReply++Reply,LD};
+%% ------------------------------------------------------------------------------
+
+%% Call-back handling the removal of both local and global trace-patterns.
+%% NOT IMPLEMENTED YET.
+handle_call({ctp,PatternFunc,Nodes,Mod,F,A},_From,LD) ->
+ Reply=h_ctp(Nodes,PatternFunc,Mod,F,A,LD),
+ {reply,Reply,LD};
+%% ------------------------------------------------------------------------------
+
+handle_call({tpm_localnames,Nodes},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_tpm_localnames(get_ctrlnode_ld(LD),Nodes2,RTStates,ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({init_tpm,Nodes,Mod,Func,Arity,CallFunc},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),
+ Nodes2,
+ init_tpm,
+ [Mod,Func,Arity,CallFunc],
+ RTStates,
+ ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({init_tpm,Nodes,Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),
+ Nodes2,
+ init_tpm,
+ [Mod,Func,Arity,InitFunc,CallFunc,ReturnFunc,RemoveFunc],
+ RTStates,
+ ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({tpm,Nodes,Mod,Func,Arity,MS},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),Nodes2,tpm,[Mod,Func,Arity,MS],RTStates,ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({tpm,Nodes,Mod,Func,Arity,MS,CallFunc},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),
+ Nodes2,
+ tpm,
+ [Mod,Func,Arity,MS,CallFunc],
+ RTStates,
+ ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({tpm,Nodes,Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),
+ Nodes2,
+ tpm,
+ [Mod,Func,Arity,MS,InitFunc,CallFunc,ReturnFunc,RemoveFunc],
+ RTStates,
+ ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({tpm_ms,Nodes,Mod,Func,Arity,MSname,MS},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),
+ Nodes2,
+ tpm_ms,
+ [Mod,Func,Arity,MSname,MS],
+ RTStates,
+ ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({ctpm_ms,Nodes,Mod,Func,Arity,MSname},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),
+ Nodes2,
+ ctpm_ms,
+ [Mod,Func,Arity,MSname],
+ RTStates,
+ ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+
+handle_call({ctpm,Nodes,Mod,Func,Arity},_From,LD) ->
+ RTStates=get_rtstates_ld(LD),
+ OurNodes=get_all_session_nodes_rtstates(RTStates),
+ {Nodes2,NotOurNodes}=remove_nodes_not_ours(Nodes,OurNodes),
+ ACTstorage=get_actstorage_ld(LD),
+ {Reply,NewACTstorage}=
+ h_all_tpm(get_ctrlnode_ld(LD),Nodes2,ctpm,[Mod,Func,Arity],RTStates,ACTstorage),
+ ErrorReply=lists:map(fun(N)->{N,{error,not_in_session}} end,NotOurNodes),
+ {reply,ErrorReply++Reply,put_actstorage_ld(NewACTstorage,LD)};
+%% ------------------------------------------------------------------------------
+
+%% Call-back for setting process trace-flags. Handles both distributed and non-
+%% distributed case.
+handle_call({tf,TraceConfList},From,LD) ->
+ handle_call({tf,all,TraceConfList},From,LD);
+handle_call({tf,Nodes,TraceConfList},_From,LD) ->
+ {Reply,NewACTstorage}=h_tf(get_ctrlnode_ld(LD),
+ Nodes,
+ TraceConfList,
+ get_actstorage_ld(LD),
+ get_rtstates_ld(LD)),
+ {reply,Reply,put_actstorage_ld(NewACTstorage,LD)};
+%% ------------------------------------------------------------------------------
+
+
+
+handle_call(get_loopdata,_From,LD) ->
+ io:format("The loopdata:~n~p~n",[LD]),
+ {reply,ok,LD}.
+%% ------------------------------------------------------------------------------
+
+
+%% Clause handling an incomming state-change event from the control component.
+%% Note that it does not have to be one of our nodes since it is not possible
+%% to subscribe to certain node-events.
+%% We may very well get state-change events for state-changes we are the source
+%% to our selves. Those state-changes are already incorporated into the RTStates.
+%% There is however no harm in doing them again since we know that this event
+%% message will reach us before a reply to a potentially following state-change
+%% request will reach us. Hence we will do all state-changes in the correct order,
+%% even if sometimes done twice.
+handle_info({trace_event,CtrlPid,_Time,{state_change,Node,{State,Status}}},LD) ->
+ case get_ctrlpid_ld(LD) of
+ CtrlPid -> % It is from our control component.
+ case {State,Status} of
+ {?TRACING,?RUNNING} -> % This is the only case when new tracerdata!
+ NewTracerData=add_current_tracerdata_ld(get_ctrlnode_ld(LD),
+ Node,
+ get_rtstates_ld(LD),
+ get_trdstorage_ld(LD)),
+ NewRTStates=statechange_rtstates(Node,State,Status,get_rtstates_ld(LD)),
+ {noreply,put_trdstorage_ld(NewTracerData,
+ put_rtstates_ld(NewRTStates,LD))};
+ _ -> % In all other cases, just fix rtstates.
+ NewRTStates=statechange_rtstates(Node,State,Status,get_rtstates_ld(LD)),
+ {noreply,put_rtstates_ld(NewRTStates,LD)}
+ end;
+ _ ->
+ {noreply,LD}
+ end;
+%% If a new runtime component connects to our trace control component, and it is
+%% in our list of runtime components belonging to this session, we may update its
+%% state to now being present. Otherwise it does not belong to this session.
+%% Note that we avoid updating an already connected runtime component. This
+%% can happend if it connected by itself after we started the session handler,
+%% but before we managed to initiate tracing. Doing so or not will not result in
+%% any error in the long run, but during a short period of time we might be
+%% prevented from doing things with the runtime though it actually is tracing.
+handle_info({trace_event,CtrlPid,_Time,{connected,Node,{_Tag,{State,Status}}}},LD) ->
+ case get_ctrlpid_ld(LD) of
+ CtrlPid -> % It is from our control component.
+ case get_statestatus_rtstates(Node,get_rtstates_ld(LD)) of
+ {ok,unavailable} -> % This is the situation when we update!
+ NewRTStates=statechange_rtstates(Node,State,Status,get_rtstates_ld(LD)),
+ {noreply,put_rtstates_ld(NewRTStates,LD)};
+ _ -> % In all other cases, let it be.
+ {noreply,LD}
+ end;
+ _ -> % Not from our control component.
+ {noreply,LD}
+ end;
+%% If a runtime component disconnects we mark it as unavailable. We must also
+%% remove all saved trace-flags in order for them to not be accidently reactivated
+%% should the runtime component reconnect and then suspend.
+handle_info({trace_event,CtrlPid,_Time,{disconnected,Node,_}},LD) ->
+ case get_ctrlpid_ld(LD) of
+ CtrlPid -> % It is from our control component.
+ NewRTStates=set_unavailable_rtstates(Node,get_rtstates_ld(LD)),
+ NewACTstorage=del_node_actstorage(Node,get_actstorage_ld(LD)),
+ {noreply,put_actstorage_ld(NewACTstorage,put_rtstates_ld(NewRTStates,LD))};
+ _ ->
+ {noreply,LD}
+ end;
+handle_info(_,LD) ->
+ {noreply,LD}.
+%% ------------------------------------------------------------------------------
+
+%% In terminate we cancel our subscription to event from the trace control component.
+%% That should actually not be necessary, but lets do it the correct way!
+terminate(_,LD) ->
+ case get_ctrlnode_ld(LD) of
+ void -> % Non-distributed.
+ inviso:unsubscribe();
+ Node ->
+ inviso_tool_lib:inviso_cmd(Node,unsubscribe,[])
+ end.
+%% ------------------------------------------------------------------------------
+
+
+
+%% ==============================================================================
+%% First level help functions to call-backs.
+%% ==============================================================================
+
+%% ------------------------------------------------------------------------------
+%% Help functions to init.
+%% ------------------------------------------------------------------------------
+
+%% Help function which find out the state/status of the runtime components.
+%% Note that since we have just started subscribe to state changes we must
+%% check our inqueue to see that we have no waiting messages for the nodes
+%% we learned the state/status of. If there is a waiting message we don't
+%% know whether that was a state change received before or after the state
+%% check was done. We will then redo the state-check.
+%% Returns {ok,States} or {error,Reason}.
+%% Where States is [{Node,{State,Status},Opts},...].
+%% Note that {error,Reason} can not occur in the non-distributed case.
+init_rtcomponent_states(NodeParams,void,CtrlPid,Nodes) -> % The non-distributed case.
+ ok=inviso:subscribe(),
+ init_rtcomponent_states_2(NodeParams,void,CtrlPid,Nodes,[]);
+init_rtcomponent_states(NodeParams,CtrlNode,CtrlPid,Nodes) ->
+ ok=inviso_tool_lib:inviso_cmd(CtrlNode,subscribe,[]),
+ init_rtcomponent_states_2(NodeParams,CtrlNode,CtrlPid,Nodes,[]).
+
+init_rtcomponent_states_2(_,_,_,[],States) ->
+ {ok,States};
+init_rtcomponent_states_2(NodeParams,void,CtrlPid,_Nodes,States) ->
+ case inviso:get_status() of
+ {ok,StateStatus} -> % Got its state/status, now...
+ {ProblemNodes,NewStates}=
+ init_rtcomponent_states_3(NodeParams,CtrlPid,[{?LOCAL_RUNTIME,{ok,StateStatus}}],
+ [],States),
+ init_rtcomponent_states_2(NodeParams,void,CtrlPid,ProblemNodes,NewStates);
+ {error,_Reason} -> % The runtime is not available!?
+ {ok,[{?LOCAL_RUNTIME,unavailable,[]}]} % Create the return value immediately.
+ end;
+init_rtcomponent_states_2(NodeParams,CtrlNode,CtrlPid,Nodes,States) ->
+ case inviso_tool_lib:inviso_cmd(CtrlNode,get_status,[Nodes]) of
+ {ok,NodeResult} ->
+ {ProblemNodes,NewStates}=
+ init_rtcomponent_states_3(NodeParams,CtrlPid,NodeResult,[],States),
+ init_rtcomponent_states_2(NodeParams,CtrlNode,CtrlPid,ProblemNodes,NewStates);
+ {error,Reason} -> % Severe problem, abort the session.
+ {error,{get_status,Reason}}
+ end.
+
+%% Traverses the list of returnvalues and checks that we do not have an event
+%% waiting in the message queue. If we do have, it is a problem. That node will
+%% be asked about its state again.
+%% Note that it is here we construct the RTStatesList.
+init_rtcomponent_states_3(NodeParams,CtrlPid,[{Node,{ok,{State,Status}}}|Rest],Problems,States) ->
+ receive
+ {trace_event,CtrlPid,_Time,{state_change,Node,_}} ->
+ init_rtcomponent_states_3(NodeParams,CtrlPid,Rest,[Node|Problems],States)
+ after
+ 0 -> % Not in msg queue, then we're safe!
+ RTState=case lists:keysearch(Node,1,NodeParams) of
+ {value,{_Node,_TracerData,Opts}} ->
+ {Node,{State,Status},Opts};
+ _ -> % No option available, use [].
+ {Node,{State,Status},[]}
+ end,
+ init_rtcomponent_states_3(NodeParams,CtrlPid,Rest,Problems,[RTState|States])
+ end;
+init_rtcomponent_states_3(NodeParams,CtrlPid,[{Node,{error,_Reason}}|Rest],Problems,States) ->
+ RTState=case lists:keysearch(Node,1,NodeParams) of
+ {value,{_Node,_TracerData,Opts}} ->
+ {Node,unavailable,Opts};
+ _ -> % No option available, use [].
+ {Node,unavailable,[]}
+ end,
+ init_rtcomponent_states_3(NodeParams,CtrlPid,Rest,Problems,[RTState|States]);
+init_rtcomponent_states_3(_,_,[],Problems,States) ->
+ {Problems,States}.
+%% ------------------------------------------------------------------------------
+
+%% Help function removing nodes from NodeParams. The reason for this can either
+%% be that we are using a tool internal tracerdata that shall not be forwarded to
+%% the trace control component, or that the node is actually already part of
+%% another session.
+%% Returns {NewNodeParams,NodesWhichShallNotBeInitiated}.
+remove_nodeparams(Nodes,NodesParams) ->
+ remove_nodeparams_2(Nodes,NodesParams,[],[]).
+
+remove_nodeparams_2(Nodes,[NodeParam|Rest],NPAcc,NAcc) when % NPAcc=NodeParamsAcc.
+ (is_tuple(NodeParam) and ((size(NodeParam)==2) or (size(NodeParam)==3))) ->
+ Node=element(1,NodeParam),
+ Params=element(2,NodeParam), % This is tracerdata!
+ case lists:member(Node,Nodes) of
+ true -> % Remove this one, in another session.
+ remove_nodeparams_2(Nodes,Rest,NPAcc,NAcc);
+ false -> % Ok so far...
+ case is_tool_internal_tracerdata(Params) of
+ false -> % Then keep it and use it later!
+ remove_nodeparams_2(Nodes,Rest,[{Node,Params}|NPAcc],NAcc);
+ true -> % Since it is, remove it from the list.
+ remove_nodeparams_2(Nodes,Rest,NPAcc,[Node|NAcc])
+ end
+ end;
+remove_nodeparams_2(Nodes,[_|Rest],NPAcc,NAcc) -> % Faulty NodeParam, skip it!
+ remove_nodeparams_2(Nodes,Rest,NPAcc,NAcc);
+remove_nodeparams_2(_,[],NPAcc,NAcc) ->
+ {lists:reverse(NPAcc),NAcc}.
+%% ------------------------------------------------------------------------------
+
+%% Help function which adds both the nodes which were already part of another
+%% session and the nodes that we actually did not issue any init_tracing for.
+%% Returns a new Result list of [{Node,NodeResult},...].
+init_fix_resultnodes(NodesOtherSes,NodesNotInit,Result) ->
+ NewResult=init_fix_resultnodes_2(NodesOtherSes,{error,in_other_session},Result),
+ init_fix_resultnodes_2(NodesNotInit,ok,NewResult).
+
+init_fix_resultnodes_2([Node|Rest],NodeResult,Result) ->
+ [{Node,NodeResult}|init_fix_resultnodes_2(Rest,NodeResult,Result)];
+init_fix_resultnodes_2([],_,Result) ->
+ Result. % Append Result to the end of the list.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Help functions to reactivate.
+%% ------------------------------------------------------------------------------
+
+h_reactivate(CtrlNode,Nodes,ACTstorage) -> % Distributed case.
+ case inviso_tool_lib:inviso_cmd(CtrlNode,cancel_suspension,[Nodes]) of
+ {ok,CSuspResults} ->
+ {GoodNodes,BadResults}= % Sort out nodes no longer suspended.
+ lists:foldl(fun({Node,ok},{GoodNs,BadNs})->
+ {[Node|GoodNs],BadNs};
+ ({Node,{error,Reason}},{GoodNs,BadNs})->
+ {GoodNs,[{Node,{error,{cancel_suspension,Reason}}}|BadNs]}
+ end,
+ {[],[]},
+ CSuspResults),
+ Results=h_reactivate_redo_activity(CtrlNode,GoodNodes,ACTstorage,[]),
+ {ok,BadResults++Results};
+ {error,Reason} -> % General failure cancelling suspend.
+ {error,{cancel_suspension,Reason}}
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Help function which traverses the list of nodes known to be ours and have
+%% cancelled their suspend. If we fail redoing one of the activities associated
+%% with a node, the node will be reported in the return value as failed. From
+%% that point on its state must be considered unknown since we do not know how
+%% many of the activities were successfully redone.
+h_reactivate_redo_activity(CtrlNode,[Node|Rest],ACTstorage,Acc) ->
+ case get_activities_actstorage(Node,ACTstorage) of
+ {ok,Activities} -> % The node existed in activity storage.
+ {Good,Bad}=h_reactivate_redo_activity_2(CtrlNode,Node,Activities,0,0),
+ h_reactivate_redo_activity(CtrlNode,Rest,ACTstorage,[{Node,{Good,Bad}}|Acc]);
+ false -> % Node not present in activity storage.
+ h_reactivate_redo_activity(CtrlNode,Rest,ACTstorage,[{Node,{0,0}}|Acc])
+ end;
+h_reactivate_redo_activity(_CtrlNode,[],_,Acc) ->
+ lists:reverse(Acc).
+
+%% Help function actually redoing the activity. Note that there must be one
+%% clause here for every type of activity.
+%% Returns {NrGoodCmds,NrBadCmds}.
+%% The number of good or bad commands refers to inviso commands done. If any
+%% of the subparts of such a command returned an error, the command is concidered
+%% no good.
+h_reactivate_redo_activity_2(CtrlNode,Node,[{tf,{Op,TraceConfList}}|Rest],Good,Bad) ->
+ case inviso_tool_lib:inviso_cmd(CtrlNode,Op,[[Node],TraceConfList]) of
+ {ok,[{_Node,{ok,Answers}}]} ->
+ case h_reactivate_redo_activity_check_tf(Answers) of
+ ok ->
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good+1,Bad);
+ error -> % At least oneReports the first encountered error.
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1)
+ end;
+ {ok,[{_Node,{error,_Reason}}]} ->
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1);
+ {error,_Reason} -> % General error when doing cmd.
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1)
+ end;
+h_reactivate_redo_activity_2(CtrlNode,Node,[{tpm,{Op,InvisoCmdParams}}|Rest],Good,Bad) ->
+ case inviso_tool_lib:inviso_cmd(CtrlNode,Op,[[Node]|InvisoCmdParams]) of
+ {ok,[{_Node,ok}]} ->
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good+1,Bad);
+ {ok,[{_Node,{error,_Reason}}]} ->
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1);
+ {error,_Reason} -> % General error when doing cmd.
+ h_reactivate_redo_activity_2(CtrlNode,Node,Rest,Good,Bad+1)
+ end;
+h_reactivate_redo_activity_2(_CtrlNode,_Node,[],Good,Bad) ->
+ {Good,Bad}.
+
+%% Help function traversing a list of results from inviso:tf/2 or inviso:ctf/2
+%% to see if there were any errors.
+h_reactivate_redo_activity_check_tf([N|Rest]) when integer(N) ->
+ h_reactivate_redo_activity_check_tf(Rest);
+h_reactivate_redo_activity_check_tf([{error,_Reason}|_]) ->
+ error;
+h_reactivate_redo_activity_check_tf([]) ->
+ ok.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Help functions to tp (setting trace patterns, both local and global).
+%% ------------------------------------------------------------------------------
+
+%% Help function which handles both tpl and tp. Note that the non-distributed case
+%% handled with Nodes='all'.
+%% Returns what shall be the reply to the client.
+h_tp(all,PatternFunc,Mod,F,A,MS,Opts,LD) -> % All available runtime nodes.
+ Nodes=get_all_available_nodes_rtstates(get_rtstates_ld(LD)),
+ h_tp(Nodes,PatternFunc,Mod,F,A,MS,Opts,LD);
+h_tp(Nodes,PatternFunc,Mod,F,A,MS,Opts,LD) -> % Only certain nodes in the session.
+ CtrlNode=get_ctrlnode_ld(LD),
+ Dbg=get_dbg_ld(LD),
+ SafetyCatches=get_safetycatches_ld(LD),
+ case inviso_tool_lib:expand_module_names(Nodes,Mod,Opts) of % Take care of any reg-exps.
+ {multinode_expansion,NodeMods} ->
+ NodeTPs=inviso_tool_lib:make_patterns(SafetyCatches,Opts,Dbg,NodeMods,F,A,MS),
+ h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,NodeTPs,[]);
+ {singlenode_expansion,Modules} ->
+ TPs=inviso_tool_lib:make_patterns(SafetyCatches,Opts,Dbg,Modules,F,A,MS),
+ h_tp_do_tps(CtrlNode,Nodes,TPs,PatternFunc,Dbg);
+ module ->
+ TPs=inviso_tool_lib:make_patterns(SafetyCatches,Opts,Dbg,[Mod],F,A,MS),
+ h_tp_do_tps(CtrlNode,Nodes,TPs,PatternFunc,Dbg);
+ wildcard -> % Means do for all modules, no safety.
+ h_tp_do_tps(CtrlNode,Nodes,[{Mod,F,A,MS}],PatternFunc,Dbg);
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+%% Note that this function can never be called in the non-distributed case.
+h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,[{Node,TPs}|Rest],Accum) ->
+ case h_tp_do_tps(CtrlNode,[Node],TPs,PatternFunc,Dbg) of
+ {ok,[{Node,Result}]} ->
+ h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,Rest,[{Node,Result}|Accum]);
+ {error,Reason} -> % Failure, but don't stop.
+ h_tp_node_by_node(CtrlNode,PatternFunc,Dbg,Rest,[{Node,{error,Reason}}|Accum])
+ end;
+h_tp_node_by_node(_,_,_,[],Accum) ->
+ {ok,lists:reverse(Accum)}.
+
+%% Help function which does the actual call to the trace control component.
+%% Note that Nodes can be a list of nodes (including a single one) or
+%% ?LOCAL_RUNTIME if we are not distributed. The non-distributed case is otherwise
+%% detected by the 'void' CtrlNode.
+%% Returns {ok,[{Node,{ok,{NrOfFunctions,NrOfErrors}}},{Node,{error,Reason}},...]} or
+%% {error,Reason}. In the non-distributed case {ok,{NrOfFunctions,NrOfErros}} or
+%% {error,Reason}.
+h_tp_do_tps(void,_Nodes,TPs,PatternFunc,Dbg) -> % Non distributed case!
+ inviso_tool_lib:debug(tp,Dbg,[TPs,PatternFunc]),
+ case inviso:PatternFunc(TPs) of
+ {ok,Result} -> % A list of [Nr1,Nr2,error,...].
+ {ok,
+ lists:foldl(fun(N,{AccNr,AccErr}) when integer(N) ->
+ {AccNr+N,AccErr};
+ (error,{AccNr,AccErr}) ->
+ {AccNr,AccErr+1}
+ end,
+ {0,0},
+ Result)};
+ {error,Reason} ->
+ {error,{PatternFunc,Reason}}
+ end;
+h_tp_do_tps(CtrlNode,Nodes,TPs,PatternFunc,Dbg) ->
+ inviso_tool_lib:debug(tp,Dbg,[Nodes,TPs,PatternFunc]),
+ case inviso_tool_lib:inviso_cmd(CtrlNode,PatternFunc,[Nodes,TPs]) of
+ {ok,Result} -> % Result is [{Node,Result},...].
+ {ok,
+ lists:map(fun({Node,{ok,Res}})->
+ {Node,lists:foldl(fun(N,{ok,{AccNr,AccErr}}) when integer(N) ->
+ {ok,{AccNr+N,AccErr}};
+ (error,{AccNr,AccErr}) ->
+ {ok,{AccNr,AccErr+1}}
+ end,
+ {ok,{0,0}},
+ Res)};
+ ({_Node,{error,Reason}})->
+ {error,Reason}
+ end,
+ Result)};
+ {error,Reason} ->
+ {error,{PatternFunc,Reason}}
+ end.
+%% ------------------------------------------------------------------------------
+
+%% ------------------------------------------------------------------------------
+%% Help functions for removing trace-patterns.
+%% ------------------------------------------------------------------------------
+
+%% NOT IMPLEMENTED YET.
+h_ctp(Node,PatternFunc,Mod,F,A,LD) ->
+ tbd.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Help functions for calling the trace information facility.
+%% ------------------------------------------------------------------------------
+
+
+%% Function handling the meta trace pattern for capturing registration of local
+%% process names.
+h_tpm_localnames(CtrlNode,Nodes,RTStates,ACTstorage) ->
+ AvailableNodes=get_all_available_nodes_rtstates(RTStates),
+ {Nodes3,FaultyNodes}=remove_nodes_not_ours(Nodes,AvailableNodes),
+ case inviso_tool_lib:inviso_cmd(CtrlNode,tpm_localnames,[Nodes3]) of
+ {ok,Result} -> % That good we want to modify tpmstorage!
+ NewACTstorage=add_tpm_actstorage(Result,tpm_localnames,[],ACTstorage),
+ ErrorResult=lists:map(fun(N)->{N,{error,not_available}} end,FaultyNodes),
+ {{ok,ErrorResult++Result},NewACTstorage};
+ {error,Reason} -> % If general failure, do not modify storage.
+ {{error,Reason},ACTstorage}
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Functions calling meta trace functions for specified nodes. This function is
+%% intended for use with all tmp function calls, init_tpm,tpm,tpm_ms,ctpm_ms and
+%% ctpm.
+%% Note that we must store called meta trace functions and their parameters in the
+%% activity storage in order to be able to redo them in case of a reactivate.
+h_all_tpm(CtrlNode,Nodes,TpmCmd,InvisoCmdParams,RTStates,ACTstorage) ->
+ AvailableNodes=get_all_available_nodes_rtstates(RTStates),
+ {Nodes3,FaultyNodes}=remove_nodes_not_ours(Nodes,AvailableNodes),
+ case inviso_tool_lib:inviso_cmd(CtrlNode,TpmCmd,[Nodes3|InvisoCmdParams]) of
+ {ok,Result} -> % That good we want to modify tpmstorage!
+ NewACTstorage=add_tpm_actstorage(Result,TpmCmd,InvisoCmdParams,ACTstorage),
+ ErrorResult=lists:map(fun(N)->{N,{error,not_available}} end,FaultyNodes),
+ {{ok,ErrorResult++Result},NewACTstorage};
+ {error,Reason} -> % If general failure, do not modify storage.
+ {{error,Reason},ACTstorage}
+ end.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Help functions for set trace flags.
+%% ------------------------------------------------------------------------------
+
+%% Help function which sets the tracepatterns in TraceConfList for all nodes
+%% mentioned in Nodes. Note that non-distributed case is handled with Nodes='all'.
+%% Returns {Reply,NewACTstorage} where Reply is whatever shall be returned to caller
+%% and NewACTstorage is traceflag storage modified with the flags added to the
+%% corresponding nodes.
+h_tf(void,_Nodes,TraceConfList,ACTstorage,_RTStates) -> % The non-distributed case.
+ Reply=inviso:tf(TraceConfList),
+ NewACTstorage=add_tf_actstorage([{?LOCAL_RUNTIME,Reply}],tf,TraceConfList,ACTstorage),
+ {Reply,NewACTstorage};
+h_tf(CtrlNode,all,TraceConfList,ACTstorage,RTStates) ->
+ AllNodes=get_all_session_nodes_rtstates(RTStates),
+ h_tf(CtrlNode,AllNodes,TraceConfList,ACTstorage,RTStates);
+h_tf(CtrlNode,Nodes,TraceConfList,ACTstorage,_RTStates) ->
+ case inviso_tool_lib:inviso_cmd(CtrlNode,tf,[Nodes,TraceConfList]) of
+ {ok,Result} -> % That good we want to modify actstorage!
+ NewACTstorage=add_tf_actstorage(Result,tf,TraceConfList,ACTstorage),
+ {{ok,Result},NewACTstorage};
+ {error,Reason} -> % If general failure, do not modify actstorage.
+ {{error,Reason},ACTstorage}
+ end.
+%% ------------------------------------------------------------------------------
+
+%% ------------------------------------------------------------------------------
+%% Help functions to stop_session.
+%% ------------------------------------------------------------------------------
+
+%% This function fetches all local log-files using our stored tracerdata. Note
+%% that there are two major ways of tranfering logfiles. Either via distributed
+%% Erlang or by common filesystem (like NFS). The default is distributed Erlang.
+%% But there may be info in the RTStates structure about a common file-system.
+%% Returns {FailedNodes,FetchedFileNames} where FailedNodes is a list of
+%% nodenames where problems occurred. Note that problems does not necessarily
+%% mean that no files were copied.
+%% FetchedFileNames contains one or two of the tuples {trace_log,Files} and/or
+%% {ti_log,Files}, listing all files successfully fetched. Note that the
+%% list of fetched files contains sublists of filenames. One for each node and
+%% tracerdata.
+%% In the non-distributed system we always use copy (since the files always
+%% resides locally).
+transfer_logfiles(RTStates,CtrlNode,Dir,Prefix,TRDstorage,Dbg,AvailableNodes) ->
+ if
+ CtrlNode==void -> % When non-distributed, always copy!
+ fetch_logfiles_copy(CtrlNode,Dir,Prefix,TRDstorage,Dbg,[?LOCAL_RUNTIME]);
+ true -> % The distributed case.
+ {FetchNodes,CopyNodes}=find_logfile_transfer_methods(AvailableNodes,RTStates),
+ {FailedFetchNodes,FetchedFiles}=
+ case fetch_logfiles_distributed(CtrlNode,Dir,Prefix,TRDstorage,Dbg,FetchNodes) of
+ {ok,Failed,Files} -> % So far no disasters.
+ {Failed,Files};
+ {error,Reason} -> % Means all fetch-nodes failed!
+ inviso_tool_lib:debug(transfer_logfiles,Dbg,[FetchNodes,Reason]),
+ {lists:map(fun(N)->{N,error} end,FetchNodes),[]}
+ end,
+ {FailedCopyNodes,CopiedFiles}=
+ fetch_logfiles_copy(CtrlNode,Dir,Prefix,TRDstorage,Dbg,CopyNodes),
+ {FailedFetchNodes++FailedCopyNodes,FetchedFiles++CopiedFiles}
+ end.
+
+%% Help function which finds out which node we have a common file system with
+%% and from which we must make distributed erlang tranfere.
+%% Returns {DistributedNodes,CopyNodes} where CopyNode is [{Node,CopyFromDir},...].
+find_logfile_transfer_methods(Nodes,RTStates) ->
+ find_logfile_transfer_methods_2(Nodes,RTStates,[],[]).
+
+find_logfile_transfer_methods_2([Node|Rest],RTStates,FetchAcc,CopyAcc) ->
+ {ok,Opts}=get_opts_rtstates(Node,RTStates), % Node must be in RTStates!
+ case lists:keysearch(?COPY_LOG_FROM,1,Opts) of
+ {value,{_,FromDir}} when list(FromDir) -> % Node has common filesystem.
+ find_logfile_transfer_methods_2(Rest,RTStates,FetchAcc,[{Node,FromDir}|CopyAcc]);
+ {value,_} -> % Can't understand dir option.
+ find_logfile_transfer_methods_2(Rest,RTStates,[Node|FetchAcc],CopyAcc);
+ false -> % Then we want to use fetch instead.
+ find_logfile_transfer_methods_2(Rest,RTStates,[Node|FetchAcc],CopyAcc)
+ end;
+find_logfile_transfer_methods_2([],_,FetchAcc,CopyAcc) ->
+ {FetchAcc,CopyAcc}.
+%% ------------------------------------------------------------------------------
+
+%% Help function which transferes all local logfiles according to the tracerdata
+%% stored for the nodes in Nodes.
+%% Returns {ok,FailedNodes,FileNodeSpecs} or {error,Reason}.
+%% FailedNodes is a list of nodes where fetching logs did not succeed, partially
+%% or not at all.
+%% FileNames is a list of list of actually fetched files (the name as it is here, including
+%% Dir). The sublists are files which belong together.
+fetch_logfiles_distributed(CtrlNode,Dir,Prefix,TRDstorage,Dbg,Nodes) ->
+ LogSpecList=build_logspeclist(Nodes,TRDstorage),
+ case inviso_fetch_log(inviso_tool_lib:inviso_cmd(CtrlNode,
+ fetch_log,
+ [LogSpecList,Dir,Prefix])) of
+ {ok,Result} ->
+ Files=get_all_filenames_fetchlog_result(Result,Dbg),
+ FailedNodes=get_all_failednodes_fetchlog_result(Result),
+ {ok,FailedNodes,Files};
+ {error,Reason} -> % Some general failure!
+ {error,{fetch_log,Reason}}
+ end.
+
+%% Help function which constructs a list {Node,TracerData} for all nodes in Nodes.
+%% Note that there may be more than one tracerdata for a node, resulting in multiple
+%% tuples for that node.
+build_logspeclist(Nodes,TRDstorage) ->
+ build_logspeclist_2(Nodes,TRDstorage,[]).
+
+build_logspeclist_2([Node|Rest],TRDstorage,Acc) ->
+ TRDlist=find_tracerdata_for_node_trd(Node,TRDstorage), % A list of all tracerdata.
+ build_logspeclist_2(Rest,
+ TRDstorage,
+ [lists:map(fun(TRD)->{Node,TRD} end,TRDlist)|Acc]);
+build_logspeclist_2([],_,Acc) ->
+ lists:flatten(Acc).
+
+%% Help function which translates inviso:fetch_log return values to what I
+%% want!
+inviso_fetch_log({error,Reason}) ->
+ {error,Reason};
+inviso_fetch_log({_Success,ResultList}) ->
+ {ok,ResultList}.
+
+%% Help function which collects all filenames mentioned in a noderesult structure.
+%% The files may or may not be complete.
+%% Returns a list of list of filenames. Each sublist contains files which belong
+%% together, i.e because they are a wrap-set.
+get_all_filenames_fetchlog_result(NodeResult,Dbg) ->
+ get_all_filenames_fetchlog_result_2(NodeResult,Dbg,[]).
+
+get_all_filenames_fetchlog_result_2([{Node,{Success,FileInfo}}|Rest],Dbg,Accum)
+ when Success=/=error, list(FileInfo) ->
+ SubAccum=get_all_filenames_fetchlog_result_3(FileInfo,[]),
+ get_all_filenames_fetchlog_result_2(Rest,Dbg,[{Node,SubAccum}|Accum]);
+get_all_filenames_fetchlog_result_2([{Node,{error,FReason}}|Rest],Dbg,Accum) ->
+ inviso_tool_lib:debug(fetch_files,Dbg,[Node,FReason]),
+ get_all_filenames_fetchlog_result_2(Rest,Dbg,Accum);
+get_all_filenames_fetchlog_result_2([],_Dbg,Accum) ->
+ Accum.
+
+get_all_filenames_fetchlog_result_3([{FType,Files}|Rest],SubAccum) ->
+ FilesOnly=lists:foldl(fun({ok,FName},Acc)->[FName|Acc];(_,Acc)->Acc end,[],Files),
+ get_all_filenames_fetchlog_result_3(Rest,[{FType,FilesOnly}|SubAccum]);
+get_all_filenames_fetchlog_result_3([],SubAccum) ->
+ SubAccum.
+
+%% Help function which traverses a noderesult and builds a list as return
+%% value containing the nodenames of all nodes not being complete.
+%% Note that a node may occur multiple times since may have fetched logfiles
+%% for several tracerdata from the same node. Makes sure the list contains
+%% unique node names.
+%% Returns a list nodes.
+get_all_failednodes_fetchlog_result(NodeResult) ->
+ get_all_failednodes_fetchlog_result_2(NodeResult,[]).
+
+get_all_failednodes_fetchlog_result_2([{_Node,{complete,_}}|Rest],Acc) ->
+ get_all_failednodes_fetchlog_result_2(Rest,Acc);
+get_all_failednodes_fetchlog_result_2([{Node,{_Severity,_}}|Rest],Acc) ->
+ case lists:member(Node,Acc) of
+ true -> % Already in the list.
+ get_all_failednodes_fetchlog_result_2(Rest,Acc);
+ false -> % Not in Acc, add it!
+ get_all_failednodes_fetchlog_result_2(Rest,[Node|Acc])
+ end;
+get_all_failednodes_fetchlog_result_2([],Acc) ->
+ Acc.
+%% ------------------------------------------------------------------------------
+
+%% Help function which copies files from one location to Dir and at the same time
+%% adds the Prefix to the filename. NodeSpecs contains full path to the files. The
+%% reason the node information is still part of NodeSpecs is that otherwise we can
+%% not report faulty nodes. Note that one node may occur multiple times since there
+%% may be more than one tracerdata for a node.
+%% Returns {FailedNodes,Files} where FailedNodes is a list of nodes where problems
+%% occurred. Files is a tuple list of [{Node,[{FType,FileNames},...]},...].
+fetch_logfiles_copy(CtrlNode,Dir,Prefix,TRDstorage,Dbg,NodeSpecs) ->
+ CopySpecList=build_copylist(CtrlNode,Dbg,NodeSpecs,TRDstorage),
+ fetch_logfiles_copy_2(Dir,Prefix,Dbg,CopySpecList,[],[]).
+
+fetch_logfiles_copy_2(Dir,Prefix,Dbg,[{Node,CopySpecs}|Rest],FailedNodes,Files) ->
+ case fetch_logfiles_copy_3(Dir,Prefix,Dbg,CopySpecs,[],0) of
+ {0,LocalFiles} -> % Copy went ok and zero errors.
+ fetch_logfiles_copy_2(Dir,Prefix,Dbg,Rest,FailedNodes,[{Node,LocalFiles}|Files]);
+ {_N,LocalFiles} -> % Copied files, but some went wrong.
+ case lists:member(Node,FailedNodes) of
+ true -> % Node already in FailedNodes.
+ fetch_logfiles_copy_2(Dir,Prefix,Dbg,Rest,FailedNodes,
+ [{Node,LocalFiles}|Files]);
+ false -> % Node not marked as failed, yet.
+ fetch_logfiles_copy_2(Dir,Prefix,Dbg,Rest,[Node|FailedNodes],
+ [{Node,LocalFiles}|Files])
+ end
+ end;
+fetch_logfiles_copy_2(_,_,_,[],FailedNodes,Files) ->
+ {FailedNodes,Files}. % The return value from fetch_logfiles_copy.
+
+fetch_logfiles_copy_3(Dir,Prefix,Dbg,[{FType,RemoteFiles}|Rest],Results,Errors) ->
+ {Err,LocalFiles}=fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,RemoteFiles,[],0),
+ fetch_logfiles_copy_3(Dir,Prefix,Dbg,Rest,[{FType,LocalFiles}|Results],Errors+Err);
+fetch_logfiles_copy_3(_,_,_,[],Results,Errors) ->
+ {Errors,Results}.
+
+%% For each file of one file-type (e.g. trace_log).
+fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,[File|Rest],LocalFiles,Errors) ->
+ DestName=Prefix++filename:basename(File),
+ Destination=filename:join(Dir,DestName),
+ case do_copy_file(File,Destination) of
+ ok ->
+ fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,Rest,[DestName|LocalFiles],Errors);
+ {error,Reason} ->
+ inviso_tool_lib:debug(copy_files,Dbg,[File,Destination,Reason]),
+ fetch_logfiles_copy_3_1(Dir,Prefix,Dbg,Rest,LocalFiles,Errors+1)
+ end;
+fetch_logfiles_copy_3_1(_,_,_,[],LocalFiles,Errors) ->
+ {Errors,LocalFiles}.
+
+%% Help function which builds a [{Node,[{Type,[ListOfRemoteFiles]}},...}]
+%% where Type describes trace_log or ti_log and each entry in ListOfRemoteFiles
+%% is a complete path to a file to be copied.
+build_copylist(CtrlNode,Dbg,NodeSpecList,TRDstorage) ->
+ build_copylist_2(CtrlNode,Dbg,NodeSpecList,TRDstorage,[]).
+
+%% For each node specified in the NodeSpecList.
+build_copylist_2(CtrlNode,Dbg,[{Node,SourceDir}|Rest],TRDstorage,Acc) ->
+ TRDlist=find_tracerdata_for_node_trd(Node,TRDstorage),
+ CopySpecList=build_copylist_3(CtrlNode,Dbg,SourceDir,Node,TRDlist),
+ build_copylist_2(CtrlNode,Dbg,Rest,TRDstorage,[CopySpecList|Acc]);
+build_copylist_2(_,_,[],_,Acc) ->
+ lists:flatten(Acc).
+
+%% For each tracerdata found for the node.
+build_copylist_3(void,Dbg,SourceDir,Node,[TRD|Rest]) -> % The non-distributed case.
+ case inviso:list_logs(TRD) of
+ {ok,FileSpec} when list(FileSpec) -> % [{trace_log,Dir,Files},...]
+ NewFileSpec=build_copylist_4(SourceDir,FileSpec,[]),
+ [{Node,NewFileSpec}|build_copylist_3(void,Dbg,SourceDir,Node,Rest)];
+ {ok,no_log} -> % This tracedata not associated with any log.
+ build_copylist_3(void,Dbg,SourceDir,Node,Rest);
+ {error,Reason} ->
+ inviso_tool_lib:debug(list_logs,Dbg,[Node,TRD,Reason]),
+ build_copylist_3(void,Dbg,SourceDir,Node,Rest)
+ end;
+build_copylist_3(CtrlNode,Dbg,SourceDir,Node,[TRD|Rest]) -> % The distributed case.
+ case inviso_tool_lib:inviso_cmd(CtrlNode,list_logs,[[{Node,TRD}]]) of
+ {ok,[{Node,{ok,FileSpec}}]} when list(FileSpec) ->
+ NewFileSpec=build_copylist_4(SourceDir,FileSpec,[]),
+ [{Node,NewFileSpec}|build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest)];
+ {ok,[{Node,{ok,no_log}}]} -> % It relays to another node, no files!
+ build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest);
+ {ok,[{Node,{error,Reason}}]} ->
+ inviso_tool_lib:debug(list_logs,Dbg,[Node,TRD,Reason]),
+ build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest);
+ {error,Reason} -> % Some general failure.
+ inviso_tool_lib:debug(list_logs,Dbg,[Node,TRD,Reason]),
+ build_copylist_3(CtrlNode,Dbg,SourceDir,Node,Rest)
+ end;
+build_copylist_3(_,_,_,_,[]) ->
+ [].
+
+%% Help function which makes a [{Type,Files},...] list where each file in Files
+%% is with full path as found from our file-system.
+build_copylist_4(SourceDir,[{Type,_Dir,Files}|Rest],Accum) ->
+ NewFiles=
+ lists:foldl(fun(FName,LocalAcc)->[filename:join(SourceDir,FName)|LocalAcc] end,
+ [],
+ Files),
+ build_copylist_4(SourceDir,Rest,[{Type,NewFiles}|Accum]);
+build_copylist_4(_,[],Accum) ->
+ Accum.
+
+
+%% Help function which copies a file using os:cmd.
+%% Returns 'ok' or {error,Reason}.
+do_copy_file(Source,Destination) ->
+ case os:type() of
+ {win32,_} ->
+ os:cmd("copy "++Source++" "++Destination), % Perhaps a test on success?
+ ok;
+ {unix,_} ->
+ os:cmd("cp "++Source++" "++Destination), % Perhaps a test on success?
+ ok
+ end.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+
+%% ==============================================================================
+%% Various help functions.
+%% ==============================================================================
+
+%% Help function going through the Nodes list and checking that only nodes
+%% mentioned in OurNodes gets returned. It also makes the nodes in the return
+%% value unique.
+remove_nodes_not_ours(Nodes,OurNodes) ->
+ remove_nodes_not_ours_2(Nodes,OurNodes,[],[]).
+
+remove_nodes_not_ours_2([Node|Rest],OurNodes,OurAcc,OtherAcc) ->
+ case lists:member(Node,OurNodes) of
+ true -> % Ok it is one of our nodes.
+ case lists:member(Node,OurAcc) of
+ true -> % Already in the list, skip.
+ remove_nodes_not_ours_2(Rest,OurNodes,OurAcc,OtherAcc);
+ false ->
+ remove_nodes_not_ours_2(Rest,OurNodes,[Node|OurAcc],OtherAcc)
+ end;
+ false ->
+ case lists:member(Node,OtherAcc) of
+ true ->
+ remove_nodes_not_ours_2(Rest,OurNodes,OurAcc,OtherAcc);
+ false ->
+ remove_nodes_not_ours_2(Rest,OurNodes,OurAcc,[Node|OtherAcc])
+ end
+ end;
+remove_nodes_not_ours_2([],_,OurAcc,OtherAcc) ->
+ {lists:reverse(OurAcc),lists:reverse(OtherAcc)}.
+%% ------------------------------------------------------------------------------
+
+%% Help function which returns 'true' or 'false' depending on if TracerData is
+%% meant to be used by the session handler (true) or if it supposed to be passed
+%% on to the trace system.
+is_tool_internal_tracerdata(_) -> % CURRENTLY NO INTERNAL TRACER DATA!
+ false.
+%% ------------------------------------------------------------------------------
+
+%% Help function which checks that all nodes in the first list of nodes exists
+%% in the second list of nodes. Returns 'true' or 'false'. The latter if as much
+%% as one incorrect node was found.
+check_our_nodes([Node|Rest],AllNodes) ->
+ case lists:member(Node,AllNodes) of
+ true ->
+ check_our_nodes(Rest,AllNodes);
+ false -> % Then we can stop right here.
+ false
+ end;
+check_our_nodes([],_) ->
+ true.
+%% ------------------------------------------------------------------------------
+
+%% Help function which checks that a directory actually exists. Returns 'true' or
+%% 'false'.
+check_directory_exists(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok,#file_info{type=directory}} ->
+ true;
+ _ -> % In all other cases it is not valid.
+ false
+ end.
+%% ------------------------------------------------------------------------------
+
+%% This function stops the tracing on all nodes in Nodes. Preferably Nodes is a list
+%% of only tracing runtime components. Not that there will actually be any difference
+%% since the return value does not reflect how stopping the nodes went.
+%% Returns 'ok' or {error,Reason}, the latter only in case of general failure.
+stop_all_tracing(void,Dbg,[?LOCAL_RUNTIME]) -> % The non-distributed case, and is tracing.
+ case inviso:stop_tracing() of
+ {ok,_State} ->
+ ok;
+ {error,Reason} -> % We actually don't care.
+ inviso_tool_lib:debug(stop_tracing,Dbg,[?LOCAL_RUNTIME,Reason]),
+ ok
+ end;
+stop_all_tracing(void,_,_) -> % There is no local runtime started.
+ ok;
+stop_all_tracing(CtrlNode,Dbg,Nodes) ->
+ case inviso_tool_lib:inviso_cmd(CtrlNode,stop_tracing,[Nodes]) of
+ {ok,Result} -> % The result is only used for debug.
+ Failed=lists:foldl(fun({N,{error,Reason}},Acc)->[{N,{error,Reason}}|Acc];
+ (_,Acc)->Acc
+ end,
+ [],
+ Result),
+ if
+ Failed==[] ->
+ ok;
+ true ->
+ inviso_tool_lib:debug(stop_tracing,Dbg,[Nodes,Failed]),
+ ok
+ end;
+ {error,Reason} ->
+ {error,{stop_tracing,Reason}}
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Help function removing all local logs using the tracerdata to determine what
+%% logs to remove from where.
+%% There is no significant return value since it is not really clear what to do
+%% if removal went wrong. The function can make debug-reports thought.
+remove_all_local_logs(CtrlNode,TRDstorage,Nodes,Dbg) ->
+ LogSpecList=build_logspeclist_remove_logs(Nodes,TRDstorage),
+ case inviso_tool_lib:inviso_cmd(CtrlNode,delete_log,[LogSpecList]) of
+ {ok,Results} ->
+ case look_for_errors_resultlist(Results) of
+ [] -> % No errors found in the result!
+ true;
+ Errors ->
+ inviso_tool_lib:debug(remove_all_local_logs,Dbg,[Errors]),
+ true
+ end;
+ {error,Reason} -> % Some general error.
+ inviso_tool_lib:debug(remove_all_local_logs,Dbg,[{error,Reason}]),
+ true
+ end.
+
+%% Help function which puts together a list of {Node,Tracerdata} tuples. Note that
+%% we must build one tuple for each tracerdata for one node.
+build_logspeclist_remove_logs(Nodes,TRDstorage) ->
+ [{Node,TracerData}||Node<-Nodes,TracerData<-find_tracerdata_for_node_trd(Node,TRDstorage)].
+%% ------------------------------------------------------------------------------
+
+%% Help function which traverses a resultlist from an inviso function. Such are
+%% built up as [{Node,SubResults},...] where SubResult is a list of tuples for each
+%% file-type (e.g trace_log) {FType,FileList} where a FileList is either {error,Reason}
+%% or {ok,FileName}.
+%% Returns a list of {Node,[{error,Reason},...]}.
+look_for_errors_resultlist([{Node,{error,Reason}}|Rest]) ->
+ [{Node,{error,Reason}}|look_for_errors_resultlist(Rest)];
+look_for_errors_resultlist([{Node,{ok,NResults}}|Rest]) when list(NResults) ->
+ case look_for_errors_resultlist_2(NResults,[]) of
+ [] ->
+ look_for_errors_resultlist(Rest);
+ Errors -> % A list of lists.
+ [{Node,lists:flatten(Errors)}|look_for_errors_resultlist(Rest)]
+ end;
+look_for_errors_resultlist([_|Rest]) ->
+ look_for_errors_resultlist(Rest);
+look_for_errors_resultlist([]) ->
+ [].
+
+look_for_errors_resultlist_2([{_FType,NSubResult}|Rest],Accum) ->
+ case lists:filter(fun({error,_Reason})->true;(_)->false end,NSubResult) of
+ [] -> % No errors for this node.
+ look_for_errors_resultlist_2(Rest,Accum);
+ Errors -> % A list of at least one error.
+ look_for_errors_resultlist_2(Rest,[Errors|Accum])
+ end;
+look_for_errors_resultlist_2([],Accum) ->
+ Accum.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Functions working on the loopdata structure.
+%% Its main purpose is to store information about runtime components participating
+%% in the session and their current status.
+%% ------------------------------------------------------------------------------
+
+-record(ld,{parent,
+ ctrlnode,
+ ctrlpid, % To where to send inviso cmd.
+ rtstates,
+ tracerdata,
+ safetycatches,
+ dbg,
+ actstorage % Activity storage, for reactivate.
+ }).
+
+%% Function creating the initial datastructure.
+%% The datastructure is [{Node,State},...].
+%%
+%% The tracerdata table is a bag simply for the reason that if we try to insert
+%% the same tracerdata for a node twice, we will end up with one tracerdata after
+%% all. This is useful when we insert tracerdata ourselves, the tracerdata will
+%% come as a state-change too.
+mk_ld(Parent,CtrlNode,CtrlPid,RTStates,NodeParams,OtherNodes,SafetyCatches,Dbg) ->
+ TRDtableName=list_to_atom("inviso_tool_sh_trdstorage_"++pid_to_list(self())),
+ TRDtid=ets:new(TRDtableName,[bag]),
+ ACTtableName=list_to_atom("inviso_tool_sh_actstorage_"++pid_to_list(self())),
+ ACTtid=ets:new(ACTtableName,[bag]),
+ mk_ld_fill_tracerdata(CtrlNode,TRDtid,NodeParams,OtherNodes), % Fill the ETS table.
+ #ld{parent=Parent, % The tool main process.
+ ctrlnode=CtrlNode, % Node name where the control component is.
+ ctrlpid=CtrlPid, % The process id of the control component.
+ rtstates=RTStates, % All nodes and their state/status.
+ tracerdata=TRDtid,
+ safetycatches=SafetyCatches,
+ dbg=Dbg,
+ actstorage=ACTtid
+ }.
+
+%% Help function which inserts tracer data for the nodes. Note that we can get
+%% tracer data either from the return value from init_tracing or by asking the
+%% node for it. The latter is necessary for the nodes which were marked not to
+%% be initiated by the session handler. This maybe because those nodes have
+%% autostarted.
+mk_ld_fill_tracerdata(CtrlNode,TId,NodeParams,OtherNodes) ->
+ mk_ld_fill_tracerdata_nodeparams(TId,NodeParams),
+ mk_ld_fill_tracerdata_othernodes(CtrlNode,TId,OtherNodes).
+
+mk_ld_fill_tracerdata_nodeparams(TId,[{Node,TracerData}|Rest]) ->
+ ets:insert(TId,{Node,TracerData}),
+ mk_ld_fill_tracerdata_nodeparams(TId,Rest);
+mk_ld_fill_tracerdata_nodeparams(_,[]) ->
+ ok.
+
+mk_ld_fill_tracerdata_othernodes(_,_,[]) -> % Then not necessary to do anything.
+ ok;
+mk_ld_fill_tracerdata_othernodes(void,TId,[Node]) -> % The non-distributed case.
+ case inviso:get_tracerdata() of
+ {error,_Reason} -> % Perhaps in state new or disconnected.
+ ok; % Do nothing.
+ {ok,TracerData} ->
+ ets:insert(TId,{Node,TracerData})
+ end;
+mk_ld_fill_tracerdata_othernodes(CtrlNode,TId,Nodes) ->
+ case inviso_tool_lib:invisomd(CtrlNode,get_tracerdata,[Nodes]) of
+ {ok,Results} ->
+ mk_ld_fill_tracerdata_othernodes_2(TId,Results);
+ {error,_Reason} -> % Strange, we will probably crash later.
+ ok
+ end.
+
+mk_ld_fill_tracerdata_othernodes_2(TId,[{_Node,{ok,no_tracerdata}}|Rest]) ->
+ mk_ld_fill_tracerdata_othernodes_2(TId,Rest); % It was not initiated then!
+mk_ld_fill_tracerdata_othernodes_2(TId,[{Node,{ok,TracerData}}|Rest]) ->
+ ets:insert(TId,{Node,TracerData}),
+ mk_ld_fill_tracerdata_othernodes_2(TId,Rest);
+mk_ld_fill_tracerdata_othernodes_2(_,[]) ->
+ ok.
+%% ------------------------------------------------------------------------------
+
+get_ctrlnode_ld(#ld{ctrlnode=CtrlNode}) ->
+ CtrlNode.
+%% ------------------------------------------------------------------------------
+
+
+get_ctrlpid_ld(#ld{ctrlpid=CtrlPid}) ->
+ CtrlPid.
+%% ------------------------------------------------------------------------------
+
+get_rtstates_ld(#ld{rtstates=RTStates}) ->
+ RTStates.
+
+put_rtstates_ld(NewRTStates,LD) ->
+ LD#ld{rtstates=NewRTStates}.
+%% ------------------------------------------------------------------------------
+
+get_trdstorage_ld(#ld{tracerdata=TId}) ->
+ TId.
+
+put_trdstorage_ld(_NewTId,LD) ->
+ LD.
+%% ------------------------------------------------------------------------------
+
+%% Help function which adds the current tracerdata of node Node to the tracerdata
+%% storage. We only want to add tracerdata we have not seen before. We therefore
+%% avoid adding it if the node already is in state ?TRACING.
+%% Returns a new tracerdata (what ever it is)!
+add_current_tracerdata_ld(CtrlNode,Node,RTStates,TId) ->
+ case get_statestatus_rtstates(Node,RTStates) of
+ {ok,{?TRACING,_}} -> % Then we have already added the tracerdata.
+ TId; % Then do nothing.
+ {ok,_} -> % Since we were not tracing before.
+ case add_current_tracerdata_ld_fetchtracerdata(CtrlNode,Node) of
+ {ok,TracerData} ->
+ ets:insert(TId,{Node,TracerData});
+ no_tracerdata -> % Strange, how could we become tracing
+ ok;
+ {error,_Reason} -> % The node perhaps disconnected!?
+ ok
+ end;
+ false -> % Very strange, not our node!
+ ok % Do nothing.
+ end.
+
+add_current_tracerdata_ld_fetchtracerdata(void,_Node) ->
+ case inviso:get_tracerdata() of
+ {ok,TracerData} ->
+ {ok,TracerData};
+ {error,no_tracerdata} ->
+ no_tracerdata;
+ {error,Reason} ->
+ {error,Reason}
+ end;
+add_current_tracerdata_ld_fetchtracerdata(CtrlNode,Node) ->
+ case inviso_tool_lib:inviso_cmd(CtrlNode,get_tracerdata,[[Node]]) of
+ {ok,[{Node,{ok,TracerData}}]} ->
+ {ok,TracerData};
+ {ok,[{Node,{error,no_tracerdata}}]} ->
+ no_tracerdata;
+ {ok,[{Node,{error,Reason}}]} ->
+ {error,Reason};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+%% ------------------------------------------------------------------------------
+
+
+get_safetycatches_ld(#ld{safetycatches=SCs}) ->
+ SCs.
+%% ------------------------------------------------------------------------------
+
+get_dbg_ld(#ld{dbg=Dbg}) ->
+ Dbg.
+%% ------------------------------------------------------------------------------
+
+get_actstorage_ld(#ld{actstorage=ACTstorage}) ->
+ ACTstorage.
+
+put_actstorage_ld(_NewACTstorage,LD) ->
+ LD.
+%% ------------------------------------------------------------------------------
+
+
+
+%% ------------------------------------------------------------------------------
+%% Functions working on the rtstates structure (which is a substructure of loopdata).
+%% It is either:
+%% [{Node,StateStatus,Opts},...]
+%% Node is either the node name of the runtime component erlang node or
+%% ?LOCAL_RUNTIME as returned from the trace control component.
+%% StateStatus is {State,Status}, 'unavailable' or 'unknown'.
+%% Status is the returnvalue from trace control component.
+%% i.e: running | {suspended,Reason}
+%% ------------------------------------------------------------------------------
+
+%% Function contructing an rtstates structure from a list of [{Node,StateStatus,Opts},...].
+to_rtstates(ListOfStates) when list(ListOfStates) ->
+ ListOfStates.
+%% ------------------------------------------------------------------------------
+
+%% Function which takes a rtstates structure and returns a list of [{Node,StateStatus},...].
+from_rtstates(RTStates) ->
+ RTStates.
+%% ------------------------------------------------------------------------------
+
+%% Function which takes an rtstates structure and a result as returned from
+%% init_tracing. The RTStates is modified for the nodes that changed state as a
+%% result of successful init_tracing.
+%% Returns a new RTStates.
+set_tracing_rtstates([E={Node,_StateStatus,Opts}|Rest],Result) ->
+ case lists:keysearch(Node,1,Result) of
+ {value,{_,ok}} -> % Means state-change to tracing!
+ [{Node,{tracing,running},Opts}|set_tracing_rtstates(Rest,Result)];
+ _ -> % Otherwise, leave it as is.
+ [E|set_tracing_rtstates(Rest,Result)]
+ end;
+set_tracing_rtstates([],_Result) ->
+ [].
+%% ------------------------------------------------------------------------------
+
+%% Function updating the state/status for a certain runtime component.
+%% Returns a new RTStates structure. Note that Node must not necessarily be one
+%% of the nodes in the session. Meaning that Node shall not be added to RTStates
+%% should it not already be in there.
+statechange_rtstates(Node,State,Status,RTStates) when list(RTStates) ->
+ case lists:keysearch(Node,1,RTStates) of
+ {value,{_,_,Opts}} ->
+ lists:keyreplace(Node,1,RTStates,{Node,{State,Status},Opts});
+ _ -> % Then Node does not exist.
+ RTStates % Just keep it as is, as keyreplace would have done.
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Function updating the state/status for a certain runtime component. The
+%% state/status is set to 'unavailable'.
+%% Returns a new RTStates structure.
+set_unavailable_rtstates(Node,RTStates) when list(RTStates) ->
+ case lists:keysearch(Node,1,RTStates) of
+ {value,{_,_,Opts}} ->
+ lists:keyreplace(Node,1,RTStates,{Node,unavailable,Opts});
+ _ -> % Then Node does not exist.
+ RTStates % Just keep it as is, as keyreplace would have done.
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Function finding the statestatus associated with Node in the RTStates structure.
+%% Returns {ok,StateStatus} or 'false'.
+get_statestatus_rtstates(Node,RTStates) ->
+ case lists:keysearch(Node,1,RTStates) of
+ {value,{_,StateStatus,_}} ->
+ {ok,StateStatus};
+ false ->
+ false
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Help function which returns a list of all nodes that are currently marked
+%% as available to us in the runtime state structure.
+get_all_available_nodes_rtstates(RTStates) ->
+ get_all_session_nodes_rtstates(lists:filter(fun({_N,unavailable,_})->false;
+ (_)->true
+ end,
+ RTStates)).
+%% ------------------------------------------------------------------------------
+
+%% Help function returning a list of all nodes belonging to this session.
+get_all_session_nodes_rtstates(RTStates) ->
+ lists:map(fun({Node,_,_})->Node end,RTStates).
+%% ------------------------------------------------------------------------------
+
+%% Function which returns a list of nodes that are indicated as tracing in the
+%% RTStates structure.
+get_all_tracing_nodes_rtstates(RTStates) ->
+ lists:map(fun({N,_,_})->N end,
+ lists:filter(fun({_,{tracing,_},_})->true;(_)->false end,RTStates)).
+%% ------------------------------------------------------------------------------
+
+%% Returns the options associated with Node in the RTStates structure.
+get_opts_rtstates(Node,RTStates) ->
+ case lists:keysearch(Node,1,RTStates) of
+ {value,{_,_,Opts}} ->
+ {ok,Opts};
+ false ->
+ false
+ end.
+
+%% ------------------------------------------------------------------------------
+%% Functions working on the tracerdata structure, which is a part of the loopdata.
+%% The tracerdata structure is an ETS-table of type bag storing:
+%% {Node,TracerData}.
+%% Note that there can of course be multiple entries for a node.
+%% ------------------------------------------------------------------------------
+
+%% Help function which takes a tracerdata loopdata structure and returns a list
+%% of all stored tracerdata for a certain Node.
+find_tracerdata_for_node_trd(Node,TRD) ->
+ case ets:lookup(TRD,Node) of
+ Result when list(Result) ->
+ lists:map(fun({_Node,TracerData})->TracerData end,Result);
+ _ -> % Should probably never happend.
+ []
+ end.
+%% ------------------------------------------------------------------------------
+
+
+%% ------------------------------------------------------------------------------
+%% Functions working on the activity storage structure, which is part of the
+%% loopdata. It stores entries about things that needs to be "redone" in case
+%% of a reactivation of the node. The time order is also important.
+%% Note that for every ActivityType there must be a "handler" in the reactivation
+%% functionality.
+%%
+%% The structure is a bag of {Node,ActivityType,What}.
+%% ActivityType/What=tf/{Op,TraceConfList}|tpm/{Op,[Mod,Func,Arity,MS,CallFunc]}
+%% /{Op,[Mod,Func,Arity,MS,CallFunc,ReturnFunc]}
+%% /{Op,[]}
+%% TraceConfList=[{Proc,Flags},...]
+%% How=true|false
+%% ------------------------------------------------------------------------------
+
+%% Function that adds meta-pattern activities to the activity storage. Note
+%% that one of the parameters to the function is a return value from an
+%% inviso call. In that way we do not enter activities that were unsuccessful.
+%% Op can be either the setting or clearing of a meta pattern.
+%% Returns a new ACTstorage.
+add_tpm_actstorage([{Node,ok}|Rest],Op,InvisoCmdParams,ACTstorage) ->
+ true=ets:insert(ACTstorage,{Node,tpm,{Op,InvisoCmdParams}}),
+ add_tpm_actstorage(Rest,Op,InvisoCmdParams,ACTstorage);
+add_tpm_actstorage([_|Rest],Op,InvisoCmdParams,ACTstorage) ->
+ add_tpm_actstorage(Rest,Op,InvisoCmdParams,ACTstorage);
+add_tpm_actstorage([],_,_,ACTstorage) ->
+ ACTstorage.
+
+%% Function that adds process trace-flags to the activity storage. Note that one
+%% of the parameters is the return value from an inviso function. Meaning that
+%% if the flags failed in their entirety, no activity will be saved. If only
+%% some of the flags failed, we will not go through the effort of trying to find
+%% out exactly which.
+%% Returns a new activity storage structure.
+add_tf_actstorage([{_Node,{error,_Reason}}|Rest],Op,TraceConfList,ACTstorage) ->
+ add_tf_actstorage(Rest,Op,TraceConfList,ACTstorage);
+add_tf_actstorage([{Node,_Result}|Rest],Op,TraceConfList,ACTstorage) ->
+ true=ets:insert(ACTstorage,{Node,tf,{Op,TraceConfList}}),
+ add_tf_actstorage(Rest,Op,TraceConfList,ACTstorage);
+add_tf_actstorage([],_,_,ACTstorage) ->
+ ACTstorage.
+%% ------------------------------------------------------------------------------
+
+%% Finds all activities associated with Node. Returns a list of them in the
+%% same order as they were inserted.
+get_activities_actstorage(Node,ACTstorage) ->
+ case ets:lookup(ACTstorage,Node) of
+ [] ->
+ false;
+ Result when list(Result) ->
+ {ok,lists:map(fun({_N,Type,What})->{Type,What} end,Result)}
+ end.
+%% ------------------------------------------------------------------------------
+
+%% Function removing all activity entries associated with Node. This is useful
+%% if the Node disconnects for instance.
+del_node_actstorage(Node,ACTstorage) ->
+ ets:delete(ACTstorage,Node),
+ ACTstorage.
+%% ------------------------------------------------------------------------------
+
diff --git a/lib/inviso/test/inviso_tool_SUITE.erl b/lib/inviso/test/inviso_tool_SUITE.erl
index d59e3b5fa8..6b16e506eb 100644
--- a/lib/inviso/test/inviso_tool_SUITE.erl
+++ b/lib/inviso/test/inviso_tool_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,7 +22,7 @@
%% properly.
%%
%% Authors:
-%% Lennart �hman, [email protected]
+%% Lennart Öhman, [email protected]
%% -----------------------------------------------------------------------------
-module(inviso_tool_SUITE).
@@ -103,21 +103,21 @@ end_per_testcase(_Case,Config) ->
?l test_server:stop_node(get_remotenode_config(inviso2,Config)),
?l test_server:timetrap_cancel(get_timetraphandle_config(Config)),
?l case whereis(inviso_tool) of % In case inviso_tool did not stop.
- Pid when pid(Pid) ->
+ Pid when is_pid(Pid) ->
?l io:format("Had to kill inviso_tool!~n",[]),
?l exit(Pid,kill);
_ ->
true
end,
?l case whereis(inviso_rt) of % In case we ran a runtime here.
- Pid2 when pid(Pid2) ->
+ Pid2 when is_pid(Pid2) ->
?l io:format("Had to kill inviso_rt!~n",[]),
?l exit(Pid2,kill);
_ ->
true
end,
?l case whereis(inviso_c) of % In case we ran the controll component here.
- Pid3 when pid(Pid3) ->
+ Pid3 when is_pid(Pid3) ->
?l io:format("Had to kill inviso_c!~n",[]),
?l exit(Pid3,kill);
_ ->
@@ -196,7 +196,7 @@ end_per_testcase(_Case,Config) ->
%% TEST CASE: Basic, distributed, start of inviso_tool with simple tracing.
dist_basic_1(doc) -> ["Simple test"];
dist_basic_1(suite) -> [];
-dist_basic_1(Config) when list(Config) ->
+dist_basic_1(Config) when is_list(Config) ->
RemoteNodes=get_remotenodes_config(Config),
[RegExpNode|_]=RemoteNodes,
CNode=node(),
@@ -325,7 +325,7 @@ dist_basic_1(Config) when list(Config) ->
inviso_tool:get_autostart_data(Nodes,{dependency,infinity}),
?l true=check_noderesults(Nodes,
fun({_N,{ok,{[{dependency,infinity}],{tdg,{_M,_F,TDlist}}}}})
- when list(TDlist)->
+ when is_list(TDlist)->
true;
(_) ->
false
@@ -554,7 +554,7 @@ dist_rtc(Config) when is_list(Config) ->
%% This test case tests mainly that reconnect and reinitiations of a node works.
dist_reconnect(doc) -> [""];
dist_reconnect(suite) -> [];
-dist_reconnect(Config) when list(Config) ->
+dist_reconnect(Config) when is_list(Config) ->
RemoteNodes=get_remotenodes_config(Config),
[RegExpNode|OtherNodes]=RemoteNodes,
CNode=node(),
@@ -595,7 +595,7 @@ dist_reconnect(Config) when list(Config) ->
%% than RexExpNode.
?l {ok,NodeResults1}=inviso_tool:inviso(tp,["application.*",module_info,0,[]]),
?l true=check_noderesults(OtherNodes,
- fun({_N,{ok,Ints}}) when list(Ints) ->
+ fun({_N,{ok,Ints}}) when is_list(Ints) ->
NrOfModules=lists:sum(Ints),
true;
(_) ->
@@ -610,9 +610,9 @@ dist_reconnect(Config) when list(Config) ->
%% Now it is time to restart the crashed node and reconnect it and then
%% finally reinitiate it.
- ?l RegExpNodeString=atom_to_list(RegExpNode),
- ?l {match,Pos,1}=regexp:first_match(RegExpNodeString,"@"),
- ?l RegExpNodeName=list_to_atom(lists:sublist(RegExpNodeString,Pos-1)),
+ ?l RegExpNodeString=atom_to_list(RegExpNode),
+ ?l [NodeNameString,_HostNameString] = string:tokens(RegExpNodeString,[$@]),
+ ?l RegExpNodeName=list_to_atom(NodeNameString),
?l test_server:start_node(RegExpNodeName,peer,[]),
?l ok=poll(net_adm,ping,[RegExpNode],pong,20),
?l SuiteDir=filename:dirname(code:which(?MODULE)),
@@ -626,7 +626,7 @@ dist_reconnect(Config) when list(Config) ->
?l ok=poll(rpc,
call,
[RegExpNode,erlang,whereis,[inviso_tool_test_proc]],
- fun(P) when pid(P) -> true;
+ fun(P) when is_pid(P) -> true;
(undefined) -> false
end,
10),
@@ -685,7 +685,7 @@ dist_reconnect(Config) when list(Config) ->
%% mark it as tracing-running.
dist_adopt(doc) -> [""];
dist_adopt(suite) -> [];
-dist_adopt(Config) when list(Config) ->
+dist_adopt(Config) when is_list(Config) ->
RemoteNodes=get_remotenodes_config(Config),
[RegExpNode|_]=RemoteNodes,
CNode=node(),
@@ -755,7 +755,7 @@ dist_adopt(Config) when list(Config) ->
%% This test tests that saving and restoring a history works.
dist_history(doc) -> [""];
dist_history(suite) -> [];
-dist_history(Config) when list(Config) ->
+dist_history(Config) when is_list(Config) ->
RemoteNodes=get_remotenodes_config(Config),
[RegExpNode|_]=RemoteNodes,
CNode=RegExpNode, % We use a remote control component.
@@ -904,7 +904,7 @@ dist_history(Config) when list(Config) ->
%% are no nodes that can be initiated or reinitiated.
dist_start_session_special(doc) -> [""];
dist_start_session_special(suite) -> [];
-dist_start_session_special(Config) when list(Config) ->
+dist_start_session_special(Config) when is_list(Config) ->
RemoteNodes=get_remotenodes_config(Config),
[RegExpNode|_]=RemoteNodes,
CNode=RegExpNode, % We use a remote control component.
@@ -1019,7 +1019,7 @@ stop_inviso_tool_session(CNode,SessionNr,Nodes) ->
%% Help function checking that there is a Result for each node in Nodes.
%% Returns 'true' if successful.
-check_noderesults(Nodes,Fun,[{Node,Result}|Rest]) when function(Fun) ->
+check_noderesults(Nodes,Fun,[{Node,Result}|Rest]) when is_function(Fun) ->
case Fun({Node,Result}) of
true ->
case lists:member(Node,Nodes) of
@@ -1052,7 +1052,7 @@ poll(_,_,_,_,0) ->
error;
poll(M,F,Args,Result,Times) ->
try apply(M,F,Args) of
- What when function(Result) ->
+ What when is_function(Result) ->
case Result(What) of
true ->
ok;
diff --git a/lib/kernel/doc/src/gen_tcp.xml b/lib/kernel/doc/src/gen_tcp.xml
index 8e7192a496..aa171c77c2 100644
--- a/lib/kernel/doc/src/gen_tcp.xml
+++ b/lib/kernel/doc/src/gen_tcp.xml
@@ -74,6 +74,7 @@ posix()
socket()
as returned by accept/1,2 and connect/3,4</code>
+ <marker id="connect"></marker>
</section>
<funcs>
<func>
diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl
index feb5131aad..b0f99305f2 100644
--- a/lib/kernel/src/code.erl
+++ b/lib/kernel/src/code.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -72,39 +72,42 @@
%% User interface.
%%
-%% objfile_extension() -> ".beam"
-%% set_path(Dir*) -> true
-%% get_path() -> Dir*
-%% add_path(Dir) -> true | {error, What}
-%% add_patha(Dir) -> true | {error, What}
-%% add_pathz(Dir) -> true | {error, What}
-%% add_paths(DirList) -> true | {error, What}
-%% add_pathsa(DirList) -> true | {error, What}
-%% add_pathsz(DirList) -> true | {error, What}
-%% del_path(Dir) -> true | {error, What}
-%% replace_path(Name,Dir) -> true | {error, What}
-%% load_file(File) -> {error,What} | {module, Mod}
-%% load_abs(File) -> {error,What} | {module, Mod}
-%% load_abs(File,Mod) -> {error,What} | {module, Mod}
-%% load_binary(Mod,File,Bin) -> {error,What} | {module,Mod}
-%% ensure_loaded(Module) -> {error,What} | {module, Mod}
-%% delete(Module)
-%% purge(Module) kills all procs running old code
-%% soft_purge(Module) -> true | false
-%% is_loaded(Module) -> {file, File} | false
-%% all_loaded() -> {Module, File}*
-%% get_object_code(Mod) -> error | {Mod, Bin, Filename}
-%% stop() -> true
-%% root_dir()
-%% compiler_dir()
-%% lib_dir()
-%% priv_dir(Name)
-%% stick_dir(Dir) -> ok | error
-%% unstick_dir(Dir) -> ok | error
-%% is_sticky(Module) -> true | false
-%% which(Module) -> Filename
-%% set_primary_archive((FileName, Bin, FileInfo) -> ok | {error, Reason}
-%% clash() -> -> print out
+%% objfile_extension() -> ".beam"
+%% get_path() -> [Dir]
+%% set_path([Dir]) -> true | {error, bad_directory | bad_path}
+%% add_path(Dir) -> true | {error, bad_directory}
+%% add_patha(Dir) -> true | {error, bad_directory}
+%% add_pathz(Dir) -> true | {error, bad_directory}
+%% add_paths([Dir]) -> ok
+%% add_pathsa([Dir]) -> ok
+%% add_pathsz([Dir]) -> ok
+%% del_path(Dir) -> boolean() | {error, bad_name}
+%% replace_path(Name, Dir) -> true | replace_path_error()
+%% load_file(Module) -> {module, Module} | {error, What :: atom()}
+%% load_abs(File) -> {module, Module} | {error, What :: atom()}
+%% load_abs(File, Module) -> {module, Module} | {error, What :: atom()}
+%% load_binary(Module, File, Bin)-> {module, Module} | {error, What :: atom()}
+%% ensure_loaded(Module) -> {module, Module} | {error, What :: atom()}
+%% delete(Module) -> boolean()
+%% purge(Module) -> boolean() kills all procs running old code
+%% soft_purge(Module) -> boolean()
+%% is_loaded(Module) -> {file, loaded_filename()} | false
+%% all_loaded() -> [{Module, loaded_filename()}]
+%% get_object_code(Module) -> {Module, Bin, Filename} | error
+%% stop() -> no_return()
+%% root_dir() -> Dir
+%% compiler_dir() -> Dir
+%% lib_dir() -> Dir
+%% lib_dir(Application) -> Dir | {error, bad_name}
+%% priv_dir(Application) -> Dir | {error, bad_name}
+%% stick_dir(Dir) -> ok | error
+%% unstick_dir(Dir) -> ok | error
+%% stick_mod(Module) -> true
+%% unstick_mod(Module) -> true
+%% is_sticky(Module) -> boolean()
+%% which(Module) -> Filename | loaded_ret_atoms() | non_existing
+%% set_primary_archive((FileName, Bin, FileInfo) -> ok | {error, Reason}
+%% clash() -> ok prints out number of clashes
%%----------------------------------------------------------------------------
%% Some types for basic exported functions of this module
@@ -120,7 +123,7 @@
%% User interface
%%----------------------------------------------------------------------------
--spec objfile_extension() -> file:filename().
+-spec objfile_extension() -> nonempty_string().
objfile_extension() ->
init:objfile_extension().
@@ -138,21 +141,21 @@ load_abs(File) when is_list(File); is_atom(File) -> call({load_abs,File,[]}).
%% XXX Filename is also an atom(), e.g. 'cover_compiled'
-spec load_abs(Filename :: loaded_filename(), Module :: atom()) -> load_ret().
-load_abs(File,M) when (is_list(File) orelse is_atom(File)), is_atom(M) ->
+load_abs(File, M) when (is_list(File) orelse is_atom(File)), is_atom(M) ->
call({load_abs,File,M}).
%% XXX Filename is also an atom(), e.g. 'cover_compiled'
-spec load_binary(Module :: atom(), Filename :: loaded_filename(), Binary :: binary()) -> load_ret().
-load_binary(Mod,File,Bin)
+load_binary(Mod, File, Bin)
when is_atom(Mod), (is_list(File) orelse is_atom(File)), is_binary(Bin) ->
call({load_binary,Mod,File,Bin}).
-spec load_native_partial(Module :: atom(), Binary :: binary()) -> load_ret().
-load_native_partial(Mod,Bin) when is_atom(Mod), is_binary(Bin) ->
+load_native_partial(Mod, Bin) when is_atom(Mod), is_binary(Bin) ->
call({load_native_partial,Mod,Bin}).
-spec load_native_sticky(Module :: atom(), Binary :: binary(), WholeModule :: 'false' | binary()) -> load_ret().
-load_native_sticky(Mod,Bin,WholeModule)
+load_native_sticky(Mod, Bin, WholeModule)
when is_atom(Mod), is_binary(Bin),
(is_binary(WholeModule) orelse WholeModule =:= false) ->
call({load_native_sticky,Mod,Bin,WholeModule}).
@@ -160,7 +163,7 @@ load_native_sticky(Mod,Bin,WholeModule)
-spec delete(Module :: atom()) -> boolean().
delete(Mod) when is_atom(Mod) -> call({delete,Mod}).
--spec purge/1 :: (Module :: atom()) -> boolean().
+-spec purge(Module :: atom()) -> boolean().
purge(Mod) when is_atom(Mod) -> call({purge,Mod}).
-spec soft_purge(Module :: atom()) -> boolean().
@@ -195,7 +198,7 @@ lib_dir(App, SubDir) when is_atom(App), is_atom(SubDir) -> call({dir,{lib_dir,Ap
compiler_dir() -> call({dir,compiler_dir}).
%% XXX is_list() is for backwards compatibility -- take out in future version
--spec priv_dir(Appl :: atom()) -> file:filename() | {'error', 'bad_name'}.
+-spec priv_dir(App :: atom()) -> file:filename() | {'error', 'bad_name'}.
priv_dir(App) when is_atom(App) ; is_list(App) -> call({dir,{priv_dir,App}}).
-spec stick_dir(Directory :: file:filename()) -> 'ok' | 'error'.
@@ -220,13 +223,14 @@ set_path(PathList) when is_list(PathList) -> call({set_path,PathList}).
-spec get_path() -> [file:filename()].
get_path() -> call(get_path).
--spec add_path(Directory :: file:filename()) -> 'true' | {'error', 'bad_directory'}.
+-type add_path_ret() :: 'true' | {'error', 'bad_directory'}.
+-spec add_path(Directory :: file:filename()) -> add_path_ret().
add_path(Dir) when is_list(Dir) -> call({add_path,last,Dir}).
--spec add_pathz(Directory :: file:filename()) -> 'true' | {'error', 'bad_directory'}.
+-spec add_pathz(Directory :: file:filename()) -> add_path_ret().
add_pathz(Dir) when is_list(Dir) -> call({add_path,last,Dir}).
--spec add_patha(Directory :: file:filename()) -> 'true' | {'error', 'bad_directory'}.
+-spec add_patha(Directory :: file:filename()) -> add_path_ret().
add_patha(Dir) when is_list(Dir) -> call({add_path,first,Dir}).
-spec add_paths(Directories :: [file:filename()]) -> 'ok'.
@@ -243,8 +247,8 @@ del_path(Name) when is_list(Name) ; is_atom(Name) -> call({del_path,Name}).
-type replace_path_error() :: {'error', 'bad_directory' | 'bad_name' | {'badarg',_}}.
-spec replace_path(Name:: atom(), Dir :: file:filename()) -> 'true' | replace_path_error().
-replace_path(Name, Dir) when (is_atom(Name) or is_list(Name)) and
- (is_atom(Dir) or is_list(Dir)) ->
+replace_path(Name, Dir) when (is_atom(Name) orelse is_list(Name)),
+ (is_atom(Dir) orelse is_list(Dir)) ->
call({replace_path,Name,Dir}).
-spec rehash() -> 'ok'.
@@ -275,21 +279,14 @@ start_link(Flags) ->
do_start(Flags) ->
%% The following module_info/1 calls are here to ensure
- %% that the modules are loaded prior to their use elsewhere in
+ %% that these modules are loaded prior to their use elsewhere in
%% the code_server.
%% Otherwise a deadlock may occur when the code_server is starting.
- code_server:module_info(module),
- packages:module_info(module),
+ code_server = code_server:module_info(module),
+ packages = packages:module_info(module),
catch hipe_unified_loader:load_hipe_modules(),
- gb_sets:module_info(module),
- gb_trees:module_info(module),
-
- ets:module_info(module),
- os:module_info(module),
- binary:module_info(module),
- unicode:module_info(module),
- filename:module_info(module),
- lists:module_info(module),
+ Modules2 = [gb_sets, gb_trees, ets, os, binary, unicode, filename, lists],
+ lists:foreach(fun (M) -> M = M:module_info(module) end, Modules2),
Mode = get_mode(Flags),
case init:get_argument(root) of
@@ -297,7 +294,7 @@ do_start(Flags) ->
Root = filename:join([Root0]), % Normalize. Use filename
case code_server:start_link([Root,Mode]) of
{ok,_Pid} = Ok2 ->
- if
+ if
Mode =:= interactive ->
case lists:member(stick, Flags) of
true -> do_stick_dirs();
@@ -306,14 +303,14 @@ do_start(Flags) ->
true ->
ok
end,
- % Quietly load the native code for all modules loaded so far.
+ %% Quietly load native code for all modules loaded so far
catch load_native_code_for_all_loaded(),
Ok2;
Other ->
Other
end;
Other ->
- error_logger:error_msg("Can not start code server ~w ~n",[Other]),
+ error_logger:error_msg("Can not start code server ~w ~n", [Other]),
{error, crash}
end.
@@ -330,7 +327,7 @@ do_s(Lib) ->
%% The return value is intentionally ignored. Missing
%% directories is not a fatal error. (In embedded systems,
%% there is usually no compiler directory.)
- stick_dir(filename:append(Dir, "ebin")),
+ _ = stick_dir(filename:append(Dir, "ebin")),
ok
end.
@@ -428,7 +425,7 @@ where_is_file(Path, File) when is_list(Path), is_list(File) ->
-spec set_primary_archive(ArchiveFile :: file:filename(),
ArchiveBin :: binary(),
- FileInfo :: #file_info{})
+ FileInfo :: file:file_info())
-> 'ok' | {'error', atom()}.
set_primary_archive(ArchiveFile0, ArchiveBin, #file_info{} = FileInfo)
@@ -485,13 +482,13 @@ filter(Ext, _, {ok,Files}) ->
filter2(Ext, length(Ext), Files).
filter2(_Ext, _Extlen, []) -> [];
-filter2(Ext, Extlen,[File|Tail]) ->
- case has_ext(Ext,Extlen, File) of
+filter2(Ext, Extlen, [File|Tail]) ->
+ case has_ext(Ext, Extlen, File) of
true -> [File | filter2(Ext, Extlen, Tail)];
false -> filter2(Ext, Extlen, Tail)
end.
-has_ext(Ext, Extlen,File) ->
+has_ext(Ext, Extlen, File) ->
L = length(File),
case catch lists:nthtail(L - Extlen, File) of
Ext -> true;
diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl
index bc95359986..3aca9b4b0d 100644
--- a/lib/kernel/src/file.erl
+++ b/lib/kernel/src/file.erl
@@ -240,7 +240,7 @@ write_file(Name, Bin) ->
%% when it is time to change file server protocol again.
%% Meanwhile, it is implemented here, slightly less efficient.
--spec write_file(Name :: name(), Bin :: binary(), Modes :: [mode()]) ->
+-spec write_file(Name :: name(), Bin :: iodata(), Modes :: [mode()]) ->
'ok' | {'error', posix()}.
write_file(Name, Bin, ModeList) when is_list(ModeList) ->
diff --git a/lib/kernel/src/inet6_tcp_dist.erl b/lib/kernel/src/inet6_tcp_dist.erl
index fab00bbb9f..b9c4fa607c 100644
--- a/lib/kernel/src/inet6_tcp_dist.erl
+++ b/lib/kernel/src/inet6_tcp_dist.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -162,8 +162,8 @@ do_accept(Kernel, AcceptPid, Socket, MyNode, Allowed, SetupTime) ->
inet:getll(S)
end,
f_address = fun get_remote_id/2,
- mf_tick = {?MODULE, tick},
- mf_getstat = {?MODULE,getstat}
+ mf_tick = fun ?MODULE:tick/1,
+ mf_getstat = fun ?MODULE:getstat/1
},
dist_util:handshake_other_started(HSData);
{false,IP} ->
diff --git a/lib/kernel/src/net_kernel.erl b/lib/kernel/src/net_kernel.erl
index f5e2820bbe..23db85e1f4 100644
--- a/lib/kernel/src/net_kernel.erl
+++ b/lib/kernel/src/net_kernel.erl
@@ -322,24 +322,19 @@ init({Name, LongOrShortNames, TickT}) ->
process_flag(priority, max),
Ticktime = to_integer(TickT),
Ticker = spawn_link(net_kernel, ticker, [self(), Ticktime]),
- case auth:get_cookie(Node) of
- Cookie when is_atom(Cookie) ->
- {ok, #state{name = Name,
- node = Node,
- type = LongOrShortNames,
- tick = #tick{ticker = Ticker, time = Ticktime},
- connecttime = connecttime(),
- connections =
- ets:new(sys_dist,[named_table,
- protected,
- {keypos, 2}]),
- listen = Listeners,
- allowed = [],
- verbose = 0
- }};
- _ELSE ->
- {stop, {error,{bad_cookie, Node}}}
- end;
+ {ok, #state{name = Name,
+ node = Node,
+ type = LongOrShortNames,
+ tick = #tick{ticker = Ticker, time = Ticktime},
+ connecttime = connecttime(),
+ connections =
+ ets:new(sys_dist,[named_table,
+ protected,
+ {keypos, 2}]),
+ listen = Listeners,
+ allowed = [],
+ verbose = 0
+ }};
Error ->
{stop, Error}
end.
diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl
index 75a11a8afd..95a2f71ec0 100644
--- a/lib/kernel/src/os.erl
+++ b/lib/kernel/src/os.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -82,8 +82,9 @@ verify_executable(Name0, [Ext|Rest], OrigExtensions) ->
end;
_ ->
case file:read_file_info(Name1) of
- {ok, #file_info{mode=Mode}} when Mode band 8#111 =/= 0 ->
- %% XXX This test for execution permission is not full-proof
+ {ok, #file_info{type=regular,mode=Mode}}
+ when Mode band 8#111 =/= 0 ->
+ %% XXX This test for execution permission is not fool-proof
%% on Unix, since we test if any execution bit is set.
{ok, Name1};
_ ->
@@ -230,9 +231,13 @@ start_port_srv(Request) ->
catch
error:_ -> false
end,
- start_port_srv_loop(Request, StayAlive).
+ start_port_srv_handle(Request),
+ case StayAlive of
+ true -> start_port_srv_loop();
+ false -> exiting
+ end.
-start_port_srv_loop({Ref,Client}, StayAlive) ->
+start_port_srv_handle({Ref,Client}) ->
Reply = try open_port({spawn, ?SHELL},[stream]) of
Port when is_port(Port) ->
(catch port_connect(Port, Client)),
@@ -242,20 +247,18 @@ start_port_srv_loop({Ref,Client}, StayAlive) ->
error:Reason ->
{Reason,erlang:get_stacktrace()}
end,
- Client ! {Ref,Reply},
- case StayAlive of
- true -> start_port_srv_loop(get_open_port_request(), true);
- false -> exiting
- end.
+ Client ! {Ref,Reply}.
-get_open_port_request() ->
+
+start_port_srv_loop() ->
receive
{Ref, Client} = Request when is_reference(Ref),
is_pid(Client) ->
- Request;
+ start_port_srv_handle(Request);
_Junk ->
- get_open_port_request()
- end.
+ ignore
+ end,
+ start_port_srv_loop().
%%
%% unix_get_data(Port) -> Result
diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl
index efce5bd476..9cccdab76b 100644
--- a/lib/kernel/test/erl_distribution_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -865,7 +865,7 @@ monitor_nodes_otp_6481_test(Config, TestType) when is_list(Config) ->
% the node mercilessly.
% We then want to ensure that the nodedown message arrives
% last ... without garbage after it.
- Pid = spawn(fun() -> node_loop_send(Me, NodeMsg, 1) end),
+ _ = spawn(fun() -> node_loop_send(Me, NodeMsg, 1) end),
receive {Me, kill_it} -> ok end,
halt()
end),
diff --git a/lib/kernel/test/gen_tcp_api_SUITE.erl b/lib/kernel/test/gen_tcp_api_SUITE.erl
index c8fe602ac2..d9abeb808b 100644
--- a/lib/kernel/test/gen_tcp_api_SUITE.erl
+++ b/lib/kernel/test/gen_tcp_api_SUITE.erl
@@ -180,34 +180,38 @@ t_fdopen(Config) when is_list(Config) ->
%%% implicit inet6 option to api functions
t_implicit_inet6(Config) when is_list(Config) ->
- ?line Hostname = ok(inet:gethostname()),
+ ?line Host = ok(inet:gethostname()),
+ ?line
+ case inet:getaddr(Host, inet6) of
+ {ok,Addr} ->
+ ?line t_implicit_inet6(Host, Addr);
+ {error,Reason} ->
+ {skip,
+ "Can not look up IPv6 address: "
+ ++atom_to_list(Reason)}
+ end.
+
+t_implicit_inet6(Host, Addr) ->
?line
case gen_tcp:listen(0, [inet6]) of
{ok,S1} ->
- ?line
- case inet:getaddr(Hostname, inet6) of
- {ok,Host} ->
- ?line Loopback = {0,0,0,0,0,0,0,1},
- ?line io:format("~s ~p~n", ["Loopback",Loopback]),
- ?line implicit_inet6(S1, Loopback),
- ?line ok = gen_tcp:close(S1),
- %%
- ?line Localhost =
- ok(inet:getaddr("localhost", inet6)),
- ?line io:format("~s ~p~n", ["localhost",Localhost]),
- ?line S2 = ok(gen_tcp:listen(0, [{ip,Localhost}])),
- ?line implicit_inet6(S2, Localhost),
- ?line ok = gen_tcp:close(S2),
- %%
- ?line io:format("~s ~p~n", [Hostname,Host]),
- ?line S3 = ok(gen_tcp:listen(0, [{ifaddr,Host}])),
- ?line implicit_inet6(S3, Host),
- ?line ok = gen_tcp:close(S1);
- {error,eafnosupport} ->
- ?line ok = gen_tcp:close(S1),
- {skip,"Can not look up IPv6 address"}
- end;
- _ ->
+ ?line Loopback = {0,0,0,0,0,0,0,1},
+ ?line io:format("~s ~p~n", ["::1",Loopback]),
+ ?line implicit_inet6(S1, Loopback),
+ ?line ok = gen_tcp:close(S1),
+ %%
+ ?line Localhost = "localhost",
+ ?line Localaddr = ok(inet:getaddr(Localhost, inet6)),
+ ?line io:format("~s ~p~n", [Localhost,Localaddr]),
+ ?line S2 = ok(gen_tcp:listen(0, [{ip,Localaddr}])),
+ ?line implicit_inet6(S2, Localaddr),
+ ?line ok = gen_tcp:close(S2),
+ %%
+ ?line io:format("~s ~p~n", [Host,Addr]),
+ ?line S3 = ok(gen_tcp:listen(0, [{ifaddr,Addr}])),
+ ?line implicit_inet6(S3, Addr),
+ ?line ok = gen_tcp:close(S3);
+ {error,_} ->
{skip,"IPv6 not supported"}
end.
diff --git a/lib/kernel/test/gen_udp_SUITE.erl b/lib/kernel/test/gen_udp_SUITE.erl
index ee9288bc75..0ea2d53af0 100644
--- a/lib/kernel/test/gen_udp_SUITE.erl
+++ b/lib/kernel/test/gen_udp_SUITE.erl
@@ -450,36 +450,38 @@ connect(Config) when is_list(Config) ->
ok.
implicit_inet6(Config) when is_list(Config) ->
- ?line Hostname = ok(inet:gethostname()),
+ ?line Host = ok(inet:gethostname()),
+ ?line
+ case inet:getaddr(Host, inet6) of
+ {ok,Addr} ->
+ ?line implicit_inet6(Host, Addr);
+ {error,Reason} ->
+ {skip,
+ "Can not look up IPv6 address: "
+ ++atom_to_list(Reason)}
+ end.
+
+implicit_inet6(Host, Addr) ->
?line Active = {active,false},
?line
case gen_udp:open(0, [inet6,Active]) of
{ok,S1} ->
- ?line
- case inet:getaddr(Hostname, inet6) of
- {ok,Host} ->
- ?line Loopback = {0,0,0,0,0,0,0,1},
- ?line io:format("~s ~p~n", ["Loopback",Loopback]),
- ?line implicit_inet6(S1, Active, Loopback),
- ?line ok = gen_udp:close(S1),
- %%
- ?line Localhost =
- ok(inet:getaddr("localhost", inet6)),
- ?line io:format("~s ~p~n", ["localhost",Localhost]),
- ?line S2 =
- ok(gen_udp:open(0, [{ip,Localhost},Active])),
- ?line implicit_inet6(S2, Active, Localhost),
- ?line ok = gen_udp:close(S2),
- %%
- ?line io:format("~s ~p~n", [Hostname,Host]),
- ?line S3 =
- ok(gen_udp:open(0, [{ifaddr,Host},Active])),
- ?line implicit_inet6(S3, Active, Host),
- ?line ok = gen_udp:close(S1);
- {error,eafnosupport} ->
- ?line ok = gen_udp:close(S1),
- {skip,"Can not look up IPv6 address"}
- end;
+ ?line Loopback = {0,0,0,0,0,0,0,1},
+ ?line io:format("~s ~p~n", ["::1",Loopback]),
+ ?line implicit_inet6(S1, Active, Loopback),
+ ?line ok = gen_udp:close(S1),
+ %%
+ ?line Localhost = "localhost",
+ ?line Localaddr = ok(inet:getaddr(Localhost, inet6)),
+ ?line io:format("~s ~p~n", [Localhost,Localaddr]),
+ ?line S2 = ok(gen_udp:open(0, [{ip,Localaddr},Active])),
+ ?line implicit_inet6(S2, Active, Localaddr),
+ ?line ok = gen_udp:close(S2),
+ %%
+ ?line io:format("~s ~p~n", [Host,Addr]),
+ ?line S3 = ok(gen_udp:open(0, [{ifaddr,Addr},Active])),
+ ?line implicit_inet6(S3, Active, Addr),
+ ?line ok = gen_udp:close(S3);
_ ->
{skip,"IPv6 not supported"}
end.
@@ -500,5 +502,4 @@ implicit_inet6(S1, Active, Addr) ->
?line {Addr,P2,"pong"} = ok(gen_udp:recv(S1, 1024)),
?line ok = gen_udp:close(S2).
-
ok({ok,V}) -> V.
diff --git a/lib/kernel/test/os_SUITE.erl b/lib/kernel/test/os_SUITE.erl
index ecb9a111f6..b08b12c978 100644
--- a/lib/kernel/test/os_SUITE.erl
+++ b/lib/kernel/test/os_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -175,6 +175,21 @@ find_executable(Config) when is_list(Config) ->
?line find_exe(Current, "my_batch", ".bat", Path),
ok;
{unix, _} ->
+ DataDir = ?config(data_dir, Config),
+
+ %% Smoke test.
+ case lib:progname() of
+ erl ->
+ ?line ErlPath = os:find_executable("erl"),
+ ?line true = is_list(ErlPath),
+ ?line true = filelib:is_regular(ErlPath);
+ _ ->
+ %% Don't bother -- the progname could include options.
+ ok
+ end,
+
+ %% Never return a directory name.
+ ?line false = os:find_executable("unix", [DataDir]),
ok;
vxworks ->
ok
diff --git a/lib/megaco/Makefile b/lib/megaco/Makefile
index d4698eb558..10efaf667f 100644
--- a/lib/megaco/Makefile
+++ b/lib/megaco/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2009. All Rights Reserved.
+# Copyright Ericsson AB 1999-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -97,12 +97,19 @@ endif
CONFIGURE_OPTS = $(FLEX_SCANNER_LINENO_ENABLER) $(FLEX_SCANNER_REENTRANT_ENABLER)
+MEGACO_DIA_PLT = ./priv/megaco.plt
+MEGACO_DIA_PLT_LOG = $(basename $(MEGACO_DIA_PLT)).dialyzer_plt_log
+MEGACO_DIA_LOG = $(basename $(MEGACO_DIA_PLT)).dialyzer_log
+
# ----------------------------------------------------
# Default Subdir Targets
# ----------------------------------------------------
include $(ERL_TOP)/make/otp_subdir.mk
+.PHONY: reconf conf dconf econf configure setup info version \
+ app_install dialyzer
+
reconf:
(cd $(ERL_TOP) && \
./otp_build autoconf && \
@@ -132,6 +139,10 @@ info:
@echo "APP_TAR_FILE: $(APP_TAR_FILE)"
@echo "OTP_INSTALL_DIR: $(OTP_INSTALL_DIR)"
@echo "APP_INSTALL_DIR: $(APP_INSTALL_DIR)"
+ @echo ""
+ @echo "MEGACO_PLT = $(MEGACO_PLT)"
+ @echo "MEGACO_DIA_LOG = $(MEGACO_DIA_LOG)"
+ @echo ""
version:
@echo "$(VSN)"
@@ -190,9 +201,18 @@ tar: $(APP_TAR_FILE)
$(APP_TAR_FILE): $(APP_DIR)
(cd $(APP_RELEASE_DIR); gtar zcf $(APP_TAR_FILE) $(DIR_NAME))
-dialyzer:
- (cd ./ebin; \
- dialyzer --build_plt \
- --output_plt ../priv/megaco.plt \
- -r ../../megaco/ebin \
- --verbose)
+dialyzer_plt: $(MEGACO_DIA_PLT)
+
+$(MEGACO_DIA_PLT):
+ @echo "Building megaco plt file"
+ @dialyzer --build_plt \
+ --output_plt $@ \
+ -r ../megaco/ebin \
+ -o $(MEGACO_DIA_PLT_LOG) \
+ --verbose
+
+dialyzer: $(MEGACO_DIA_PLT)
+ (dialyzer --plt $< \
+ -o $(MEGACO_DIA_LOG) \
+ ../megaco/ebin \
+ && (shell cat $(MEGACO_DIA_LOG)))
diff --git a/lib/megaco/doc/src/notes.xml b/lib/megaco/doc/src/notes.xml
index 81c9305542..4f678a2a1b 100644
--- a/lib/megaco/doc/src/notes.xml
+++ b/lib/megaco/doc/src/notes.xml
@@ -1,10 +1,10 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
<header>
<copyright>
- <year>2000</year><year>2010</year>
+ <year>2000</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -35,22 +35,90 @@
thus constitutes one section in this document. The title of each
section is the version number of Megaco.</p>
+
+ <section><title>Megaco 3.15.1</title>
+
+ <p>Version 3.15.1 supports code replacement in runtime from/to
+ version 3.15, 3.14.1.1, 3.14.1 and 3.14.</p>
+
+ <section>
+ <title>Improvements and new features</title>
+
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
+ <item>
+ <p>Updated the
+ <seealso marker="megaco_performance">performance</seealso>
+ chapter. </p>
+ <p>Own Id: OTP-8696</p>
+ </item>
+
+ </list>
+-->
+
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>Fixing miscellaneous things detected by dialyzer. </p>
+ <p>Own Id: OTP-9075</p>
+ <!-- <p>Aux Id: Seq 11579</p> -->
+ </item>
+
+ </list>
+
+ </section>
+
+ </section> <!-- 3.15.1 -->
+
+
<section><title>Megaco 3.15</title>
<section><title>Improvements and New Features</title>
- <list>
+
+<!--
+ <p>-</p>
+-->
+
+ <list type="bulleted">
+ <item>
+ <p>Fixing auto-import issues.</p>
+ <p>Own Id: OTP-8842</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Fixed bugs and malfunctions</title>
+ <p>-</p>
+
+<!--
+ <list type="bulleted">
<item>
- <p>
- Fixing auto-import issues.</p>
- <p>
- Own Id: OTP-8842</p>
+ <p>Eliminated a possible race condition while creating
+ pending counters. </p>
+ <p>Own Id: OTP-8634</p>
+ <p>Aux Id: Seq 11579</p>
</item>
+
</list>
+-->
+
</section>
-</section>
+ </section> <!-- 3.15 -->
-<section>
+
+ <section>
<title>Megaco 3.14.1.1</title>
<p>Version 3.14.1.1 supports code replacement in runtime from/to
diff --git a/lib/megaco/src/app/megaco.app.src b/lib/megaco/src/app/megaco.app.src
index 503fcd7176..c0d8218ac8 100644
--- a/lib/megaco/src/app/megaco.app.src
+++ b/lib/megaco/src/app/megaco.app.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -67,6 +67,7 @@
megaco_compact_text_encoder_prev3c,
megaco_compact_text_encoder_v3,
megaco_config,
+ megaco_config_misc,
megaco_digit_map,
megaco_encoder,
megaco_edist_compress,
diff --git a/lib/megaco/src/app/megaco.appup.src b/lib/megaco/src/app/megaco.appup.src
index 66068f650f..01b070d79f 100644
--- a/lib/megaco/src/app/megaco.appup.src
+++ b/lib/megaco/src/app/megaco.appup.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -133,50 +133,34 @@
%% |
%% v
%% 3.15
+%% |
+%% v
+%% 3.15.1
%%
%%
{"%VSN%",
[
- {"3.14.1.1",
+ {"3.15",
[
- {load_module, megaco_binary_transformer_prev3a, soft_purge, soft_purge, []},
- {load_module, megaco_binary_transformer_prev3b, soft_purge, soft_purge, []},
- {load_module, megaco_binary_transformer_prev3c, soft_purge, soft_purge, []},
+ {load_module, megaco_flex_scanner, soft_purge, soft_purge, []},
{load_module, megaco_sdp, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_v1, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_v1, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_v2, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_v2, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_prev3a, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_prev3a, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_prev3b, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_prev3b, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_prev3c, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_prev3c, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_v3, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_v3, soft_purge, soft_purge, []}
+ {load_module, megaco_filter, soft_purge, soft_purge, []},
+ {load_module, megaco_timer, soft_purge, soft_purge, [megaco_config_misc]},
+ {update, megaco_config, soft, soft_purge, soft_purge,
+ [megaco_timer, megaco_config_misc]},
+ {add_module, megaco_config_misc}
]
}
],
[
- {"3.14.1.1",
+ {"3.15",
[
- {load_module, megaco_binary_transformer_prev3a, soft_purge, soft_purge, []},
- {load_module, megaco_binary_transformer_prev3b, soft_purge, soft_purge, []},
- {load_module, megaco_binary_transformer_prev3c, soft_purge, soft_purge, []},
+ {load_module, megaco_flex_scanner, soft_purge, soft_purge, []},
{load_module, megaco_sdp, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_v1, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_v1, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_v2, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_v2, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_prev3a, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_prev3a, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_prev3b, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_prev3b, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_prev3c, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_prev3c, soft_purge, soft_purge, []},
- {load_module, megaco_compact_text_encoder_v3, soft_purge, soft_purge, []},
- {load_module, megaco_pretty_text_encoder_v3, soft_purge, soft_purge, []}
+ {load_module, megaco_filter, soft_purge, soft_purge, []},
+ {load_module, megaco_timer, soft_purge, soft_purge, [megaco_config]},
+ {update, megaco_config, soft, soft_purge, soft_purge, []},
+ {remove, {megaco_config_misc, soft_purge, brutal_purge}}
]
}
]
diff --git a/lib/megaco/src/binary/megaco_binary_encoder_lib.erl b/lib/megaco/src/binary/megaco_binary_encoder_lib.erl
index 842d6b70d1..967ee93935 100644
--- a/lib/megaco/src/binary/megaco_binary_encoder_lib.erl
+++ b/lib/megaco/src/binary/megaco_binary_encoder_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -134,6 +134,12 @@ encode_transaction(EC, {Tag, _} = Trans, AsnMod, TransMod, Type)
encode_transaction(_EC, T, _AsnMod, _TransMod, _Type) ->
{error, {no_megaco_transaction, T}}.
+-spec do_encode_transaction(EC :: list(),
+ Trans :: tuple(),
+ AnsMod :: atom(),
+ TransMod :: atom(),
+ Type :: atom()) ->
+ {'ok', binary()} | {'error', any()}.
do_encode_transaction([native], _Trans, _AsnMod, _TransMod, binary) ->
%% asn1rt:encode(AsnMod, element(1, T), T);
{error, not_implemented};
@@ -160,6 +166,12 @@ do_encode_transaction(EC, _Trans, _AsnMod, _TransMod, _Type) ->
%% Convert a list of ActionRequest record's into a binary
%% Return {ok, DeepIoList} | {error, Reason}
%%----------------------------------------------------------------------
+-spec encode_action_requests(EC :: list(),
+ ARs :: list(),
+ AnsMod :: atom(),
+ TransMod :: atom(),
+ Type :: atom()) ->
+ {'ok', binary()} | {'error', any()}.
encode_action_requests([native], _ARs, _AsnMod, _TransMod, binary) ->
%% asn1rt:encode(AsnMod, element(1, T), T);
{error, not_implemented};
@@ -183,13 +195,20 @@ encode_action_requests(EC, _ARs, _AsnMod, _TransMod, _Type) ->
%% Convert a ActionRequest record into a binary
%% Return {ok, DeepIoList} | {error, Reason}
%%----------------------------------------------------------------------
-encode_action_request([native], _ARs, _AsnMod, _TransMod, binary) ->
+
+-spec encode_action_request(EC :: list(),
+ AR :: tuple(),
+ AnsMod :: atom(),
+ TransMod :: atom(),
+ Type :: atom()) ->
+ {'ok', binary()} | {'error', any()}.
+encode_action_request([native], _AR, _AsnMod, _TransMod, binary) ->
%% asn1rt:encode(AsnMod, element(1, T), T);
{error, not_implemented};
-encode_action_request(_EC, _ARs0, _AsnMod, _TransMod, binary) ->
+encode_action_request(_EC, _AR, _AsnMod, _TransMod, binary) ->
{error, not_implemented};
-encode_action_request(EC, ARs, AsnMod, TransMod, io_list) ->
- case encode_action_request(EC, ARs, AsnMod, TransMod, binary) of
+encode_action_request(EC, AR, AsnMod, TransMod, io_list) ->
+ case encode_action_request(EC, AR, AsnMod, TransMod, binary) of
{ok, Bin} when is_binary(Bin) ->
{ok, Bin};
{ok, DeepIoList} ->
@@ -198,7 +217,7 @@ encode_action_request(EC, ARs, AsnMod, TransMod, io_list) ->
{error, Reason} ->
{error, Reason}
end;
-encode_action_request(EC, _ARs, _AsnMod, _TransMod, _Type) ->
+encode_action_request(EC, _AR, _AsnMod, _TransMod, _Type) ->
{error, {bad_encoding_config, EC}}.
diff --git a/lib/megaco/src/engine/depend.mk b/lib/megaco/src/engine/depend.mk
index 8d8c83e923..935eb813e5 100644
--- a/lib/megaco/src/engine/depend.mk
+++ b/lib/megaco/src/engine/depend.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2003-2009. All Rights Reserved.
+# Copyright Ericsson AB 2003-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -17,6 +17,8 @@
#
# %CopyrightEnd%
+$(EBIN)/megaco_config_misc.$(EMULATOR): megaco_config_misc.erl
+
$(EBIN)/megaco_config.$(EMULATOR): megaco_config.erl \
../../include/megaco.hrl \
../app/megaco_internal.hrl
diff --git a/lib/megaco/src/engine/megaco_config.erl b/lib/megaco/src/engine/megaco_config.erl
index 6805db790d..b65ddbe232 100644
--- a/lib/megaco/src/engine/megaco_config.erl
+++ b/lib/megaco/src/engine/megaco_config.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,10 +46,10 @@
%% Verification functions
verify_val/2,
- verify_strict_uint/1,
- verify_strict_int/1, verify_strict_int/2,
- verify_uint/1,
- verify_int/1, verify_int/2,
+%% verify_strict_uint/1,
+%% verify_strict_int/1, verify_strict_int/2,
+%% verify_uint/1,
+%% verify_int/1, verify_int/2,
%% Reply limit counter
@@ -1501,28 +1501,37 @@ verify_val(Item, Val) ->
mid -> true;
local_mid -> true;
remote_mid -> true;
- min_trans_id -> verify_strict_uint(Val, 4294967295); % uint32
- max_trans_id -> verify_uint(Val, 4294967295); % uint32
+ min_trans_id ->
+ megaco_config_misc:verify_strict_uint(Val, 4294967295); % uint32
+ max_trans_id ->
+ megaco_config_misc:verify_uint(Val, 4294967295); % uint32
request_timer -> verify_timer(Val);
long_request_timer -> verify_timer(Val);
- auto_ack -> verify_bool(Val);
+ auto_ack ->
+ megaco_config_misc:verify_bool(Val);
- trans_ack -> verify_bool(Val);
- trans_ack_maxcount -> verify_uint(Val);
+ trans_ack ->
+ megaco_config_misc:verify_bool(Val);
+ trans_ack_maxcount ->
+ megaco_config_misc:verify_uint(Val);
- trans_req -> verify_bool(Val);
- trans_req_maxcount -> verify_uint(Val);
- trans_req_maxsize -> verify_uint(Val);
+ trans_req ->
+ megaco_config_misc:verify_bool(Val);
+ trans_req_maxcount ->
+ megaco_config_misc:verify_uint(Val);
+ trans_req_maxsize ->
+ megaco_config_misc:verify_uint(Val);
- trans_timer -> verify_timer(Val) and (Val >= 0);
- trans_sender when Val == undefined -> true;
+ trans_timer ->
+ verify_timer(Val) and (Val >= 0);
+ trans_sender when Val =:= undefined -> true;
pending_timer -> verify_timer(Val);
- sent_pending_limit -> verify_uint(Val) andalso
- (Val > 0);
- recv_pending_limit -> verify_uint(Val) andalso
- (Val > 0);
+ sent_pending_limit ->
+ megaco_config_misc:verify_uint(Val) andalso (Val > 0);
+ recv_pending_limit ->
+ megaco_config_misc:verify_uint(Val) andalso (Val > 0);
reply_timer -> verify_timer(Val);
control_pid when is_pid(Val) -> true;
monitor_ref -> true; % Internal usage only
@@ -1530,110 +1539,43 @@ verify_val(Item, Val) ->
send_handle -> true;
encoding_mod when is_atom(Val) -> true;
encoding_config when is_list(Val) -> true;
- protocol_version -> verify_strict_uint(Val);
+ protocol_version ->
+ megaco_config_misc:verify_strict_uint(Val);
auth_data -> true;
user_mod when is_atom(Val) -> true;
user_args when is_list(Val) -> true;
reply_data -> true;
- threaded -> verify_bool(Val);
- strict_version -> verify_bool(Val);
- long_request_resend -> verify_bool(Val);
- call_proxy_gc_timeout -> verify_strict_uint(Val);
- cancel -> verify_bool(Val);
+ threaded ->
+ megaco_config_misc:verify_bool(Val);
+ strict_version ->
+ megaco_config_misc:verify_bool(Val);
+ long_request_resend ->
+ megaco_config_misc:verify_bool(Val);
+ call_proxy_gc_timeout ->
+ megaco_config_misc:verify_strict_uint(Val);
+ cancel ->
+ megaco_config_misc:verify_bool(Val);
resend_indication -> verify_resend_indication(Val);
- segment_reply_ind -> verify_bool(Val);
- segment_recv_acc -> verify_bool(Val);
+ segment_reply_ind ->
+ megaco_config_misc:verify_bool(Val);
+ segment_recv_acc ->
+ megaco_config_misc:verify_bool(Val);
segment_recv_timer -> verify_timer(Val);
segment_send -> verify_segmentation_window(Val);
segment_send_timer -> verify_timer(Val);
- max_pdu_size -> verify_int(Val) andalso (Val > 0);
+ max_pdu_size ->
+ megaco_config_misc:verify_int(Val) andalso (Val > 0);
request_keep_alive_timeout ->
- (verify_uint(Val) orelse (Val =:= plain));
+ (megaco_config_misc:verify_uint(Val) orelse (Val =:= plain));
_ -> false
end.
-verify_bool(true) -> true;
-verify_bool(false) -> true;
-verify_bool(_) -> false.
-
verify_resend_indication(flag) -> true;
-verify_resend_indication(Val) -> verify_bool(Val).
-
--spec verify_strict_int(Int :: integer()) -> boolean().
-verify_strict_int(Int) when is_integer(Int) -> true;
-verify_strict_int(_) -> false.
-
--spec verify_strict_int(Int :: integer(),
- Max :: integer() | 'infinity') -> boolean().
-verify_strict_int(Int, infinity) ->
- verify_strict_int(Int);
-verify_strict_int(Int, Max) ->
- verify_strict_int(Int) andalso verify_strict_int(Max) andalso (Int =< Max).
-
--spec verify_strict_uint(Int :: non_neg_integer()) -> boolean().
-verify_strict_uint(Int) when is_integer(Int) andalso (Int >= 0) -> true;
-verify_strict_uint(_) -> false.
-
--spec verify_strict_uint(Int :: non_neg_integer(),
- Max :: non_neg_integer() | 'infinity') -> boolean().
-verify_strict_uint(Int, infinity) ->
- verify_strict_uint(Int);
-verify_strict_uint(Int, Max) ->
- verify_strict_int(Int, 0, Max).
-
--spec verify_uint(Val :: non_neg_integer() | 'infinity') -> boolean().
-verify_uint(infinity) -> true;
-verify_uint(Val) -> verify_strict_uint(Val).
-
--spec verify_int(Val :: integer() | 'infinity') -> boolean().
-verify_int(infinity) -> true;
-verify_int(Val) -> verify_strict_int(Val).
-
--spec verify_int(Int :: integer() | 'infinity',
- Max :: integer() | 'infinity') -> boolean().
-verify_int(Int, infinity) ->
- verify_int(Int);
-verify_int(infinity, _Max) ->
- true;
-verify_int(Int, Max) ->
- verify_strict_int(Int) andalso verify_strict_int(Max) andalso (Int =< Max).
-
--spec verify_uint(Int :: non_neg_integer() | 'infinity',
- Max :: non_neg_integer() | 'infinity') -> boolean().
-verify_uint(Int, infinity) ->
- verify_uint(Int);
-verify_uint(infinity, _Max) ->
- true;
-verify_uint(Int, Max) ->
- verify_strict_int(Int, 0, Max).
-
--spec verify_strict_int(Int :: integer(),
- Min :: integer(),
- Max :: integer()) -> boolean().
-verify_strict_int(Val, Min, Max)
- when (is_integer(Val) andalso
- is_integer(Min) andalso
- is_integer(Max) andalso
- (Val >= Min) andalso
- (Val =< Max)) ->
- true;
-verify_strict_int(_Val, _Min, _Max) ->
- false.
-
--spec verify_int(Val :: integer() | 'infinity',
- Min :: integer(),
- Max :: integer() | 'infinity') -> boolean().
-verify_int(infinity, Min, infinity) ->
- verify_strict_int(Min);
-verify_int(Val, Min, infinity) ->
- verify_strict_int(Val) andalso
- verify_strict_int(Min) andalso (Val >= Min);
-verify_int(Int, Min, Max) ->
- verify_strict_int(Int, Min, Max).
+verify_resend_indication(Val) -> megaco_config_misc:verify_bool(Val).
verify_timer(Timer) ->
megaco_timer:verify(Timer).
@@ -1641,7 +1583,7 @@ verify_timer(Timer) ->
verify_segmentation_window(none) ->
true;
verify_segmentation_window(K) ->
- verify_int(K, 1, infinity).
+ megaco_config_misc:verify_int(K, 1, infinity).
handle_stop_user(UserMid) ->
case catch user_info(UserMid, mid) of
diff --git a/lib/megaco/src/engine/megaco_config_misc.erl b/lib/megaco/src/engine/megaco_config_misc.erl
new file mode 100644
index 0000000000..0a1601c766
--- /dev/null
+++ b/lib/megaco/src/engine/megaco_config_misc.erl
@@ -0,0 +1,113 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2011-2011. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%%----------------------------------------------------------------------
+%% Purpose: Utility module for megaco_config
+%%----------------------------------------------------------------------
+%%
+
+-module(megaco_config_misc).
+
+%% Application internal exports
+-export([
+ verify_bool/1,
+
+ verify_int/1, verify_int/2, verify_int/3,
+ verify_strict_int/1, verify_strict_int/2, verify_strict_int/3,
+
+ verify_uint/1, verify_uint/2,
+ verify_strict_uint/1, verify_strict_uint/2
+ ]).
+
+
+%%%----------------------------------------------------------------------
+%%% API
+%%%----------------------------------------------------------------------
+
+verify_bool(true) -> true;
+verify_bool(false) -> true;
+verify_bool(_) -> false.
+
+
+%% verify_int(Val) -> boolean()
+verify_int(infinity) -> true;
+verify_int(Val) -> verify_strict_int(Val).
+
+%% verify_int(Val, Max) -> boolean()
+verify_int(Int, infinity) ->
+ verify_int(Int);
+verify_int(infinity, _Max) ->
+ true;
+verify_int(Int, Max) ->
+ verify_strict_int(Int) andalso verify_strict_int(Max) andalso (Int =< Max).
+
+%% verify_int(Val, Min, Max) -> boolean()
+verify_int(infinity, Min, infinity) ->
+ verify_strict_int(Min);
+verify_int(Val, Min, infinity) ->
+ verify_strict_int(Val) andalso
+ verify_strict_int(Min) andalso (Val >= Min);
+verify_int(Int, Min, Max) ->
+ verify_strict_int(Int, Min, Max).
+
+%% verify_strict_int(Val) -> boolean()
+verify_strict_int(Int) when is_integer(Int) -> true;
+verify_strict_int(_) -> false.
+
+%% verify_strict_int(Val, Max) -> boolean()
+verify_strict_int(Int, infinity) ->
+ verify_strict_int(Int);
+verify_strict_int(Int, Max) ->
+ verify_strict_int(Int) andalso verify_strict_int(Max) andalso (Int =< Max).
+
+%% verify_strict_int(Val, Min, Max) -> boolean()
+verify_strict_int(Val, Min, Max)
+ when (is_integer(Val) andalso
+ is_integer(Min) andalso
+ is_integer(Max) andalso
+ (Val >= Min) andalso
+ (Val =< Max)) ->
+ true;
+verify_strict_int(_Val, _Min, _Max) ->
+ false.
+
+
+%% verify_uint(Val) -> boolean()
+verify_uint(infinity) -> true;
+verify_uint(Val) -> verify_strict_uint(Val).
+
+%% verify_uint(Val, Max) -> boolean()
+verify_uint(Int, infinity) ->
+ verify_uint(Int);
+verify_uint(infinity, _Max) ->
+ true;
+verify_uint(Int, Max) ->
+ verify_strict_int(Int, 0, Max).
+
+%% verify_strict_uint(Val) -> boolean()
+verify_strict_uint(Int) when is_integer(Int) andalso (Int >= 0) -> true;
+verify_strict_uint(_) -> false.
+
+%% verify_strict_uint(Val, Max) -> boolean()
+verify_strict_uint(Int, infinity) ->
+ verify_strict_uint(Int);
+verify_strict_uint(Int, Max) ->
+ verify_strict_int(Int, 0, Max).
+
diff --git a/lib/megaco/src/engine/megaco_filter.erl b/lib/megaco/src/engine/megaco_filter.erl
index 9df752789c..fb0c700a82 100644
--- a/lib/megaco/src/engine/megaco_filter.erl
+++ b/lib/megaco/src/engine/megaco_filter.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -21,6 +21,7 @@
%%----------------------------------------------------------------------
%% Purpose : Megaco/H.248 customization of the Event Tracer tool
%%----------------------------------------------------------------------
+%%
-module(megaco_filter).
@@ -33,6 +34,7 @@
-include_lib("megaco/src/app/megaco_internal.hrl").
-include_lib("et/include/et.hrl").
+
%%----------------------------------------------------------------------
%% BUGBUG: There are some opportunities for improvements:
%%
@@ -43,7 +45,8 @@
%% records that already are defined in megaco_message_{v1,v2,v3}.hrl.
%% * The records megaco_udp and megaco_tcp are copied from the files
%% megaco_udp.hrl and megaco_tcp.hrl respectively, as we cannot include
-%% both header files. They both defines the macros HEAP_SIZE and GC_MSG_LIMIT.
+%% both header files.
+%% They both defines the macros HEAP_SIZE and GC_MSG_LIMIT.
%%-include("megaco_message_internal.hrl").
-record('megaco_transaction_reply',
@@ -76,6 +79,8 @@
module = megaco,
serialize = false % false: Spawn a new process for each message
}).
+
+
%%----------------------------------------------------------------------
start() ->
@@ -360,28 +365,24 @@ pretty(_ConnData, MegaMsg) when is_record(MegaMsg, 'MegacoMessage') ->
{ok, Bin} = megaco_pretty_text_encoder:encode_message([], MegaMsg),
term_to_string(Bin);
pretty(_ConnData, CmdReq) when is_record(CmdReq, 'CommandRequest') ->
- {ok, IoList} = megaco_pretty_text_encoder:encode_command_request(CmdReq),
- term_to_string(lists:flatten(IoList));
+ {ok, Bin} = megaco_pretty_text_encoder:encode_command_request(CmdReq),
+ term_to_string(Bin);
pretty(_ConnData, {complete_success, ContextId, RepList}) ->
ActRep = #'ActionReply'{contextId = ContextId,
commandReply = RepList},
- {ok, IoList} = megaco_pretty_text_encoder:encode_action_reply(ActRep),
- term_to_string(lists:flatten(IoList));
+ {ok, Bin} = megaco_pretty_text_encoder:encode_action_reply(ActRep),
+ term_to_string(Bin);
pretty(_ConnData, AR) when is_record(AR, 'ActionReply') ->
- {ok, IoList} = megaco_pretty_text_encoder:encode_action_reply(AR),
- term_to_string(lists:flatten(IoList));
+ {ok, Bin} = megaco_pretty_text_encoder:encode_action_reply(AR),
+ term_to_string(Bin);
pretty(_ConnData, {partial_failure, ContextId, RepList}) ->
ActRep = #'ActionReply'{contextId = ContextId,
commandReply = RepList},
- {ok, IoList} = megaco_pretty_text_encoder:encode_action_reply(ActRep),
- term_to_string(lists:flatten(IoList));
+ {ok, Bin} = megaco_pretty_text_encoder:encode_action_reply(ActRep),
+ term_to_string(Bin);
pretty(_ConnData, {trans, Trans}) ->
- case megaco_pretty_text_encoder:encode_transaction(Trans) of
- {ok, Bin} when is_binary(Bin) ->
- term_to_string(binary_to_list(Bin));
- {ok, IoList} ->
- term_to_string(lists:flatten(IoList))
- end;
+ {ok, Bin} = megaco_pretty_text_encoder:encode_transaction(Trans),
+ term_to_string(Bin);
pretty(__ConnData, Other) ->
term_to_string(Other).
diff --git a/lib/megaco/src/engine/megaco_sdp.erl b/lib/megaco/src/engine/megaco_sdp.erl
index 37f28cac59..96732584fb 100644
--- a/lib/megaco/src/engine/megaco_sdp.erl
+++ b/lib/megaco/src/engine/megaco_sdp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -877,9 +877,7 @@ decode_bandwidth_bwt("CT") ->
decode_bandwidth_bwt("AS") ->
as;
decode_bandwidth_bwt(BwType) when is_list(BwType) ->
- BwType;
-decode_bandwidth_bwt(BadBwType) ->
- error({invalid_bandwidth_bwtype, BadBwType}).
+ BwType.
encode_bandwidth_bwt(ct) ->
"CT";
diff --git a/lib/megaco/src/engine/megaco_timer.erl b/lib/megaco/src/engine/megaco_timer.erl
index 9f524523a8..1336be0b5b 100644
--- a/lib/megaco/src/engine/megaco_timer.erl
+++ b/lib/megaco/src/engine/megaco_timer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -42,7 +42,7 @@
%% NewTimer = megaco_timer()
%% TimeoutTime = infinity | integer()
%%
-init(SingleWaitFor) when SingleWaitFor == infinity ->
+init(SingleWaitFor) when SingleWaitFor =:= infinity ->
{SingleWaitFor, timeout};
init(SingleWaitFor) when is_integer(SingleWaitFor) and (SingleWaitFor >= 0) ->
{SingleWaitFor, timeout};
@@ -76,17 +76,17 @@ verify(#megaco_incr_timer{wait_for = WaitFor,
factor = Factor,
incr = Incr,
max_retries = MaxRetries}) ->
- (megaco_config:verify_strict_uint(WaitFor) and
- megaco_config:verify_strict_uint(Factor) and
- megaco_config:verify_strict_int(Incr) and
+ (megaco_config_misc:verify_strict_uint(WaitFor) and
+ megaco_config_misc:verify_strict_uint(Factor) and
+ megaco_config_misc:verify_strict_int(Incr) and
verify_max_retries(MaxRetries));
verify(Timer) ->
- megaco_config:verify_uint(Timer).
+ megaco_config_misc:verify_uint(Timer).
verify_max_retries(infinity_restartable) ->
true;
verify_max_retries(Val) ->
- megaco_config:verify_uint(Val).
+ megaco_config_misc:verify_uint(Val).
%%-----------------------------------------------------------------
diff --git a/lib/megaco/src/engine/modules.mk b/lib/megaco/src/engine/modules.mk
index 44bcadc37b..4bc57cd63e 100644
--- a/lib/megaco/src/engine/modules.mk
+++ b/lib/megaco/src/engine/modules.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2001-2009. All Rights Reserved.
+# Copyright Ericsson AB 2001-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,7 @@ BEHAVIOUR_MODULES = \
MODULES = \
$(BEHAVIOUR_MODULES) \
+ megaco_config_misc \
megaco_config \
megaco_digit_map \
megaco_erl_dist_encoder \
diff --git a/lib/megaco/src/flex/megaco_flex_scanner.erl b/lib/megaco/src/flex/megaco_flex_scanner.erl
index e471412c13..508f8905e7 100644
--- a/lib/megaco/src/flex/megaco_flex_scanner.erl
+++ b/lib/megaco/src/flex/megaco_flex_scanner.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,21 +30,11 @@
-define(SCHED_ID(), erlang:system_info(scheduler_id)).
-define(SMP_SUPPORT_DEFAULT(), erlang:system_info(smp_support)).
-is_enabled() ->
- case ?ENABLE_MEGACO_FLEX_SCANNER of
- true ->
- true;
- _ ->
- false
- end.
+is_enabled() ->
+ (true =:= ?ENABLE_MEGACO_FLEX_SCANNER).
is_reentrant_enabled() ->
- case ?MEGACO_REENTRANT_FLEX_SCANNER of
- true ->
- true;
- _ ->
- false
- end.
+ (true =:= ?MEGACO_REENTRANT_FLEX_SCANNER).
is_scanner_port(Port, Port) when is_port(Port) ->
true;
diff --git a/lib/megaco/test/megaco_SUITE.erl b/lib/megaco/test/megaco_SUITE.erl
index 4faa6736e6..007677ba4d 100644
--- a/lib/megaco/test/megaco_SUITE.erl
+++ b/lib/megaco/test/megaco_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,42 +46,53 @@ init() ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Top test case
-suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,1}]}]}].
+suite() -> [{ct_hooks, [{ts_install_cth, [{nodenames,1}]}]}].
all() ->
- [{group, app_test}, {group, appup_test},
- {group, config}, {group, flex}, {group, udp},
- {group, tcp}, {group, examples}, {group, digit_map},
- {group, mess}, {group, measure},
- {group, binary_term_id}, {group, codec}, {group, sdp},
- {group, mib}, {group, trans}, {group, actions},
- {group, load}, {group, pending_limit},
- {group, segmented}, {group, timer}].
+ [{group, app_test},
+ {group, appup_test},
+ {group, config},
+ {group, flex},
+ {group, udp},
+ {group, tcp},
+ {group, examples},
+ {group, digit_map},
+ {group, mess},
+ {group, measure},
+ {group, binary_term_id},
+ {group, codec},
+ {group, sdp},
+ {group, mib},
+ {group, trans},
+ {group, actions},
+ {group, load},
+ {group, pending_limit},
+ {group, segmented},
+ {group, timer}].
groups() ->
- [{tickets, [], [{group, mess}, {group, codec}]},
- {app_test, [], [{megaco_app_test, all}]},
- {appup_test, [], [{megaco_appup_test, all}]},
- {config, [], [{megaco_config_test, all}]},
- {call_flow, [], [{megaco_call_flow_test, all}]},
- {digit_map, [], [{megaco_digit_map_test, all}]},
- {mess, [], [{megaco_mess_test, all}]},
- {udp, [], [{megaco_udp_test, all}]},
- {tcp, [], [{megaco_tcp_test, all}]},
- {examples, [], [{megaco_examples_test, all}]},
- {measure, [], [{megaco_measure_test, all}]},
- {binary_term_id, [],
- [{megaco_binary_term_id_test, all}]},
- {codec, [], [{megaco_codec_test, all}]},
- {sdp, [], [{megaco_sdp_test, all}]},
- {mib, [], [{megaco_mib_test, all}]},
- {trans, [], [{megaco_trans_test, all}]},
- {actions, [], [{megaco_actions_test, all}]},
- {load, [], [{megaco_load_test, all}]},
- {pending_limit, [], [{megaco_pending_limit_test, all}]},
- {segmented, [], [{megaco_segment_test, all}]},
- {timer, [], [{megaco_timer_test, all}]},
- {flex, [], [{megaco_flex_test, all}]}].
+ [{tickets, [], [{group, mess}, {group, codec}]},
+ {app_test, [], [{megaco_app_test, all}]},
+ {appup_test, [], [{megaco_appup_test, all}]},
+ {config, [], [{megaco_config_test, all}]},
+ {call_flow, [], [{megaco_call_flow_test, all}]},
+ {digit_map, [], [{megaco_digit_map_test, all}]},
+ {mess, [], [{megaco_mess_test, all}]},
+ {udp, [], [{megaco_udp_test, all}]},
+ {tcp, [], [{megaco_tcp_test, all}]},
+ {examples, [], [{megaco_examples_test, all}]},
+ {measure, [], [{megaco_measure_test, all}]},
+ {binary_term_id, [], [{megaco_binary_term_id_test, all}]},
+ {codec, [], [{megaco_codec_test, all}]},
+ {sdp, [], [{megaco_sdp_test, all}]},
+ {mib, [], [{megaco_mib_test, all}]},
+ {trans, [], [{megaco_trans_test, all}]},
+ {actions, [], [{megaco_actions_test, all}]},
+ {load, [], [{megaco_load_test, all}]},
+ {pending_limit, [], [{megaco_pending_limit_test, all}]},
+ {segmented, [], [{megaco_segment_test, all}]},
+ {timer, [], [{megaco_timer_test, all}]},
+ {flex, [], [{megaco_flex_test, all}]}].
init_per_suite(Config) ->
Config.
diff --git a/lib/megaco/test/megaco_app_test.erl b/lib/megaco/test/megaco_app_test.erl
index 0bfa388ef6..00f7b7fb68 100644
--- a/lib/megaco/test/megaco_app_test.erl
+++ b/lib/megaco/test/megaco_app_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -42,11 +42,17 @@ init_per_testcase(Case, Config) ->
end_per_testcase(Case, Config) ->
megaco_test_lib:end_per_testcase(Case, Config).
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
all() ->
- [fields, modules, exportall, app_depend,
- undef_funcs].
+ [
+ fields,
+ modules,
+ exportall,
+ app_depend,
+ undef_funcs
+ ].
groups() ->
[].
@@ -112,7 +118,7 @@ fields(doc) ->
[];
fields(Config) when is_list(Config) ->
AppFile = key1search(app_file, Config),
- Fields = [vsn, description, modules, registered, applications],
+ Fields = [vsn, description, modules, registered, applications],
case check_fields(Fields, AppFile, []) of
[] ->
ok;
diff --git a/lib/megaco/test/megaco_appup_test.erl b/lib/megaco/test/megaco_appup_test.erl
index a49ab0a968..40eebcae86 100644
--- a/lib/megaco/test/megaco_appup_test.erl
+++ b/lib/megaco/test/megaco_appup_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,7 @@
-module(megaco_appup_test).
-compile(export_all).
+-compile({no_auto_import,[error/1]}).
-include("megaco_test_lib.hrl").
diff --git a/lib/megaco/test/megaco_codec_v1_test.erl b/lib/megaco/test/megaco_codec_v1_test.erl
index 7068d005da..3a548c4d9e 100644
--- a/lib/megaco/test/megaco_codec_v1_test.erl
+++ b/lib/megaco/test/megaco_codec_v1_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco.hrl").
-include_lib("megaco/include/megaco_message_v1.hrl").
-include("megaco_test_lib.hrl").
@@ -458,76 +462,122 @@ end_per_testcase(Case, Config) ->
%% Top test case
all() ->
- [{group, text}, {group, binary}, {group, erl_dist},
- {group, tickets}].
+ [
+ {group, text},
+ {group, binary},
+ {group, erl_dist},
+ {group, tickets}
+ ].
groups() ->
- [{text, [],
- [{group, pretty}, {group, flex_pretty},
- {group, compact}, {group, flex_compact}]},
- {binary, [],
- [{group, bin}, {group, ber}, {group, ber_bin},
- {group, per}, {group, per_bin}]},
- {erl_dist, [], [{group, erl_dist_m}]},
- {pretty, [], [pretty_test_msgs]},
- {compact, [], [compact_test_msgs]},
- {flex_pretty, [], flex_pretty_cases()},
- {flex_compact, [], flex_compact_cases()},
- {bin, [], [bin_test_msgs]}, {ber, [], [ber_test_msgs]},
- {ber_bin, [], [ber_bin_test_msgs]},
- {per, [], [per_test_msgs]},
- {per_bin, [], [per_bin_test_msgs]},
- {erl_dist_m, [], [erl_dist_m_test_msgs]},
- {tickets, [],
- [{group, compact_tickets}, {group, pretty_tickets},
- {group, flex_compact_tickets},
- {group, flex_pretty_tickets}]},
- {compact_tickets, [],
- [compact_otp4011_msg1, compact_otp4011_msg2,
- compact_otp4011_msg3, compact_otp4013_msg1,
- compact_otp4085_msg1, compact_otp4085_msg2,
- compact_otp4280_msg1, compact_otp4299_msg1,
- compact_otp4299_msg2, compact_otp4359_msg1,
- compact_otp4920_msg0, compact_otp4920_msg1,
- compact_otp4920_msg2, compact_otp4920_msg3,
- compact_otp4920_msg4, compact_otp4920_msg5,
- compact_otp4920_msg6, compact_otp4920_msg7,
- compact_otp4920_msg8, compact_otp4920_msg9,
- compact_otp4920_msg10, compact_otp4920_msg11,
- compact_otp4920_msg12, compact_otp4920_msg20,
- compact_otp4920_msg21, compact_otp4920_msg22,
- compact_otp4920_msg23, compact_otp4920_msg24,
- compact_otp4920_msg25, compact_otp5186_msg01,
- compact_otp5186_msg02, compact_otp5186_msg03,
- compact_otp5186_msg04, compact_otp5186_msg05,
- compact_otp5186_msg06, compact_otp5793_msg01,
- compact_otp5993_msg01, compact_otp5993_msg02,
- compact_otp5993_msg03, compact_otp6017_msg01,
- compact_otp6017_msg02, compact_otp6017_msg03]},
- {flex_compact_tickets, [],
- flex_compact_tickets_cases()},
- {pretty_tickets, [],
- [pretty_otp4632_msg1, pretty_otp4632_msg2,
- pretty_otp4632_msg3, pretty_otp4632_msg4,
- pretty_otp4710_msg1, pretty_otp4710_msg2,
- pretty_otp4945_msg1, pretty_otp4945_msg2,
- pretty_otp4945_msg3, pretty_otp4945_msg4,
- pretty_otp4945_msg5, pretty_otp4945_msg6,
- pretty_otp4949_msg1, pretty_otp4949_msg2,
- pretty_otp4949_msg3, pretty_otp5042_msg1,
- pretty_otp5068_msg1, pretty_otp5085_msg1,
- pretty_otp5085_msg2, pretty_otp5085_msg3,
- pretty_otp5085_msg4, pretty_otp5085_msg5,
- pretty_otp5085_msg6, pretty_otp5085_msg7,
- pretty_otp5600_msg1, pretty_otp5600_msg2,
- pretty_otp5601_msg1, pretty_otp5793_msg01,
- pretty_otp5882_msg01, pretty_otp6490_msg01,
- pretty_otp6490_msg02, pretty_otp6490_msg03,
- pretty_otp6490_msg04, pretty_otp6490_msg05,
- pretty_otp6490_msg06, pretty_otp7671_msg01,
- pretty_otp7671_msg02, pretty_otp7671_msg03,
- pretty_otp7671_msg04, pretty_otp7671_msg05]},
- {flex_pretty_tickets, [], flex_pretty_tickets_cases()}].
+ [{text, [], [{group, pretty},
+ {group, flex_pretty},
+ {group, compact},
+ {group, flex_compact}]},
+ {binary, [], [{group, bin},
+ {group, ber},
+ {group, ber_bin},
+ {group, per},
+ {group, per_bin}]},
+ {erl_dist, [], [{group, erl_dist_m}]},
+ {pretty, [], [pretty_test_msgs]},
+ {compact, [], [compact_test_msgs]},
+ {flex_pretty, [], flex_pretty_cases()},
+ {flex_compact, [], flex_compact_cases()},
+ {bin, [], [bin_test_msgs]},
+ {ber, [], [ber_test_msgs]},
+ {ber_bin, [], [ber_bin_test_msgs]},
+ {per, [], [per_test_msgs]},
+ {per_bin, [], [per_bin_test_msgs]},
+ {erl_dist_m, [], [erl_dist_m_test_msgs]},
+ {tickets, [], [{group, compact_tickets},
+ {group, pretty_tickets},
+ {group, flex_compact_tickets},
+ {group, flex_pretty_tickets}]},
+ {compact_tickets, [], [compact_otp4011_msg1,
+ compact_otp4011_msg2,
+ compact_otp4011_msg3,
+ compact_otp4013_msg1,
+ compact_otp4085_msg1,
+ compact_otp4085_msg2,
+ compact_otp4280_msg1,
+ compact_otp4299_msg1,
+ compact_otp4299_msg2,
+ compact_otp4359_msg1,
+ compact_otp4920_msg0,
+ compact_otp4920_msg1,
+ compact_otp4920_msg2,
+ compact_otp4920_msg3,
+ compact_otp4920_msg4,
+ compact_otp4920_msg5,
+ compact_otp4920_msg6,
+ compact_otp4920_msg7,
+ compact_otp4920_msg8,
+ compact_otp4920_msg9,
+ compact_otp4920_msg10,
+ compact_otp4920_msg11,
+ compact_otp4920_msg12,
+ compact_otp4920_msg20,
+ compact_otp4920_msg21,
+ compact_otp4920_msg22,
+ compact_otp4920_msg23,
+ compact_otp4920_msg24,
+ compact_otp4920_msg25,
+ compact_otp5186_msg01,
+ compact_otp5186_msg02,
+ compact_otp5186_msg03,
+ compact_otp5186_msg04,
+ compact_otp5186_msg05,
+ compact_otp5186_msg06,
+ compact_otp5793_msg01,
+ compact_otp5993_msg01,
+ compact_otp5993_msg02,
+ compact_otp5993_msg03,
+ compact_otp6017_msg01,
+ compact_otp6017_msg02,
+ compact_otp6017_msg03]},
+ {flex_compact_tickets, [], flex_compact_tickets_cases()},
+ {pretty_tickets, [], [pretty_otp4632_msg1,
+ pretty_otp4632_msg2,
+ pretty_otp4632_msg3,
+ pretty_otp4632_msg4,
+ pretty_otp4710_msg1,
+ pretty_otp4710_msg2,
+ pretty_otp4945_msg1,
+ pretty_otp4945_msg2,
+ pretty_otp4945_msg3,
+ pretty_otp4945_msg4,
+ pretty_otp4945_msg5,
+ pretty_otp4945_msg6,
+ pretty_otp4949_msg1,
+ pretty_otp4949_msg2,
+ pretty_otp4949_msg3,
+ pretty_otp5042_msg1,
+ pretty_otp5068_msg1,
+ pretty_otp5085_msg1,
+ pretty_otp5085_msg2,
+ pretty_otp5085_msg3,
+ pretty_otp5085_msg4,
+ pretty_otp5085_msg5,
+ pretty_otp5085_msg6,
+ pretty_otp5085_msg7,
+ pretty_otp5600_msg1,
+ pretty_otp5600_msg2,
+ pretty_otp5601_msg1,
+ pretty_otp5793_msg01,
+ pretty_otp5882_msg01,
+ pretty_otp6490_msg01,
+ pretty_otp6490_msg02,
+ pretty_otp6490_msg03,
+ pretty_otp6490_msg04,
+ pretty_otp6490_msg05,
+ pretty_otp6490_msg06,
+ pretty_otp7671_msg01,
+ pretty_otp7671_msg02,
+ pretty_otp7671_msg03,
+ pretty_otp7671_msg04,
+ pretty_otp7671_msg05]},
+ {flex_pretty_tickets, [], flex_pretty_tickets_cases()}].
init_per_group(flex_pretty_tickets, Config) ->
flex_pretty_init(Config);
diff --git a/lib/megaco/test/megaco_test_generator.erl b/lib/megaco/test/megaco_test_generator.erl
index a021d2451b..4fbc86262e 100644
--- a/lib/megaco/test/megaco_test_generator.erl
+++ b/lib/megaco/test/megaco_test_generator.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
-behaviour(gen_server).
+-compile({no_auto_import,[error/2]}).
+
+%% ----
+
-export([
start_link/3,
start_link/4,
diff --git a/lib/megaco/test/megaco_test_lib.erl b/lib/megaco/test/megaco_test_lib.erl
index 0d2b4a3f4e..41f6c2c4cb 100644
--- a/lib/megaco/test/megaco_test_lib.erl
+++ b/lib/megaco/test/megaco_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -146,28 +146,28 @@ tickets(Mod, Func, Config) ->
end,
lists:map(Map, Cases);
- {req, _, {conf, Init, Cases, Finish}} ->
- case (catch Mod:Init(Config)) of
- Conf when is_list(Conf) ->
- io:format("Expand: ~p:~p ...~n", [Mod, Func]),
- Map = fun({M,_}) when is_atom(M) ->
- tickets(M, tickets, Config);
- (F) when is_atom(F) ->
- tickets(Mod, F, Config);
- (Case) -> Case
- end,
- Res = lists:map(Map, Cases),
- (catch Mod:Finish(Conf)),
- Res;
+%% {req, _, {conf, Init, Cases, Finish}} ->
+%% case (catch Mod:Init(Config)) of
+%% Conf when is_list(Conf) ->
+%% io:format("Expand: ~p:~p ...~n", [Mod, Func]),
+%% Map = fun({M,_}) when is_atom(M) ->
+%% tickets(M, tickets, Config);
+%% (F) when is_atom(F) ->
+%% tickets(Mod, F, Config);
+%% (Case) -> Case
+%% end,
+%% Res = lists:map(Map, Cases),
+%% (catch Mod:Finish(Conf)),
+%% Res;
- {'EXIT', {skipped, Reason}} ->
- io:format(" => skipping: ~p~n", [Reason]),
- [{skipped, {Mod, Func}, Reason}];
+%% {'EXIT', {skipped, Reason}} ->
+%% io:format(" => skipping: ~p~n", [Reason]),
+%% [{skipped, {Mod, Func}, Reason}];
- Error ->
- io:format(" => init failed: ~p~n", [Error]),
- [{failed, {Mod, Func}, Error}]
- end;
+%% Error ->
+%% io:format(" => init failed: ~p~n", [Error]),
+%% [{failed, {Mod, Func}, Error}]
+%% end;
{'EXIT', {undef, _}} ->
io:format("Undefined: ~p~n", [{Mod, Func}]),
@@ -252,6 +252,8 @@ alloc_instance_mem_info(Key, InstanceInfo) ->
end.
+t([Case]) when is_atom(Case) ->
+ t(Case);
t(Case) ->
process_flag(trap_exit, true),
MEM = fun() -> case (catch erlang:memory()) of
@@ -266,11 +268,65 @@ t(Case) ->
Res = lists:flatten(t(Case, default_config())),
Alloc2 = alloc_info(),
Mem2 = MEM(),
- %% io:format("Res: ~p~n", [Res]),
display_result(Res, Alloc1, Mem1, Alloc2, Mem2),
Res.
-t({Mod, Fun}, Config) when is_atom(Mod) andalso is_atom(Fun) ->
+
+groups(Mod) when is_atom(Mod) ->
+ try Mod:groups() of
+ Groups when is_list(Groups) ->
+ Groups;
+ BadGroups ->
+ exit({bad_groups, Mod, BadGroups})
+ catch
+ _:_ ->
+ []
+ end.
+
+init_suite(Mod, Config) ->
+ Mod:init_per_suite(Config).
+
+end_suite(Mod, Config) ->
+ Mod:end_per_suite(Config).
+
+init_group(Mod, Group, Config) ->
+ Mod:init_per_group(Group, Config).
+
+end_group(Mod, Group, Config) ->
+ Mod:init_per_group(Group, Config).
+
+%% This is for sub-SUITEs
+t({_Mod, {NewMod, all}, _Groups}, _Config) when is_atom(NewMod) ->
+ t(NewMod);
+t({Mod, {group, Name} = Group, Groups}, Config)
+ when is_atom(Mod) andalso is_atom(Name) andalso is_list(Groups) ->
+ case lists:keysearch(Name, 1, Groups) of
+ {value, {Name, _Props, GroupsAndCases}} ->
+ try init_group(Mod, Name, Config) of
+ Config2 when is_list(Config2) ->
+ Res = [t({Mod, Case, Groups}, Config2) ||
+ Case <- GroupsAndCases],
+ (catch end_group(Mod, Name, Config2)),
+ Res;
+ Error ->
+ io:format(" => group (~w) init failed: ~p~n",
+ [Name, Error]),
+ [{failed, {Mod, Group}, Error}]
+ catch
+ exit:{skipped, SkipReason} ->
+ io:format(" => skipping group: ~p~n", [SkipReason]),
+ [{skipped, {Mod, Group}, SkipReason, 0}];
+ exit:{undef, _} ->
+ [t({Mod, Case, Groups}, Config) ||
+ Case <- GroupsAndCases];
+ T:E ->
+ [{failed, {Mod, Group}, {T,E}, 0}]
+ end;
+ false ->
+ exit({unknown_group, Mod, Name, Groups})
+ end;
+t({Mod, Fun, _}, Config)
+ when is_atom(Mod) andalso is_atom(Fun) ->
case catch apply(Mod, Fun, [suite]) of
[] ->
io:format("Eval: ~p:", [{Mod, Fun}]),
@@ -286,26 +342,6 @@ t({Mod, Fun}, Config) when is_atom(Mod) andalso is_atom(Fun) ->
end,
t(lists:map(Map, Cases), Config);
- {req, _, {conf, Init, Cases, Finish}} ->
- case (catch apply(Mod, Init, [Config])) of
- Conf when is_list(Conf) ->
- io:format("Expand: ~p ...~n", [{Mod, Fun}]),
- Map = fun(Case) when is_atom(Case) -> {Mod, Case};
- (Case) -> Case
- end,
- Res = t(lists:map(Map, Cases), Conf),
- (catch apply(Mod, Finish, [Conf])),
- Res;
-
- {'EXIT', {skipped, Reason}} ->
- io:format(" => skipping: ~p~n", [Reason]),
- [{skipped, {Mod, Fun}, Reason, 0}];
-
- Error ->
- io:format(" => failed: ~p~n", [Error]),
- [{failed, {Mod, Fun}, Error, 0}]
- end;
-
{'EXIT', {undef, _}} ->
io:format("Undefined: ~p~n", [{Mod, Fun}]),
[{nyi, {Mod, Fun}, ok, 0}];
@@ -315,10 +351,38 @@ t({Mod, Fun}, Config) when is_atom(Mod) andalso is_atom(Fun) ->
[{failed, {Mod, Fun}, Error, 0}]
end;
t(Mod, Config) when is_atom(Mod) ->
- Res = t({Mod, all}, Config),
- Res;
-t(Cases, Config) when is_list(Cases) ->
- [t(Case, Config) || Case <- Cases];
+ %% This is assumed to be a test suite, so we start by calling
+ %% the top test suite function(s) (all/0 and groups/0).
+ case (catch Mod:all()) of
+ Cases when is_list(Cases) ->
+ %% The list may contain atoms (actual test cases) and
+ %% group-tuples (a tuple naming a group of test cases).
+ %% A group is defined by the (optional) groups/0 function.
+ Groups = groups(Mod),
+ try init_suite(Mod, Config) of
+ Config2 when is_list(Config2) ->
+ Res = [t({Mod, Case, Groups}, Config2) || Case <- Cases],
+ (catch end_suite(Mod, Config2)),
+ Res;
+ Error ->
+ io:format(" => suite init failed: ~p~n", [Error]),
+ [{failed, {Mod, init_per_suite}, Error}]
+ catch
+ exit:{skipped, SkipReason} ->
+ io:format(" => skipping suite: ~p~n", [SkipReason]),
+ [{skipped, {Mod, init_per_suite}, SkipReason, 0}];
+ exit:{undef, _} ->
+ [t({Mod, Case, Groups}, Config) || Case <- Cases];
+ T:E ->
+ [{failed, {Mod, init_per_suite}, {T,E}, 0}]
+ end;
+ {'EXIT', {undef, _}} ->
+ io:format("Undefined: ~p~n", [{Mod, all}]),
+ [{nyi, {Mod, all}, ok, 0}];
+
+ Crap ->
+ Crap
+ end;
t(Bad, _Config) ->
[{badarg, Bad, ok, 0}].
@@ -495,28 +559,56 @@ do_display_memory([{Key, Mem1}|MemInfo1], MemInfo2) ->
display_result([]) ->
io:format("OK~n", []);
display_result(Res) when is_list(Res) ->
- Ok = [{MF, Time} || {ok, MF, _, Time} <- Res],
- Nyi = [MF || {nyi, MF, _, _Time} <- Res],
- Skipped = [{MF, Reason} || {skipped, MF, Reason, _Time} <- Res],
- Failed = [{MF, Reason} || {failed, MF, Reason, _Time} <- Res],
- Crashed = [{MF, Reason} || {crashed, MF, Reason, _Time} <- Res],
- display_summery(Ok, Nyi, Skipped, Failed, Crashed),
+ Ok = [{MF, Time} || {ok, MF, _, Time} <- Res],
+ Nyi = [MF || {nyi, MF, _, _Time} <- Res],
+ SkippedGrps = [{{M,G}, Reason} ||
+ {skipped, {M, {group, G}}, Reason, _Time} <- Res],
+ SkippedCases = [{MF, Reason} ||
+ {skipped, {_M, F} = MF, Reason, _Time} <- Res,
+ is_atom(F)],
+ FailedGrps = [{{M,G}, Reason} ||
+ {failed, {M, {group, G}}, Reason, _Time} <- Res],
+ FailedCases = [{MF, Reason} ||
+ {failed, {_M, F} = MF, Reason, _Time} <- Res,
+ is_atom(F)],
+ Crashed = [{MF, Reason} || {crashed, MF, Reason, _Time} <- Res],
+ display_summery(Ok, Nyi,
+ SkippedGrps, SkippedCases,
+ FailedGrps, FailedCases,
+ Crashed),
display_ok(Ok),
- display_skipped(Skipped),
- display_failed(Failed),
+ display_skipped("groups", SkippedGrps),
+ display_skipped("test cases", SkippedCases),
+ display_failed("groups", FailedGrps),
+ display_failed("test cases", FailedCases),
display_crashed(Crashed).
-display_summery(Ok, Nyi, Skipped, Failed, Crashed) ->
+display_summery(Ok, Nyi,
+ SkippedGrps, SkippedCases,
+ FailedGrps, FailedCases,
+ Crashed) ->
io:format("~nTest case summery:~n", []),
- display_summery(Ok, "successfull"),
- display_summery(Nyi, "not yet implemented"),
- display_summery(Skipped, "skipped"),
- display_summery(Failed, "failed"),
- display_summery(Crashed, "crashed"),
+ display_summery(Ok, "test case", "successfull"),
+ display_summery(Nyi, "test case", "not yet implemented"),
+ display_summery(SkippedGrps, "group", "skipped"),
+ display_summery(SkippedCases, "test case", "skipped"),
+ display_summery(FailedGrps, "group", "failed"),
+ display_summery(FailedCases, "test case", "failed"),
+ display_summery(Crashed, "test case", "crashed"),
io:format("~n", []).
-display_summery(Res, Info) ->
- io:format(" ~w test cases ~s~n", [length(Res), Info]).
+
+display_summery(Res, Kind, Info) ->
+ Len = length(Res),
+ if
+ Len =:= 1 ->
+ display_summery(Len, Kind ++ " " ++ Info);
+ true ->
+ display_summery(Len, Kind ++ "s " ++ Info)
+ end.
+
+display_summery(Len, Info) ->
+ io:format(" ~w ~s~n", [Len, Info]).
display_ok([]) ->
ok;
@@ -528,20 +620,20 @@ display_ok(Ok) ->
lists:foreach(F, Ok),
io:format("~n", []).
-display_skipped([]) ->
+display_skipped(_, []) ->
ok;
-display_skipped(Skipped) ->
- io:format("Skipped test cases:~n", []),
- F = fun({MF, Reason}) -> io:format(" ~p => ~p~n", [MF, Reason]) end,
+display_skipped(Pre, Skipped) ->
+ io:format("Skipped ~s:~n", [Pre]),
+ F = fun({X, Reason}) -> io:format(" ~p => ~p~n", [X, Reason]) end,
lists:foreach(F, Skipped),
io:format("~n", []).
-display_failed([]) ->
+display_failed(_, []) ->
ok;
-display_failed(Failed) ->
- io:format("Failed test cases:~n", []),
- F = fun({MF, Reason}) -> io:format(" ~p => ~p~n", [MF, Reason]) end,
+display_failed(Pre, Failed) ->
+ io:format("Failed ~s:~n", [Pre]),
+ F = fun({X, Reason}) -> io:format(" ~p => ~p~n", [X, Reason]) end,
lists:foreach(F, Failed),
io:format("~n", []).
@@ -837,5 +929,5 @@ start_nodes([Node | Nodes], File, Line) ->
start_nodes([], _File, _Line) ->
ok.
-p(F,A) ->
- io:format("~p" ++ F ++ "~n", [self()|A]).
+p(F, A) ->
+ io:format("~p~w:" ++ F ++ "~n", [self(), ?MODULE |A]).
diff --git a/lib/megaco/test/megaco_test_megaco_generator.erl b/lib/megaco/test/megaco_test_megaco_generator.erl
index 21b33e4abc..f0c723d2cf 100644
--- a/lib/megaco/test/megaco_test_megaco_generator.erl
+++ b/lib/megaco/test/megaco_test_megaco_generator.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,8 @@
-behaviour(megaco_test_generator).
+-compile({no_auto_import,[error/1]}).
+
%% API
-export([
start_link/1, start_link/2,
diff --git a/lib/megaco/test/megaco_test_msg_prev3a_lib.erl b/lib/megaco/test/megaco_test_msg_prev3a_lib.erl
index 2fb0752865..fad7f29831 100644
--- a/lib/megaco/test/megaco_test_msg_prev3a_lib.erl
+++ b/lib/megaco/test/megaco_test_msg_prev3a_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco_message_prev3a.hrl").
-include_lib("megaco/include/megaco.hrl").
diff --git a/lib/megaco/test/megaco_test_msg_prev3b_lib.erl b/lib/megaco/test/megaco_test_msg_prev3b_lib.erl
index 6e042080b7..2f1a093728 100644
--- a/lib/megaco/test/megaco_test_msg_prev3b_lib.erl
+++ b/lib/megaco/test/megaco_test_msg_prev3b_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco_message_prev3b.hrl").
-include_lib("megaco/include/megaco.hrl").
diff --git a/lib/megaco/test/megaco_test_msg_prev3c_lib.erl b/lib/megaco/test/megaco_test_msg_prev3c_lib.erl
index c768105194..884e2f2bad 100644
--- a/lib/megaco/test/megaco_test_msg_prev3c_lib.erl
+++ b/lib/megaco/test/megaco_test_msg_prev3c_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco_message_prev3c.hrl").
-include_lib("megaco/include/megaco.hrl").
diff --git a/lib/megaco/test/megaco_test_msg_v1_lib.erl b/lib/megaco/test/megaco_test_msg_v1_lib.erl
index 424a66b7c9..76665cb575 100644
--- a/lib/megaco/test/megaco_test_msg_v1_lib.erl
+++ b/lib/megaco/test/megaco_test_msg_v1_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco_message_v1.hrl").
-include_lib("megaco/include/megaco.hrl").
diff --git a/lib/megaco/test/megaco_test_msg_v2_lib.erl b/lib/megaco/test/megaco_test_msg_v2_lib.erl
index b29920006d..66e423284a 100644
--- a/lib/megaco/test/megaco_test_msg_v2_lib.erl
+++ b/lib/megaco/test/megaco_test_msg_v2_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco_message_v2.hrl").
-include_lib("megaco/include/megaco.hrl").
diff --git a/lib/megaco/test/megaco_test_msg_v3_lib.erl b/lib/megaco/test/megaco_test_msg_v3_lib.erl
index fee61542b7..24492167ff 100644
--- a/lib/megaco/test/megaco_test_msg_v3_lib.erl
+++ b/lib/megaco/test/megaco_test_msg_v3_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,10 @@
%% ----
+-compile({no_auto_import,[error/1]}).
+
+%% ----
+
-include_lib("megaco/include/megaco_message_v3.hrl").
-include_lib("megaco/include/megaco.hrl").
diff --git a/lib/megaco/test/megaco_test_tcp_generator.erl b/lib/megaco/test/megaco_test_tcp_generator.erl
index 416d56d742..3ed4c49bab 100644
--- a/lib/megaco/test/megaco_test_tcp_generator.erl
+++ b/lib/megaco/test/megaco_test_tcp_generator.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,8 @@
-behaviour(megaco_test_generator).
+-compile({no_auto_import,[error/1]}).
+
%% API
-export([
start_link/1, start_link/2,
diff --git a/lib/megaco/test/megaco_timer_test.erl b/lib/megaco/test/megaco_timer_test.erl
index cccf4651ab..9b9103c40b 100644
--- a/lib/megaco/test/megaco_timer_test.erl
+++ b/lib/megaco/test/megaco_timer_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -23,6 +23,8 @@
%%----------------------------------------------------------------------
-module(megaco_timer_test).
+-compile({no_auto_import,[error/1]}).
+
-export([
t/0, t/1,
init_per_testcase/2, end_per_testcase/2,
diff --git a/lib/megaco/vsn.mk b/lib/megaco/vsn.mk
index 9fc0e0f2fa..5f71712360 100644
--- a/lib/megaco/vsn.mk
+++ b/lib/megaco/vsn.mk
@@ -1,4 +1,23 @@
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1997-2011. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
APPLICATION = megaco
-MEGACO_VSN = 3.15
-PRE_VSN =
-APP_VSN = "$(APPLICATION)-$(MEGACO_VSN)$(PRE_VSN)"
+MEGACO_VSN = 3.15.1
+PRE_VSN =
+APP_VSN = "$(APPLICATION)-$(MEGACO_VSN)$(PRE_VSN)"
diff --git a/lib/mnesia/src/mnesia.appup.src b/lib/mnesia/src/mnesia.appup.src
index 22ef5178a7..0eff761b61 100644
--- a/lib/mnesia/src/mnesia.appup.src
+++ b/lib/mnesia/src/mnesia.appup.src
@@ -1,12 +1,24 @@
%% -*- erlang -*-
{"%VSN%",
- [
- {"4.4.15",[
- {update, mnesia_dumper, soft, soft_purge, soft_purge, []}
- ]}
+ [
+ {"4.4.16",[
+ {update, mnesia_frag, soft, soft_purge, soft_purge, []},
+ {update, mnesia_schema, soft, soft_purge, soft_purge, []}
+ ]},
+ {"4.4.15",[
+ {update, mnesia_frag, soft, soft_purge, soft_purge, []},
+ {update, mnesia, soft, soft_purge, soft_purge, []},
+ {update, mnesia_dumper, soft, soft_purge, soft_purge, []}
+ ]}
],
[
+ {"4.4.16",[
+ {update, mnesia_frag, soft, soft_purge, soft_purge, []},
+ {update, mnesia_schema, soft, soft_purge, soft_purge, []}
+ ]},
{"4.4.15",[
+ {update, mnesia_frag, soft, soft_purge, soft_purge, []},
+ {update, mnesia, soft, soft_purge, soft_purge, []},
{update, mnesia_dumper, soft, soft_purge, soft_purge, []}
]}
]
diff --git a/lib/mnesia/src/mnesia.erl b/lib/mnesia/src/mnesia.erl
index fb29007780..025b32f506 100644
--- a/lib/mnesia/src/mnesia.erl
+++ b/lib/mnesia/src/mnesia.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -302,7 +302,7 @@ ms() ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Activity mgt
--spec(abort/1 :: (_) -> no_return()).
+-spec abort(_) -> no_return().
abort(Reason) ->
exit({aborted, Reason}).
@@ -1835,6 +1835,7 @@ do_dirty_rpc(Tab, Node, M, F, Args) ->
%% Info
%% Info about one table
+-spec table_info(atom(), any()) -> any().
table_info(Tab, Item) ->
case get(mnesia_activity_state) of
undefined ->
@@ -1868,7 +1869,7 @@ any_table_info(Tab, Item) when is_atom(Tab) ->
type ->
case ?catch_val({Tab, setorbag}) of
{'EXIT', _} ->
- bad_info_reply(Tab, Item);
+ abort({no_exists, Tab, Item});
Val ->
Val
end;
@@ -1886,7 +1887,7 @@ any_table_info(Tab, Item) when is_atom(Tab) ->
_ ->
case ?catch_val({Tab, Item}) of
{'EXIT', _} ->
- bad_info_reply(Tab, Item);
+ abort({no_exists, Tab, Item});
Val ->
Val
end
diff --git a/lib/mnesia/src/mnesia_bup.erl b/lib/mnesia/src/mnesia_bup.erl
index 37a8258d74..47dcdad7ac 100644
--- a/lib/mnesia/src/mnesia_bup.erl
+++ b/lib/mnesia/src/mnesia_bup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -65,6 +65,8 @@
default_op = keep_tables
}).
+-type fallback_args() :: #fallback_args{}.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Backup iterator
@@ -108,6 +110,7 @@ iter(R, Header, Schema, Fun, Acc, BupItems) ->
Acc2 = Fun(BupItems, Header, Schema, Acc),
iter(R, Header, Schema, Fun, Acc2, []).
+-spec safe_apply(#restore{}, atom(), list()) -> tuple().
safe_apply(R, write, [_, Items]) when Items =:= [] ->
R;
safe_apply(R, What, Args) ->
@@ -570,6 +573,7 @@ fallback_bup() -> mnesia_lib:dir(fallback_name()).
fallback_tmp_name() -> "FALLBACK.TMP".
%% fallback_full_tmp_name() -> mnesia_lib:dir(fallback_tmp_name()).
+-spec fallback_receiver(pid(), fallback_args()) -> no_return().
fallback_receiver(Master, FA) ->
process_flag(trap_exit, true),
@@ -981,6 +985,7 @@ do_uninstall_fallback(FA) ->
{error, Reason}
end.
+-spec uninstall_fallback_master(pid(), fallback_args()) -> no_return().
uninstall_fallback_master(ClientPid, FA) ->
process_flag(trap_exit, true),
diff --git a/lib/mnesia/src/mnesia_frag.erl b/lib/mnesia/src/mnesia_frag.erl
index a2958ab461..9e77fe0b9f 100644
--- a/lib/mnesia/src/mnesia_frag.erl
+++ b/lib/mnesia/src/mnesia_frag.erl
@@ -1,7 +1,7 @@
%%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -209,7 +209,7 @@ first(ActivityId, Opaque, Tab) ->
end
end.
-search_first(ActivityId, Opaque, Tab, N, FH) when N =< FH#frag_state.n_fragments ->
+search_first(ActivityId, Opaque, Tab, N, FH) when N < FH#frag_state.n_fragments ->
NextN = N + 1,
NextFrag = n_to_frag_name(Tab, NextN),
case mnesia:first(ActivityId, Opaque, NextFrag) of
@@ -448,13 +448,15 @@ do_remote_select(_ReplyTo, _Ref, [], _MatchSpec) ->
local_collect(Ref, Pid, Type, LocalMatch, OldSelectFun) ->
receive
- {local_select, Ref, LocalRes} ->
- remote_collect(Ref, Type, LocalRes, LocalMatch, OldSelectFun);
+ {local_select, Ref, ok} ->
+ remote_collect_ok(Ref, Type, LocalMatch, OldSelectFun);
+ {local_select, Ref, {error, Reason}} ->
+ remote_collect_error(Ref, Type, Reason, OldSelectFun);
{'EXIT', Pid, Reason} ->
- remote_collect(Ref, Type, {error, Reason}, [], OldSelectFun)
+ remote_collect_error(Ref, Type, Reason, OldSelectFun)
end.
-remote_collect(Ref, Type, LocalRes = ok, Acc, OldSelectFun) ->
+remote_collect_ok(Ref, Type, Acc, OldSelectFun) ->
receive
{remote_select, Ref, Node, RemoteRes} ->
case RemoteRes of
@@ -463,19 +465,21 @@ remote_collect(Ref, Type, LocalRes = ok, Acc, OldSelectFun) ->
ordered_set -> lists:merge(RemoteMatch, Acc);
_ -> RemoteMatch ++ Acc
end,
- remote_collect(Ref, Type, LocalRes, Matches, OldSelectFun);
+ remote_collect_ok(Ref, Type, Matches, OldSelectFun);
_ ->
- remote_collect(Ref, Type, {error, {node_not_running, Node}}, [], OldSelectFun)
+ Reason = {node_not_running, Node},
+ remote_collect_error(Ref, Type, Reason, OldSelectFun)
end
after 0 ->
Acc
- end;
-remote_collect(Ref, Type, LocalRes = {error, Reason}, _Acc, OldSelectFun) ->
+ end.
+
+remote_collect_error(Ref, Type, Reason, OldSelectFun) ->
receive
{remote_select, Ref, _Node, _RemoteRes} ->
- remote_collect(Ref, Type, LocalRes, [], OldSelectFun)
+ remote_collect_error(Ref, Type, Reason, OldSelectFun)
after 0 ->
- mnesia:abort(Reason)
+ mnesia:abort({error, Reason})
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/mnesia/src/mnesia_index.erl b/lib/mnesia/src/mnesia_index.erl
index 4e6e8a997c..61210d7e55 100644
--- a/lib/mnesia/src/mnesia_index.erl
+++ b/lib/mnesia/src/mnesia_index.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/mnesia/src/mnesia_lib.erl b/lib/mnesia/src/mnesia_lib.erl
index 3da3dd2f5c..36bcfe8de9 100644
--- a/lib/mnesia/src/mnesia_lib.erl
+++ b/lib/mnesia/src/mnesia_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -399,7 +399,7 @@ other_val(Var, Other) ->
pr_other(Var, Other)
end.
--spec(pr_other/2 :: (_,_) -> no_return()).
+-spec pr_other(_,_) -> no_return().
pr_other(Var, Other) ->
Why =
diff --git a/lib/mnesia/src/mnesia_locker.erl b/lib/mnesia/src/mnesia_locker.erl
index 6b5770d91e..ca0cc79c45 100644
--- a/lib/mnesia/src/mnesia_locker.erl
+++ b/lib/mnesia/src/mnesia_locker.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1104,6 +1104,7 @@ do_stop() ->
system_continue(_Parent, _Debug, State) ->
loop(State).
+-spec system_terminate(_, _, _, _) -> no_return().
system_terminate(_Reason, _Parent, _Debug, _State) ->
do_stop().
diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl
index 7435b6896a..b3eed1de6e 100644
--- a/lib/mnesia/src/mnesia_recover.erl
+++ b/lib/mnesia/src/mnesia_recover.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl
index 17e570b881..d1d892a387 100644
--- a/lib/mnesia/src/mnesia_schema.erl
+++ b/lib/mnesia/src/mnesia_schema.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -2686,7 +2686,8 @@ do_merge_schema(LockTabs0) ->
if
RemoteRunning /= RemoteRunning1 ->
mnesia_lib:error("Mnesia on ~p could not connect to node(s) ~p~n",
- [node(), RemoteRunning1 -- RemoteRunning]);
+ [node(), RemoteRunning1 -- RemoteRunning]),
+ mnesia:abort({node_not_running, RemoteRunning1 -- RemoteRunning});
true -> ok
end,
NeedsLock = RemoteRunning -- LockedAlready,
@@ -3029,7 +3030,9 @@ announce_im_running([N | Ns], SchemaCs) ->
mnesia_lib:add({current, db_nodes}, N),
mnesia_controller:add_active_replica(schema, N, SchemaCs);
false ->
- ignore
+ mnesia_lib:error("Mnesia on ~p could not connect to node ~p~n",
+ [node(), N]),
+ mnesia:abort({node_not_running, N})
end,
announce_im_running(Ns, SchemaCs);
announce_im_running([], _) ->
diff --git a/lib/mnesia/src/mnesia_snmp_hook.erl b/lib/mnesia/src/mnesia_snmp_hook.erl
index 8b4b5231e1..893b39f3c0 100644
--- a/lib/mnesia/src/mnesia_snmp_hook.erl
+++ b/lib/mnesia/src/mnesia_snmp_hook.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/mnesia/src/mnesia_tm.erl b/lib/mnesia/src/mnesia_tm.erl
index f3ffac5493..bb8e788b40 100644
--- a/lib/mnesia/src/mnesia_tm.erl
+++ b/lib/mnesia/src/mnesia_tm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1604,6 +1604,7 @@ tell_participants([Pid | Pids], Msg) ->
tell_participants([], _Msg) ->
ok.
+-spec commit_participant(_, _, _, _, _) -> no_return().
%% Trap exit because we can get a shutdown from application manager
commit_participant(Coord, Tid, Bin, DiscNs, RamNs) when is_binary(Bin) ->
process_flag(trap_exit, true),
@@ -2279,6 +2280,7 @@ fixtable(Tab, Lock, Me) ->
system_continue(_Parent, _Debug, State) ->
doit_loop(State).
+-spec system_terminate(_, _, _, _) -> no_return().
system_terminate(_Reason, _Parent, _Debug, State) ->
do_stop(State).
diff --git a/lib/mnesia/test/mnesia_SUITE.erl b/lib/mnesia/test/mnesia_SUITE.erl
index fe7d366eb3..8ba8427213 100644
--- a/lib/mnesia/test/mnesia_SUITE.erl
+++ b/lib/mnesia/test/mnesia_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,7 +30,7 @@ end_per_testcase(Func, Conf) ->
mnesia_test_lib:end_per_testcase(Func, Conf).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,1}]}]}].
+suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,2}]}]}].
%% Verify that Mnesia really is a distributed real-time DBMS.
diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk
index 5b52bc6075..5247657b68 100644
--- a/lib/mnesia/vsn.mk
+++ b/lib/mnesia/vsn.mk
@@ -1 +1 @@
-MNESIA_VSN = 4.4.16
+MNESIA_VSN = 4.4.17
diff --git a/lib/observer/doc/src/crashdump.xml b/lib/observer/doc/src/crashdump.xml
index f8d7641524..b6056c2ed1 100644
--- a/lib/observer/doc/src/crashdump.xml
+++ b/lib/observer/doc/src/crashdump.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2003</year>
- <year>2007</year>
+ <year>2011</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -38,6 +38,10 @@
<description>
<p>The Crashdump Viewer is an HTML based tool for browsing Erlang
crashdumps. Crashdump Viewer runs under the WebTool application.</p>
+
+ <p>See the <seealso marker="crashdump_ug">user's guide</seealso>
+ for more information about how to get started with the Crashdump
+ Viewer.</p>
</description>
<funcs>
<func>
diff --git a/lib/observer/doc/src/crashdump_help.html b/lib/observer/doc/src/crashdump_help.html
index 736a024288..268b9495d6 100644
--- a/lib/observer/doc/src/crashdump_help.html
+++ b/lib/observer/doc/src/crashdump_help.html
@@ -131,7 +131,7 @@ SRC="min_head.gif"></a>
- <a NAME="ets_tables">
+ <a NAME="ets_tables"><a NAME="internal_ets_tables">
<h3>ETS tables</h3>
<p>The ETS table information page shows all ETS table
@@ -304,4 +304,4 @@ Copyright &copy; 1991-2003
</font>
</center>
</body>
-</html> \ No newline at end of file
+</html>
diff --git a/lib/observer/doc/src/crashdump_ug.xml b/lib/observer/doc/src/crashdump_ug.xml
index 9913b30e38..dc65fe5b39 100644
--- a/lib/observer/doc/src/crashdump_ug.xml
+++ b/lib/observer/doc/src/crashdump_ug.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2003</year><year>2009</year>
+ <year>2003</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -38,12 +38,31 @@
<section>
<title>Getting Started</title>
- <p>From an erlang node, start Crashdump Viewer by calling
- <c>crashdump_viewer:start()</c>. This will automatically start
- WebTool and display the web address where WebTool can be
- found. See the documentation for the WebTool application for
- further information about how to use WebTool.
- </p>
+
+ <p>The easiest way to start Crashdump Viewer is to use the
+ provided shell script named <c>cdv</c> with the full path to the
+ erlang crashdump as an argument. The script can be found in the
+ priv directory of the <c>observer</c> application. This starts
+ WebTool, Crashdump Viewer and a web browser, and loads the given
+ file. The browser should then display a page named General
+ Information which shows a short summary of the information in
+ the crashdump.</p>
+
+ <p>The default browser is Internet Explorer on Windows or else
+ Firefox. To use another browser, give the browser's start command
+ as the second argument to <c>cdv</c>. If the given browser name is
+ not known to Crashdump Viewer, the browser argument is executed as
+ a command with the start URL as the only argument.</p>
+
+ <p>Under Windows the batch file <c>cdv.bat</c> can be used.</p>
+
+ <p>It is also possible to start the Crashdump Viewer from within
+ an erlang node by calling <seealso
+ marker="crashdump_viewer#start/0">crashdump_viewer:start/0</seealso>. This
+ will automatically start WebTool and display the web address where
+ WebTool can be found. See the documentation for the WebTool
+ application for further information about how to use WebTool.</p>
+
<p>Point your web browser to the address displayed, and you should
now see the start page of WebTool. At the top of the page, you
will see a link to "CrashDumpViewer". Click this link to get to
@@ -52,15 +71,12 @@
connection to the internet, or you must set no proxy for
localhost.)
</p>
- <p>You can also start WebTool, Crashdump Viewer and a browser in
- one go by running the <c>start_webtool</c> script found in the
- <c>priv</c> directory of the WebTool application, e.g.
- <br></br>
-<c>>start_webtool crashdump_viewer</c></p>
<p>From the start page of Crashdump Viewer, push the "Load
Crashdump" button to load a crashdump into the tool. Then enter
- the filename of the crashdump in the entry field and push the "Ok"
- button.
+ the filename of the crashdump in the entry field and push the
+ "Ok" button. This will bring you to the General Information
+ page, i.e. the same page as the <c>cdv</c> script will open in
+ the browser.
</p>
<p>Crashdumps generated by OTP R9C and later are loaded directly
into the Crashdump Viewer, while dumps from earlier releases first
diff --git a/lib/observer/doc/src/etop.xml b/lib/observer/doc/src/etop.xml
index 1ea67e6864..78047caab3 100644
--- a/lib/observer/doc/src/etop.xml
+++ b/lib/observer/doc/src/etop.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2002</year>
- <year>2007</year>
+ <year>2011</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -26,27 +26,33 @@
<title>etop</title>
<prepared>Siri hansen</prepared>
<responsible></responsible>
- <docno>1</docno>
+ <docno></docno>
<approved></approved>
<checked></checked>
- <date>2002-03-27</date>
- <rev>PA1</rev>
- <file>etop.sgml</file>
+ <date></date>
+ <rev></rev>
+ <file></file>
</header>
<module>etop</module>
<modulesummary>Erlang Top is a tool for presenting information about erlang processes similar to the information presented by "top" in UNIX.</modulesummary>
<description>
+
<p><c>etop</c> should be started with the provided scripts
<c>etop</c> and <c>getop</c> for text based and graphical
- presentation respectively. Under Windows the batch files
- <c>etop.bat</c> and <c>getop.bat</c> can be used.
- </p>
- <p>All interaction with <c>etop</c> when running the graphical
- presentation should happen via the menus. For the text based
- presentation the functions described below can be used.
- </p>
- <p>The following configuration parameters exist for <c>etop</c>.
- </p>
+ presentation respectively. This will start a hidden erlang node
+ which connects to the node to be measured. The measured node is
+ given with the <c>-node</c> option. If the measured node has a
+ different cookie than the default cookie for the user who
+ invokes the script, the cookie must be explicitly given witht
+ the <c>-setcookie</c> option.</p>
+
+ <p>Under Windows the batch files <c>etop.bat</c> and
+ <c>getop.bat</c> can be used.</p>
+
+ <p>The following configuration parameters exist for the
+ <c>etop</c> tool. When executing the <c>etop</c> or <c>getop</c>
+ scripts, these parameters can be given as command line options,
+ e.g. <c>getop -node testnode@myhost -setcookie MyCookie</c>.</p>
<taglist>
<tag>node</tag>
<item>The measured node.
@@ -96,6 +102,15 @@ Default: <c>runtime</c> (<c>reductions</c> if
Value: <c>on | off</c> <br></br>
Default: <c>on</c></item>
</taglist>
+
+ <p>All interaction with <c>etop</c> when running the graphical
+ presentation should happen via the menus. For the text based
+ presentation the functions described below can be used.
+ </p>
+
+ <p>See the <seealso marker="etop_ug">user's guide</seealso> for
+ more information about the <c>etop</c> tool.</p>
+
</description>
<funcs>
<func>
diff --git a/lib/observer/priv/bin/cdv b/lib/observer/priv/bin/cdv
new file mode 100755
index 0000000000..1c44785ac2
--- /dev/null
+++ b/lib/observer/priv/bin/cdv
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+erl -sname cdv -noinput -s crashdump_viewer script_start $@
+
diff --git a/lib/observer/priv/bin/cdv.bat b/lib/observer/priv/bin/cdv.bat
new file mode 100644
index 0000000000..efa8bf8687
--- /dev/null
+++ b/lib/observer/priv/bin/cdv.bat
@@ -0,0 +1,2 @@
+@ECHO OFF
+CALL werl -sname cdv -s crashdump_viewer script_start %*
diff --git a/lib/observer/src/Makefile b/lib/observer/src/Makefile
index b4eb518dd7..2d06cb6bc4 100644
--- a/lib/observer/src/Makefile
+++ b/lib/observer/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2002-2009. All Rights Reserved.
+# Copyright Ericsson AB 2002-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -59,8 +59,10 @@ BINDIR= $(PRIVDIR)/bin
EXECUTABLES= \
$(BINDIR)/etop \
$(BINDIR)/getop \
+ $(BINDIR)/cdv \
$(BINDIR)/etop.bat \
- $(BINDIR)/getop.bat
+ $(BINDIR)/getop.bat \
+ $(BINDIR)/cdv.bat
CDVDIR= $(PRIVDIR)/crashdump_viewer
GIF_FILES= \
$(CDVDIR)/collapsd.gif \
diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl
index 978541e470..3b8d17c7d9 100644
--- a/lib/observer/src/crashdump_viewer.erl
+++ b/lib/observer/src/crashdump_viewer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -23,7 +23,7 @@
%% the server started by webtool and the API for the crashdump viewer tool.
%%
%% All functions in the API except configData/0 and start_link/0 are
-%% called from HTML pages via erl_scheme.
+%% called from HTML pages via erl_scheme (mod_esi).
%%
%% Tables
%% ------
@@ -34,18 +34,21 @@
%%
%% cdv_dump_index_table: This table holds all tags read from the crashdump.
%% Each tag indicates where the information about a specific item starts.
-%% The table entry for a tag includes the start and end positions for
-%% this item-information. All tags start with a "=" at the beginning of
+%% The table entry for a tag includes the start position for this
+%% item-information. All tags start with a "=" at the beginning of
%% a line.
%%
%% Process state
%% -------------
%% file: The name of the crashdump currently viewed.
%% procs_summary: Process summary represented by a list of
-%% #proc records. This is used for efficiency reasons when sorting
-%% the process summary table instead of reading all processes from
-%% the dump again.
-%% sorted: atom(), indicated what item was last sorted in process summary.
+%% #proc records. This is used for efficiency reasons when sorting the
+%% process summary table instead of reading all processes from the
+%% dump again. Note that if the dump contains more than
+%% ?max_sort_process_num processes, the sort functionality is not
+%% available, and the procs_summary field in the state will have the
+%% value 'too_many'.
+%% sorted: string(), indicated what item was last sorted in process summary.
%% This is needed so reverse sorting can be done.
%% shared_heap: 'true' if crashdump comes from a system running shared heap,
%% else 'false'.
@@ -54,7 +57,7 @@
%%
%% User API
--export([start/0,stop/0]).
+-export([start/0,stop/0,script_start/0,script_start/1]).
%% Webtool API
-export([configData/0,
@@ -68,26 +71,27 @@
initial_info_frame/2,
toggle/2,
general_info/2,
- processes/2,
+ processes/3,
proc_details/2,
- ports/2,
- ets_tables/2,
- timers/2,
- fun_table/2,
- atoms/2,
+ port/2,
+ ports/3,
+ ets_tables/3,
+ internal_ets_tables/2,
+ timers/3,
+ fun_table/3,
+ atoms/3,
dist_info/2,
- loaded_modules/2,
+ loaded_modules/3,
loaded_mod_details/2,
memory/2,
allocated_areas/2,
allocator_info/2,
hash_tables/2,
index_tables/2,
- sort_procs/2,
+ sort_procs/3,
expand/2,
expand_binary/2,
- expand_memory/2,
- next/2]).
+ expand_memory/2]).
%% gen_server callbacks
@@ -113,24 +117,49 @@
% this, it must be explicitly expanded.
-define(max_display_binary_size,50). % max size of a binary that will be
% directly displayed.
+-define(max_sort_process_num,10000). % Max number of processes that allows
+ % sorting. If more than this number of
+ % processes exist, they will be displayed
+ % in the order they are found in the log.
+-define(items_chunk_size,?max_sort_process_num). % Number of items per chunk
+ % when page of many items
+ % is displayed, e.g. processes,
+ % timers, funs...
+ % Must be equal to
+ % ?max_sort_process_num!
+
+%% All possible tags - use macros in order to avoid misspelling in the code
+-define(allocated_areas,allocated_areas).
+-define(allocator,allocator).
+-define(atoms,atoms).
+-define(binary,binary).
+-define(debug_proc_dictionary,debug_proc_dictionary).
+-define(ende,ende).
+-define(erl_crash_dump,erl_crash_dump).
+-define(ets,ets).
+-define(fu,fu).
+-define(hash_table,hash_table).
+-define(hidden_node,hidden_node).
+-define(index_table,index_table).
+-define(instr_data,instr_data).
+-define(internal_ets,internal_ets).
+-define(loaded_modules,loaded_modules).
+-define(memory,memory).
+-define(mod,mod).
+-define(no_distribution,no_distribution).
+-define(node,node).
+-define(not_connected,not_connected).
+-define(num_atoms,num_atoms).
+-define(old_instr_data,old_instr_data).
+-define(port,port).
+-define(proc,proc).
+-define(proc_dictionary,proc_dictionary).
+-define(proc_heap,proc_heap).
+-define(proc_messages,proc_messages).
+-define(proc_stack,proc_stack).
+-define(timer,timer).
+-define(visible_node,visible_node).
--define(initial_proc_record(Pid),
- #proc{pid=Pid,
- %% msg_q_len, reds and stack_heap are integers because it must
- %% be possible to sort on them. All other fields are strings
- msg_q_len=0,reds=0,stack_heap=0,
- %% for old dumps start_time, parent and number of heap frament
- %% does not exist
- start_time="unknown",
- parent="unknown",
- num_heap_frag="unknown",
- %% current_func can be both "current function" and
- %% "last scheduled in for"
- current_func={"Current Function",?space},
- %% stack_dump, message queue and dictionaries should only be
- %% displayed as a link to "Expand" (if dump is from OTP R9B
- %% or newer)
- _=?space}).
-record(state,{file,procs_summary,sorted,shared_heap=false,
wordsize=4,num_atoms="unknown",binaries,bg_status}).
@@ -177,6 +206,85 @@ stop() ->
webtool:stop().
%%%-----------------------------------------------------------------
+%%% Start crashdump_viewer via the cdv script located in
+%%% $OBSERVER_PRIV_DIR/bin
+script_start() ->
+ usage().
+script_start([File]) ->
+ DefaultBrowser =
+ case os:type() of
+ {win32,_} -> iexplore;
+ _ -> firefox
+ end,
+ script_start([File,DefaultBrowser]);
+script_start([FileAtom,Browser]) ->
+ File = atom_to_list(FileAtom),
+ case filelib:is_regular(File) of
+ true ->
+ io:format("Starting crashdump_viewer...\n"),
+ start(),
+ io:format("Reading crashdump..."),
+ read_file(File),
+ redirect([],[]),
+ io:format("done\n"),
+ start_browser(Browser);
+ false ->
+ io:format("cdv error: the given file does not exist\n"),
+ usage()
+ end.
+
+start_browser(Browser) ->
+ PortStr = integer_to_list(gen_server:call(web_tool,get_port)),
+ Url = "http://localhost:" ++ PortStr ++ ?START_PAGE,
+ {OSType,_} = os:type(),
+ case Browser of
+ none ->
+ ok;
+ iexplore when OSType == win32->
+ io:format("Starting internet explorer...\n"),
+ {ok,R} = win32reg:open(""),
+ Key="\\local_machine\\SOFTWARE\\Microsoft\\IE Setup\\Setup",
+ win32reg:change_key(R,Key),
+ {ok,Val} = win32reg:value(R,"Path"),
+ IExplore=filename:join(win32reg:expand(Val),"iexplore.exe"),
+ os:cmd("\"" ++ IExplore ++ "\" " ++ Url);
+ _ when OSType == win32 ->
+ io:format("Starting ~w...\n",[Browser]),
+ os:cmd("\"" ++ atom_to_list(Browser) ++ "\" " ++ Url);
+ B when B==firefox; B==mozilla ->
+ io:format("Sending URL to ~w...",[Browser]),
+ BStr = atom_to_list(Browser),
+ SendCmd = BStr ++ " -raise -remote \'openUrl(" ++ Url ++ ")\'",
+ Port = open_port({spawn,SendCmd},[exit_status]),
+ receive
+ {Port,{exit_status,0}} ->
+ io:format("done\n");
+ {Port,{exit_status,_Error}} ->
+ io:format(" not running, starting ~w...\n",[Browser]),
+ os:cmd(BStr ++ " " ++ Url)
+ after 5000 ->
+ io:format(" failed, starting ~w...\n",[Browser]),
+ erlang:port_close(Port),
+ os:cmd(BStr ++ " " ++ Url)
+ end;
+ _ ->
+ io:format("Starting ~w...\n",[Browser]),
+ os:cmd(atom_to_list(Browser) ++ " " ++ Url)
+ end,
+ ok.
+
+usage() ->
+ io:format(
+ "\nusage: cdv file [ browser ]\n"
+ "\tThe \'file\' must be an existing erlang crash dump.\n"
+ "\tDefault browser is \'iexplore\' (Internet Explorer) on Windows\n"
+ "\tor else \'firefox\'.\n",
+ []).
+
+
+
+
+%%%-----------------------------------------------------------------
%%% Return config data used by webtool
configData() ->
Dir = filename:join(code:priv_dir(observer),"crashdump_viewer"),
@@ -266,22 +374,24 @@ toggle(_Env,Input) ->
%%% The following functions are called when menu items are clicked.
general_info(_Env,_Input) ->
call(general_info).
-processes(_Env,_Input) ->
- call(procs_summary).
-ports(_Env,Input) -> % this is also called when a link to a port is clicked
- call({ports,Input}).
-ets_tables(_Env,Input) ->
- call({ets_tables,Input}).
-timers(_Env,Input) ->
- call({timers,Input}).
-fun_table(_Env,_Input) ->
- call(funs).
-atoms(_Env,_Input) ->
- call(atoms).
+processes(SessionId,_Env,_Input) ->
+ call({procs_summary,SessionId}).
+ports(SessionId,_Env,_Input) ->
+ call({ports,SessionId}).
+ets_tables(SessionId,_Env,Input) ->
+ call({ets_tables,SessionId,Input}).
+internal_ets_tables(_Env,_Input) ->
+ call(internal_ets_tables).
+timers(SessionId,_Env,Input) ->
+ call({timers,SessionId,Input}).
+fun_table(SessionId,_Env,_Input) ->
+ call({funs,SessionId}).
+atoms(SessionId,_Env,_Input) ->
+ call({atoms,SessionId}).
dist_info(_Env,_Input) ->
call(dist_info).
-loaded_modules(_Env,_Input) ->
- call(loaded_mods).
+loaded_modules(SessionId,_Env,_Input) ->
+ call({loaded_mods,SessionId}).
loaded_mod_details(_Env,Input) ->
call({loaded_mod_details,Input}).
memory(_Env,_Input) ->
@@ -303,8 +413,13 @@ proc_details(_Env,Input) ->
%%%-----------------------------------------------------------------
%%% Called when one of the headings in the process summary table are
%%% clicked. It sorts the processes by the clicked heading.
-sort_procs(_Env,Input) ->
- call({sort_procs,Input}).
+sort_procs(SessionId,_Env,Input) ->
+ call({sort_procs,SessionId,Input}).
+
+%%%-----------------------------------------------------------------
+%%% Called when a link to a port is clicked.
+port(_Env,Input) ->
+ call({port,Input}).
%%%-----------------------------------------------------------------
%%% Called when the "Expand" link in a call stack (Last Calls) is
@@ -325,11 +440,6 @@ expand_binary(_Env,Input) ->
call({expand_binary,Input}).
%%%-----------------------------------------------------------------
-%%% Called when the "Next" link under atoms is clicked.
-next(_Env,Input) ->
- call({next,Input}).
-
-%%%-----------------------------------------------------------------
%%% Called on regular intervals while waiting for a dump to be read
redirect(_Env,_Input) ->
call(redirect).
@@ -348,7 +458,7 @@ redirect(_Env,_Input) ->
%%--------------------------------------------------------------------
init([]) ->
ets:new(cdv_menu_table,[set,named_table,{keypos,#menu_item.index},public]),
- ets:new(cdv_dump_index_table,[bag,named_table,public]),
+ ets:new(cdv_dump_index_table,[ordered_set,named_table,public]),
{ok, #state{}}.
%%--------------------------------------------------------------------
@@ -373,16 +483,7 @@ handle_call(start_page, _From, State) ->
Reply = crashdump_viewer_html:start_page(),
{reply,Reply,State};
handle_call({read_file,Input}, _From, _State) ->
- {ok,File0} = get_value("path",httpd:parse_query(Input)),
- File =
- case File0 of
- [$"|FileAndSome] ->
- %% Opera adds \"\" around the filename!
- [$"|Elif] = lists:reverse(FileAndSome),
- lists:reverse(Elif);
- _ ->
- File0
- end,
+ {ok,File} = get_value("path",httpd:parse_query(Input)),
spawn_link(fun() -> read_file(File) end),
Status = background_status(reading,File),
Reply = crashdump_viewer_html:redirect(Status),
@@ -399,8 +500,17 @@ handle_call(initial_info_frame,_From,State=#state{file=File}) ->
GenInfo = general_info(File),
NumAtoms = GenInfo#general_info.num_atoms,
{WS,SH} = parse_vsn_str(GenInfo#general_info.system_vsn,4,false),
+ NumProcs = list_to_integer(GenInfo#general_info.num_procs),
+ ProcsSummary =
+ if NumProcs > ?max_sort_process_num -> too_many;
+ true -> State#state.procs_summary
+ end,
+ NewState = State#state{shared_heap=SH,
+ wordsize=WS,
+ num_atoms=NumAtoms,
+ procs_summary=ProcsSummary},
Reply = crashdump_viewer_html:general_info(GenInfo),
- {reply,Reply,State#state{shared_heap=SH,wordsize=WS,num_atoms=NumAtoms}};
+ {reply,Reply,NewState};
handle_call({toggle,Input},_From,State) ->
{ok,Index} = get_value("index",httpd:parse_query(Input)),
do_toggle(list_to_integer(Index)),
@@ -429,7 +539,7 @@ handle_call({expand,Input},_From,State=#state{file=File}) ->
handle_call({expand_memory,Input},_From,State=#state{file=File,binaries=B}) ->
[{"pid",Pid},{"what",What}] = httpd:parse_query(Input),
Reply =
- case truncated_warning([{"=proc",Pid}]) of
+ case truncated_warning([{?proc,Pid}]) of
[] ->
Expanded = expand_memory(File,What,Pid,B),
crashdump_viewer_html:expanded_memory(What,Expanded);
@@ -450,149 +560,129 @@ handle_call({expand_binary,Input},_From,State=#state{file=File}) ->
close(Fd),
Reply=crashdump_viewer_html:expanded_binary(io_lib:format("~p",[Bin])),
{reply,Reply,State};
-handle_call({next,Input},_From,State=#state{file=File}) ->
- [{"pos",Pos},{"num",N},{"start",Start},{"what",What}] =
- httpd:parse_query(Input),
- Tags = related_tags(What),
- TW = truncated_warning(Tags),
- Next = get_next(File,list_to_integer(Pos),list_to_integer(N),
- list_to_integer(Start),What),
- Reply = crashdump_viewer_html:next(Next,TW),
- {reply,Reply,State};
handle_call(general_info,_From,State=#state{file=File}) ->
GenInfo=general_info(File),
Reply = crashdump_viewer_html:general_info(GenInfo),
{reply,Reply,State};
-handle_call(procs_summary,_From,State=#state{file=File,shared_heap=SH}) ->
- ProcsSummary =
- case State#state.procs_summary of
- undefined -> procs_summary(File);
- PS -> PS
- end,
- TW = truncated_warning(["=proc"]),
- Reply = crashdump_viewer_html:procs_summary("pid",ProcsSummary,TW,SH),
- {reply,Reply,State#state{procs_summary=ProcsSummary,sorted="pid"}};
-handle_call({sort_procs,Input}, _From, State=#state{shared_heap=SH}) ->
+handle_call({procs_summary,SessionId},_From,State) ->
+ TW = truncated_warning([?proc]),
+ NewState = procs_summary(SessionId,TW,"pid",State#state{sorted=undefined}),
+ {reply,ok,NewState};
+handle_call({sort_procs,SessionId,Input}, _From, State) ->
{ok,Sort} = get_value("sort",httpd:parse_query(Input)),
- {ProcsSummary,Sorted} = do_sort_procs(Sort,
- State#state.procs_summary,
- State#state.sorted),
- TW = truncated_warning(["=proc"]),
- Reply = crashdump_viewer_html:procs_summary(Sort,ProcsSummary,TW,SH),
- {reply,Reply,State#state{sorted=Sorted}};
+ TW = truncated_warning([?proc]),
+ NewState = procs_summary(SessionId,TW,Sort,State),
+ {reply,ok,NewState};
handle_call({proc_details,Input},_From,State=#state{file=File,shared_heap=SH}) ->
{ok,Pid} = get_value("pid",httpd:parse_query(Input)),
Reply =
case get_proc_details(File,Pid) of
{ok,Proc} ->
- TW = truncated_warning([{"=proc",Pid}]),
+ TW = truncated_warning([{?proc,Pid}]),
crashdump_viewer_html:proc_details(Pid,Proc,TW,SH);
{other_node,Node} ->
- TW = truncated_warning(["=visible_node",
- "=hidden_node",
- "=not_connected"]),
+ TW = truncated_warning([?visible_node,
+ ?hidden_node,
+ ?not_connected]),
crashdump_viewer_html:nods(Node,TW);
not_found ->
crashdump_viewer_html:info_page(["Could not find process: ",
Pid],?space)
end,
{reply, Reply, State};
-handle_call({ports,Input},_From,State=#state{file=File}) ->
+handle_call({port,Input},_From,State=#state{file=File}) ->
+ {ok,P} = get_value("port",httpd:parse_query(Input)),
+ Id = [$#|P],
Reply =
- case get_value("port",httpd:parse_query(Input)) of
- {ok,P} ->
- Id = [$#|P],
- case get_port(File,Id) of
- {ok,PortInfo} ->
- TW = truncated_warning([{"=port",Id}]),
- crashdump_viewer_html:ports(Id,[PortInfo],TW);
- {other_node,Node} ->
- TW = truncated_warning(["=visible_node",
- "=hidden_node",
- "=not_connected"]),
- crashdump_viewer_html:nods(Node,TW);
- not_found ->
- crashdump_viewer_html:info_page(
- ["Could not find port: ",Id],?space)
- end;
- error -> % no port identity in Input - get all ports
- Ports=get_ports(File),
- TW = truncated_warning(["=port"]),
- crashdump_viewer_html:ports("Port Information",Ports,TW)
+ case get_port(File,Id) of
+ {ok,PortInfo} ->
+ TW = truncated_warning([{?port,Id}]),
+ crashdump_viewer_html:port(Id,PortInfo,TW);
+ {other_node,Node} ->
+ TW = truncated_warning([?visible_node,
+ ?hidden_node,
+ ?not_connected]),
+ crashdump_viewer_html:nods(Node,TW);
+ not_found ->
+ crashdump_viewer_html:info_page(
+ ["Could not find port: ",Id],?space)
end,
{reply,Reply,State};
-handle_call({ets_tables,Input},_From,State=#state{file=File,wordsize=WS}) ->
- {Pid,Heading,InternalEts} =
+handle_call({ports,SessionId},_From,State=#state{file=File}) ->
+ TW = truncated_warning([?port]),
+ get_ports(SessionId,File,TW),
+ {reply,ok,State};
+handle_call({ets_tables,SessionId,Input},_From,State=#state{file=File,wordsize=WS}) ->
+ {Pid,Heading} =
case get_value("pid",httpd:parse_query(Input)) of
{ok,P} ->
- {P,["ETS Tables for Process ",P],[]};
+ {P,["ETS Tables for Process ",P]};
error ->
- I = get_internal_ets_tables(File,WS),
- {'_',"ETS Table Information",I}
+ {'$2',"ETS Table Information"}
end,
- EtsTables = get_ets_tables(File,Pid,WS),
- TW = truncated_warning(["=ets"]),
- Reply = crashdump_viewer_html:ets_tables(Heading,EtsTables,InternalEts,TW),
+ TW = truncated_warning([?ets]),
+ get_ets_tables(SessionId,File,Heading,TW,Pid,WS),
+ {reply,ok,State};
+handle_call(internal_ets_tables,_From,State=#state{file=File,wordsize=WS}) ->
+ InternalEts = get_internal_ets_tables(File,WS),
+ TW = truncated_warning([?internal_ets]),
+ Reply = crashdump_viewer_html:internal_ets_tables(InternalEts,TW),
{reply,Reply,State};
-handle_call({timers,Input},_From,State=#state{file=File}) ->
+handle_call({timers,SessionId,Input},_From,State=#state{file=File}) ->
{Pid,Heading} =
case get_value("pid",httpd:parse_query(Input)) of
{ok,P} -> {P,["Timers for Process ",P]};
- error -> {'_',"Timer Information"}
+ error -> {'$2',"Timer Information"}
end,
- Timers=get_timers(File,Pid),
- TW = truncated_warning(["=timer"]),
- Reply = crashdump_viewer_html:timers(Heading,Timers,TW),
- {reply,Reply,State};
+ TW = truncated_warning([?timer]),
+ get_timers(SessionId,File,Heading,TW,Pid),
+ {reply,ok,State};
handle_call(dist_info,_From,State=#state{file=File}) ->
Nods=nods(File),
- TW = truncated_warning(["=visible_node","=hidden_node","=not_connected"]),
+ TW = truncated_warning([?visible_node,?hidden_node,?not_connected]),
Reply = crashdump_viewer_html:nods(Nods,TW),
{reply,Reply,State};
-handle_call(loaded_mods,_From,State=#state{file=File}) ->
- LoadedMods=loaded_mods(File),
- TW = truncated_warning(["=mod"]),
- Reply = crashdump_viewer_html:loaded_mods(LoadedMods,TW),
- {reply,Reply,State};
+handle_call({loaded_mods,SessionId},_From,State=#state{file=File}) ->
+ TW = truncated_warning([?mod]),
+ loaded_mods(SessionId,File,TW),
+ {reply,ok,State};
handle_call({loaded_mod_details,Input},_From,State=#state{file=File}) ->
{ok,Mod} = get_value("mod",httpd:parse_query(Input)),
ModInfo = get_loaded_mod_details(File,Mod),
- TW = truncated_warning([{"=mod",Mod}]),
+ TW = truncated_warning([{?mod,Mod}]),
Reply = crashdump_viewer_html:loaded_mod_details(ModInfo,TW),
{reply,Reply,State};
-handle_call(funs,_From,State=#state{file=File}) ->
- Funs=funs(File),
- TW = truncated_warning(["=fun"]),
- Reply = crashdump_viewer_html:funs(Funs,TW),
- {reply,Reply,State};
-handle_call(atoms,_From,State=#state{file=File,num_atoms=Num}) ->
- Atoms=atoms(File),
- TW = truncated_warning(["=atoms","=num_atoms"]),
- Reply = crashdump_viewer_html:atoms(Atoms,Num,TW),
- {reply,Reply,State};
+handle_call({funs,SessionId},_From,State=#state{file=File}) ->
+ TW = truncated_warning([?fu]),
+ funs(SessionId,File,TW),
+ {reply,ok,State};
+handle_call({atoms,SessionId},_From,State=#state{file=File,num_atoms=Num}) ->
+ TW = truncated_warning([?atoms,?num_atoms]),
+ atoms(SessionId,File,TW,Num),
+ {reply,ok,State};
handle_call(memory,_From,State=#state{file=File}) ->
Memory=memory(File),
- TW = truncated_warning(["=memory"]),
+ TW = truncated_warning([?memory]),
Reply = crashdump_viewer_html:memory(Memory,TW),
{reply,Reply,State};
handle_call(allocated_areas,_From,State=#state{file=File}) ->
AllocatedAreas=allocated_areas(File),
- TW = truncated_warning(["=allocated_areas"]),
+ TW = truncated_warning([?allocated_areas]),
Reply = crashdump_viewer_html:allocated_areas(AllocatedAreas,TW),
{reply,Reply,State};
handle_call(allocator_info,_From,State=#state{file=File}) ->
SlAlloc=allocator_info(File),
- TW = truncated_warning(["=allocator"]),
+ TW = truncated_warning([?allocator]),
Reply = crashdump_viewer_html:allocator_info(SlAlloc,TW),
{reply,Reply,State};
handle_call(hash_tables,_From,State=#state{file=File}) ->
HashTables=hash_tables(File),
- TW = truncated_warning(["=hash_table","=index_table"]),
+ TW = truncated_warning([?hash_table,?index_table]),
Reply = crashdump_viewer_html:hash_tables(HashTables,TW),
{reply,Reply,State};
handle_call(index_tables,_From,State=#state{file=File}) ->
IndexTables=index_tables(File),
- TW = truncated_warning(["=hash_table","=index_table"]),
+ TW = truncated_warning([?hash_table,?index_table]),
Reply = crashdump_viewer_html:index_tables(IndexTables,TW),
{reply,Reply,State}.
@@ -682,9 +772,9 @@ truncated_here(Tag) ->
%% Check if the dump was truncated with the same tag, but earlier id.
-%% Eg if this is {"=proc","<0.30.0>"}, we should warn if the dump was
-%% truncated in {"=proc","<0.29.0>"} or earlier
-truncated_earlier({"=proc",Pid}) ->
+%% Eg if this is {?proc,"<0.30.0>"}, we should warn if the dump was
+%% truncated in {?proc,"<0.29.0>"} or earlier
+truncated_earlier({?proc,Pid}) ->
compare_pid(Pid,get(truncated_proc));
truncated_earlier(_Tag) ->
false.
@@ -718,9 +808,37 @@ open(File) ->
close(Fd) ->
erase(chunk),
file:close(Fd).
+
+%% Set position relative to beginning of file
+%% If position is within the already read Chunk, then adjust 'chunk'
+%% and 'pos' in process dictionary. Else set position in file.
pos_bof(Fd,Pos) ->
+ case get(pos) of
+ undefined ->
+ hard_pos_bof(Fd,Pos);
+ OldPos when Pos>=OldPos ->
+ case get(chunk) of
+ undefined ->
+ hard_pos_bof(Fd,Pos);
+ Chunk ->
+ ChunkSize = byte_size(Chunk),
+ ChunkEnd = OldPos+ChunkSize,
+ if Pos=<ChunkEnd ->
+ Diff = Pos-OldPos,
+ put(pos,Pos),
+ put(chunk,binary:part(Chunk,Diff,ChunkEnd-Pos));
+ true ->
+ hard_pos_bof(Fd,Pos)
+ end
+ end;
+ _ ->
+ hard_pos_bof(Fd,Pos)
+ end.
+
+hard_pos_bof(Fd,Pos) ->
reset_chunk(),
- file:position(Fd,{bof,Pos}).
+ file:position(Fd,{bof,Pos}).
+
get_chunk(Fd) ->
case erase(chunk) of
@@ -979,7 +1097,9 @@ initial_menu() ->
[menu_item(0, {"./general_info","General information"},0),
menu_item(0, {"./processes","Processes"}, 0),
menu_item(0, {"./ports","Ports"}, 0),
- menu_item(0, {"./ets_tables","ETS tables"}, 0),
+ menu_item(2, "ETS tables", 0),
+ menu_item(0, {"./ets_tables","ETS tables"}, 1),
+ menu_item(0, {"./internal_ets_tables","Internal ETS tables"}, 1),
menu_item(0, {"./timers","Timers"}, 0),
menu_item(0, {"./fun_table","Fun table"}, 0),
menu_item(0, {"./atoms","Atoms"}, 0),
@@ -1066,9 +1186,9 @@ read_file(File) ->
{ok,<<$=:8,TagAndRest/binary>>} ->
{Tag,Id,Rest,N1} = tag(Fd,TagAndRest,1),
case Tag of
- "=erl_crash_dump" ->
- ets:delete_all_objects(cdv_dump_index_table),
- ets:insert(cdv_dump_index_table,{Tag,Id,N1+1}),
+ ?erl_crash_dump ->
+ reset_index_table(),
+ insert_index(Tag,Id,N1+1),
put(last_tag,{Tag,""}),
Status = background_status(processing,File),
background_status(Status),
@@ -1107,34 +1227,35 @@ read_file(File) ->
background_done({R,undefined,undefined})
end.
-indexify(Fd,<<"\n=",TagAndRest/binary>>,N) ->
- {Tag,Id,Rest,N1} = tag(Fd,TagAndRest,N+2),
- ets:insert(cdv_dump_index_table,{Tag,Id,N1+1}), % +1 to get past newline
- put(last_tag,{Tag,Id}),
- indexify(Fd,Rest,N1);
-indexify(Fd,<<>>,N) ->
- case read(Fd) of
- {ok,Chunk} when is_binary(Chunk) ->
- indexify(Fd,Chunk,N);
- eof ->
- eof
- end;
-indexify(Fd,<<$\n>>,N) ->
- %% This clause is needed in case the chunk ends with a newline and
- %% the next chunk starts with a tag (i.e. "\n=....")
- case read(Fd) of
- {ok,Chunk} when is_binary(Chunk) ->
- indexify(Fd,<<$\n,Chunk/binary>>,N);
- eof ->
- eof
- end;
-indexify(Fd,<<_Char:8,Rest/binary>>,N) ->
- indexify(Fd,Rest,N+1).
+indexify(Fd,Bin,N) ->
+ case binary:match(Bin,<<"\n=">>) of
+ {Start,Len} ->
+ Pos = Start+Len,
+ <<_:Pos/binary,TagAndRest/binary>> = Bin,
+ {Tag,Id,Rest,N1} = tag(Fd,TagAndRest,N+Pos),
+ insert_index(Tag,Id,N1+1), % +1 to get past newline
+ put(last_tag,{Tag,Id}),
+ indexify(Fd,Rest,N1);
+ nomatch ->
+ case read(Fd) of
+ {ok,Chunk0} when is_binary(Chunk0) ->
+ {Chunk,N1} =
+ case binary:last(Bin) of
+ $\n ->
+ {<<$\n,Chunk0/binary>>,N+byte_size(Bin)-1};
+ _ ->
+ {Chunk0,N+byte_size(Bin)}
+ end,
+ indexify(Fd,Chunk,N1);
+ eof ->
+ eof
+ end
+ end.
tag(Fd,Bin,N) ->
tag(Fd,Bin,N,[],[],tag).
tag(_Fd,<<$\n:8,_/binary>>=Rest,N,Gat,Di,_Now) ->
- {[$=|lists:reverse(Gat)],lists:reverse(Di),Rest,N};
+ {tag_to_atom(lists:reverse(Gat)),lists:reverse(Di),Rest,N};
tag(Fd,<<$\r:8,Rest/binary>>,N,Gat,Di,Now) ->
tag(Fd,Rest,N+1,Gat,Di,Now);
tag(Fd,<<$::8,IdAndRest/binary>>,N,Gat,Di,tag) ->
@@ -1148,12 +1269,12 @@ tag(Fd,<<>>,N,Gat,Di,Now) ->
{ok,Chunk} when is_binary(Chunk) ->
tag(Fd,Chunk,N,Gat,Di,Now);
eof ->
- {[$=|lists:reverse(Gat)],lists:reverse(Di),<<>>,N}
+ {tag_to_atom(lists:reverse(Gat)),lists:reverse(Di),<<>>,N}
end.
check_if_truncated() ->
case get(last_tag) of
- {"=end",_} ->
+ {?ende,_} ->
put(truncated,false),
put(truncated_proc,false);
TruncatedTag ->
@@ -1161,32 +1282,29 @@ check_if_truncated() ->
find_truncated_proc(TruncatedTag)
end.
-find_truncated_proc({"=atom",_Id}) ->
+find_truncated_proc({?atoms,_Id}) ->
put(truncated_proc,false);
find_truncated_proc({Tag,Pid}) ->
case is_proc_tag(Tag) of
true ->
put(truncated_proc,Pid);
false ->
- %% This means that the dump is truncated between "=proc" and
- %% "=proc_heap" => memory info is missing for all procs.
+ %% This means that the dump is truncated between ?proc and
+ %% ?proc_heap => memory info is missing for all procs.
put(truncated_proc,"<0.0.0>")
end.
-is_proc_tag(Tag) when Tag=="=proc";
- Tag=="=proc_dictionary";
- Tag=="=proc_messages";
- Tag=="=proc_dictionary";
- Tag=="=debug_proc_dictionary";
- Tag=="=proc_stack";
- Tag=="=proc_heap" ->
+is_proc_tag(Tag) when Tag==?proc;
+ Tag==?proc_dictionary;
+ Tag==?proc_messages;
+ Tag==?proc_dictionary;
+ Tag==?debug_proc_dictionary;
+ Tag==?proc_stack;
+ Tag==?proc_heap ->
true;
is_proc_tag(_) ->
false.
-related_tags("Atoms") ->
- ["=atoms","=num_atoms"].
-
%%% Inform the crashdump_viewer_server that a background job is completed.
background_done(Result) ->
Dict = get(),
@@ -1198,8 +1316,7 @@ background_status(Status) ->
%%%-----------------------------------------------------------------
%%% Functions for reading information from the dump
general_info(File) ->
- [{"=erl_crash_dump",_Id,Start}] =
- ets:lookup(cdv_dump_index_table,"=erl_crash_dump"),
+ [{_Id,Start}] = lookup_index(?erl_crash_dump),
Fd = open(File),
pos_bof(Fd,Start),
Created = case get_rest_of_line(Fd) of
@@ -1207,15 +1324,15 @@ general_info(File) ->
WholeLine -> WholeLine
end,
- GI0 = get_general_info(Fd,#general_info{created=Created,_=?space}),
+ GI0 = get_general_info(Fd,#general_info{created=Created}),
GI = case GI0#general_info.num_atoms of
?space -> GI0#general_info{num_atoms=get_num_atoms(Fd)};
_ -> GI0
end,
{MemTot,MemMax} =
- case ets:lookup(cdv_dump_index_table,"=memory") of
- [{"=memory",_,MemStart}] ->
+ case lookup_index(?memory) of
+ [{_,MemStart}] ->
pos_bof(Fd,MemStart),
Memory = get_meminfo(Fd,[]),
Tot = case lists:keysearch("total",1,Memory) of
@@ -1232,33 +1349,34 @@ general_info(File) ->
end,
close(Fd),
- {NumProcs,NumEts,NumFuns} = count(),
+ {NumProcs,NumEts,NumFuns,NumTimers} = count(),
NodeName =
- case ets:lookup(cdv_dump_index_table,"=node") of
- [{"=node",N,_Start}] ->
+ case lookup_index(?node) of
+ [{N,_Start}] ->
N;
[] ->
- case ets:lookup(cdv_dump_index_table,"=no_distribution") of
+ case lookup_index(?no_distribution) of
[_] -> "nonode@nohost";
[] -> "unknown"
end
end,
InstrInfo =
- case ets:member(cdv_dump_index_table,"=old_instr_data") of
- true ->
- old_instr_data;
- false ->
- case ets:member(cdv_dump_index_table,"=instr_data") of
- true ->
- instr_data;
- false ->
- false
- end
+ case lookup_index(?old_instr_data) of
+ [] ->
+ case lookup_index(?instr_data) of
+ [] ->
+ false;
+ _ ->
+ instr_data
+ end;
+ _ ->
+ old_instr_data
end,
GI#general_info{node_name=NodeName,
num_procs=integer_to_list(NumProcs),
num_ets=integer_to_list(NumEts),
+ num_timers=integer_to_list(NumTimers),
num_fun=integer_to_list(NumFuns),
mem_tot=MemTot,
mem_max=MemMax,
@@ -1285,8 +1403,8 @@ get_general_info(Fd,GenInfo) ->
end.
get_num_atoms(Fd) ->
- case ets:match(cdv_dump_index_table,{"=hash_table","atom_tab",'$1'}) of
- [[Pos]] ->
+ case lookup_index(?hash_table,"atom_tab") of
+ [{_,Pos}] ->
pos_bof(Fd,Pos),
skip_rest_of_line(Fd), % size
skip_rest_of_line(Fd), % used
@@ -1300,10 +1418,10 @@ get_num_atoms(Fd) ->
get_num_atoms2()
end.
get_num_atoms2() ->
- case ets:lookup(cdv_dump_index_table,"=num_atoms") of
+ case lookup_index(?num_atoms) of
[] ->
?space;
- [{"=num_atoms",NA,_Pos}] ->
+ [{NA,_Pos}] ->
%% If dump is translated this will exist
case get(truncated) of
true ->
@@ -1314,43 +1432,70 @@ get_num_atoms2() ->
end.
count() ->
- {ets:select_count(cdv_dump_index_table,count_ms("=proc")),
- ets:select_count(cdv_dump_index_table,count_ms("=ets")),
- ets:select_count(cdv_dump_index_table,count_ms("=fun"))}.
+ {count_index(?proc),count_index(?ets),count_index(?fu),count_index(?timer)}.
-count_ms(Tag) ->
- [{{Tag,'_','_'},[],[true]}].
+%%-----------------------------------------------------------------
+%% Page with all processes
+%%
+%% If there are less than ?max_sort_process_num processes in the dump,
+%% we will store the list of processes in the server state in order to
+%% allow sorting according to the different columns of the
+%% table. Since ?max_sort_process_num=:=?items_chunk_size, there will
+%% never be more than one chunk in this case.
+%%
+%% If there are more than ?max_sort_process_num processes in the dump,
+%% no sorting will be allowed, and the processes must be read (chunk
+%% by chunk) from the file each time the page is opened. This is to
+%% avoid really big data in the server state.
+procs_summary(SessionId,TW,_,State=#state{procs_summary=too_many}) ->
+ chunk_page(SessionId,State#state.file,TW,?proc,processes,
+ {no_sort,State#state.shared_heap},procs_summary_parsefun()),
+ State;
+procs_summary(SessionId,TW,SortOn,State) ->
+ ProcsSummary =
+ case State#state.procs_summary of
+ undefined -> % first time - read from file
+ Fd = open(State#state.file),
+ {PS,_}=lookup_and_parse_index_chunk(first_chunk_pointer(?proc),
+ Fd,procs_summary_parsefun()),
+ close(Fd),
+ PS;
+ PS ->
+ PS
+ end,
+ {SortedPS,NewSorted} = do_sort_procs(SortOn,ProcsSummary,State#state.sorted),
+ HtmlInfo =
+ crashdump_viewer_html:chunk_page(processes,SessionId,TW,
+ {SortOn,State#state.shared_heap},
+ SortedPS),
+ crashdump_viewer_html:chunk(SessionId,done,HtmlInfo),
+ State#state{procs_summary=ProcsSummary,sorted=NewSorted}.
-procs_summary(File) ->
- AllProcs = ets:lookup(cdv_dump_index_table,"=proc"),
- Fd = open(File),
- R = lists:map(fun({"=proc",Pid,Start}) ->
- pos_bof(Fd,Start),
- get_procinfo(Fd,fun main_procinfo/4,
- ?initial_proc_record(Pid))
- end,
- AllProcs),
- close(Fd),
- R.
+procs_summary_parsefun() ->
+ fun(Fd,Pid) ->
+ get_procinfo(Fd,fun main_procinfo/4,#proc{pid=Pid})
+ end.
+%%-----------------------------------------------------------------
+%% Page with one process
get_proc_details(File,Pid) ->
- DumpVsn = ets:lookup_element(cdv_dump_index_table,"=erl_crash_dump",2),
- case ets:match(cdv_dump_index_table,{"=proc",Pid,'$1'}) of
- [[Start]] ->
+ [{DumpVsn,_}] = lookup_index(?erl_crash_dump),
+ case lookup_index(?proc,Pid) of
+ [{_,Start}] ->
Fd = open(File),
pos_bof(Fd,Start),
Proc0 =
case DumpVsn of
"0.0" ->
%% Old version (translated)
- ?initial_proc_record(Pid);
+ #proc{pid=Pid};
_ ->
- (?initial_proc_record(Pid))#proc{
- stack_dump=if_exist("=proc_stack",Pid),
- msg_q=if_exist("=proc_messages",Pid),
- dict=if_exist("=proc_dictionary",Pid),
- debug_dict=if_exist("=debug_proc_dictionary",Pid)}
+ #proc{pid=Pid,
+ stack_dump=if_exist(?proc_stack,Pid),
+ msg_q=if_exist(?proc_messages,Pid),
+ dict=if_exist(?proc_dictionary,Pid),
+ debug_dict=if_exist(?debug_proc_dictionary,Pid)}
end,
Proc = get_procinfo(Fd,fun all_procinfo/4,Proc0),
close(Fd),
@@ -1368,11 +1513,11 @@ get_proc_details(File,Pid) ->
end.
if_exist(Tag,Key) ->
- case ets:select_count(cdv_dump_index_table,[{{Tag,Key,'_'},[],[true]}]) of
+ case count_index(Tag,Key) of
0 ->
Tag1 =
case is_proc_tag(Tag) of
- true -> "=proc";
+ true -> ?proc;
false -> Tag
end,
case truncated_here({Tag1,Key}) of
@@ -1523,13 +1668,14 @@ maybe_other_node(File,Id) ->
N
end,
Ms = ets:fun2ms(
- fun({Tag,Id,Start}) when Tag=:="=visible_node", Id=:=Channel ->
+ fun({{Tag,Start},Ch}) when Tag=:=?visible_node, Ch=:=Channel ->
{"Visible Node",Start};
- ({Tag,Id,Start}) when Tag=:="=hidden_node", Id=:=Channel ->
+ ({{Tag,Start},Ch}) when Tag=:=?hidden_node, Ch=:=Channel ->
{"Hidden Node",Start};
- ({Tag,Id,Start}) when Tag=:="=not_connected", Id=:=Channel ->
+ ({{Tag,Start},Ch}) when Tag=:=?not_connected, Ch=:=Channel ->
{"Not Connected Node",Start}
end),
+
case ets:select(cdv_dump_index_table,Ms) of
[] ->
not_found;
@@ -1540,6 +1686,7 @@ maybe_other_node(File,Id) ->
{other_node,Type,NodeInfo}
end.
+
expand_memory(File,What,Pid,Binaries) ->
Fd = open(File),
put(fd,Fd),
@@ -1548,8 +1695,8 @@ expand_memory(File,What,Pid,Binaries) ->
case What of
"StackDump" -> read_stack_dump(Fd,Pid,Dict);
"MsgQueue" -> read_messages(Fd,Pid,Dict);
- "Dictionary" -> read_dictionary(Fd,"=proc_dictionary",Pid,Dict);
- "DebugDictionary" -> read_dictionary(Fd,"=debug_proc_dictionary",Pid,Dict)
+ "Dictionary" -> read_dictionary(Fd,?proc_dictionary,Pid,Dict);
+ "DebugDictionary" -> read_dictionary(Fd,?debug_proc_dictionary,Pid,Dict)
end,
erase(fd),
close(Fd),
@@ -1559,10 +1706,10 @@ expand_memory(File,What,Pid,Binaries) ->
%%% Read binaries.
%%%
read_binaries(Fd) ->
- AllBinaries = ets:match(cdv_dump_index_table,{"=binary",'$1','$2'}),
+ AllBinaries = lookup_index(?binary),
read_binaries(Fd,AllBinaries, gb_trees:empty()).
-read_binaries(Fd,[[Addr0,Pos]|Bins],Dict0) ->
+read_binaries(Fd,[{Addr0,Pos}|Bins],Dict0) ->
pos_bof(Fd,Pos),
{Addr,_} = get_hex(Addr0),
Dict =
@@ -1603,15 +1750,15 @@ parse_binary(Addr, Line0, Dict) ->
%%%
read_stack_dump(Fd,Pid,Dict) ->
- case ets:match(cdv_dump_index_table,{"=proc_stack",Pid,'$1'}) of
- [[Start]] ->
+ case lookup_index(?proc_stack,Pid) of
+ [{_,Start}] ->
pos_bof(Fd,Start),
read_stack_dump1(Fd,Dict,[]);
[] ->
[]
end.
read_stack_dump1(Fd,Dict,Acc) ->
- %% This function is never called if the dump is truncated in "=proc_heap:Pid"
+ %% This function is never called if the dump is truncated in {?proc_heap,Pid}
case val(Fd) of
"=" ++ _next_tag ->
lists:reverse(Acc);
@@ -1631,15 +1778,15 @@ parse_top(Line0, D) ->
%%%
read_messages(Fd,Pid,Dict) ->
- case ets:match(cdv_dump_index_table,{"=proc_messages",Pid,'$1'}) of
- [[Start]] ->
+ case lookup_index(?proc_messages,Pid) of
+ [{_,Start}] ->
pos_bof(Fd,Start),
read_messages1(Fd,Dict,[]);
[] ->
[]
end.
read_messages1(Fd,Dict,Acc) ->
- %% This function is never called if the dump is truncated in "=proc_heap:Pid"
+ %% This function is never called if the dump is truncated in {?proc_heap,Pid}
case val(Fd) of
"=" ++ _next_tag ->
lists:reverse(Acc);
@@ -1659,15 +1806,15 @@ parse_message(Line0, D) ->
%%%
read_dictionary(Fd,Tag,Pid,Dict) ->
- case ets:match(cdv_dump_index_table,{Tag,Pid,'$1'}) of
- [[Start]] ->
+ case lookup_index(Tag,Pid) of
+ [{_,Start}] ->
pos_bof(Fd,Start),
read_dictionary1(Fd,Dict,[]);
[] ->
[]
end.
read_dictionary1(Fd,Dict,Acc) ->
- %% This function is never called if the dump is truncated in "=proc_heap:Pid"
+ %% This function is never called if the dump is truncated in {?proc_heap,Pid}
case val(Fd) of
"=" ++ _next_tag ->
lists:reverse(Acc);
@@ -1686,8 +1833,8 @@ parse_dictionary(Line0, D) ->
%%%
read_heap(Fd,Pid,Dict0) ->
- case ets:match(cdv_dump_index_table,{"=proc_heap",Pid,'$2'}) of
- [[Pos]] ->
+ case lookup_index(?proc_heap,Pid) of
+ [{_,Pos}] ->
pos_bof(Fd,Pos),
read_heap(Dict0);
[] ->
@@ -1695,7 +1842,7 @@ read_heap(Fd,Pid,Dict0) ->
end.
read_heap(Dict0) ->
- %% This function is never called if the dump is truncated in "=proc_heap:Pid"
+ %% This function is never called if the dump is truncated in {?proc_heap,Pid}
case get(fd) of
end_of_heap ->
Dict0;
@@ -1761,12 +1908,14 @@ do_sort_procs("name",Procs,Sorted) ->
_ -> {Result,"name"}
end.
-
+%%-----------------------------------------------------------------
+%% Page with one port
get_port(File,Port) ->
- case ets:match(cdv_dump_index_table,{"=port",Port,'$1'}) of
- [[Start]] ->
+ case lookup_index(?port,Port) of
+ [{_,Start}] ->
Fd = open(File),
- R = get_portinfo(Fd,Port,Start),
+ pos_bof(Fd,Start),
+ R = get_portinfo(Fd,#port{id=Port}),
close(Fd),
{ok,R};
[] ->
@@ -1781,18 +1930,11 @@ get_port(File,Port) ->
end
end.
-get_ports(File) ->
- Ports = ets:lookup(cdv_dump_index_table,"=port"),
- Fd = open(File),
- R = lists:map(fun({"=port",Id,Start}) -> get_portinfo(Fd,Id,Start) end,
- Ports),
- close(Fd),
- R.
-
-
-get_portinfo(Fd,Id,Start) ->
- pos_bof(Fd,Start),
- get_portinfo(Fd,#port{id=Id,_=?space}).
+%%-----------------------------------------------------------------
+%% Page with all ports
+get_ports(SessionId,File,TW) ->
+ ParseFun = fun(Fd,Id) -> get_portinfo(Fd,#port{id=Id}) end,
+ chunk_page(SessionId,File,TW,?port,ports,[],ParseFun).
get_portinfo(Fd,Port) ->
case line_head(Fd) of
@@ -1802,6 +1944,10 @@ get_portinfo(Fd,Port) ->
get_portinfo(Fd,Port#port{connected=val(Fd)});
"Links" ->
get_portinfo(Fd,Port#port{links=val(Fd)});
+ "Registered as" ->
+ get_portinfo(Fd,Port#port{name=val(Fd)});
+ "Monitors" ->
+ get_portinfo(Fd,Port#port{monitors=val(Fd)});
"Port controls linked-in driver" ->
get_portinfo(Fd,Port#port{controls=["Linked in driver: " |
val(Fd)]});
@@ -1820,30 +1966,12 @@ get_portinfo(Fd,Port) ->
Port
end.
-get_ets_tables(File,Pid,WS) ->
- EtsTables = ets:match_object(cdv_dump_index_table,{"=ets",Pid,'_'}),
- Fd = open(File),
- R = lists:map(fun({"=ets",P,Start}) ->
- get_etsinfo(Fd,P,Start,WS)
- end,
- EtsTables),
- close(Fd),
- R.
-get_internal_ets_tables(File,WS) ->
- InternalEts = ets:match_object(cdv_dump_index_table,
- {"=internal_ets",'_','_'}),
- Fd = open(File),
- R = lists:map(fun({"=internal_ets",Descr,Start}) ->
- {Descr,get_etsinfo(Fd,undefined,Start,WS)}
- end,
- InternalEts),
- close(Fd),
- R.
-
-get_etsinfo(Fd,Pid,Start,WS) ->
- pos_bof(Fd,Start),
- get_etsinfo(Fd,#ets_table{pid=Pid,type="hash",_=?space},WS).
+%%-----------------------------------------------------------------
+%% Page with external ets tables
+get_ets_tables(SessionId,File,Heading,TW,Pid,WS) ->
+ ParseFun = fun(Fd,Id) -> get_etsinfo(Fd,#ets_table{pid=Id},WS) end,
+ chunk_page(SessionId,File,TW,{?ets,Pid},ets_tables,Heading,ParseFun).
get_etsinfo(Fd,EtsTable,WS) ->
case line_head(Fd) of
@@ -1875,26 +2003,32 @@ get_etsinfo(Fd,EtsTable,WS) ->
EtsTable
end.
-get_timers(File,Pid) ->
- Timers = ets:match_object(cdv_dump_index_table,{"=timer",Pid,'$1'}),
+
+%% Internal ets table page
+get_internal_ets_tables(File,WS) ->
+ InternalEts = lookup_index(?internal_ets),
Fd = open(File),
- R = lists:map(fun({"=timer",P,Start}) ->
- get_timerinfo(Fd,P,Start)
- end,
- Timers),
+ R = lists:map(
+ fun({Descr,Start}) ->
+ pos_bof(Fd,Start),
+ {Descr,get_etsinfo(Fd,#ets_table{},WS)}
+ end,
+ InternalEts),
close(Fd),
R.
-get_timerinfo(Fd,Pid,Start) ->
- pos_bof(Fd,Start),
- get_timerinfo(Fd,#timer{pid=Pid,_=?space}).
+%%-----------------------------------------------------------------
+%% Page with list of all timers
+get_timers(SessionId,File,Heading,TW,Pid) ->
+ ParseFun = fun(Fd,Id) -> get_timerinfo_1(Fd,#timer{pid=Id}) end,
+ chunk_page(SessionId,File,TW,{?timer,Pid},timers,Heading,ParseFun).
-get_timerinfo(Fd,Timer) ->
+get_timerinfo_1(Fd,Timer) ->
case line_head(Fd) of
"Message" ->
- get_timerinfo(Fd,Timer#timer{msg=val(Fd)});
+ get_timerinfo_1(Fd,Timer#timer{msg=val(Fd)});
"Time left" ->
- get_timerinfo(Fd,Timer#timer{time=val(Fd)});
+ get_timerinfo_1(Fd,Timer#timer{time=val(Fd)});
"=" ++ _next_tag ->
Timer;
Other ->
@@ -1902,25 +2036,27 @@ get_timerinfo(Fd,Timer) ->
Timer
end.
+%%-----------------------------------------------------------------
+%% Page with information about the erlang distribution
nods(File) ->
- case ets:lookup(cdv_dump_index_table,"=no_distribution") of
+ case lookup_index(?no_distribution) of
[] ->
- V = ets:lookup(cdv_dump_index_table,"=visible_node"),
- H = ets:lookup(cdv_dump_index_table,"=hidden_node"),
- N = ets:lookup(cdv_dump_index_table,"=not_connected"),
+ V = lookup_index(?visible_node),
+ H = lookup_index(?hidden_node),
+ N = lookup_index(?not_connected),
Fd = open(File),
Visible = lists:map(
- fun({"=visible_node",Channel,Start}) ->
+ fun({Channel,Start}) ->
get_nodeinfo(Fd,Channel,Start)
end,
V),
Hidden = lists:map(
- fun({"=hidden_node",Channel,Start}) ->
+ fun({Channel,Start}) ->
get_nodeinfo(Fd,Channel,Start)
end,
H),
NotConnected = lists:map(
- fun({"=not_connected",Channel,Start}) ->
+ fun({Channel,Start}) ->
get_nodeinfo(Fd,Channel,Start)
end,
N),
@@ -1932,7 +2068,7 @@ nods(File) ->
get_nodeinfo(Fd,Channel,Start) ->
pos_bof(Fd,Start),
- get_nodeinfo(Fd,#nod{channel=Channel,_=?space}).
+ get_nodeinfo(Fd,#nod{channel=Channel}).
get_nodeinfo(Fd,Nod) ->
case line_head(Fd) of
@@ -1963,26 +2099,37 @@ get_nodeinfo(Fd,Nod) ->
Nod
end.
-loaded_mods(File) ->
- case ets:lookup(cdv_dump_index_table,"=loaded_modules") of
- [{"=loaded_modules",_,StartTotal}] ->
- Fd = open(File),
- pos_bof(Fd,StartTotal),
- {CC,OC} = get_loaded_mod_totals(Fd,{"unknown","unknown"}),
-
- Mods = ets:lookup(cdv_dump_index_table,"=mod"),
- LM = lists:map(fun({"=mod",M,Start}) ->
- pos_bof(Fd,Start),
- InitLM = #loaded_mod{mod=M,_=?space},
- get_loaded_mod_info(Fd,InitLM,
- fun main_modinfo/3)
- end,
- Mods),
- close(Fd),
- {CC,OC,LM};
- [] ->
- {"unknown","unknown",[]}
- end.
+%%-----------------------------------------------------------------
+%% Page with details about one loaded modules
+get_loaded_mod_details(File,Mod) ->
+ [{_,Start}] = lookup_index(?mod,Mod),
+ Fd = open(File),
+ pos_bof(Fd,Start),
+ InitLM = #loaded_mod{mod=Mod,old_size="No old code exists"},
+ ModInfo = get_loaded_mod_info(Fd,InitLM,fun all_modinfo/3),
+ close(Fd),
+ ModInfo.
+
+%%-----------------------------------------------------------------
+%% Page with list of all loaded modules
+loaded_mods(SessionId,File,TW) ->
+ ParseFun =
+ fun(Fd,Id) ->
+ get_loaded_mod_info(Fd,#loaded_mod{mod=Id},
+ fun main_modinfo/3)
+ end,
+ {CC,OC} =
+ case lookup_index(?loaded_modules) of
+ [{_,StartTotal}] ->
+ Fd = open(File),
+ pos_bof(Fd,StartTotal),
+ R = get_loaded_mod_totals(Fd,{"unknown","unknown"}),
+ close(Fd),
+ R;
+ [] ->
+ {"unknown","unknown"}
+ end,
+ chunk_page(SessionId,File,TW,?mod,loaded_mods,{CC,OC},ParseFun).
get_loaded_mod_totals(Fd,{CC,OC}) ->
case line_head(Fd) of
@@ -1997,16 +2144,6 @@ get_loaded_mod_totals(Fd,{CC,OC}) ->
{CC,OC} % truncated file
end.
-get_loaded_mod_details(File,Mod) ->
- [[Start]] = ets:match(cdv_dump_index_table,{"=mod",Mod,'$1'}),
- Fd = open(File),
- pos_bof(Fd,Start),
- InitLM = #loaded_mod{mod=Mod,old_size="No old code exists",
- _="No information available"},
- ModInfo = get_loaded_mod_info(Fd,InitLM,fun all_modinfo/3),
- close(Fd),
- ModInfo.
-
get_loaded_mod_info(Fd,LM,Fun) ->
case line_head(Fd) of
"Current size" ->
@@ -2073,39 +2210,26 @@ hex_to_dec("A") -> 10;
hex_to_dec(N) -> list_to_integer(N).
+%%-----------------------------------------------------------------
+%% Page with list of all funs
+funs(SessionId,File,TW) ->
+ ParseFun = fun(Fd,_Id) -> get_funinfo(Fd,#fu{}) end,
+ chunk_page(SessionId,File,TW,?fu,funs,[],ParseFun).
-funs(File) ->
- case ets:lookup(cdv_dump_index_table,"=fun") of
- [] ->
- [];
- AllFuns ->
- Fd = open(File),
- R = lists:map(fun({"=fun",_,Start}) ->
- get_funinfo(Fd,Start)
- end,
- AllFuns),
- close(Fd),
- R
- end.
-
-get_funinfo(Fd,Start) ->
- pos_bof(Fd,Start),
- get_funinfo1(Fd,#fu{_=?space}).
-
-get_funinfo1(Fd,Fu) ->
+get_funinfo(Fd,Fu) ->
case line_head(Fd) of
"Module" ->
- get_funinfo1(Fd,Fu#fu{module=val(Fd)});
+ get_funinfo(Fd,Fu#fu{module=val(Fd)});
"Uniq" ->
- get_funinfo1(Fd,Fu#fu{uniq=val(Fd)});
+ get_funinfo(Fd,Fu#fu{uniq=val(Fd)});
"Index" ->
- get_funinfo1(Fd,Fu#fu{index=val(Fd)});
+ get_funinfo(Fd,Fu#fu{index=val(Fd)});
"Address" ->
- get_funinfo1(Fd,Fu#fu{address=val(Fd)});
+ get_funinfo(Fd,Fu#fu{address=val(Fd)});
"Native_address" ->
- get_funinfo1(Fd,Fu#fu{native_address=val(Fd)});
+ get_funinfo(Fd,Fu#fu{native_address=val(Fd)});
"Refc" ->
- get_funinfo1(Fd,Fu#fu{refc=val(Fd)});
+ get_funinfo(Fd,Fu#fu{refc=val(Fd)});
"=" ++ _next_tag ->
Fu;
Other ->
@@ -2113,28 +2237,53 @@ get_funinfo1(Fd,Fu) ->
Fu
end.
-atoms(File) ->
- case ets:lookup(cdv_dump_index_table,"=atoms") of
- [{_atoms,_Id,Start}] ->
+%%-----------------------------------------------------------------
+%% Page with list of all atoms
+atoms(SessionId,File,TW,Num) ->
+ case lookup_index(?atoms) of
+ [{_Id,Start}] ->
Fd = open(File),
pos_bof(Fd,Start),
- R = case get_n_lines_of_tag(Fd,100) of
- {all,N,Lines} ->
- {n_lines,1,N,"Atoms",Lines};
- {part,100,Lines} ->
- {n_lines,1,100,"Atoms",Lines,get(pos)};
- empty ->
- []
- end,
- close(Fd),
- R;
+ case get_atoms(Fd,?items_chunk_size) of
+ {Atoms,Cont} ->
+ crashdump_viewer_html:atoms(SessionId,TW,Num,Atoms),
+ atoms_chunks(Fd,SessionId,Cont);
+ done ->
+ crashdump_viewer_html:atoms(SessionId,TW,Num,done)
+ end;
_ ->
- []
+ crashdump_viewer_html:atoms(SessionId,TW,Num,done)
+ end.
+
+get_atoms(Fd,Number) ->
+ case get_n_lines_of_tag(Fd,Number) of
+ {all,_,Lines} ->
+ close(Fd),
+ {Lines,done};
+ {part,_,Lines} ->
+ {Lines,Number};
+ empty ->
+ close(Fd),
+ done
end.
+atoms_chunks(_Fd,SessionId,done) ->
+ crashdump_viewer_html:atoms_chunk(SessionId,done);
+atoms_chunks(Fd,SessionId,Number) ->
+ case get_atoms(Fd,Number) of
+ {Atoms,Cont} ->
+ crashdump_viewer_html:atoms_chunk(SessionId,Atoms),
+ atoms_chunks(Fd,SessionId,Cont);
+ done ->
+ atoms_chunks(Fd,SessionId,done)
+ end.
+
+
+%%-----------------------------------------------------------------
+%% Page with memory information
memory(File) ->
- case ets:lookup(cdv_dump_index_table,"=memory") of
- [{"=memory",_,Start}] ->
+ case lookup_index(?memory) of
+ [{_,Start}] ->
Fd = open(File),
pos_bof(Fd,Start),
R = get_meminfo(Fd,[]),
@@ -2153,10 +2302,12 @@ get_meminfo(Fd,Acc) ->
Key ->
get_meminfo(Fd,[{Key,val(Fd)}|Acc])
end.
-
+
+%%-----------------------------------------------------------------
+%% Page with information about allocated areas
allocated_areas(File) ->
- case ets:lookup(cdv_dump_index_table,"=allocated_areas") of
- [{"=allocated_areas",_,Start}] ->
+ case lookup_index(?allocated_areas) of
+ [{_,Start}] ->
Fd = open(File),
pos_bof(Fd,Start),
R = get_allocareainfo(Fd,[]),
@@ -2183,14 +2334,16 @@ get_allocareainfo(Fd,Acc) ->
end,
get_allocareainfo(Fd,[AllocInfo|Acc])
end.
-
+
+%%-----------------------------------------------------------------
+%% Page with information about allocators
allocator_info(File) ->
- case ets:lookup(cdv_dump_index_table,"=allocator") of
+ case lookup_index(?allocator) of
[] ->
[];
AllAllocators ->
Fd = open(File),
- R = lists:map(fun({"=allocator",Heading,Start}) ->
+ R = lists:map(fun({Heading,Start}) ->
{Heading,get_allocatorinfo(Fd,Start)}
end,
AllAllocators),
@@ -2220,14 +2373,15 @@ get_all_vals([],Acc) ->
get_all_vals([Char|Rest],Acc) ->
get_all_vals(Rest,[Char|Acc]).
-
+%%-----------------------------------------------------------------
+%% Page with hash table information
hash_tables(File) ->
- case ets:lookup(cdv_dump_index_table,"=hash_table") of
+ case lookup_index(?hash_table) of
[] ->
[];
AllHashTables ->
Fd = open(File),
- R = lists:map(fun({"=hash_table",Name,Start}) ->
+ R = lists:map(fun({Name,Start}) ->
get_hashtableinfo(Fd,Name,Start)
end,
AllHashTables),
@@ -2237,7 +2391,7 @@ hash_tables(File) ->
get_hashtableinfo(Fd,Name,Start) ->
pos_bof(Fd,Start),
- get_hashtableinfo1(Fd,#hash_table{name=Name,_=?space}).
+ get_hashtableinfo1(Fd,#hash_table{name=Name}).
get_hashtableinfo1(Fd,HashTable) ->
case line_head(Fd) of
@@ -2256,13 +2410,15 @@ get_hashtableinfo1(Fd,HashTable) ->
HashTable
end.
+%%-----------------------------------------------------------------
+%% Page with index table information
index_tables(File) ->
- case ets:lookup(cdv_dump_index_table,"=index_table") of
+ case lookup_index(?index_table) of
[] ->
[];
AllIndexTables ->
Fd = open(File),
- R = lists:map(fun({"=index_table",Name,Start}) ->
+ R = lists:map(fun({Name,Start}) ->
get_indextableinfo(Fd,Name,Start)
end,
AllIndexTables),
@@ -2272,7 +2428,7 @@ index_tables(File) ->
get_indextableinfo(Fd,Name,Start) ->
pos_bof(Fd,Start),
- get_indextableinfo1(Fd,#index_table{name=Name,_=?space}).
+ get_indextableinfo1(Fd,#index_table{name=Name}).
get_indextableinfo1(Fd,IndexTable) ->
case line_head(Fd) of
@@ -2284,6 +2440,8 @@ get_indextableinfo1(Fd,IndexTable) ->
get_indextableinfo1(Fd,IndexTable#index_table{limit=val(Fd)});
"rate" ->
get_indextableinfo1(Fd,IndexTable#index_table{rate=val(Fd)});
+ "entries" ->
+ get_indextableinfo1(Fd,IndexTable#index_table{entries=val(Fd)});
"=" ++ _next_tag ->
IndexTable;
Other ->
@@ -2295,6 +2453,8 @@ get_indextableinfo1(Fd,IndexTable) ->
+%%-----------------------------------------------------------------
+%% Expand a set of data which was shown in a truncated form on
get_expanded(File,Pos,Size) ->
Fd = open(File),
R = case file:pread(Fd,Pos,Size) of
@@ -2307,20 +2467,6 @@ get_expanded(File,Pos,Size) ->
R.
-get_next(File,Pos,N0,Start,What) ->
- Fd = open(File),
- pos_bof(Fd,Pos),
- R = case get_n_lines_of_tag(Fd,N0) of
- {all,N,Lines} ->
- {n_lines,Start,N,What,Lines};
- {part,N,Lines} ->
- {n_lines,Start,N,What,Lines,get(pos)}
- end,
- close(Fd),
- R.
-
-
-
replace_all(From,To,[From|Rest],Acc) ->
replace_all(From,To,Rest,[To|Acc]);
replace_all(From,To,[Char|Rest],Acc) ->
@@ -2567,3 +2713,110 @@ get_binary(_N, [], _Acc) ->
cdvbin(Sz,Pos) ->
"#CDVBin<"++integer_to_list(Sz)++","++integer_to_list(Pos)++">".
+
+
+%%-----------------------------------------------------------------
+%% Functions for accessing the cdv_dump_index_table
+reset_index_table() ->
+ ets:delete_all_objects(cdv_dump_index_table).
+
+insert_index(Tag,Id,Pos) ->
+ ets:insert(cdv_dump_index_table,{{Tag,Pos},Id}).
+
+lookup_index(Tag) ->
+ lookup_index(Tag,'$2').
+lookup_index(Tag,Id) ->
+ ets:select(cdv_dump_index_table,[{{{Tag,'$1'},Id},[],[{{Id,'$1'}}]}]).
+
+lookup_index_chunk({'#CDVFirstChunk',Tag,Id}) ->
+ ets:select(cdv_dump_index_table,
+ [{{{Tag,'$1'},Id},[],[{{Id,'$1'}}]}],
+ ?items_chunk_size);
+lookup_index_chunk(Cont) ->
+ ets:select(Cont).
+
+%% Create a tag which can be used instead of an ets Continuation for
+%% the first call to lookup_index_chunk.
+first_chunk_pointer({Tag,Id}) ->
+ {'#CDVFirstChunk',Tag,Id};
+first_chunk_pointer(Tag) ->
+ first_chunk_pointer({Tag,'$2'}).
+
+count_index(Tag) ->
+ ets:select_count(cdv_dump_index_table,[{{{Tag,'_'},'_'},[],[true]}]).
+count_index(Tag,Id) ->
+ ets:select_count(cdv_dump_index_table,[{{{Tag,'_'},Id},[],[true]}]).
+
+
+%%-----------------------------------------------------------------
+%% Convert tags read from crashdump to atoms used as first part of key
+%% in cdv_dump_index_table
+tag_to_atom("allocated_areas") -> ?allocated_areas;
+tag_to_atom("allocator") -> ?allocator;
+tag_to_atom("atoms") -> ?atoms;
+tag_to_atom("binary") -> ?binary;
+tag_to_atom("debug_proc_dictionary") -> ?debug_proc_dictionary;
+tag_to_atom("end") -> ?ende;
+tag_to_atom("erl_crash_dump") -> ?erl_crash_dump;
+tag_to_atom("ets") -> ?ets;
+tag_to_atom("fun") -> ?fu;
+tag_to_atom("hash_table") -> ?hash_table;
+tag_to_atom("hidden_node") -> ?hidden_node;
+tag_to_atom("index_table") -> ?index_table;
+tag_to_atom("instr_data") -> ?instr_data;
+tag_to_atom("internal_ets") -> ?internal_ets;
+tag_to_atom("loaded_modules") -> ?loaded_modules;
+tag_to_atom("memory") -> ?memory;
+tag_to_atom("mod") -> ?mod;
+tag_to_atom("no_distribution") -> ?no_distribution;
+tag_to_atom("node") -> ?node;
+tag_to_atom("not_connected") -> ?not_connected;
+tag_to_atom("num_atoms") -> ?num_atoms;
+tag_to_atom("old_instr_data") -> ?old_instr_data;
+tag_to_atom("port") -> ?port;
+tag_to_atom("proc") -> ?proc;
+tag_to_atom("proc_dictionary") -> ?proc_dictionary;
+tag_to_atom("proc_heap") -> ?proc_heap;
+tag_to_atom("proc_messages") -> ?proc_messages;
+tag_to_atom("proc_stack") -> ?proc_stack;
+tag_to_atom("timer") -> ?timer;
+tag_to_atom("visible_node") -> ?visible_node;
+tag_to_atom(UnknownTag) ->
+ io:format("WARNING: Found unexpected tag:~s~n",[UnknownTag]),
+ list_to_atom(UnknownTag).
+
+%%%-----------------------------------------------------------------
+%%% Create a page by sending chunk by chunk to crashdump_viewer_html
+chunk_page(SessionId,File,TW,What,HtmlCB,HtmlExtra,ParseFun) ->
+ Fd = open(File),
+ case lookup_and_parse_index_chunk(first_chunk_pointer(What),Fd,ParseFun) of
+ done ->
+ crashdump_viewer_html:chunk_page(HtmlCB,SessionId,TW,HtmlExtra,done);
+ {Chunk,Cont} ->
+ HtmlInfo = crashdump_viewer_html:chunk_page(
+ HtmlCB,
+ SessionId,TW,HtmlExtra,Chunk),
+ chunk_page_1(Fd,HtmlInfo,SessionId,ParseFun,
+ lookup_and_parse_index_chunk(Cont,Fd,ParseFun))
+ end.
+
+chunk_page_1(_Fd,HtmlInfo,SessionId,_ParseFun,done) ->
+ crashdump_viewer_html:chunk(SessionId,done,HtmlInfo);
+chunk_page_1(Fd,HtmlInfo,SessionId,ParseFun,{Chunk,Cont}) ->
+ crashdump_viewer_html:chunk(SessionId,Chunk,HtmlInfo),
+ chunk_page_1(Fd,HtmlInfo,SessionId,ParseFun,
+ lookup_and_parse_index_chunk(Cont,Fd,ParseFun)).
+
+lookup_and_parse_index_chunk(Pointer,Fd,ParseFun) ->
+ case lookup_index_chunk(Pointer) of
+ '$end_of_table' ->
+ close(Fd),
+ done;
+ {Chunk,Cont} ->
+ R = lists:map(fun({Id,Start}) ->
+ pos_bof(Fd,Start),
+ ParseFun(Fd,Id)
+ end,
+ Chunk),
+ {R,Cont}
+ end.
diff --git a/lib/observer/src/crashdump_viewer.hrl b/lib/observer/src/crashdump_viewer.hrl
index 6ce727cd3e..466f33b63b 100644
--- a/lib/observer/src/crashdump_viewer.hrl
+++ b/lib/observer/src/crashdump_viewer.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -17,117 +17,136 @@
%% %CopyrightEnd%
%%
-define(space, "&nbsp;").
+-define(unknown, "unknown").
-record(menu_item,{index,picture,text,depth,children,state,target}).
-record(general_info,
{created,
- slogan,
- system_vsn,
- compile_time,
- taints,
- node_name,
- num_atoms,
- num_procs,
- num_ets,
- num_fun,
- mem_tot,
- mem_max,
- instr_info}).
+ slogan=?space,
+ system_vsn=?space,
+ compile_time=?space,
+ taints=?space,
+ node_name=?space,
+ num_atoms=?space,
+ num_procs=?space,
+ num_ets=?space,
+ num_timers=?space,
+ num_fun=?space,
+ mem_tot=?space,
+ mem_max=?space,
+ instr_info=?space}).
-record(proc,
+ %% Initial data according to the follwoing:
+ %%
+ %% msg_q_len, reds and stack_heap are integers because it must
+ %% be possible to sort on them. All other fields are strings
+ %%
+ %% for old dumps start_time, parent and number of heap frament
+ %% does not exist
+ %%
+ %% current_func can be both "current function" and
+ %% "last scheduled in for"
+ %%
+ %% stack_dump, message queue and dictionaries should only be
+ %% displayed as a link to "Expand" (if dump is from OTP R9B
+ %% or newer)
{pid,
- name,
- init_func,
- parent,
- start_time,
- state,
- current_func,
- msg_q_len,
- msg_q,
- last_calls,
- links,
- prog_count,
- cp,
- arity,
- dict,
- debug_dict,
- reds,
- num_heap_frag,
- heap_frag_data,
- stack_heap,
- old_heap,
- heap_unused,
- old_heap_unused,
- new_heap_start,
- new_heap_top,
- stack_top,
- stack_end,
- old_heap_start,
- old_heap_top,
- old_heap_end,
- stack_dump}).
+ name=?space,
+ init_func=?space,
+ parent=?unknown,
+ start_time=?unknown,
+ state=?space,
+ current_func={"Current Function",?space},
+ msg_q_len=0,
+ msg_q=?space,
+ last_calls=?space,
+ links=?space,
+ prog_count=?space,
+ cp=?space,
+ arity=?space,
+ dict=?space,
+ debug_dict=?space,
+ reds=0,
+ num_heap_frag=?unknown,
+ heap_frag_data=?space,
+ stack_heap=0,
+ old_heap=?space,
+ heap_unused=?space,
+ old_heap_unused=?space,
+ new_heap_start=?space,
+ new_heap_top=?space,
+ stack_top=?space,
+ stack_end=?space,
+ old_heap_start=?space,
+ old_heap_top=?space,
+ old_heap_end=?space,
+ stack_dump=?space}).
-record(port,
{id,
- slot,
- connected,
- links,
- controls}).
+ slot=?space,
+ connected=?space,
+ links=?space,
+ name=?space,
+ monitors=?space,
+ controls=?space}).
-record(ets_table,
{pid,
- slot,
- id,
- name,
- type,
- buckets,
- size,
- memory}).
+ slot=?space,
+ id=?space,
+ name=?space,
+ type="hash",
+ buckets=?space,
+ size=?space,
+ memory=?space}).
-record(timer,
{pid,
- msg,
- time}).
+ msg=?space,
+ time=?space}).
-record(fu,
- {module,
- uniq,
- index,
- address,
- native_address,
- refc}).
+ {module=?space,
+ uniq=?space,
+ index=?space,
+ address=?space,
+ native_address=?space,
+ refc=?space}).
-record(nod,
- {name,
+ {name=?space,
channel,
- controller,
- creation,
- remote_links,
- remote_mon,
- remote_mon_by,
- error}).
+ controller=?space,
+ creation=?space,
+ remote_links=?space,
+ remote_mon=?space,
+ remote_mon_by=?space,
+ error=?space}).
-record(loaded_mod,
{mod,
- current_size,
- current_attrib,
- current_comp_info,
- old_size,
- old_attrib,
- old_comp_info}).
+ current_size=?space,
+ current_attrib=?space,
+ current_comp_info=?space,
+ old_size=?space,
+ old_attrib=?space,
+ old_comp_info=?space}).
-record(hash_table,
{name,
- size,
- used,
- objs,
- depth}).
+ size=?space,
+ used=?space,
+ objs=?space,
+ depth=?space}).
-record(index_table,
{name,
- size,
- used,
- limit,
- rate}).
+ size=?space,
+ used=?space,
+ limit=?space,
+ rate=?space,
+ entries=?space}).
diff --git a/lib/observer/src/crashdump_viewer_html.erl b/lib/observer/src/crashdump_viewer_html.erl
index 5e7bbf62a0..24a80b1916 100644
--- a/lib/observer/src/crashdump_viewer_html.erl
+++ b/lib/observer/src/crashdump_viewer_html.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -32,25 +32,23 @@
general_info/1,
pretty_info_page/2,
info_page/2,
- procs_summary/4,
proc_details/4,
expanded_memory/2,
expanded_binary/1,
- next/2,
- ports/3,
- timers/3,
- ets_tables/4,
+ port/3,
+ internal_ets_tables/2,
nods/2,
- loaded_mods/2,
loaded_mod_details/2,
- funs/2,
- atoms/3,
+ atoms/4,
+ atoms_chunk/2,
memory/2,
allocated_areas/2,
allocator_info/2,
hash_tables/2,
index_tables/2,
- error/2]).
+ error/2,
+ chunk_page/5,
+ chunk/3]).
-include("crashdump_viewer.hrl").
@@ -79,23 +77,20 @@ read_file_frame() ->
read_file_frame_body() ->
- Entry =
- case webtool:is_localhost() of
- true -> [input("TYPE=file NAME=browse SIZE=40"),
- input("TYPE=hidden NAME=path")];
- false -> input("TYPE=text NAME=path SIZE=60")
- end,
+ %% Using a plain text input field instead of a file input field
+ %% (e.g. <INPUT TYPE=file NAME=pathj SIZE=40">) because most
+ %% browsers can not forward the full path from this dialog even if
+ %% the browser is running on localhost (Ref 'fakepath'-problem)
+ Entry = input("TYPE=text NAME=path SIZE=60"),
Form =
form(
- "NAME=read_file_form METHOD=post ACTION= \"./read_file\"",
+ "NAME=read_file_form METHOD=post ACTION=\"./read_file\"",
table(
"BORDER=0",
[tr(td("COLSPAN=2","Enter file to analyse")),
tr(
[td(Entry),
- td("ALIGN=center",
- input("TYPE=submit onClick=\"path.value=browse.value;\""
- "VALUE=Ok"))])])),
+ td("ALIGN=center",input("TYPE=submit VALUE=Ok"))])])),
table(
"WIDTH=100% HEIGHT=60%",
tr("VALIGN=middle",
@@ -235,6 +230,8 @@ general_info_body(Heading,GenInfo) ->
td(GenInfo#general_info.num_procs)]),
tr([th("ALIGN=left BGCOLOR=\"#8899AA\"","ETS tables"),
td(GenInfo#general_info.num_ets)]),
+ tr([th("ALIGN=left BGCOLOR=\"#8899AA\"","Timers"),
+ td(GenInfo#general_info.num_timers)]),
tr([th("ALIGN=left BGCOLOR=\"#8899AA\"","Funs"),
td(GenInfo#general_info.num_fun)])]),
case GenInfo#general_info.instr_info of
@@ -295,60 +292,6 @@ pretty_info_body(Heading,Info) ->
pre(pretty_format(Info))].
%%%-----------------------------------------------------------------
-%%% Make table with summary of process information
-procs_summary(Sorted,ProcsSummary,TW,SharedHeap) ->
- Heading = "Process Information",
- header(Heading,
- body(
- procs_summary_body(Heading,ProcsSummary,TW,Sorted,SharedHeap))).
-
-procs_summary_body(Heading,[],TW,_Sorted,_SharedHeap) ->
- [h1(Heading),
- warn(TW),
- "No processes were found\n"];
-procs_summary_body(Heading,ProcsSummary,TW,Sorted,SharedHeap) ->
- MemHeading =
- if SharedHeap ->
- "Stack";
- true ->
- "Stack+heap"
- end,
-
- [heading(Heading,"processes"),
- warn(TW),
- table(
- "BORDER=4 CELLPADDING=4",
- [tr(
- [summary_table_head("pid","Pid",Sorted),
- summary_table_head("name_func","Name/Spawned as",Sorted),
- summary_table_head("state","State",Sorted),
- summary_table_head("reds","Reductions",Sorted),
- summary_table_head("mem",MemHeading,Sorted),
- summary_table_head("msg_q_len","MsgQ Length",Sorted)]) |
- lists:map(fun(Proc) -> procs_summary_table(Proc) end,ProcsSummary)])].
-
-summary_table_head(Sorted,Text,Sorted) ->
- %% Mark the sorted column (bigger and italic)
- th(font("SIZE=\"+1\"",em(href("./sort_procs?sort="++Sorted,Text))));
-summary_table_head(SortOn,Text,_Sorted) ->
- th(href("./sort_procs?sort="++SortOn,Text)).
-
-procs_summary_table(Proc) ->
- #proc{pid=Pid,name=Name,state=State,
- reds=Reds,stack_heap=Mem0,msg_q_len=MsgQLen}=Proc,
- Mem = case Mem0 of
- -1 -> "unknown";
- _ -> integer_to_list(Mem0)
- end,
- tr(
- [td(href(["./proc_details?pid=",Pid],Pid)),
- td(Name),
- td(State),
- td("ALIGN=right",integer_to_list(Reds)),
- td("ALIGN=right",Mem),
- td("ALIGN=right",integer_to_list(MsgQLen))]).
-
-%%%-----------------------------------------------------------------
%%% Print details for one process
proc_details(Pid,Proc,TW,SharedHeap) ->
Script =
@@ -594,83 +537,33 @@ expanded_binary_body(Heading,Bin) ->
href("javascript:history.go(-1)","BACK")].
%%%-----------------------------------------------------------------
-%%% Print table of ports
-ports(Heading,Ports,TW) ->
- header(Heading,body(ports_body(Heading,Ports,TW))).
+%%% Print info for one port
+port(Heading,Port,TW) ->
+ header(Heading,body(port_body(Heading,Port,TW))).
-ports_body(Heading,[],TW) ->
- [h1(Heading),
- warn(TW),
- "No ports were found\n"];
-ports_body(Heading,Ports,TW) ->
+port_body(Heading,Port,TW) ->
[heading(Heading,"ports"),
warn(TW),
table(
"BORDER=4 CELLPADDING=4",
- [tr(
- [th("Id"),
- th("Slot"),
- th("Connected"),
- th("Links"),
- th("Controls")]) |
- lists:map(fun(Port) -> ports_table(Port) end, Ports)])].
+ [tr([th(Head) || Head <- port_table_head()]), ports_table(Port)])].
-ports_table(Port) ->
- #port{id=Id,slot=Slot,connected=Connected,links=Links,
- controls=Controls}=Port,
- tr(
- [td(Id),
- td("ALIGHT=right",Slot),
- td(href_proc_port(Connected)),
- td(href_proc_port(Links)),
- td(Controls)]).
-
%%%-----------------------------------------------------------------
-%%% Print table of ETS tables
-ets_tables(Heading,EtsTables,InternalEts,TW) ->
- header(Heading,body(ets_tables_body(Heading,EtsTables,InternalEts,TW))).
+%%% Print table of internal ETS tables
+internal_ets_tables(InternalEts,TW) ->
+ Heading = "Internal ETS tables",
+ header(Heading,body(internal_ets_tables_body(Heading,InternalEts,TW))).
-ets_tables_body(Heading,[],InternalEts,TW) ->
+internal_ets_tables_body(Heading,[],TW) ->
[h1(Heading),
warn(TW),
- "No ETS tables were found\n" |
- internal_ets_tables_table(InternalEts)];
-ets_tables_body(Heading,EtsTables,InternalEts,TW) ->
- [heading(Heading,"ets_tables"),
+ "No internal ETS tables were found\n"];
+internal_ets_tables_body(Heading,InternalEts,TW) ->
+ [heading(Heading,"internal_ets_tables"),
warn(TW),
table(
"BORDER=4 CELLPADDING=4",
[tr(
- [th("Owner"),
- th("Slot"),
- th("Id"),
- th("Name"),
- th("Type"),
- th("Buckets"),
- th("Objects"),
- th("Memory (bytes)")]) |
- lists:map(fun(EtsTable) -> ets_tables_table(EtsTable) end,
- EtsTables)]) |
- internal_ets_tables_table(InternalEts)].
-
-ets_tables_table(EtsTable) ->
- #ets_table{pid=Pid,slot=Slot,id=Id,name=Name,type=Type,
- buckets=Buckets,size=Size,memory=Memory} = EtsTable,
- tr(
- [td(href_proc_port(Pid)),
- td(Slot),
- td(Id),
- td(Name),
- td(Type),
- td("ALIGN=right",Buckets),
- td("ALIGN=right",Size),
- td("ALIGN=right",Memory)]).
-
-internal_ets_tables_table(InternalEtsTables) ->
- [h2("Internal ETS tables"),
- table(
- "BORDER=4 CELLPADDING=4",
- [tr(
[th("Description"),
th("Id"),
th("Name"),
@@ -681,7 +574,7 @@ internal_ets_tables_table(InternalEtsTables) ->
lists:map(fun(InternalEtsTable) ->
internal_ets_tables_table1(InternalEtsTable)
end,
- InternalEtsTables)])].
+ InternalEts)])].
internal_ets_tables_table1({Descr,InternalEtsTable}) ->
#ets_table{id=Id,name=Name,type=Type,buckets=Buckets,
@@ -696,33 +589,6 @@ internal_ets_tables_table1({Descr,InternalEtsTable}) ->
td("ALIGN=right",Memory)]).
%%%-----------------------------------------------------------------
-%%% Print table of timers
-timers(Heading,Timers,TW) ->
- header(Heading,body(timers_body(Heading,Timers,TW))).
-
-timers_body(Heading,[],TW) ->
- [h1(Heading),
- warn(TW),
- "No timers were found\n"];
-timers_body(Heading,Timers,TW) ->
- [heading(Heading,"timers"),
- warn(TW),
- table(
- "BORDER=4 CELLPADDING=4",
- [tr(
- [th("Owner"),
- th("Message"),
- th("Time left")]) |
- lists:map(fun(Timer) -> timers_table(Timer) end, Timers)])].
-
-timers_table(Timer) ->
- #timer{pid=Pid,msg=Msg,time=Time}=Timer,
- tr(
- [td(href_proc_port(Pid)),
- td(Msg),
- td("ALIGN=right",Time)]).
-
-%%%-----------------------------------------------------------------
%%% Print table of nodes in distribution
nods(Nods,TW) ->
header("Distribution Information",body(nodes_body(Nods,TW))).
@@ -826,33 +692,6 @@ format_extra_info(Error) ->
?space -> "";
_ -> font("COLOR=\"#FF0000\"",["ERROR: ",Error,"\n"])
end.
-%%%-----------------------------------------------------------------
-%%% Print loaded modules information
-loaded_mods({CC,OC,LM},TW) ->
- Heading = "Loaded Modules Information",
- header(Heading,body(loaded_mods_body(Heading,CC,OC,LM,TW))).
-
-loaded_mods_body(Heading,"unknown","unknown",[],TW) ->
- [h1(Heading),
- warn(TW),
- "No loaded modules information was found\n"];
-loaded_mods_body(Heading,CC,OC,LM,TW) ->
- [heading(Heading,"loaded_modules"),
- warn(TW),
- p([b("Current code: "),CC," bytes",br(),
- b("Old code: "),OC," bytes"]),
- table(
- "BORDER=4 CELLPADDING=4",
- [tr([th("Module"),
- th("Current size (bytes)"),
- th("Old size (bytes)")]) |
- lists:map(fun(Mod) -> loaded_mods_table(Mod) end,LM)])].
-
-loaded_mods_table(#loaded_mod{mod=Mod,current_size=CS,old_size=OS}) ->
- tr([td(href(["loaded_mod_details?mod=",Mod],Mod)),
- td("ALIGN=right",CS),
- td("ALIGN=right",OS)]).
-
%%%-----------------------------------------------------------------
%%% Print detailed information about one module
@@ -882,107 +721,33 @@ loaded_mod_details_body(ModInfo,TW) ->
%%%-----------------------------------------------------------------
-%%% Print table of funs
-funs(Funs,TW) ->
- Heading = "Fun Information",
- header(Heading,body(funs_body(Heading,Funs,TW))).
-
-funs_body(Heading,[],TW) ->
- [h1(Heading),
- warn(TW),
- "No Fun information was found\n"];
-funs_body(Heading,Funs,TW) ->
- [heading(Heading,"funs"),
- warn(TW),
- table(
- "BORDER=4 CELLPADDING=4",
- [tr(
- [th("Module"),
- th("Uniq"),
- th("Index"),
- th("Address"),
- th("Native_address"),
- th("Refc")]) |
- lists:map(fun(Fun) -> funs_table(Fun) end, Funs)])].
-
-funs_table(Fu) ->
- #fu{module=Module,uniq=Uniq,index=Index,address=Address,
- native_address=NativeAddress,refc=Refc}=Fu,
- tr(
- [td(Module),
- td("ALIGN=right",Uniq),
- td("ALIGN=right",Index),
- td(Address),
- td(NativeAddress),
- td("ALIGN=right",Refc)]).
-
-%%%-----------------------------------------------------------------
%%% Print atoms
-atoms(Atoms,Num,TW) ->
+atoms(SessionId,TW,Num,FirstChunk) ->
Heading = "Atoms",
- header(Heading,body(atoms_body(Heading,Atoms,Num,TW))).
-
-atoms_body(Heading,[],Num,TW) ->
- [h1(Heading),
- warn(TW),
- "No atoms were found in log",br(),
- "Total number of atoms in node was ", Num, br()];
-atoms_body(Heading,Atoms,Num,TW) ->
- [heading(Heading,"atoms"),
- warn(TW),
- "Total number of atoms in node was ", Num,
- br(),
- "The last created atom is shown first",
- br(),br() |
- n_first(Atoms)].
-
-n_first({n_lines,Start,N,What,Lines,Pos}) ->
- NextHref = next_href(N,What,Pos,Start),
- [What," number ",integer_to_list(Start),"-",integer_to_list(Start+N-1),
- br(),
- NextHref,
- pre(Lines),
- NextHref];
-n_first({n_lines,_Start,_N,_What,Lines}) ->
- [pre(Lines)].
-
-%%%-----------------------------------------------------------------
-%%% Print next N lines of "something"
-next(NLines,TW) ->
- header(element(4,NLines),body(next_body(NLines,TW))).
-
-next_body({n_lines,Start,N,What,Lines,Pos},TW) ->
- PrefHref = prev_href(),
- NextHref = next_href(N,What,Pos,Start),
- [warn(TW),
- What," number ",integer_to_list(Start),"-",integer_to_list(Start+N-1),
- br(),
- PrefHref,
- ?space,
- NextHref,
- pre(Lines),
- PrefHref,
- ?space,
- NextHref];
-next_body({n_lines,Start,N,What,Lines},TW) ->
- PrefHref = prev_href(),
- [warn(TW),
- What," number ",integer_to_list(Start),"-",integer_to_list(Start+N-1),
- br(),
- PrefHref,
- pre(Lines),
- PrefHref].
-
-
-prev_href() ->
- href("javascript:history.back()",["Previous"]).
-
-next_href(N,What,Pos,Start) ->
- href(["./next?pos=",integer_to_list(Pos),
- "&num=",integer_to_list(N),
- "&start=",integer_to_list(Start+N),
- "&what=",What],
- "Next").
+ case FirstChunk of
+ done ->
+ deliver_first(SessionId,[start_html_page(Heading),
+ h1(Heading),
+ warn(TW),
+ "No atoms were found in log",br(),
+ "Total number of atoms in node was ", Num,
+ br()]);
+ _ ->
+ deliver_first(SessionId,[start_html_page(Heading),
+ heading(Heading,"atoms"),
+ warn(TW),
+ "Total number of atoms in node was ", Num,
+ br(),
+ "The last created atom is shown first",
+ br(),
+ start_pre()]),
+ atoms_chunk(SessionId,FirstChunk)
+ end.
+
+atoms_chunk(SessionId,done) ->
+ deliver(SessionId,[stop_pre(),stop_html_page()]);
+atoms_chunk(SessionId,Atoms) ->
+ deliver(SessionId,Atoms).
%%%-----------------------------------------------------------------
%%% Print memory information
@@ -1120,52 +885,92 @@ index_tables_body(Heading,IndexTables,TW) ->
th("Size"),
th("Limit"),
th("Used"),
- th("Rate")]) |
+ th("Rate"),
+ th("Entries")]) |
lists:map(fun(IndexTable) -> index_tables_table(IndexTable) end,
IndexTables)])].
index_tables_table(IndexTable) ->
- #index_table{name=Name,size=Size,limit=Limit,used=Used,rate=Rate} =
- IndexTable,
+ #index_table{name=Name,size=Size,limit=Limit,used=Used,
+ rate=Rate,entries=Entries} = IndexTable,
tr(
[td(Name),
td("ALIGN=right",Size),
td("ALIGN=right",Limit),
td("ALIGN=right",Used),
- td("ALIGN=right",Rate)]).
+ td("ALIGN=right",Rate),
+ td("ALIGN=right",Entries)]).
%%%-----------------------------------------------------------------
%%% Internal library
+start_html_page(Title) ->
+ [only_http_header(),
+ start_html(),
+ only_html_header(Title),
+ start_html_body()].
+
+stop_html_page() ->
+ [stop_html_body(),
+ stop_html()].
+
+only_http_header() ->
+ ["Pragma:no-cache\r\n",
+ "Content-type: text/html\r\n\r\n"].
+
+only_html_header(Title) ->
+ only_html_header(Title,"").
+only_html_header(Title,JavaScript) ->
+ ["<HEAD>\n",
+ "<TITLE>", Title, "</TITLE>\n",
+ JavaScript,
+ "</HEAD>\n"].
+
+start_html() ->
+ "<HTML>\n".
+stop_html() ->
+ "</HTML>".
+start_html_body() ->
+ "<BODY BGCOLOR=\"#FFFFFF\">\n".
+stop_html_body() ->
+ "</BODY>\n".
+
header(Body) ->
header("","",Body).
header(Title,Body) ->
header(Title,"",Body).
header(Title,JavaScript,Body) ->
- ["Pragma:no-cache\r\n",
- "Content-type: text/html\r\n\r\n",
+ [only_http_header(),
html_header(Title,JavaScript,Body)].
html_header(Title,JavaScript,Body) ->
- ["<HTML>\n",
- "<HEAD>\n",
- "<TITLE>", Title, "</TITLE>\n",
- JavaScript,
- "</HEAD>\n",
+ [start_html(),
+ only_html_header(Title,JavaScript),
Body,
- "</HTML>"].
+ stop_html()].
body(Text) ->
- ["<BODY BGCOLOR=\"#FFFFFF\">\n",
+ [start_html_body(),
Text,
- "<\BODY>\n"].
+ stop_html_body()].
frameset(Args,Frames) ->
["<FRAMESET ",Args,">\n", Frames, "\n</FRAMESET>\n"].
frame(Args) ->
["<FRAME ",Args, ">\n"].
+start_visible_table() ->
+ start_table("BORDER=\"4\" CELLPADDING=\"4\"").
+start_visible_table(ColTitles) ->
+ [start_visible_table(),
+ tr([th(ColTitle) || ColTitle <- ColTitles])].
+
+start_table(Args) ->
+ ["<TABLE ", Args, ">\n"].
+stop_table() ->
+ "</TABLE>\n".
+
table(Args,Text) ->
- ["<TABLE ", Args, ">\n", Text, "\n</TABLE>\n"].
+ [start_table(Args), Text, stop_table()].
tr(Text) ->
["<TR>\n", Text, "\n</TR>\n"].
tr(Args,Text) ->
@@ -1183,8 +988,12 @@ b(Text) ->
["<B>",Text,"</B>"].
em(Text) ->
["<EM>",Text,"</EM>\n"].
+start_pre() ->
+ "<PRE>".
+stop_pre() ->
+ "</PRE>".
pre(Text) ->
- ["<PRE>",Text,"</PRE>"].
+ [start_pre(),Text,stop_pre()].
href(Link,Text) ->
["<A HREF=\"",Link,"\">",Text,"</A>"].
href(Args,Link,Text) ->
@@ -1199,8 +1008,6 @@ input(Args) ->
["<INPUT ", Args, ">\n"].
h1(Text) ->
["<H1>",Text,"</H1>\n"].
-h2(Text) ->
- ["<H2>",Text,"</H2>\n"].
font(Args,Text) ->
["<FONT ",Args,">\n",Text,"\n</FONT>\n"].
p(Text) ->
@@ -1223,7 +1030,7 @@ href_proc_port([$#,$F,$u,$n,$<|T],Acc) ->
href_proc_port([$#,$P,$o,$r,$t,$<|T],Acc) ->
{[$#|Port]=HashPort,Rest} = to_gt(T,[$;,$t,$l,$&,$t,$r,$o,$P,$#]),
href_proc_port(Rest,[href("TARGET=\"main\"",
- ["./ports?port=",Port],HashPort)|Acc]);
+ ["./port?port=",Port],HashPort)|Acc]);
href_proc_port([$<,$<|T],Acc) ->
%% No links to binaries
href_proc_port(T,[$;,$t,$l,$&,$;,$t,$l,$&|Acc]);
@@ -1243,7 +1050,7 @@ href_proc_port([$",$#,$C,$D,$V,$P,$o,$r,$t,$<|T],Acc) ->
%% Port written by crashdump_viewer:parse_term(...)
{[$#|Port]=HashPort,[$"|Rest]} = to_gt(T,[$;,$t,$l,$&,$t,$r,$o,$P,$#]),
href_proc_port(Rest,[href("TARGET=\"main\"",
- ["./ports?port=",Port],HashPort)|Acc]);
+ ["./port?port=",Port],HashPort)|Acc]);
href_proc_port([$",$#,$C,$D,$V,$P,$i,$d,$<|T],Acc) ->
%% Pid written by crashdump_viewer:parse_term(...)
{Pid,[$"|Rest]} = to_gt(T,[$;,$t,$l,$&]),
@@ -1422,7 +1229,7 @@ replace_insrt("'trsni$'"++Rest,[H|T],Acc) -> % the list is reversed here!
"&lt;" ++ _Pid ->
href("TARGET=\"main\"",["./proc_details?pid=",H],H);
"#Port&lt;" ++ Port ->
- href("TARGET=\"main\"",["./ports?port=","Port&lt;"++Port],H);
+ href("TARGET=\"main\"",["./port?port=","Port&lt;"++Port],H);
"#" ++ _other ->
H
end,
@@ -1431,3 +1238,173 @@ replace_insrt([H|T],Insrt,Acc) ->
replace_insrt(T,Insrt,[H|Acc]);
replace_insrt([],[],Acc) ->
Acc.
+
+%%%-----------------------------------------------------------------
+%%% Create a page with one table by delivering chunk by chunk to
+%%% inets. crashdump_viewer first calls chunk_page/5 once, then
+%%% chunk/3 multiple times until all data is delivered.
+chunk_page(processes,SessionId,TW,{Sorted,SharedHeap},FirstChunk) ->
+ Columns = procs_summary_table_head(Sorted,SharedHeap),
+ chunk_page(SessionId, "Process Information", TW, FirstChunk,
+ "processes", Columns, fun procs_summary_table/1);
+chunk_page(ports,SessionId,TW,_,FirstChunk) ->
+ chunk_page(SessionId, "Port Information", TW, FirstChunk,
+ "ports", port_table_head(), fun ports_table/1);
+chunk_page(ets_tables,SessionId,TW,Heading,FirstChunk) ->
+ Columns = ["Owner",
+ "Slot",
+ "Id",
+ "Name",
+ "Type",
+ "Buckets",
+ "Objects",
+ "Memory (bytes)"],
+ chunk_page(SessionId, Heading, TW, FirstChunk,
+ "ets_tables", Columns, fun ets_tables_table/1);
+chunk_page(timers,SessionId,TW,Heading,FirstChunk) ->
+ chunk_page(SessionId, Heading, TW, FirstChunk, "timers",
+ ["Owner","Message","Time left"], fun timers_table/1);
+chunk_page(loaded_mods,SessionId,TW,{CC,OC},FirstChunk) ->
+ TotalsInfo = p([b("Current code: "),CC," bytes",br(),
+ b("Old code: "),OC," bytes"]),
+ Columns = ["Module","Current size (bytes)","Old size (bytes)"],
+ chunk_page(SessionId, "Loaded Modules Information", TW, FirstChunk,
+ "loaded_modules", TotalsInfo,Columns, fun loaded_mods_table/1);
+chunk_page(funs,SessionId, TW, _, FirstChunk) ->
+ Columns = ["Module",
+ "Uniq",
+ "Index",
+ "Address",
+ "Native_address",
+ "Refc"],
+ chunk_page(SessionId, "Fun Information", TW, FirstChunk,
+ "funs", Columns, fun funs_table/1).
+
+chunk_page(SessionId,Heading,TW,FirstChunk,Type,TableColumns,TableFun) ->
+ chunk_page(SessionId,Heading,TW,FirstChunk,Type,[],TableColumns,TableFun).
+chunk_page(SessionId,Heading,TW,done,Type,_TotalsInfo,_TableColumns,_TableFun) ->
+ no_info_found(SessionId,Heading,TW,Type);
+chunk_page(SessionId,Heading,TW,FirstChunk,Type,TotalsInfo,TableColumns,TableFun) ->
+ deliver_first(SessionId,[start_html_page(Heading),
+ heading(Heading,Type),
+ warn(TW),
+ TotalsInfo,
+ start_visible_table(TableColumns)]),
+ chunk(SessionId,FirstChunk,TableFun),
+ TableFun.
+
+no_info_found(SessionId, Heading, TW, Type) ->
+ Info = ["No ", Type, " were found\n"],
+ deliver_first(SessionId,[start_html_page(Heading),
+ h1(Heading),
+ warn(TW),
+ Info,
+ stop_html_page()]).
+
+chunk(SessionId, done, _TableFun) ->
+ deliver(SessionId,[stop_table(),stop_html_page()]);
+chunk(SessionId, Items, TableFun) ->
+ deliver(SessionId, [lists:map(TableFun, Items),
+ stop_table(), %! Will produce an empty table at the end
+ start_visible_table()]). % of the page :(
+
+%%%-----------------------------------------------------------------
+%%% Deliver part of a page to inets
+%%% The first part, which includes the HTTP header, must always be
+%%% delivered as a string (i.e. no binaries). The rest of the page is
+%%% better delivered as binaries in order to avoid data copying.
+deliver_first(SessionId,String) ->
+ mod_esi:deliver(SessionId,String).
+deliver(SessionId,IoList) ->
+ mod_esi:deliver(SessionId,[list_to_binary(IoList)]).
+
+
+%%%-----------------------------------------------------------------
+%%% Page specific stuff for chunk pages
+procs_summary_table_head(Sorted,SharedHeap) ->
+ MemHeading =
+ if SharedHeap ->
+ "Stack";
+ true ->
+ "Stack+heap"
+ end,
+ [procs_summary_table_head("pid","Pid",Sorted),
+ procs_summary_table_head("name_func","Name/Spawned as",Sorted),
+ procs_summary_table_head("state","State",Sorted),
+ procs_summary_table_head("reds","Reductions",Sorted),
+ procs_summary_table_head("mem",MemHeading,Sorted),
+ procs_summary_table_head("msg_q_len","MsgQ Length",Sorted)].
+
+procs_summary_table_head(_,Text,no_sort) ->
+ Text;
+procs_summary_table_head(Sorted,Text,Sorted) ->
+ %% Mark the sorted column (bigger and italic)
+ font("SIZE=\"+1\"",em(href("./sort_procs?sort="++Sorted,Text)));
+procs_summary_table_head(SortOn,Text,_Sorted) ->
+ href("./sort_procs?sort="++SortOn,Text).
+
+procs_summary_table(Proc) ->
+ #proc{pid=Pid,name=Name,state=State,
+ reds=Reds,stack_heap=Mem0,msg_q_len=MsgQLen}=Proc,
+ Mem = case Mem0 of
+ -1 -> "unknown";
+ _ -> integer_to_list(Mem0)
+ end,
+ tr(
+ [td(href(["./proc_details?pid=",Pid],Pid)),
+ td(Name),
+ td(State),
+ td("ALIGN=right",integer_to_list(Reds)),
+ td("ALIGN=right",Mem),
+ td("ALIGN=right",integer_to_list(MsgQLen))]).
+
+port_table_head() ->
+ ["Id","Slot","Connected","Links","Name","Monitors","Controls"].
+
+ports_table(Port) ->
+ #port{id=Id,slot=Slot,connected=Connected,links=Links,name=Name,
+ monitors=Monitors,controls=Controls}=Port,
+ tr(
+ [td(Id),
+ td("ALIGN=right",Slot),
+ td(href_proc_port(Connected)),
+ td(href_proc_port(Links)),
+ td(Name),
+ td(href_proc_port(Monitors)),
+ td(Controls)]).
+
+ets_tables_table(EtsTable) ->
+ #ets_table{pid=Pid,slot=Slot,id=Id,name=Name,type=Type,
+ buckets=Buckets,size=Size,memory=Memory} = EtsTable,
+ tr(
+ [td(href_proc_port(Pid)),
+ td(Slot),
+ td(Id),
+ td(Name),
+ td(Type),
+ td("ALIGN=right",Buckets),
+ td("ALIGN=right",Size),
+ td("ALIGN=right",Memory)]).
+
+timers_table(Timer) ->
+ #timer{pid=Pid,msg=Msg,time=Time}=Timer,
+ tr(
+ [td(href_proc_port(Pid)),
+ td(Msg),
+ td("ALIGN=right",Time)]).
+
+loaded_mods_table(#loaded_mod{mod=Mod,current_size=CS,old_size=OS}) ->
+ tr([td(href(["loaded_mod_details?mod=",Mod],Mod)),
+ td("ALIGN=right",CS),
+ td("ALIGN=right",OS)]).
+
+funs_table(Fu) ->
+ #fu{module=Module,uniq=Uniq,index=Index,address=Address,
+ native_address=NativeAddress,refc=Refc}=Fu,
+ tr(
+ [td(Module),
+ td("ALIGN=right",Uniq),
+ td("ALIGN=right",Index),
+ td(Address),
+ td(NativeAddress),
+ td("ALIGN=right",Refc)]).
diff --git a/lib/observer/test/Makefile b/lib/observer/test/Makefile
index e15bde7346..6073e6ea00 100644
--- a/lib/observer/test/Makefile
+++ b/lib/observer/test/Makefile
@@ -53,7 +53,7 @@ EBIN = .
make_emakefile:
$(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) \
- $(MODULES) >> $(EMAKEFILE)
+ $(MODULES) > $(EMAKEFILE)
tests debug opt: make_emakefile
cd $(ERL_TOP)/lib/test_server/src && \
diff --git a/lib/observer/test/crashdump_viewer_SUITE.erl b/lib/observer/test/crashdump_viewer_SUITE.erl
index 1a7e6f61fe..fdc4a2f1ff 100644
--- a/lib/observer/test/crashdump_viewer_SUITE.erl
+++ b/lib/observer/test/crashdump_viewer_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -68,7 +68,7 @@ init_per_suite(doc) ->
init_per_suite(Config) when is_list(Config) ->
Dog = ?t:timetrap(?default_timeout),
application:start(inets), % will be using the http client later
- http:set_options([{ipv6,disabled}]),
+ httpc:set_options([{ipfamily,inet6fb4}]),
DataDir = ?config(data_dir,Config),
Rels = [R || R <- [r12b,r13b], ?t:is_release_available(R)] ++ [current],
io:format("Creating crash dumps for the following releases: ~p", [Rels]),
@@ -112,7 +112,7 @@ start(Config) when is_list(Config) ->
undefined = whereis(crashdump_viewer_server),
undefined = whereis(web_tool),
Url = cdv_url(Port,"start_page"),
- {error,_} = http:request(get,{Url,[]},[],[]),
+ {error,_} = httpc:request(Url),
% exit(whereis(httpc_manager),kill),
?t:timetrap_cancel(AngryDog),
ok.
@@ -246,7 +246,7 @@ cdv_url(Port,Link) ->
"http://localhost:" ++ Port ++ "/cdv_erl/crashdump_viewer/" ++ Link.
request_sync(Method,HTTPReqCont) ->
- case http:request(Method,
+ case httpc:request(Method,
HTTPReqCont,
[{timeout,30000}],
[{full_result, false}]) of
@@ -254,13 +254,13 @@ request_sync(Method,HTTPReqCont) ->
Html;
{ok,{Code,Html}} ->
io:format("~s\n", [Html]),
- io:format("Received ~w from http:request(...) with\nMethod=~w\n"
+ io:format("Received ~w from httpc:request(...) with\nMethod=~w\n"
"HTTPReqCont=~p\n",
[Code,Method,HTTPReqCont]),
?t:fail();
Other ->
io:format(
- "Received ~w from http:request(...) with\nMethod=~w\n"
+ "Received ~w from httpc:request(...) with\nMethod=~w\n"
"HTTPReqCont=~p\n",
[Other,Method,HTTPReqCont]),
?t:fail()
@@ -414,16 +414,17 @@ special(Port,File) ->
_ ->
ok
end;
- ".250atoms" ->
- Html1 = contents(Port,"atoms"),
- NextLink1 = next_link(Html1),
- "Atoms" = title(Html1),
- Html2 = contents(Port,NextLink1),
- NextLink2 = next_link(Html2),
- "Atoms" = title(Html2),
- Html3 = contents(Port,NextLink2),
- "" = next_link(Html3),
- "Atoms" = title(Html3);
+ %%! No longer needed - all atoms are shown on one page!!
+ %% ".250atoms" ->
+ %% Html1 = contents(Port,"atoms"),
+ %% NextLink1 = next_link(Html1),
+ %% "Atoms" = title(Html1),
+ %% Html2 = contents(Port,NextLink1),
+ %% NextLink2 = next_link(Html2),
+ %% "Atoms" = title(Html2),
+ %% Html3 = contents(Port,NextLink2),
+ %% "" = next_link(Html3),
+ %% "Atoms" = title(Html3);
_ ->
ok
end,
@@ -496,27 +497,27 @@ expand_binary_link(Html) ->
end.
-next_link(Html) ->
- case Html of
- "<A HREF=\"./next?pos=" ++ Rest ->
- "next?pos=" ++ string:sub_word(Rest,1,$");
- [_H|T] ->
- next_link(T);
- [] ->
- []
- end.
+%% next_link(Html) ->
+%% case Html of
+%% "<A HREF=\"./next?pos=" ++ Rest ->
+%% "next?pos=" ++ string:sub_word(Rest,1,$");
+%% [_H|T] ->
+%% next_link(T);
+%% [] ->
+%% []
+%% end.
toggle_menu(Port) ->
- Html = contents(Port,"toggle?index=10"),
+ Html = contents(Port,"toggle?index=4"),
check_toggle(Html).
check_toggle(Html) ->
case Html of
- "<A HREF=\"./toggle?index=10\"><IMG SRC=\"/crashdump_viewer/collapsd.gif\"" ++ _ ->
+ "<A HREF=\"./toggle?index=4\"><IMG SRC=\"/crashdump_viewer/collapsd.gif\"" ++ _ ->
collapsed;
- "<A HREF=\"./toggle?index=10\"><IMG SRC=\"/crashdump_viewer/exploded.gif\"" ++ _ ->
+ "<A HREF=\"./toggle?index=4\"><IMG SRC=\"/crashdump_viewer/exploded.gif\"" ++ _ ->
exploded;
[_H|T] ->
check_toggle(T)
@@ -543,10 +544,10 @@ expand_link(Html) ->
port_details(Port) ->
- Port1 = contents(Port,"ports?port=Port<0.1>"),
+ Port1 = contents(Port,"port?port=Port<0.1>"),
"#Port<0.1>" = title(Port1),
- Port0 = contents(Port,"ports?port=Port<0.0>"),
+ Port0 = contents(Port,"port?port=Port<0.0>"),
"Could not find port: #Port<0.0>" = title(Port0).
is_truncated(File) ->
@@ -668,7 +669,7 @@ rename(From,To) ->
end.
check_complete(File) ->
- check_complete1(File,5).
+ check_complete1(File,10).
check_complete1(_File,0) ->
{error,enoent};
diff --git a/lib/odbc/c_src/odbcserver.c b/lib/odbc/c_src/odbcserver.c
index 077d78bfe5..d61ce940c3 100644
--- a/lib/odbc/c_src/odbcserver.c
+++ b/lib/odbc/c_src/odbcserver.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 1999-2010. All Rights Reserved.
+ * Copyright Ericsson AB 1999-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -472,7 +472,7 @@ static db_result_msg db_connect(byte *args, db_state *state)
&stringlength2ptr, SQL_DRIVER_NOPROMPT);
if (!sql_success(result)) {
- diagnos = get_diagnos(SQL_HANDLE_STMT, statement_handle(state));
+ diagnos = get_diagnos(SQL_HANDLE_DBC, connection_handle(state));
strcat((char *)diagnos.error_msg,
" Connection to database failed.");
msg = encode_error_message(diagnos.error_msg);
diff --git a/lib/odbc/src/odbc.appup.src b/lib/odbc/src/odbc.appup.src
index f1a370d925..2a6667ccd3 100644
--- a/lib/odbc/src/odbc.appup.src
+++ b/lib/odbc/src/odbc.appup.src
@@ -1,8 +1,8 @@
%% -*- erlang -*-
{"%VSN%",
[
- {"2.10.8", [{restart_application, ssl}]}
+ {"2.10.9", [{restart_application, ssl}]}
],
[
- {"2.10.8", [{restart_application, ssl}]}
+ {"2.10.9", [{restart_application, ssl}]}
]}.
diff --git a/lib/odbc/vsn.mk b/lib/odbc/vsn.mk
index aacf3924db..42a51be33e 100644
--- a/lib/odbc/vsn.mk
+++ b/lib/odbc/vsn.mk
@@ -1 +1 @@
-ODBC_VSN = 2.10.9
+ODBC_VSN = 2.10.10
diff --git a/lib/orber/doc/src/notes.xml b/lib/orber/doc/src/notes.xml
index ba16682f0b..589123ef73 100644
--- a/lib/orber/doc/src/notes.xml
+++ b/lib/orber/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>1997</year><year>2010</year>
+ <year>1997</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -33,6 +33,22 @@
</header>
<section>
+ <title>Orber 3.6.20</title>
+
+ <section>
+ <title>Improvements and New Features</title>
+ <list type="bulleted">
+ <item>
+ <p>
+ Eliminated Dialyzer warnings when using exit or throw.</p>
+ <p>
+ Own Id: OTP-9050 Aux Id:</p>
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
<title>Orber 3.6.19</title>
<section>
diff --git a/lib/orber/include/ifr_types.hrl b/lib/orber/include/ifr_types.hrl
index 144ec7f8a1..324b32bd4f 100644
--- a/lib/orber/include/ifr_types.hrl
+++ b/lib/orber/include/ifr_types.hrl
@@ -1,9 +1,9 @@
%%--------------------------------------------------------------------
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
diff --git a/lib/orber/src/corba.erl b/lib/orber/src/corba.erl
index ea1363742c..ecec768544 100644
--- a/lib/orber/src/corba.erl
+++ b/lib/orber/src/corba.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -620,6 +620,8 @@ get_pid(Objkey) ->
%% Returns : Throws the exception.
%% Description:
%%----------------------------------------------------------------------
+%% To avoid dialyzer warnings due to the use of exit/throw.
+-spec raise(term()) -> no_return().
raise(E) ->
throw({'EXCEPTION', E}).
@@ -629,6 +631,8 @@ raise(E) ->
%% Returns : Throws the exception.
%% Description:
%%----------------------------------------------------------------------
+%% To avoid dialyzer warnings due to the use of exit/throw.
+-spec raise_with_state(term(), term()) -> no_return().
raise_with_state(E, State) ->
throw({reply, {'EXCEPTION', E}, State}).
diff --git a/lib/orber/src/orber.app.src b/lib/orber/src/orber.app.src
index fe911d65a4..88df4162b6 100644
--- a/lib/orber/src/orber.app.src
+++ b/lib/orber/src/orber.app.src
@@ -101,7 +101,7 @@
orber_iiop_insup, orber_init, orber_reqno,
orber_objkeyserver, orber_iiop_socketsup,
orber_iiop_pm, orber_env]},
- {applications, [stdlib, kernel]},
+ {applications, [stdlib, kernel, mnesia]},
{env, []},
{mod, {orber, []}}
]}.
diff --git a/lib/orber/src/orber.erl b/lib/orber/src/orber.erl
index c3d37ad1fb..665b3cb383 100644
--- a/lib/orber/src/orber.erl
+++ b/lib/orber/src/orber.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1027,12 +1027,18 @@ remove_node(Node) when is_atom(Node) ->
remove_tables(Tables, Node) ->
- remove_tables(Tables, Node, []).
+ case remove_tables(Tables, Node, []) of
+ ok ->
+ ok;
+ {error, Node, Failed} ->
+ ?EFORMAT("orber:remove_node(~p) failed. Unable to remove table(s): ~p",
+ [Node, Failed])
+ end.
-remove_tables([], _, []) -> ok;
+remove_tables([], _, []) ->
+ ok;
remove_tables([], Node, Failed) ->
- ?EFORMAT("orber:remove_node(~p) failed. Unable to remove table(s): ~p",
- [Node, Failed]);
+ {error, Node, Failed};
remove_tables([T1|Trest], Node, Failed) ->
case mnesia:del_table_copy(T1, Node) of
{atomic, ok} ->
@@ -1041,8 +1047,6 @@ remove_tables([T1|Trest], Node, Failed) ->
remove_tables(Trest, Node, [{T1, Reason}|Failed])
end.
-
-
%%-----------------------------------------------------------------
%% Internal interface functions
%%-----------------------------------------------------------------
diff --git a/lib/orber/src/orber_socket.erl b/lib/orber/src/orber_socket.erl
index 84ed193ebb..ec2cf8f42a 100644
--- a/lib/orber/src/orber_socket.erl
+++ b/lib/orber/src/orber_socket.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -37,7 +37,7 @@
%%-----------------------------------------------------------------
-export([start/0, connect/4, listen/3, listen/4, accept/2, accept/3, write/3,
controlling_process/3, close/2, peername/2, sockname/2,
- peerdata/2, peercert/2, peercert/3, sockdata/2, setopts/3,
+ peerdata/2, peercert/2, sockdata/2, setopts/3,
clear/2, shutdown/3, post_accept/2, post_accept/3]).
%%-----------------------------------------------------------------
@@ -366,14 +366,6 @@ peercert(Type, _Socket) ->
[?LINE, Type], ?DEBUG_LEVEL),
{error, ebadsocket}.
-peercert(ssl, Socket, Opts) ->
- ssl:peercert(Socket, Opts);
-peercert(Type, _Socket, Opts) ->
- orber:dbg("[~p] orber_socket:peercert(~p, ~p);~n"
- "Only available for SSL sockets.",
- [?LINE, Type, Opts], ?DEBUG_LEVEL),
- {error, ebadsocket}.
-
%%-----------------------------------------------------------------
%% Get peerdata
%%
diff --git a/lib/orber/test/csiv2_SUITE.erl b/lib/orber/test/csiv2_SUITE.erl
index e7c79b9e84..95cd8c56b3 100644
--- a/lib/orber/test/csiv2_SUITE.erl
+++ b/lib/orber/test/csiv2_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -675,8 +675,8 @@ ssl_server_peercert_api(_Config) ->
{ok, Socket} =
?match({ok, _}, fake_client_ORB(ssl, ServerHost, ServerPort, SSLOptions)),
{ok, _PeerCert} = ?match({ok, _}, orber_socket:peercert(ssl, Socket)),
- ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [pkix, subject])),
- ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [ssl, subject])),
+%% ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [pkix, subject])),
+%% ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [ssl, subject])),
% ?match({ok, #'Certificate'{}},
% 'OrberCSIv2':decode('Certificate', PeerCert)),
destroy_fake_ORB(ssl, Socket),
@@ -715,8 +715,8 @@ ssl_client_peercert_api(_Config) ->
?match(ok, ssl:ssl_accept(Socket)),
{ok, _PeerCert} = ?match({ok, _}, orber_socket:peercert(ssl, Socket)),
- ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [pkix, subject])),
- ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [ssl, subject])),
+%% ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [pkix, subject])),
+%% ?match({ok, {rdnSequence, _}}, orber_socket:peercert(ssl, Socket, [ssl, subject])),
% ?match({ok, #'Certificate'{}},
% 'OrberCSIv2':decode('Certificate', PeerCert)),
ssl:close(Socket),
diff --git a/lib/orber/test/multi_ORB_SUITE.erl b/lib/orber/test/multi_ORB_SUITE.erl
index 6ac514eb77..608fb23f3e 100644
--- a/lib/orber/test/multi_ORB_SUITE.erl
+++ b/lib/orber/test/multi_ORB_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1391,7 +1391,7 @@ light_orber2_api(_Config) ->
LocalHost = net_adm:localhost(),
{ok, Node, _Host} =
?match({ok,_,_}, orber_test_lib:js_node([],
- {lightweigth, ["iiop://"++LocalHost++":"++integer_to_list(orber:iiop_port())]})),
+ {lightweight, ["iiop://"++LocalHost++":"++integer_to_list(orber:iiop_port())]})),
?match(ok, orber:info(io)),
?match([_], orber_test_lib:remote_apply(Node, orber_env, get_lightweight_nodes,[])),
diff --git a/lib/orber/test/orber_test_lib.erl b/lib/orber/test/orber_test_lib.erl
index 5053a5fddc..ffc13d0e3c 100644
--- a/lib/orber/test/orber_test_lib.erl
+++ b/lib/orber/test/orber_test_lib.erl
@@ -301,9 +301,11 @@ start_ssl(true, Node) ->
start_ssl(_, _) ->
ok.
-start_orber({lightweigth, Options}, Node) ->
+start_orber({lightweight, Options}, Node) ->
+ ok = rpc:call(Node, mnesia, start, []),
ok = rpc:call(Node, orber, start_lightweight, [Options]);
start_orber(lightweight, Node) ->
+ ok = rpc:call(Node, mnesia, start, []),
ok = rpc:call(Node, orber, start_lightweight, []);
start_orber(_, Node) ->
ok = rpc:call(Node, orber, jump_start, []).
diff --git a/lib/orber/vsn.mk b/lib/orber/vsn.mk
index b0c5a253a2..5f17cda229 100644
--- a/lib/orber/vsn.mk
+++ b/lib/orber/vsn.mk
@@ -1 +1,3 @@
-ORBER_VSN = 3.6.19
+
+ORBER_VSN = 3.6.20
+
diff --git a/lib/os_mon/test/disksup_SUITE.erl b/lib/os_mon/test/disksup_SUITE.erl
index 6e015ef74a..a3c6c7782d 100644
--- a/lib/os_mon/test/disksup_SUITE.erl
+++ b/lib/os_mon/test/disksup_SUITE.erl
@@ -41,10 +41,16 @@ end_per_suite(Config) when is_list(Config) ->
?line ok = application:stop(os_mon),
Config.
+init_per_testcase(unavailable, Config) ->
+ terminate(Config),
+ init_per_testcase(dummy, Config);
init_per_testcase(_Case, Config) ->
Dog = ?t:timetrap(?default_timeout),
[{watchdog,Dog} | Config].
+end_per_testcase(unavailable, Config) ->
+ restart(Config),
+ end_per_testcase(dummy, Config);
end_per_testcase(_Case, Config) ->
Dog = ?config(watchdog, Config),
?t:timetrap_cancel(Dog),
diff --git a/lib/percept/src/egd.erl b/lib/percept/src/egd.erl
index 4fb5b6c46a..63e5c30572 100644
--- a/lib/percept/src/egd.erl
+++ b/lib/percept/src/egd.erl
@@ -42,6 +42,7 @@
%%==========================================================================
%% @type egd_image()
+%% @type font()
%% @type point() = {integer(), integer()}
%% @type color()
%% @type render_option() = {render_engine, opaque} | {render_engine, alpha}
diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml
index 91e058f74e..81aedaea56 100644
--- a/lib/public_key/doc/src/public_key.xml
+++ b/lib/public_key/doc/src/public_key.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2008</year>
- <year>2008</year>
+ <year>2011</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -258,7 +258,7 @@
</func>
<func>
- <name> pkix_decode_cert(Cert, otp|plain) -> #'Certificate'{} | #'OTPCertificate'{}</name>
+ <name>pkix_decode_cert(Cert, otp|plain) -> #'Certificate'{} | #'OTPCertificate'{}</name>
<fsummary> Decodes an asn1 der encoded pkix x509 certificate.</fsummary>
<type>
<v>Cert = der_encoded()</v>
diff --git a/lib/public_key/src/public_key.appup.src b/lib/public_key/src/public_key.appup.src
index 6b6b76d0a5..c65ac7bc99 100644
--- a/lib/public_key/src/public_key.appup.src
+++ b/lib/public_key/src/public_key.appup.src
@@ -1,6 +1,13 @@
%% -*- erlang -*-
{"%VSN%",
[
+ {"0.10",
+ [
+ {update, public_key, soft, soft_purge, soft_purge, []},
+ {update, pubkey_pem, soft, soft_purge, soft_purge, []},
+ {update, pubkey_cert_records, soft, soft_purge, soft_purge, []}
+ ]
+ },
{"0.9",
[
{update, public_key, soft, soft_purge, soft_purge, []},
@@ -18,12 +25,19 @@
}
],
[
- {"0.9",
- [
+ {"0.10",
+ [
+ {update, public_key, soft, soft_purge, soft_purge, []},
+ {update, pubkey_pem, soft, soft_purge, soft_purge, []},
+ {update, pubkey_cert_records, soft, soft_purge, soft_purge, []}
+ ]
+ },
+ {"0.9",
+ [
{update, public_key, soft, soft_purge, soft_purge, []},
- {update, pubkey_cert, soft, soft_purge, soft_purge, []}
- ]
- },
+ {update, pubkey_cert, soft, soft_purge, soft_purge, []}
+ ]
+ },
{"0.8",
[
{update, 'OTP-PUB-KEY', soft, soft_purge, soft_purge, []},
@@ -32,5 +46,5 @@
{update, pubkey_cert_records, soft, soft_purge, soft_purge, []},
{update, pubkey_cert, soft, soft_purge, soft_purge, []}
]
- }
+ }
]}.
diff --git a/lib/public_key/test/pkits_SUITE.erl b/lib/public_key/test/pkits_SUITE.erl
index fd976cb2f3..660af4e8ab 100644
--- a/lib/public_key/test/pkits_SUITE.erl
+++ b/lib/public_key/test/pkits_SUITE.erl
@@ -585,10 +585,10 @@ end_per_testcase(_Func, Config) ->
Config.
init_per_suite(Config) ->
- case application:start(crypto) of
+ try crypto:start() of
ok ->
- Config;
- _ ->
+ Config
+ catch _:_ ->
{skip, "Crypto did not start"}
end.
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index d130196c15..6c482f9c30 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -41,10 +41,10 @@
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
init_per_suite(Config) ->
- case application:start(crypto) of
+ try crypto:start() of
ok ->
- Config;
- _ ->
+ Config
+ catch _:_ ->
{skip, "Crypto did not start"}
end.
%%--------------------------------------------------------------------
diff --git a/lib/public_key/vsn.mk b/lib/public_key/vsn.mk
index 334b9d792e..c99fd6fee1 100644
--- a/lib/public_key/vsn.mk
+++ b/lib/public_key/vsn.mk
@@ -1 +1 @@
-PUBLIC_KEY_VSN = 0.10
+PUBLIC_KEY_VSN = 0.11
diff --git a/lib/runtime_tools/src/inviso_rt.erl b/lib/runtime_tools/src/inviso_rt.erl
index dfab70b42e..ac7ac2a584 100644
--- a/lib/runtime_tools/src/inviso_rt.erl
+++ b/lib/runtime_tools/src/inviso_rt.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1422,7 +1422,17 @@ do_set_trace_patterns(Args,Flags) ->
do_set_trace_patterns_2([{M,F,Arity,MS}|Rest],Flags,Replies) -> % Option-less.
do_set_trace_patterns_2([{M,F,Arity,MS,[]}|Rest],Flags,Replies);
-do_set_trace_patterns_2([{M,F,Arity,MS,Opts}|Rest],Flags,Replies) when is_atom(M) ->
+do_set_trace_patterns_2(Mlist = [{M,F,Arity,MS,Opts}|Rest],Flags,Replies) when is_atom(M) ->
+ case length(Mlist) rem 10 of
+ 0 ->
+ timer:sleep(100);
+ _ ->
+ ok
+ end,
+ %% sleep 100 ms for every 10:th element in the list to let other
+ %% processes run since this is a potentially
+ %% heavy operation that might result in an unresponsive Erlang VM for
+ %% several seconds otherwise
case load_module_on_option(M,Opts) of
true -> % Already present, loaded or no option!
case catch erlang:trace_pattern({M,F,Arity},MS,Flags) of
@@ -1438,30 +1448,11 @@ do_set_trace_patterns_2([{M,F,Arity,MS,Opts}|Rest],Flags,Replies) when is_atom(M
do_set_trace_patterns_2(Rest,Flags,[0|Replies])
end;
do_set_trace_patterns_2([{M,F,Arity,MS,Opts}|Rest],Flags,Replies) when is_list(M) ->
- case check_pattern_parameters(void,F,Arity,MS) of % We don't want to repeat bad params.
- ok ->
- case inviso_rt_lib:expand_regexp(M,Opts) of % Get a list of real modulnames.
- Mods when is_list(Mods) ->
- MoreReplies=
- do_set_trace_patterns_2(lists:map(fun(Mod)->
- {Mod,F,Arity,MS,Opts}
- end,
- Mods),
- Flags,
- Replies),
- do_set_trace_patterns_2(Rest,Flags,MoreReplies);
- {error,Reason} ->
- do_set_trace_patterns_2(Rest,Flags,[{error,Reason}|Replies])
- end;
- error -> % Bad pattern parameters.
- do_set_trace_patterns_2(Rest,
- Flags,
- [{error,{bad_trace_args,{M,F,Arity,MS}}}|Replies])
- end;
+ do_set_trace_patterns_2([{{void,M},F,Arity,MS,Opts}|Rest],Flags,Replies);
do_set_trace_patterns_2([{{Dir,M},F,Arity,MS,Opts}|Rest],Flags,Replies)
when is_list(Dir),is_list(M) ->
- case check_pattern_parameters(void,F,Arity,MS) of % We don't want to repeat bad params.
- ok ->
+ case check_pattern_parameters('_',F,Arity,MS) of % We don't want to repeat bad params.
+ true ->
case inviso_rt_lib:expand_regexp(Dir,M,Opts) of % Get a list of real modulnames.
Mods when is_list(Mods) ->
MoreReplies=
@@ -1475,7 +1466,7 @@ do_set_trace_patterns_2([{{Dir,M},F,Arity,MS,Opts}|Rest],Flags,Replies)
{error,Reason} ->
do_set_trace_patterns_2(Rest,Flags,[{error,Reason}|Replies])
end;
- error -> % Bad pattern parameters.
+ false -> % Bad pattern parameters.
do_set_trace_patterns_2(Rest,
Flags,
[{error,{bad_trace_args,{M,F,Arity,MS}}}|Replies])
@@ -2174,21 +2165,20 @@ check_flags_2([Faulty|_],_Flags) -> {error,{bad_flag,Faulty}}.
%% the function is to avoid to get multiple error return values in the return
%% list for a pattern used together with a regexp expanded module name.
check_pattern_parameters(Mod,Func,Arity,MS) ->
- if
- (Mod=='_') and (Func=='_') and (Arity=='_') and
- (is_list(MS) or (MS==true) or (MS==false)) ->
- ok;
- (is_atom(Mod) and (Mod/='_')) and (Func=='_') and (Arity=='_') and
- (is_list(MS) or (MS==true) or (MS==false)) ->
- ok;
- (is_atom(Mod) and (Mod/='_')) and
- (is_atom(Func) and (Func/='_')) and
- ((Arity=='_') or is_integer(Arity)) and
- (is_list(MS) or (MS==true) or (MS==false)) ->
- ok;
- true ->
- error
- end.
+ MSresult = check_MS(MS),
+ MFAresult = check_MFA(Mod,Func,Arity),
+ MFAresult and MSresult.
+
+check_MS(MS) when is_list(MS) -> true;
+check_MS(true) -> true;
+check_MS(false) -> true.
+
+check_MFA('_','_','_') -> true;
+check_MFA(Mod,'_','_') when is_atom(Mod) -> true;
+check_MFA(Mod,'_',A) when is_atom(Mod), is_integer(A) -> false;
+check_MFA(Mod,F,'_') when is_atom(Mod), is_atom(F) -> true;
+check_MFA(Mod,F,A) when is_atom(Mod), is_atom(F), is_integer(A) -> true.
+
%% -----------------------------------------------------------------------------
%% Help function finding out if Mod is loaded, and if not, if it can successfully
diff --git a/lib/sasl/doc/src/appup.xml b/lib/sasl/doc/src/appup.xml
index 5182889710..48d7b69885 100644
--- a/lib/sasl/doc/src/appup.xml
+++ b/lib/sasl/doc/src/appup.xml
@@ -174,11 +174,19 @@
<c>remove</c> and <c>purge</c>.</p>
<pre>
{add_application, Application}
+{add_application, Application, Type}
Application = atom()
+ Type = permanent | transient | temporary | load | none
</pre>
<p>Adding an application means that the modules defined by
the <c>modules</c> key in the <c>.app</c> file are loaded using
- <c>add_module</c>, then the application is started.</p>
+ <c>add_module</c>.</p>
+ <p><c>Type</c> defaults to <c>permanent</c> and specifies the start type
+ of the application. If <c>Type = permanent | transient | temporary</c>,
+ the application will be loaded and started in the corresponding way,
+ see <c>application(3)</c>. If <c>Type = load</c>, the application will
+ only be loaded. If <c>Type = none</c>, the application will be neither
+ loaded nor started, although the code for its modules will be loaded.</p>
<pre>
{remove_application, Application}
Application = atom()
diff --git a/lib/sasl/src/systools_rc.erl b/lib/sasl/src/systools_rc.erl
index 23d1a52b66..daadb79967 100644
--- a/lib/sasl/src/systools_rc.erl
+++ b/lib/sasl/src/systools_rc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,7 +34,7 @@
%% {add_module, Mod, [Mod]}
%% {remove_module, Mod, PrePurge, PostPurge, [Mod]}
%% {restart_application, Appl}
-%% {add_application, Appl}
+%% {add_application, Appl, Type}
%% {remove_application, Appl}
%%
%% Low-level
@@ -109,6 +109,8 @@ expand_script([I|Script]) ->
{delete_module, Mod} ->
[{remove, {Mod, brutal_purge, brutal_purge}},
{purge, [Mod]}];
+ {add_application, Application} ->
+ {add_application, Application, permanent};
_ ->
I
end,
@@ -317,14 +319,18 @@ translate_independent_instrs(Before, After, Appls, PreAppls) ->
translate_application_instrs(Script, Appls, PreAppls) ->
%% io:format("Appls ~n~p~n",[Appls]),
L = lists:map(
- fun({add_application, Appl}) ->
+ fun({add_application, Appl, Type}) ->
case lists:keysearch(Appl, #application.name, Appls) of
{value, Application} ->
Mods =
remove_vsn(Application#application.modules),
+ ApplyL = case Type of
+ none -> [];
+ load -> [{apply, {application, load, [Appl]}}];
+ _ -> [{apply, {application, start, [Appl, Type]}}]
+ end,
[{add_module, M, []} || M <- Mods] ++
- [{apply, {application, start,
- [Appl, permanent]}}];
+ ApplyL;
false ->
throw({error, {no_such_application, Appl}})
end;
@@ -750,8 +756,9 @@ check_op({remove_module, Mod, PrePurge, PostPurge, Mods}) ->
lists:foreach(fun(M) -> check_mod(M) end, Mods);
check_op({remove_application, Appl}) ->
check_appl(Appl);
-check_op({add_application, Appl}) ->
- check_appl(Appl);
+check_op({add_application, Appl, Type}) ->
+ check_appl(Appl),
+ check_start_type(Type);
check_op({restart_application, Appl}) ->
check_appl(Appl);
check_op(restart) -> ok;
@@ -839,6 +846,13 @@ check_node(Node) -> throw({error, {bad_node, Node}}).
check_appl(Appl) when is_atom(Appl) -> ok;
check_appl(Appl) -> throw({error, {bad_application, Appl}}).
+check_start_type(none) -> ok;
+check_start_type(load) -> ok;
+check_start_type(temporary) -> ok;
+check_start_type(transient) -> ok;
+check_start_type(permanent) -> ok;
+check_start_type(T) -> throw({error, {bad_start_type, T}}).
+
check_func(Func) when is_atom(Func) -> ok;
check_func(Func) -> throw({error, {bad_func, Func}}).
diff --git a/lib/sasl/src/systools_relup.erl b/lib/sasl/src/systools_relup.erl
index 177d50be80..71bd3ca491 100644
--- a/lib/sasl/src/systools_relup.erl
+++ b/lib/sasl/src/systools_relup.erl
@@ -370,10 +370,10 @@ collect_appup_scripts(_, [], _, Ws, RUs) -> {RUs, Ws}.
%% ToApps = [#application]
%%
create_add_app_scripts(FromRel, ToRel, RU0s, W0s) ->
- AddedNs = [N || {N, _V, _T} <- ToRel#release.applications,
+ AddedNs = [{N, T} || {N, _V, T} <- ToRel#release.applications,
not lists:keymember(N, 1, FromRel#release.applications)],
%% io:format("Added apps: ~p~n", [AddedNs]),
- RUs = [[{add_application, N}] || N <- AddedNs],
+ RUs = [[{add_application, N, T}] || {N, T} <- AddedNs],
{RUs ++ RU0s, W0s}.
diff --git a/lib/snmp/doc/man1/.gitignore b/lib/snmp/doc/man1/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/snmp/doc/man1/.gitignore
diff --git a/lib/snmp/doc/src/Makefile b/lib/snmp/doc/src/Makefile
index e8d9efb148..70c2e1d09e 100644
--- a/lib/snmp/doc/src/Makefile
+++ b/lib/snmp/doc/src/Makefile
@@ -67,12 +67,15 @@ XML_OUTPUT = $(XML_FILES:%.xml=%.latex.xmls_output) \
INFO_FILE = ../../info
+#HTML_REF1_FILES = $(XML_REF1_FILES:%.xml=$(HTMLDIR)/%.html)
HTML_REF3_FILES = $(XML_REF3_FILES:%.xml=$(HTMLDIR)/%.html)
HTML_REF6_FILES = $(XML_REF6_FILES:%.xml=$(HTMLDIR)/%.html)
HTML_CHAP_FILES = $(XML_CHAPTER_FILES:%.xml=$(HTMLDIR)/%.html)
-EXTRA_FILES = summary.html.src \
+EXTRA_FILES = \
+ summary.html.src \
$(DEFAULT_HTML_FILES) \
+ $(HTML_REF1_FILES) \
$(HTML_REF3_FILES) \
$(HTML_REF6_FILES) \
$(HTML_CHAP_FILES)
@@ -80,6 +83,7 @@ EXTRA_FILES = summary.html.src \
MAN7DIR = $(DOCDIR)/man7
+MAN1_FILES = $(MAN1DIR)/snmpc.1
MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3)
MAN6_FILES = $(XML_REF6_FILES:%_app.xml=$(MAN6DIR)/%.6)
MAN7_FILES = $(MIB_FILES:$(MIBSDIR)/%.mib=$(MAN7DIR)/%.7)
@@ -95,6 +99,7 @@ else
TEX_FILES_BOOK = \
$(BOOK_FILES:%.xml=%.tex)
TEX_FILES_REF_MAN = \
+ $(XML_REF1_FILES:%.xml=%.tex) \
$(XML_REF3_FILES:%.xml=%.tex) \
$(XML_REF6_FILES:%.xml=%.tex) \
$(XML_APPLICATION_FILES:%.xml=%.tex)
@@ -169,7 +174,7 @@ ps: $(TOP_PS_FILE)
html: $(HTML_FILES) $(TOP_HTML_FILES) gifs
-html2: gifs $(TOP_HTML_FILES) $(HTML_FILES) $(HTML_REF3_FILES) $(HTML_REF6_FILES) $(HTML_CHAP_FILES)
+html2: gifs $(TOP_HTML_FILES) $(HTML_FILES) $(HTML_REF1_FILES) $(HTML_REF3_FILES) $(HTML_REF6_FILES) $(HTML_CHAP_FILES)
clean: clean_tex clean_html clean_man clean_docs
@@ -195,7 +200,9 @@ endif
$(INDEX_TARGET): $(INDEX_SRC) ../../vsn.mk # Create top make file
sed -e 's;%VSN%;$(VSN);' $< > $@ # inserting version number
-man: man3 man6 man7
+man: man1 man3 man6 man7
+
+man1: $(MAN1_FILES)
man3: $(MAN3_FILES)
@@ -213,6 +220,7 @@ clean_pdf:
clean_man:
@echo "cleaning man:"
+ rm -f $(MAN1DIR)/*
rm -f $(MAN3DIR)/*
rm -f $(MAN6DIR)/*
rm -f $(MAN7DIR)/*
@@ -233,6 +241,11 @@ $(MAN7DIR)/%.7: $(MIBSDIR)/%.mib
# ----------------------------------------------------
# Release Target
# ----------------------------------------------------
+
+$(MAN1DIR)/snmpc.1: snmpc_cmd.xml
+ date=`date +"%B %e %Y"`; \
+ xsltproc --output "$@" --stringparam company "Ericsson AB" --stringparam docgen "$(DOCGEN)" --stringparam gendate "$$date" --stringparam appname "$(APPLICATION)" --stringparam appver "$(VSN)" --xinclude -path $(DOCGEN)/priv/docbuilder_dtd -path $(DOCGEN)/priv/dtd_man_entities $(DOCGEN)/priv/xsl/db_man.xsl $<
+
include $(ERL_TOP)/make/otp_release_targets.mk
ifdef DOCSUPPORT
@@ -244,6 +257,8 @@ release_docs_spec: docs
$(INSTALL_DATA) $(HTMLDIR)/* \
$(RELSYSDIR)/doc/html
$(INSTALL_DATA) $(INFO_FILE) $(RELSYSDIR)
+ $(INSTALL_DIR) $(RELEASE_PATH)/man/man1
+ $(INSTALL_DATA) $(MAN1DIR)/* $(RELEASE_PATH)/man/man1
$(INSTALL_DIR) $(RELEASE_PATH)/man/man3
$(INSTALL_DATA) $(MAN3DIR)/* $(RELEASE_PATH)/man/man3
$(INSTALL_DIR) $(RELEASE_PATH)/man/man6
@@ -269,7 +284,9 @@ release_docs_spec: docs
$(INSTALL_DATA) $(GIF_FILES) $(EXTRA_FILES) $(HTML_FILES) \
$(RELSYSDIR)/doc/html
$(INSTALL_DATA) $(INFO_FILE) $(RELSYSDIR)
- $(INSTALL_DIR) $(RELEASE_PATH)/man/man3
+ $(INSTALL_DIR) $(RELEASE_PATH)/man/man1
+ $(INSTALL_DATA) $(MAN1_FILES) $(RELEASE_PATH)/man/man1
+ $(INSTALL_DIR) $(RELEASE_PATH)/man/man
$(INSTALL_DATA) $(MAN3_FILES) $(RELEASE_PATH)/man/man3
$(INSTALL_DIR) $(RELEASE_PATH)/man/man6
$(INSTALL_DATA) $(MAN6_FILES) $(RELEASE_PATH)/man/man6
@@ -286,6 +303,10 @@ release_spec:
ifdef DOCSUPPORT
info: info_xml info_man info_html
+ @echo "MAN1DIR: $(MAN1DIR)"
+ @echo "MAN3DIR: $(MAN3DIR)"
+ @echo "MAN6DIR: $(MAN6DIR)"
+ @echo "MAN7DIR: $(MAN7DIR)"
else
info: info_xml info_man info_html info_tex
@echo "DVI2PS = $(DVI2PS)"
@@ -297,6 +318,7 @@ endif
info_man:
@echo "man files:"
+ @echo "MAN1_FILES = $(MAN1_FILES)"
@echo "MAN3_FILES = $(MAN3_FILES)"
@echo "MAN6_FILES = $(MAN6_FILES)"
@echo "MAN7_FILES = $(MAN7_FILES)"
@@ -305,6 +327,7 @@ info_man:
info_xml:
@echo "xml files:"
+# @echo "XML_REF1_FILES = $(XML_REF1_FILES)"
@echo "XML_REF3_FILES = $(XML_REF3_FILES)"
@echo "XML_REF6_FILES = $(XML_REF6_FILES)"
@echo "XML_PART_FILES = $(XML_PART_FILES)"
@@ -333,6 +356,7 @@ info_html:
@echo ""
@echo "DEFAULT_HTML_FILES = $(DEFAULT_HTML_FILES)"
@echo ""
+# @echo "HTML_REF1_FILES = $(HTML_REF1_FILES)"
@echo "HTML_REF3_FILES = $(HTML_REF3_FILES)"
@echo "HTML_REF6_FILES = $(HTML_REF6_FILES)"
@echo "HTML_CHAP_FILES = $(HTML_CHAP_FILES)"
diff --git a/lib/snmp/doc/src/depend.mk b/lib/snmp/doc/src/depend.mk
index bf9833274d..4538f744de 100644
--- a/lib/snmp/doc/src/depend.mk
+++ b/lib/snmp/doc/src/depend.mk
@@ -48,6 +48,7 @@ $(HTMLDIR)/ref_man.html: \
snmp_app.xml \
snmp.xml \
snmpc.xml \
+ snmpc_cmd.xml \
snmpa.xml \
snmpa_conf.xml \
snmpa_discovery_handler.xml \
diff --git a/lib/snmp/doc/src/files.mk b/lib/snmp/doc/src/files.mk
index 293fb52ce0..c906ba0cf2 100644
--- a/lib/snmp/doc/src/files.mk
+++ b/lib/snmp/doc/src/files.mk
@@ -23,6 +23,9 @@ XML_APPLICATION_FILES = \
XML_APP_REF3_FILES = \
snmp.xml
+XML_COMP_REF1_FILES = \
+ snmpc_cmd.xml
+
XML_COMP_REF3_FILES = \
snmpc.xml
@@ -62,6 +65,9 @@ XML_MANAGER_REF3_FILES = \
snmpm_network_interface_filter.xml \
snmpm_user.xml
+XML_REF1_FILES = \
+ $(XML_COMP_REF1_FILES)
+
XML_REF3_FILES = \
$(XML_APP_REF3_FILES) \
$(XML_COMP_REF3_FILES) \
@@ -98,12 +104,13 @@ XML_CHAPTER_FILES = \
BOOK_FILES = book.xml
-XML_FILES = $(BOOK_FILES) \
- $(XML_CHAPTER_FILES) \
- $(XML_PART_FILES) \
- $(XML_REF6_FILES) \
- $(XML_REF3_FILES) \
- $(XML_APPLICATION_FILES)
+XML_FILES = $(BOOK_FILES) \
+ $(XML_CHAPTER_FILES) \
+ $(XML_PART_FILES) \
+ $(XML_REF1_FILES) \
+ $(XML_REF3_FILES) \
+ $(XML_REF6_FILES) \
+ $(XML_APPLICATION_FILES)
GIF_FILES = book.gif \
getnext1.gif \
diff --git a/lib/snmp/doc/src/make.dep b/lib/snmp/doc/src/make.dep
index ccd01b9d3a..6d741ec1b9 100644
--- a/lib/snmp/doc/src/make.dep
+++ b/lib/snmp/doc/src/make.dep
@@ -52,7 +52,7 @@ book.dvi: book.tex part.tex ref_man.tex snmp.tex snmp_advanced_agent.tex \
snmpa_notification_delivery_info_receiver.tex \
snmpa_notification_filter.tex \
snmpa_supervisor.tex \
- snmpc.tex snmpm.tex snmpm_conf.tex snmpm_mpd.tex \
+ snmpc.tex snmpc_cmd.tex snmpm.tex snmpm_conf.tex snmpm_mpd.tex \
snmpm_network_interface.tex snmpm_network_interface_filter.tex \
snmpm_user.tex
diff --git a/lib/snmp/doc/src/notes.xml b/lib/snmp/doc/src/notes.xml
index 493e7aa092..2efeb8ae3f 100644
--- a/lib/snmp/doc/src/notes.xml
+++ b/lib/snmp/doc/src/notes.xml
@@ -1,10 +1,10 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
<header>
<copyright>
- <year>1996</year><year>2010</year>
+ <year>1996</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -32,23 +32,135 @@
<file>notes.xml</file>
</header>
- <section><title>SNMP 4.18</title>
+ <section>
+ <title>SNMP Development Toolkit 4.19</title>
+ <p>Version 4.19 supports code replacement in runtime from/to
+ version 4.18.</p>
+
+ <section>
+ <title>Improvements and new features</title>
+<!--
+ <p>-</p>
+-->
+ <list type="bulleted">
+ <item>
+ <p>[compiler] Added support for textual convention
+ <c>AGENT-CAPABILITIES</c> and "full" support for textual
+ convention MODULE-COMPLIANCE, both defined by the SNMPv2-CONF
+ mib.</p>
+ <p>The <c>reference</c> and <c>modules</c> part(s) are
+ stored in the <c>assocList</c> of the mib-entry (<c>me</c>)
+ record.
+ Only handled <em>if</em> the option(s) <c>agent_capabilities</c>
+ and <c>module_compliance</c> (respectively) are provided to the
+ compiler. </p>
+ <p>See <seealso marker="snmpc#compile">compile/2</seealso>
+ for more info. </p>
+ <p>For backward compatibillity, the MIBs provided with
+ this application are <em>not</em> compiled with these
+ options. </p>
+ <p>Own Id: OTP-8966</p>
+ </item>
+
+ <item>
+ <p>[agent] Added a "complete" set of (snmp) table and variable
+ print functions, for each mib handled by the SNMP (agent)
+ application. This will be usefull when debugging a running agent.</p>
+ <p>See
+ <seealso marker="snmpa#print_mib_info">print_mib_info/0</seealso>,
+ <seealso marker="snmpa#print_mib_tables">print_mib_tables/0</seealso>
+ and
+ <seealso marker="snmpa#print_mib_variables">print_mib_variables/0</seealso>
+ for more info. </p>
+ <p>Own Id: OTP-8977</p>
+ </item>
+
+ <item>
+ <p>[compiler] Added a MIB compiler (frontend) escript,
+ <c>snmpc</c>. </p>
+ <p>Own Id: OTP-9004</p>
+ </item>
+
+ </list>
+ </section>
+
+ <section>
+ <title>Fixed Bugs and Malfunctions</title>
+<!--
+ <p>-</p>
+-->
+ <list type="bulleted">
+ <item>
+ <p>[agent] For the table vacmAccessTable,
+ when performing the is_set_ok and set operation(s),
+ all values of the vacmAccessSecurityModel column was
+ incorrectly translated to <c>any</c>. </p>
+<!--
+that is when calling:
+snmp_view_basec_acm_mib:vacmAccessTable(set, RowIndex, Cols).
+-->
+ <p>Own Id: OTP-8980</p>
+ </item>
+
+ <item>
+ <p>[agent] When calling
+ <seealso marker="snmp_view_based_acm_mib#reconfigure">snmp_view_based_acm_mib:reconfigure/1</seealso>
+ on a running node, the table <c>vacmAccessTable</c> was not properly
+ cleaned.
+ This meant that if some entries in the vacm.conf file was removed
+ (compared to the <c>current</c> config),
+ while others where modified and/or added, the removed entrie(s)
+ would still exist in the <c>vacmAccessTable</c> table. </p>
+ <p>Own Id: OTP-8981</p>
+ <p>Aux Id: Seq 11750</p>
+ </item>
+
+ </list>
+ </section>
+
+
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+ </section>
+
+ </section> <!-- 4.19 -->
+
+ <section>
+ <title>SNMP Development Toolkit 4.18</title>
+ <p>Version 4.18 supports code replacement in runtime from/to
+ version 4.17.1 and 4.17.</p>
+
+ <section>
+ <title>Improvements and new features</title>
+ <list type="bulleted">
+ <item>
+ <p>Prepared for R14B release.</p>
+ </item>
+ </list>
+ </section>
<section><title>Fixed Bugs and Malfunctions</title>
- <list>
+ <p>-</p>
+<!--
+ <list type="bulleted">
<item>
- <p>
- When the function FilterMod:accept_recv/2 returned false
- the SNMP agent stopped collecting messages from UDP.</p>
- <p>
- Own Id: OTP-8761</p>
+ <p>[agent] When the function FilterMod:accept_recv/2 returned false
+ the SNMP agent stopped collecting messages from UDP.</p>
+ <p>Own Id: OTP-8761</p>
</item>
</list>
+-->
</section>
-</section>
+ <section>
+ <title>Incompatibilities</title>
+ <p>-</p>
+ </section>
+ </section> <!-- 4.18 -->
+
-<section>
+ <section>
<title>SNMP Development Toolkit 4.17.1</title>
<p>Version 4.17.1 supports code replacement in runtime from/to
version 4.17, 4.16.2, 4.16.1, 4.16, 4.15, 4.14 and 4.13.5.</p>
@@ -63,7 +175,8 @@
<list type="bulleted">
<item>
<p>When the function FilterMod:accept_recv/2
- returned false the SNMP agent stopped collecting messages from UDP.</p>
+ returned false the SNMP agent stopped collecting
+ messages from UDP.</p>
<p>Own Id: OTP-8761</p>
</item>
</list>
diff --git a/lib/snmp/doc/src/ref_man.xml b/lib/snmp/doc/src/ref_man.xml
index 1ae5a8205b..815d21d33e 100644
--- a/lib/snmp/doc/src/ref_man.xml
+++ b/lib/snmp/doc/src/ref_man.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE application SYSTEM "application.dtd">
<application xmlns:xi="http://www.w3.org/2001/XInclude">
@@ -61,6 +61,7 @@
<xi:include href="snmp_user_based_sm_mib.xml"/>
<xi:include href="snmp_view_based_acm_mib.xml"/>
<xi:include href="snmpc.xml"/>
+ <xi:include href="snmpc_cmd.xml"/>
<xi:include href="snmpm.xml"/>
<xi:include href="snmpm_conf.xml"/>
<xi:include href="snmpm_mpd.xml"/>
diff --git a/lib/snmp/doc/src/snmp_agent_config_files.xml b/lib/snmp/doc/src/snmp_agent_config_files.xml
index 0bab563f87..b62269d506 100644
--- a/lib/snmp/doc/src/snmp_agent_config_files.xml
+++ b/lib/snmp/doc/src/snmp_agent_config_files.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>1997</year><year>2009</year>
+ <year>1997</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -178,11 +178,12 @@
<c>community.conf</c>. It must be present if the agent is
configured for SNMPv1 or SNMPv2c.
</p>
+ <p>An SNMP <em>community</em> is a relationship between an SNMP
+ agent and a set of SNMP managers that defines authentication, access
+ control and proxy characteristics. </p>
<p>The corresponding table is <c>snmpCommunityTable</c> in the
- SNMP-COMMUNITY-MIB.
- </p>
- <p>Each entry is a term:
- </p>
+ SNMP-COMMUNITY-MIB. </p>
+ <p>Each entry is a term: </p>
<p><c>{CommunityIndex, CommunityName, SecurityName, ContextName, TransportTag}.</c></p>
<list type="bulleted">
<item><c>CommunityIndex</c> is a non-empty string.
diff --git a/lib/snmp/doc/src/snmp_config.xml b/lib/snmp/doc/src/snmp_config.xml
index 769b908adc..4e41cb5037 100644
--- a/lib/snmp/doc/src/snmp_config.xml
+++ b/lib/snmp/doc/src/snmp_config.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
@@ -1004,36 +1004,16 @@ ok
</taglist>
<p>Another usefull way to debug the agent is to pretty-print the content of
- some of the (MIB-) tables handled directly by the agent. This can be done
- for the following tables: </p>
- <taglist>
- <tag><c><![CDATA[snmpCommunityTable]]></c></tag>
- <item>
- <p><c><![CDATA[snmp_community_mib:snmpCommunityTable(print).]]></c></p>
- </item>
-
- <tag><c><![CDATA[snmpNotifyTable]]></c></tag>
- <item>
- <p><c><![CDATA[snmp_notification_mib:snmpNotifyTable(print).]]></c></p>
- </item>
-
- <tag><c><![CDATA[snmpTargetAddrTable]]></c></tag>
- <item>
- <p><c><![CDATA[snmp_target_mib:snmpTargetAddrTable(print).]]></c></p>
- </item>
-
- <tag><c><![CDATA[snmpTargetParamsTable]]></c></tag>
- <item>
- <p><c><![CDATA[snmp_target_mib:snmpTargetParamsTable(print).]]></c></p>
- </item>
-
- <tag><c><![CDATA[usmUserTable]]></c></tag>
- <item>
- <p><c><![CDATA[snmp_user_based_sm_mib:usmUserTable(print).]]></c></p>
- </item>
-
- </taglist>
-
+ all the tables and/or variables handled directly by the agent.
+ This can be done by simply calling: </p>
+ <p><c><![CDATA[snmpa:print_mib_info()]]></c></p>
+ <p>See
+ <seealso marker="snmpa#print_mib_info">print_mib_info/0</seealso>,
+ <seealso marker="snmpa#print_mib_tables">print_mib_tables/0</seealso>
+ or
+ <seealso marker="snmpa#print_mib_variables">print_mib_variables/0</seealso>
+ for more info. </p>
+
</section>
</chapter>
diff --git a/lib/snmp/doc/src/snmp_view_based_acm_mib.xml b/lib/snmp/doc/src/snmp_view_based_acm_mib.xml
index ffea256608..d595f6b93b 100644
--- a/lib/snmp/doc/src/snmp_view_based_acm_mib.xml
+++ b/lib/snmp/doc/src/snmp_view_based_acm_mib.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2009</year>
+ <year>1999</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -38,7 +38,10 @@
SNMP-VIEW-BASED-ACM-MIB, and functions for configuring the database.
</p>
<p>The configuration files are described in the SNMP User's Manual.</p>
+
+ <marker id="configure"></marker>
</description>
+
<funcs>
<func>
<name>configure(ConfDir) -> void()</name>
@@ -48,27 +51,24 @@
</type>
<desc>
<p>This function is called from the supervisor at system
- start-up.
- </p>
+ start-up. </p>
<p>Inserts all data in the configuration files into the
- database and destroys all old rows with StorageType
- <c>volatile</c>. The rows created from the configuration file
- will have StorageType <c>nonVolatile</c>.
- </p>
- <p>All <c>snmp</c> counters are set to zero.
- </p>
+ database and destroys all old rows with StorageType
+ <c>volatile</c>. The rows created from the configuration file
+ will have StorageType <c>nonVolatile</c>. </p>
+ <p>All <c>snmp</c> counters are set to zero. </p>
<p>If an error is found in the configuration file, it is
- reported using the function <c>config_err/2</c> of the error
- report module, and the function fails with the reason
- <c>configuration_error</c>.
- </p>
+ reported using the function <c>config_err/2</c> of the error
+ report module, and the function fails with the reason
+ <c>configuration_error</c>. </p>
<p><c>ConfDir</c> is a string which points to the directory
- where the configuration files are found.
- </p>
- <p>The configuration file read is: <c>vacm.conf</c>.
- </p>
+ where the configuration files are found. </p>
+ <p>The configuration file read is: <c>vacm.conf</c>. </p>
+
+ <marker id="reconfigure"></marker>
</desc>
</func>
+
<func>
<name>reconfigure(ConfDir) -> void()</name>
<fsummary>Configure the SNMP-VIEW-BASED-ACM-MIB</fsummary>
@@ -88,18 +88,20 @@
<p>All <c>snmp</c> counters are set to zero.
</p>
<p>If an error is found in the configuration file, it is
- reported using the function <c>config_err/2</c> of the error
- report module, and the function fails with the reason
+ reported using the function
+ <seealso marker="snmpa_error#config_err">config_err/2</seealso>
+ of the error report module, and the function fails with the reason
<c>configuration_error</c>.
</p>
<p><c>ConfDir</c> is a string which points to the directory
where the configuration files are found.
</p>
- <p>The configuration file read is: <c>vacm.conf</c>.
- <marker id="add_sec2group"></marker>
-</p>
+ <p>The configuration file read is: <c>vacm.conf</c>. </p>
+
+ <marker id="add_sec2group"></marker>
</desc>
</func>
+
<func>
<name>add_sec2group(SecModel, SecName, GroupName) -> Ret</name>
<fsummary>Add one security to group definition</fsummary>
@@ -113,10 +115,13 @@
</type>
<desc>
<p>Adds a security to group definition to the agent config.
- Equivalent to one vacmSecurityToGroup-line in the <c>vacm.conf</c> file.</p>
+ Equivalent to one vacmSecurityToGroup-line in the
+ <c>vacm.conf</c> file.</p>
+
<marker id="delete_sec2group"></marker>
</desc>
</func>
+
<func>
<name>delete_sec2group(Key) -> Ret</name>
<fsummary>Delete one security to group definition</fsummary>
@@ -127,9 +132,11 @@
</type>
<desc>
<p>Delete a security to group definition from the agent config.</p>
+
<marker id="add_access"></marker>
</desc>
</func>
+
<func>
<name>add_access(GroupName, Prefix, SecModel, SecLevel, Match, RV, WV, NV) -> Ret</name>
<fsummary>Add one access definition</fsummary>
@@ -148,10 +155,12 @@
</type>
<desc>
<p>Adds a access definition to the agent config.
- Equivalent to one vacmAccess-line in the <c>vacm.conf</c> file.</p>
- <marker id="delete_access"></marker>
+ Equivalent to one vacmAccess-line in the <c>vacm.conf</c> file.</p>
+
+ <marker id="delete_access"></marker>
</desc>
</func>
+
<func>
<name>delete_access(Key) -> Ret</name>
<fsummary>Delete one access definition</fsummary>
@@ -161,10 +170,12 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Delete a access definition from the agent config.</p>
- <marker id="add_view_tree_fam"></marker>
+ <p>Delete a access definition from the agent config.</p>
+
+ <marker id="add_view_tree_fam"></marker>
</desc>
</func>
+
<func>
<name>add_view_tree_fam(ViewIndex, SubTree, Status, Mask) -> Ret</name>
<fsummary>Add one view tree family definition</fsummary>
@@ -178,11 +189,14 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Adds a view tree family definition to the agent config.
- Equivalent to one vacmViewTreeFamily-line in the <c>vacm.conf</c> file.</p>
- <marker id="delete_view_tree_fam"></marker>
+ <p>Adds a view tree family definition to the agent config.
+ Equivalent to one vacmViewTreeFamily-line in the
+ <c>vacm.conf</c> file.</p>
+
+ <marker id="delete_view_tree_fam"></marker>
</desc>
</func>
+
<func>
<name>delete_view_tree_fam(Key) -> Ret</name>
<fsummary>Delete one view tree family definition</fsummary>
diff --git a/lib/snmp/doc/src/snmpa.xml b/lib/snmp/doc/src/snmpa.xml
index f546724a78..1d680e80f5 100644
--- a/lib/snmp/doc/src/snmpa.xml
+++ b/lib/snmp/doc/src/snmpa.xml
@@ -1,10 +1,10 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE erlref SYSTEM "erlref.dtd">
<erlref>
<header>
<copyright>
- <year>2004</year><year>2010</year>
+ <year>2004</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -1252,6 +1252,39 @@ snmp_agent:register_subagent(SA1,[1,2,3], SA2).
<p>This is a utility function, that can be useful when
e.g. debugging instrumentation functions.</p>
+ <marker id="print_mib_info"></marker>
+ </desc>
+ </func>
+
+ <func>
+ <name>print_mib_info() -> void()</name>
+ <fsummary>Print mib info</fsummary>
+ <desc>
+ <p>Prints the content of all the (snmp) tables and variables
+ for all mibs handled by the snmp agent. </p>
+
+ <marker id="print_mib_tables"></marker>
+ </desc>
+ </func>
+
+ <func>
+ <name>print_mib_tables() -> void()</name>
+ <fsummary>Print mib tables</fsummary>
+ <desc>
+ <p>Prints the content of all the (snmp) tables
+ for all mibs handled by the snmp agent. </p>
+
+ <marker id="print_mib_variables"></marker>
+ </desc>
+ </func>
+
+ <func>
+ <name>print_mib_variables() -> void()</name>
+ <fsummary>Print mib variables</fsummary>
+ <desc>
+ <p>Prints the content of all the (snmp) variables
+ for all mibs handled by the snmp agent. </p>
+
<marker id="verbosity"></marker>
</desc>
</func>
diff --git a/lib/snmp/doc/src/snmpa_error.xml b/lib/snmp/doc/src/snmpa_error.xml
index a7312e8b24..4dbafdfbb7 100644
--- a/lib/snmp/doc/src/snmpa_error.xml
+++ b/lib/snmp/doc/src/snmpa_error.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2002</year><year>2009</year>
+ <year>2002</year><year>2010</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -51,6 +51,8 @@
<c>error_report_mod</c>, see
<seealso marker="snmp_config#configuration_params">configuration parameters</seealso>.
</p>
+
+ <marker id="config_err"></marker>
</description>
<funcs>
<func>
@@ -67,8 +69,11 @@
</p>
<p><c>Format</c> and <c>Args</c> are as in
<c>io:format(Format, Args)</c>.</p>
+
+ <marker id="user_err"></marker>
</desc>
</func>
+
<func>
<name>user_err(Format, Args) -> void()</name>
<fsummary>Called if a user related error occurs</fsummary>
diff --git a/lib/snmp/doc/src/snmpc.xml b/lib/snmp/doc/src/snmpc.xml
index fbd0950c69..771629492d 100644
--- a/lib/snmp/doc/src/snmpc.xml
+++ b/lib/snmp/doc/src/snmpc.xml
@@ -1,10 +1,10 @@
-<?xml version="1.0" encoding="latin1" ?>
+<?xml version="1.0" encoding="iso-8859-1" ?>
<!DOCTYPE erlref SYSTEM "erlref.dtd">
<erlref>
<header>
<copyright>
- <year>2004</year><year>2010</year>
+ <year>2004</year><year>2011</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -37,6 +37,7 @@
<p>The module <c>snmpc</c> contains interface functions to the
SNMP toolkit MIB compiler.</p>
+ <marker id="compile"></marker>
</description>
<funcs>
@@ -47,7 +48,7 @@
<type>
<v>File = string()</v>
<v>Options = [opt()]</v>
- <v>opt() = db() | relaxed_row_name_assign_check() | deprecated() | description() | reference() | group_check() | i() | il() | imports() | module() | module_identity() | outdir() | no_defs() | verbosity() | warnings()</v>
+ <v>opt() = db() | relaxed_row_name_assign_check() | deprecated() | description() | reference() | group_check() | i() | il() | imports() | module() | module_identity() | module_compliance() | agent_capabilities() | outdir() | no_defs() | verbosity() | warnings()</v>
<v>db() = {db, volatile|persistent|mnesia}</v>
<v>deprecated() = {deprecated, bool()}</v>
<v>relaxed_row_name_assign_check() = relaxed_row_name_assign_check</v>
@@ -59,6 +60,8 @@
<v>imports() = imports</v>
<v>module() = {module, atom()}</v>
<v>module_identity() = module_identity</v>
+ <v>module_compliance() = module_compliance</v>
+ <v>agent_capabilities() = agent_capabilities</v>
<v>no_defs() = no_defs</v>
<v>outdir() = {outdir, dir()}</v>
<v>verbosity() = {verbosity, silence|warning|info|log|debug|trace}</v>
@@ -77,6 +80,7 @@
be used for the default instrumentation. </p>
<p>Default is <c>volatile</c>. </p>
</item>
+
<item>
<p>The option <c>deprecated</c> specifies if a deprecated
definition should be kept or not. If the option is
@@ -84,6 +88,7 @@
definitions. </p>
<p>Default is <c>true</c>. </p>
</item>
+
<item>
<p>The option <c>relaxed_row_name_assign_check</c>, if present,
specifies that the row name assign check shall not be done
@@ -94,12 +99,14 @@
<p>By default it is not included, but if this option is present
it will be. </p>
</item>
+
<item>
<p>The option <c>description</c> specifies if the text
of the DESCRIPTION field will be included or not. </p>
<p>By default it is not included, but if this option is
present it will be. </p>
</item>
+
<item>
<p>The option <c>reference</c> specifies if the text
of the REFERENCE field, when found in a table definition,
@@ -108,18 +115,21 @@
it will be. The reference text will be placed in the allocList
field of the mib-entry record (#me{}) for the table. </p>
</item>
+
<item>
<p>The option <c>group_check</c> specifies whether the
mib compiler should check the OBJECT-GROUP macro and
the NOTIFICATION-GROUP macro for correctness or not. </p>
<p>Default is <c>true</c>. </p>
</item>
+
<item>
<p>The option <c>i</c> specifies the path to search for
imported (compiled) MIB files. The directories should be
strings with a trailing directory delimiter. </p>
<p>Default is <c>["./"]</c>. </p>
</item>
+
<item>
<p>The option <c>il</c> (include_lib) also specifies a
list of directories to search for imported MIBs. It
@@ -132,11 +142,13 @@
<c><![CDATA[<snmp-home>/priv/mibs/]]></c>
are always listed last in the include path. </p>
</item>
+
<item>
<p>The option <c>imports</c>, if present, specifies that
the IMPORT statement of the MIB shall be included in the
compiled mib. </p>
</item>
+
<item>
<p>The option <c>module</c>, if present, specifies the
name of a module which implements all instrumentation
@@ -145,11 +157,29 @@
functions must be the same as the corresponding managed
object it implements. </p>
</item>
+
<item>
<p>The option <c>module_identity</c>, if present, specifies
that the info part of the MODULE-IDENTITY statement of the MIB
shall be included in the compiled mib. </p>
</item>
+
+ <item>
+ <p>The option <c>module_compliance</c>, if present, specifies
+ that the MODULE-COMPLIANCE statement of the MIB shall be included
+ (with a mib-entry record) in the compiled mib. The mib-entry record
+ of the module-compliance will contain <c>reference</c> and <c>module</c>
+ part(s) this info in the <c>assocList</c> field). </p>
+ </item>
+
+ <item>
+ <p>The option <c>agent_capabilities</c>, if present, specifies
+ that the AGENT-CAPABILITIES statement of the MIB shall be included
+ (with a mib-entry record) in the compiled mib. The mib-entry record
+ of the agent-capabilitie will contain <c>reference</c> and <c>modules</c>
+ part(s) this info in the <c>assocList</c> field). </p>
+ </item>
+
<item>
<p>The option <c>no_defs</c>, if present, specifies
that if a managed object does not have an instrumentation
@@ -157,6 +187,7 @@
be used, instead this is reported as an error, and the
compilation aborts. </p>
</item>
+
<item>
<p>The option <c>verbosity</c> specifies the verbosity of
the SNMP mib compiler. I.e. if warning, info, log, debug
@@ -166,11 +197,13 @@
option <c>verbosity</c> is <c>silence</c>, warning messages will
still be shown. </p>
</item>
+
<item>
<p>The option <c>warnings</c> specifies whether warning
messages should be shown. </p>
<p>Default is <c>true</c>. </p>
</item>
+
</list>
<p>The MIB compiler understands both SMIv1 and SMIv2 MIBs. It
uses the <c>MODULE-IDENTITY</c> statement to determine if the MIB is
@@ -185,8 +218,11 @@
have to be specified to <c>erlc</c> using the syntax
<c>+term</c>. See <c>erlc(1)</c> for details.
</p>
+
+ <marker id="is_consistent"></marker>
</desc>
</func>
+
<func>
<name>is_consistent(Mibs) -> ok | {error, Reason}</name>
<fsummary>Check for OID conflicts between MIBs</fsummary>
@@ -198,8 +234,11 @@
<p>Checks for multiple usage of object identifiers and traps
between MIBs.
</p>
+
+ <marker id="mib_to_hrl"></marker>
</desc>
</func>
+
<func>
<name>mib_to_hrl(MibName) -> ok | {error, Reason}</name>
<fsummary>Generate constants for the objects in the MIB</fsummary>
diff --git a/lib/snmp/doc/src/snmpc_cmd.xml b/lib/snmp/doc/src/snmpc_cmd.xml
new file mode 100644
index 0000000000..9358382a10
--- /dev/null
+++ b/lib/snmp/doc/src/snmpc_cmd.xml
@@ -0,0 +1,191 @@
+<?xml version="1.0" encoding="iso-8859-1" ?>
+<!DOCTYPE comref SYSTEM "comref.dtd">
+
+<comref>
+ <header>
+ <copyright>
+ <year>2011</year><year>2011</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>snmpc</title>
+ <prepared></prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
+ <file>snmpc_cmd.xml</file>
+ </header>
+ <com>snmpc(command)</com>
+ <comsummary>SNMP MIB compiler frontend</comsummary>
+ <description>
+ <p>The <c><![CDATA[snmpc]]></c> program provides a way to run
+ the SNMP MIB compiler of the Erlang system. </p>
+ </description>
+
+ <funcs>
+ <func>
+ <name>snmpc [options] file.mib | file.bin</name>
+ <fsummary>Compile MIBs</fsummary>
+ <desc>
+ <p><c><![CDATA[snmpc]]></c> compile a SNMP MIB file,
+ see <seealso marker="snmpc#compile">compile/1,2</seealso> for
+ more info. </p>
+ <p>It can also be used to generate a header file (.hrl)
+ with definitions of Erlang constants for the objects in
+ the MIB, see
+ <seealso marker="snmpc#mib_to_hrl">mib_to_hrl/1</seealso>. </p>
+ </desc>
+ </func>
+ </funcs>
+
+ <section>
+ <title>Compiler options</title>
+ <p>The following options are supported (note that most of these relate
+ to the compilation of the MIB file):</p>
+ <taglist>
+ <tag>--help</tag>
+ <item>
+ <p>Prints help info.</p>
+ </item>
+
+ <tag>--version</tag>
+ <item>
+ <p>Prints application and mib format version.</p>
+ </item>
+
+ <tag>--verbosity <em>verbosity</em></tag>
+ <item>
+ <p>Print debug info. </p>
+ <p><c>verbosity</c> = <c>trace</c> | <c>debug</c> | <c>log</c> | <c>info</c> | <c>silence</c></p>
+ <p>Defaults to <c>silence</c>.</p>
+ </item>
+
+ <tag>--warnings</tag>
+ <item>
+ <p>Print warning messages. </p>
+ </item>
+
+ <tag>--o <em>directory</em></tag>
+ <item>
+ <p>The directory where the compiler should place the output files.
+ If not specified, output files will be placed in the current working
+ directory.</p>
+ </item>
+
+ <tag>--i <em>Directory</em></tag>
+ <item>
+ <p>Specifies the path to search for imported (compiled) MIB files.
+ By default, the current working directory is always included. </p>
+ <p>This option can be present several times, each time specifying
+ <em>one</em> path. </p>
+ </item>
+
+ <tag>--il <em>Directory</em></tag>
+ <item>
+ <p>This option (include_lib), specifies a list of directories to
+ search for imported MIBs. It assumes that the first element in
+ the directory name corresponds to an OTP application. The compiler
+ will find the current installed version. For example, the value
+ ["snmp/mibs/"] will be replaced by ["snmp-3.1.1/mibs/"] (or what
+ the current version may be in the system). The current directory
+ and the "snmp-home"/priv/mibs/ are always listed last in the
+ include path. </p>
+ </item>
+
+ <tag>--sgc</tag>
+ <item>
+ <p>This option (skip group check), if present, disables the
+ group check of the mib compiler.
+ That is, should the OBJECT-GROUP and the NOTIFICATION-GROUP
+ macro(s) be checked for correctness or not. </p>
+ </item>
+
+ <tag>--dep</tag>
+ <item>
+ <p>Keep deprecated definition(s).
+ If not specified the compiler will ignore deprecated definitions. </p>
+ </item>
+
+ <tag>--desc</tag>
+ <item>
+ <p>The DESCRIPTION field will be included. </p>
+ </item>
+
+ <tag>--ref</tag>
+ <item>
+ <p>The REFERENCE field will be included. </p>
+ </item>
+
+ <tag>--imp</tag>
+ <item>
+ <p>The IMPORTS field will be included. </p>
+ </item>
+
+ <tag>--mi</tag>
+ <item>
+ <p>The MODULE-IDENTITY field will be included. </p>
+ </item>
+
+ <tag>--mc</tag>
+ <item>
+ <p>The MODULE-COMPLIANCE field will be included. </p>
+ </item>
+
+ <tag>--ac</tag>
+ <item>
+ <p>The AGENT-CAPABILITIES field will be included. </p>
+ </item>
+
+ <tag>--mod <em>module</em></tag>
+ <item>
+ <p>The module which implements all the instrumentation functions. </p>
+ <p>The name of all instrumentation functions must be the
+ same as the corresponding managed object it implements. </p>
+ </item>
+
+ <tag>--nd</tag>
+ <item>
+ <p>The default instrumentation functions will <em>not</em> be
+ used if a managed object have no instrumentation function.
+ Instead this will be reported as an error, and the compilation
+ aborts. </p>
+ </item>
+
+ <tag>--rrnac</tag>
+ <item>
+ <p>This option, if present, specifies that the row name assign check
+ shall not be done strictly according to the SMI (which allows only
+ the value 1). </p>
+ <p>With this option, all values greater than zero is allowed (>= 1).
+ This means that the error will be converted to a warning. </p>
+ <p>By default it is not included, but if this option is present
+ it will be. </p>
+ </item>
+
+ </taglist>
+ </section>
+
+ <section>
+ <title>SEE ALSO</title>
+ <p><seealso marker="erlc">erlc(1)</seealso>,
+ <seealso marker="compiler:compile">compile(3)</seealso>,
+ <seealso marker="snmp:snmpc">snmpc(3)</seealso></p>
+ </section>
+</comref>
+
diff --git a/lib/snmp/include/snmp_types.hrl b/lib/snmp/include/snmp_types.hrl
index 1fd6d153c9..4adb24361c 100644
--- a/lib/snmp/include/snmp_types.hrl
+++ b/lib/snmp/include/snmp_types.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -192,7 +192,7 @@
%%----------------------------------------------------------------------
-record(mib,
{misc = [],
- mib_format_version = "3.1",
+ mib_format_version = "3.2",
name = "",
module_identity, %% Not in SMIv1, and only with +module_identity
mes = [],
diff --git a/lib/snmp/mibs/Makefile.in b/lib/snmp/mibs/Makefile.in
index b85a8b0767..7aefb0ea34 100644
--- a/lib/snmp/mibs/Makefile.in
+++ b/lib/snmp/mibs/Makefile.in
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1996-2009. All Rights Reserved.
+# Copyright Ericsson AB 1996-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -108,20 +108,28 @@ TARGET_FILES = \
# FLAGS
# ----------------------------------------------------
-SNMP_FLAGS += -pa ../ebin +version
+SNMP_FLAGS += -pa ../ebin +version
ifneq ($(MIBS_VERBOSITY),)
-SNMP_FLAGS += +'{verbosity,$(MIBS_VERBOSITY)}'
+SNMP_FLAGS += +'{verbosity, $(MIBS_VERBOSITY)}'
endif
-ifneq ($(MIBS_REFERENCE),)
+ifeq ($(MIBS_REFERENCE),true)
SNMP_FLAGS += +reference
endif
-ifneq ($(MIBS_OPTIONS),)
+ifeq ($(MIBS_OPTIONS),true)
SNMP_FLAGS += +options
endif
+ifeq ($(MIBS_MC),true)
+SNMP_FLAGS += +module_compliance
+endif
+
+ifeq ($(MIBS_AC),true)
+SNMP_FLAGS += +agent_capabilities
+endif
+
# ----------------------------------------------------
# Targets
@@ -148,6 +156,14 @@ conf:
cd ..; $(MAKE) conf
info:
+ @echo "MIBS_REFERENCE = $(MIBS_REFERENCE)"
+ @echo ""
+ @echo "MIBS_OPTIONS = $(MIBS_OPTIONS)"
+ @echo ""
+ @echo "MIBS_MC = $(MIBS_MC)"
+ @echo ""
+ @echo "MIBS_AC = $(MIBS_AC)"
+ @echo ""
@echo "SNMP_FLAGS = $(SNMP_FLAGS)"
@echo ""
@echo "MIBS = $(MIBS)"
diff --git a/lib/snmp/src/agent/snmp_community_mib.erl b/lib/snmp/src/agent/snmp_community_mib.erl
index 8f0f4cad73..5644a43345 100644
--- a/lib/snmp/src/agent/snmp_community_mib.erl
+++ b/lib/snmp/src/agent/snmp_community_mib.erl
@@ -336,6 +336,8 @@ get_target_addr_ext_mms(TDomain, TAddress, Key) ->
get_target_addr_ext_mms(TDomain, TAddress, NextKey)
end
end.
+
+
%%-----------------------------------------------------------------
%% Instrumentation Functions
%%-----------------------------------------------------------------
@@ -347,7 +349,7 @@ snmpCommunityTable(print) ->
PrintRow =
fun(Prefix, Row) ->
lists:flatten(
- io_lib:format("~sIndex: ~p"
+ io_lib:format("~sIndex: ~p"
"~n~sName: ~p"
"~n~sSecurityName: ~p"
"~n~sContextEngineID: ~p"
diff --git a/lib/snmp/src/agent/snmp_framework_mib.erl b/lib/snmp/src/agent/snmp_framework_mib.erl
index d9bf7e8551..0d7866d94d 100644
--- a/lib/snmp/src/agent/snmp_framework_mib.erl
+++ b/lib/snmp/src/agent/snmp_framework_mib.erl
@@ -373,15 +373,27 @@ intAgentUDPPort(Op) ->
intAgentIpAddress(Op) ->
snmp_generic:variable_func(Op, db(intAgentIpAddress)).
+snmpEngineID(print) ->
+ VarAndValue = [{snmpEngineID, snmpEngineID(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
snmpEngineID(Op) ->
snmp_generic:variable_func(Op, db(snmpEngineID)).
+snmpEngineMaxMessageSize(print) ->
+ VarAndValue = [{snmpEngineMaxMessageSize, snmpEngineMaxMessageSize(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
snmpEngineMaxMessageSize(Op) ->
snmp_generic:variable_func(Op, db(snmpEngineMaxMessageSize)).
+snmpEngineBoots(print) ->
+ VarAndValue = [{snmpEngineBoots, snmpEngineBoots(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
snmpEngineBoots(Op) ->
snmp_generic:variable_func(Op, db(snmpEngineBoots)).
+snmpEngineTime(print) ->
+ VarAndValue = [{snmpEngineTime, snmpEngineTime(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
snmpEngineTime(get) ->
{value, get_engine_time()}.
diff --git a/lib/snmp/src/agent/snmp_standard_mib.erl b/lib/snmp/src/agent/snmp_standard_mib.erl
index 639172401d..b6834d278c 100644
--- a/lib/snmp/src/agent/snmp_standard_mib.erl
+++ b/lib/snmp/src/agent/snmp_standard_mib.erl
@@ -40,6 +40,28 @@
sys_object_id/1, sys_object_id/2, sys_or_table/3,
variable_func/1, variable_func/2,
inc/1, inc/2]).
+-export([sysDescr/1, sysContact/1, sysName/1, sysLocation/1,
+ sysServices/1, sysUpTime/1, snmpEnableAuthenTraps/1,
+ sysObjectID/1,
+ snmpInPkts/1, snmpOutPkts/1,
+ snmpInBadVersions/1,
+ snmpInBadCommunityNames/1, snmpInBadCommunityUses/1,
+ snmpInASNParseErrs/1,
+ snmpInTooBigs/1,
+ snmpInNoSuchNames/1, snmpInBadValues/1,
+ snmpInReadOnlys/1, snmpInGenErrs/1,
+ snmpInTotalReqVars/1, snmpInTotalSetVars/1,
+ snmpInGetRequests/1, snmpInSetRequests/1,
+ snmpInGetNexts/1,
+ snmpInGetResponses/1, snmpInTraps/1,
+ snmpOutTooBigs/1,
+ snmpOutNoSuchNames/1,
+ snmpOutBadValues/1,
+ snmpOutGenErrs/1,
+ snmpOutGetRequests/1, snmpOutSetRequests/1,
+ snmpOutGetNexts/1,
+ snmpOutGetResponses/1,
+ snmpOutTraps/1]).
-export([dummy/1, snmp_set_serial_no/1, snmp_set_serial_no/2]).
-export([add_agent_caps/2, del_agent_caps/1, get_agent_caps/0]).
-export([check_standard/1]).
@@ -202,18 +224,257 @@ variable_func(get, Name) ->
inc(Name) -> inc(Name, 1).
inc(Name, N) -> ets:update_counter(snmp_agent_table, Name, N).
+
+sysDescr(print) ->
+ VarAndValue = [{sysDescr, sysDescr(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
+sysDescr(get) ->
+ VarDB = db(sysDescr),
+ snmp_generic:variable_get(VarDB).
+
+
+sysContact(print) ->
+ VarAndValue = [{sysContact, sysContact(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
+sysContact(get) ->
+ VarDB = db(sysContact),
+ snmp_generic:variable_get(VarDB).
+
+
+sysName(print) ->
+ VarAndValue = [{sysName, sysName(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
+sysName(get) ->
+ VarDB = db(sysName),
+ snmp_generic:variable_get(VarDB).
+
+
+sysLocation(print) ->
+ VarAndValue = [{sysLocation, sysLocation(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
+sysLocation(get) ->
+ VarDB = db(sysLocation),
+ snmp_generic:variable_get(VarDB).
+
+
+sysServices(print) ->
+ VarAndValue = [{sysServices, sysServices(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
+sysServices(get) ->
+ VarDB = db(sysServices),
+ snmp_generic:variable_get(VarDB).
+
+
+snmpInPkts(print) ->
+ gen_counter(print, snmpInPkts);
+snmpInPkts(get) ->
+ gen_counter(get, snmpInPkts).
+
+
+snmpOutPkts(print) ->
+ gen_counter(print, snmpOutPkts);
+snmpOutPkts(get) ->
+ gen_counter(get, snmpOutPkts).
+
+
+snmpInASNParseErrs(print) ->
+ gen_counter(print, snmpInASNParseErrs);
+snmpInASNParseErrs(get) ->
+ gen_counter(get, snmpInASNParseErrs).
+
+
+snmpInBadCommunityNames(print) ->
+ gen_counter(print, snmpInBadCommunityNames);
+snmpInBadCommunityNames(get) ->
+ gen_counter(get, snmpInBadCommunityNames).
+
+
+snmpInBadCommunityUses(print) ->
+ gen_counter(print, snmpInBadCommunityUses);
+
+snmpInBadCommunityUses(get) ->
+ gen_counter(get, snmpInBadCommunityUses).
+
+
+snmpInBadVersions(print) ->
+ gen_counter(print, snmpInBadVersions);
+snmpInBadVersions(get) ->
+ gen_counter(get, snmpInBadVersions).
+
+
+snmpInTooBigs(print) ->
+ gen_counter(print, snmpInTooBigs);
+snmpInTooBigs(get) ->
+ gen_counter(get, snmpInTooBigs).
+
+
+snmpInNoSuchNames(print) ->
+ gen_counter(print, snmpInNoSuchNames);
+snmpInNoSuchNames(get) ->
+ gen_counter(get, snmpInNoSuchNames).
+
+
+snmpInBadValues(print) ->
+ gen_counter(print, snmpInBadValues);
+snmpInBadValues(get) ->
+ gen_counter(get, snmpInBadValues).
+
+
+snmpInReadOnlys(print) ->
+ gen_counter(print, snmpInReadOnlys);
+snmpInReadOnlys(get) ->
+ gen_counter(get, snmpInReadOnlys).
+
+
+snmpInGenErrs(print) ->
+ gen_counter(print, snmpInGenErrs);
+snmpInGenErrs(get) ->
+ gen_counter(get, snmpInGenErrs).
+
+
+snmpInTotalReqVars(print) ->
+ gen_counter(print, snmpInTotalReqVars);
+snmpInTotalReqVars(get) ->
+ gen_counter(get, snmpInTotalReqVars).
+
+
+snmpInTotalSetVars(print) ->
+ gen_counter(print, snmpInTotalSetVars);
+snmpInTotalSetVars(get) ->
+ gen_counter(get, snmpInTotalSetVars).
+
+
+snmpInGetRequests(print) ->
+ gen_counter(print, snmpInGetRequests);
+snmpInGetRequests(get) ->
+ gen_counter(get, snmpInGetRequests).
+
+
+snmpInSetRequests(print) ->
+ gen_counter(print, snmpInSetRequests);
+snmpInSetRequests(get) ->
+ gen_counter(get, snmpInSetRequests).
+
+
+snmpInGetNexts(print) ->
+ gen_counter(print, snmpInGetNexts);
+snmpInGetNexts(get) ->
+ gen_counter(get, snmpInGetNexts).
+
+
+snmpInGetResponses(print) ->
+ gen_counter(print, snmpInGetResponses);
+snmpInGetResponses(get) ->
+ gen_counter(get, snmpInGetResponses).
+
+
+snmpInTraps(print) ->
+ gen_counter(print, snmpInTraps);
+snmpInTraps(get) ->
+ gen_counter(get, snmpInTraps).
+
+
+snmpOutTooBigs(print) ->
+ gen_counter(print, snmpOutTooBigs);
+snmpOutTooBigs(get) ->
+ gen_counter(get, snmpOutTooBigs).
+
+
+snmpOutNoSuchNames(print) ->
+ gen_counter(print, snmpOutNoSuchNames);
+snmpOutNoSuchNames(get) ->
+ gen_counter(get, snmpOutNoSuchNames).
+
+
+snmpOutBadValues(print) ->
+ gen_counter(print, snmpOutBadValues);
+snmpOutBadValues(get) ->
+ gen_counter(get, snmpOutBadValues).
+
+
+snmpOutGenErrs(print) ->
+ gen_counter(print, snmpOutGenErrs);
+snmpOutGenErrs(get) ->
+ gen_counter(get, snmpOutGenErrs).
+
+
+snmpOutGetRequests(print) ->
+ gen_counter(print, snmpOutGetRequests);
+snmpOutGetRequests(get) ->
+ gen_counter(get, snmpOutGetRequests).
+
+
+snmpOutSetRequests(print) ->
+ gen_counter(print, snmpOutSetRequests);
+snmpOutSetRequests(get) ->
+ gen_counter(get, snmpOutSetRequests).
+
+
+snmpOutGetNexts(print) ->
+ gen_counter(print, snmpOutGetNexts);
+snmpOutGetNexts(get) ->
+ gen_counter(get, snmpOutGetNexts).
+
+
+snmpOutGetResponses(print) ->
+ gen_counter(print, snmpOutGetResponses);
+snmpOutGetResponses(get) ->
+ gen_counter(get, snmpOutGetResponses).
+
+
+snmpOutTraps(print) ->
+ gen_counter(print, snmpOutTraps);
+snmpOutTraps(get) ->
+ gen_counter(get, snmpOutTraps).
+
+
+gen_counter(print, Counter) ->
+ Val = gen_counter(get, Counter),
+ VarAndValue = [{Counter, Val}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
+gen_counter(get, Counter) ->
+ variable_func(get, Counter).
+
+
%%-----------------------------------------------------------------
%% This is the instrumentation function for sysUpTime.
%%-----------------------------------------------------------------
+sysUpTime(print) ->
+ sys_up_time(print);
+sysUpTime(get) ->
+ sys_up_time(get).
+
sys_up_time() ->
snmpa:sys_up_time().
+sys_up_time(print) ->
+ VarAndValue = [{sysUpTime, sys_up_time(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
sys_up_time(get) ->
{value, snmpa:sys_up_time()}.
+
%%-----------------------------------------------------------------
%% This is the instrumentation function for snmpEnableAuthenTraps
%%-----------------------------------------------------------------
+
+snmpEnableAuthenTraps(print) ->
+ snmp_enable_authen_traps(print);
+snmpEnableAuthenTraps(get) ->
+ snmp_enable_authen_traps(get).
+
+
+snmp_enable_authen_traps(print) ->
+ VarAndValue = [{snmpEnableAuthenTraps, snmp_enable_authen_traps(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
snmp_enable_authen_traps(new) ->
snmp_generic:variable_func(new, db(snmpEnableAuthenTraps));
@@ -226,9 +487,19 @@ snmp_enable_authen_traps(get) ->
snmp_enable_authen_traps(set, NewVal) ->
snmp_generic:variable_func(set, NewVal, db(snmpEnableAuthenTraps)).
+
%%-----------------------------------------------------------------
-%% This is the instrumentation function for sysObjectId
+%% This is the instrumentation function for sysObjectID
%%-----------------------------------------------------------------
+sysObjectID(print) ->
+ sys_object_id(print);
+sysObjectID(get) ->
+ sys_object_id(get).
+
+sys_object_id(print) ->
+ VarAndValue = [{sysObjectID, sys_object_id(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
sys_object_id(new) ->
snmp_generic:variable_func(new, db(sysObjectID));
@@ -241,6 +512,7 @@ sys_object_id(get) ->
sys_object_id(set, NewVal) ->
snmp_generic:variable_func(set, NewVal, db(sysObjectID)).
+
%%-----------------------------------------------------------------
%% This is a dummy instrumentation function for objects like
%% snmpTrapOID, that is accessible-for-notify, with different
@@ -249,6 +521,7 @@ sys_object_id(set, NewVal) ->
%%-----------------------------------------------------------------
dummy(_Op) -> ok.
+
%%-----------------------------------------------------------------
%% This is the instrumentation function for snmpSetSerialNo.
%% It is always volatile.
diff --git a/lib/snmp/src/agent/snmp_target_mib.erl b/lib/snmp/src/agent/snmp_target_mib.erl
index 3c32d1f59f..270a5fd5b6 100644
--- a/lib/snmp/src/agent/snmp_target_mib.erl
+++ b/lib/snmp/src/agent/snmp_target_mib.erl
@@ -511,6 +511,10 @@ set_target_engine_id(TargetAddrName, EngineId) ->
%%-----------------------------------------------------------------
%% Instrumentation Functions
%%-----------------------------------------------------------------
+snmpTargetSpinLock(print) ->
+ VarAndValue = [{snmpTargetSpinLock, snmpTargetSpinLock(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
snmpTargetSpinLock(new) ->
snmp_generic:variable_func(new, {snmpTargetSpinLock, volatile}),
{A1,A2,A3} = erlang:now(),
@@ -591,12 +595,9 @@ snmpTargetAddrTable(print) ->
?'snmpTargetAddrRowStatus_active' -> active;
_ -> undefined
end,
- Prefix,
- element(?snmpTargetAddrEngineId, Row),
- Prefix,
- element(?snmpTargetAddrTMask, Row),
- Prefix,
- element(?snmpTargetAddrMMS, Row)]))
+ Prefix, element(?snmpTargetAddrEngineId, Row),
+ Prefix, element(?snmpTargetAddrTMask, Row),
+ Prefix, element(?snmpTargetAddrMMS, Row)]))
end,
snmpa_mib_lib:print_table(Table, DB, FOI, PrintRow);
%% Op == new | delete
diff --git a/lib/snmp/src/agent/snmp_user_based_sm_mib.erl b/lib/snmp/src/agent/snmp_user_based_sm_mib.erl
index f40bb1a5b9..69cebc858b 100644
--- a/lib/snmp/src/agent/snmp_user_based_sm_mib.erl
+++ b/lib/snmp/src/agent/snmp_user_based_sm_mib.erl
@@ -26,6 +26,12 @@
table_next/2,
is_engine_id_known/1, get_user/2, get_user_from_security_name/2,
mk_key_change/3, mk_key_change/5, extract_new_key/3, mk_random/1]).
+-export([usmStatsUnsupportedSecLevels/1,
+ usmStatsNotInTimeWindows/1,
+ usmStatsUnknownUserNames/1,
+ usmStatsUnknownEngineIDs/1,
+ usmStatsWrongDigests/1,
+ usmStatsDecryptionErrors/1]).
-export([add_user/1, add_user/13, delete_user/1]).
%% Internal
@@ -303,6 +309,54 @@ gc_tabs() ->
%%-----------------------------------------------------------------
%% Counter functions
%%-----------------------------------------------------------------
+
+usmStatsUnsupportedSecLevels(print) ->
+ VarAndValue = [{usmStatsUnsupportedSecLevels,
+ usmStatsUnsupportedSecLevels(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+usmStatsUnsupportedSecLevels(get) ->
+ get_counter(usmStatsUnsupportedSecLevels).
+
+usmStatsNotInTimeWindows(print) ->
+ VarAndValue = [{usmStatsNotInTimeWindows, usmStatsNotInTimeWindows(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+usmStatsNotInTimeWindows(get) ->
+ get_counter(usmStatsNotInTimeWindows).
+
+usmStatsUnknownUserNames(print) ->
+ VarAndValue = [{usmStatsUnknownUserNames, usmStatsUnknownUserNames(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+usmStatsUnknownUserNames(get) ->
+ get_counter(usmStatsUnknownUserNames).
+
+usmStatsUnknownEngineIDs(print) ->
+ VarAndValue = [{usmStatsUnknownEngineIDs, usmStatsUnknownEngineIDs(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+usmStatsUnknownEngineIDs(get) ->
+ get_counter(usmStatsUnknownEngineIDs).
+
+usmStatsWrongDigests(print) ->
+ VarAndValue = [{usmStatsWrongDigests, usmStatsWrongDigests(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+usmStatsWrongDigests(get) ->
+ get_counter(usmStatsWrongDigests).
+
+usmStatsDecryptionErrors(print) ->
+ VarAndValue = [{usmStatsDecryptionErrors, usmStatsDecryptionErrors(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+usmStatsDecryptionErrors(get) ->
+ get_counter(usmStatsDecryptionErrors).
+
+
+get_counter(Name) ->
+ case (catch ets:lookup(snmp_agent_table, Name)) of
+ [{_, Val}] ->
+ {value, Val};
+ _ ->
+ genErr
+ end.
+
+
init_vars() -> lists:map(fun maybe_create_var/1, vars()).
maybe_create_var(Var) ->
@@ -323,6 +377,7 @@ vars() ->
usmStatsDecryptionErrors
].
+
%%-----------------------------------------------------------------
%% API functions
%%-----------------------------------------------------------------
@@ -374,6 +429,11 @@ get_user_from_security_name(EngineID, SecName) ->
%%-----------------------------------------------------------------
%% Instrumentation Functions
%%-----------------------------------------------------------------
+
+usmUserSpinLock(print) ->
+ VarAndValue = [{usmUserSpinLock, usmUserSpinLock(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
usmUserSpinLock(new) ->
snmp_generic:variable_func(new, {usmUserSpinLock, volatile}),
{A1,A2,A3} = erlang:now(),
diff --git a/lib/snmp/src/agent/snmp_view_based_acm_mib.erl b/lib/snmp/src/agent/snmp_view_based_acm_mib.erl
index 657207b36e..3e5091a555 100644
--- a/lib/snmp/src/agent/snmp_view_based_acm_mib.erl
+++ b/lib/snmp/src/agent/snmp_view_based_acm_mib.erl
@@ -133,7 +133,6 @@ check_vacm({vacmSecurityToGroup, SecModel, SecName, GroupName}) ->
{ok, SecM} = snmp_conf:check_sec_model(SecModel, []),
snmp_conf:check_string(SecName),
snmp_conf:check_string(GroupName),
-
Vacm = {SecM, SecName, GroupName,
?'StorageType_nonVolatile', ?'RowStatus_active'},
{ok, {vacmSecurityToGroup, Vacm}};
@@ -181,15 +180,22 @@ init_tabs(Sec2Group, Access, View) ->
snmpa_local_db:table_delete(db(vacmSecurityToGroupTable)),
snmpa_local_db:table_create(db(vacmSecurityToGroupTable)),
init_sec2group_table(Sec2Group),
+
+ ?vdebug("create vacm access table",[]),
+ snmpa_vacm:cleanup(),
init_access_table(Access),
+
?vdebug("create vacm view-tree-family table",[]),
snmpa_local_db:table_delete(db(vacmViewTreeFamilyTable)),
snmpa_local_db:table_create(db(vacmViewTreeFamilyTable)),
- init_view_table(View).
+ init_view_table(View),
+
+ ?vdebug("table(s) initiated",[]),
+ ok.
init_sec2group_table([Row | T]) ->
-% ?vtrace("init security-to-group table: "
-% "~n Row: ~p",[Row]),
+%% ?vtrace("init security-to-group table: "
+%% "~n Row: ~p",[Row]),
Key1 = element(1, Row),
Key2 = element(2, Row),
Key = [Key1, length(Key2) | Key2],
@@ -198,12 +204,12 @@ init_sec2group_table([Row | T]) ->
init_sec2group_table([]) -> true.
init_access_table([{GN, Prefix, Model, Level, Row} | T]) ->
-% ?vtrace("init access table: "
-% "~n GN: ~p"
-% "~n Prefix: ~p"
-% "~n Model: ~p"
-% "~n Level: ~p"
-% "~n Row: ~p",[GN, Prefix, Model, Level, Row]),
+%% ?vtrace("init access table: "
+%% "~n GN: ~p"
+%% "~n Prefix: ~p"
+%% "~n Model: ~p"
+%% "~n Level: ~p"
+%% "~n Row: ~p",[GN, Prefix, Model, Level, Row]),
Key = [length(GN) | GN] ++ [length(Prefix) | Prefix] ++ [Model, Level],
snmpa_vacm:insert([{Key, Row}], false),
init_access_table(T);
@@ -211,8 +217,8 @@ init_access_table([]) ->
snmpa_vacm:dump_table().
init_view_table([Row | T]) ->
-% ?vtrace("init view table: "
-% "~n Row: ~p",[Row]),
+%% ?vtrace("init view table: "
+%% "~n Row: ~p",[Row]),
Key1 = element(1, Row),
Key2 = element(2, Row),
Key = [length(Key1) | Key1] ++ [length(Key2) | Key2],
@@ -348,6 +354,49 @@ vacmContextTable(Op, Arg1, Arg2) ->
snmp_framework_mib:intContextTable(Op, Arg1, Arg2).
+vacmSecurityToGroupTable(print) ->
+ Table = vacmSecurityToGroupTable,
+ DB = db(Table),
+ FOI = foi(Table),
+ PrintRow =
+ fun(Prefix, Row) ->
+ lists:flatten(
+ io_lib:format("~sSecurityModel: ~p (~w)"
+ "~n~sSecurityName: ~p"
+ "~n~sGroupName: ~p"
+ "~n~sStorageType: ~p (~w)"
+ "~n~sStatus: ~p (~w)",
+ [Prefix, element(?vacmSecurityModel, Row),
+ case element(?vacmSecurityModel, Row) of
+ ?SEC_ANY -> any;
+ ?SEC_V1 -> v1;
+ ?SEC_V2C -> v2c;
+ ?SEC_USM -> usm;
+ _ -> undefined
+ end,
+ Prefix, element(?vacmSecurityName, Row),
+ Prefix, element(?vacmGroupName, Row),
+ Prefix, element(?vacmSecurityToGroupStorageType, Row),
+ case element(?vacmSecurityToGroupStorageType, Row) of
+ ?'vacmSecurityToGroupStorageType_readOnly' -> readOnly;
+ ?'vacmSecurityToGroupStorageType_permanent' -> permanent;
+ ?'vacmSecurityToGroupStorageType_nonVolatile' -> nonVolatile;
+ ?'vacmSecurityToGroupStorageType_volatile' -> volatile;
+ ?'vacmSecurityToGroupStorageType_other' -> other;
+ _ -> undefined
+ end,
+ Prefix, element(?vacmSecurityToGroupStatus, Row),
+ case element(?vacmSecurityToGroupStatus, Row) of
+ ?'vacmSecurityToGroupStatus_destroy' -> destroy;
+ ?'vacmSecurityToGroupStatus_createAndWait' -> createAndWait;
+ ?'vacmSecurityToGroupStatus_createAndGo' -> createAndGo;
+ ?'vacmSecurityToGroupStatus_notReady' -> notReady;
+ ?'vacmSecurityToGroupStatus_notInService' -> notInService;
+ ?'vacmSecurityToGroupStatus_active' -> active;
+ _ -> undefined
+ end]))
+ end,
+ snmpa_mib_lib:print_table(Table, DB, FOI, PrintRow);
vacmSecurityToGroupTable(Op) ->
snmp_generic:table_func(Op, db(vacmSecurityToGroupTable)).
@@ -402,13 +451,13 @@ verify_vacmSecurityToGroupTable_cols([{Col, Val0}|Cols], Acc) ->
verify_vacmSecurityToGroupTable_col(?vacmSecurityModel, Model) ->
case Model of
any -> ?SEC_ANY;
- v1 -> ?SEC_ANY;
- v2c -> ?SEC_ANY;
- usm -> ?SEC_ANY;
+ v1 -> ?SEC_V1;
+ v2c -> ?SEC_V2C;
+ usm -> ?SEC_USM;
?SEC_ANY -> ?SEC_ANY;
- ?SEC_V1 -> ?SEC_ANY;
- ?SEC_V2C -> ?SEC_ANY;
- ?SEC_USM -> ?SEC_ANY;
+ ?SEC_V1 -> ?SEC_V1;
+ ?SEC_V2C -> ?SEC_V2C;
+ ?SEC_USM -> ?SEC_USM;
_ ->
?vlog("verification of vacmSecurityModel(~w) ~p failed",
[?vacmSecurityModel, Model]),
@@ -445,6 +494,49 @@ verify_vacmSecurityToGroupTable_col(_, Val) ->
%% {RowIndex, {Col4, Col5, ..., Col9}}
%%
%%-----------------------------------------------------------------
+vacmAccessTable(print) ->
+ %% M�ste jag g�ra om alla entrien till {RowIdx, Row}?
+ TableInfo = get_table(vacmAccessTable),
+ PrintRow =
+ fun(Prefix, Row) ->
+ lists:flatten(
+ io_lib:format("~sContextMatch: ~p (~w)"
+ "~n~sReadViewName: ~p"
+ "~n~sWriteViewName: ~p"
+ "~n~sNotifyViewName: ~p"
+ "~n~sStorageType: ~p (~w)"
+ "~n~sStatus: ~p (~w)",
+ [Prefix, element(?vacmAccessContextMatch-3, Row),
+ case element(?vacmAccessContextMatch-3, Row) of
+ ?vacmAccessContextMatch_exact -> exact;
+ ?vacmAccessContextMatch_prefix -> prefix;
+ _ -> undefined
+ end,
+ Prefix, element(?vacmAccessReadViewName-3, Row),
+ Prefix, element(?vacmAccessWriteViewName-3, Row),
+ Prefix, element(?vacmAccessNotifyViewName-3, Row),
+ Prefix, element(?vacmAccessStorageType-3, Row),
+ case element(?vacmAccessStorageType-3, Row) of
+ ?vacmAccessStorageType_other -> other ;
+ ?vacmAccessStorageType_volatile -> volatile;
+ ?vacmAccessStorageType_nonVolatile -> nonVolatile;
+ ?vacmAccessStorageType_permanent -> permanent;
+ ?vacmAccessStorageType_readOnly -> readOnly;
+ _ -> undefined
+ end,
+ Prefix, element(?vacmAccessStatus-3, Row),
+ case element(?vacmAccessStatus-3, Row) of
+ ?vacmAccessStatus_destroy -> destroy;
+ ?vacmAccessStatus_createAndWait -> createAndWait;
+ ?vacmAccessStatus_createAndGo -> createAndGo;
+ ?vacmAccessStatus_notReady -> notReady;
+ ?vacmAccessStatus_notInService -> notInService;
+ ?vacmAccessStatus_active -> active;
+ _ -> undefined
+ end
+ ]))
+ end,
+ snmpa_mib_lib:print_table(vacmAccessTable, {ok, TableInfo}, PrintRow);
vacmAccessTable(_Op) ->
ok.
vacmAccessTable(get, RowIndex, Cols) ->
@@ -540,24 +632,24 @@ verify_vacmAccessTable_col(?vacmAccessContextPrefix, Pref) ->
verify_vacmAccessTable_col(?vacmAccessSecurityModel, Model) ->
case Model of
any -> ?SEC_ANY;
- v1 -> ?SEC_ANY;
- v2c -> ?SEC_ANY;
- usm -> ?SEC_ANY;
+ v1 -> ?SEC_V1;
+ v2c -> ?SEC_V2C;
+ usm -> ?SEC_USM;
?SEC_ANY -> ?SEC_ANY;
- ?SEC_V1 -> ?SEC_ANY;
- ?SEC_V2C -> ?SEC_ANY;
- ?SEC_USM -> ?SEC_ANY;
+ ?SEC_V1 -> ?SEC_V1;
+ ?SEC_V2C -> ?SEC_V2C;
+ ?SEC_USM -> ?SEC_USM;
_ ->
wrongValue(?vacmAccessSecurityModel)
end;
verify_vacmAccessTable_col(?vacmAccessSecurityLevel, Level) ->
case Level of
- noAuthNoPriv -> 1;
- authNoPriv -> 2;
- authPriv -> 3;
- 1 -> 1;
- 2 -> 2;
- 3 -> 3;
+ noAuthNoPriv -> ?vacmAccessSecurityLevel_noAuthNoPriv;
+ authNoPriv -> ?vacmAccessSecurityLevel_authNoPriv;
+ authPriv -> ?vacmAccessSecurityLevel_authPriv;
+ ?vacmAccessSecurityLevel_noAuthNoPriv -> ?vacmAccessSecurityLevel_noAuthNoPriv;
+ ?vacmAccessSecurityLevel_authNoPriv -> ?vacmAccessSecurityLevel_authNoPriv;
+ ?vacmAccessSecurityLevel_authPriv -> ?vacmAccessSecurityLevel_authPriv;
_ -> wrongValue(?vacmAccessSecurityLevel)
end;
verify_vacmAccessTable_col(?vacmAccessContextMatch, Match) ->
@@ -664,6 +756,7 @@ do_get_next(RowIndex, Cols) ->
end
end.
+
%%-----------------------------------------------------------------
%% Functions to manipulate vacmAccessRows.
%%-----------------------------------------------------------------
@@ -696,29 +789,76 @@ split_cols([Col | Cols], PreCols) when Col =< 3 ->
split_cols(Cols, PreCols) ->
{PreCols, Cols}.
+vacmViewSpinLock(print) ->
+ VarAndValue = [{vacmViewSpinLock, vacmViewSpinLock(get)}],
+ snmpa_mib_lib:print_variables(VarAndValue);
+
vacmViewSpinLock(new) ->
- snmp_generic:variable_func(new, {vacmViewSpinLock, volatile}),
+ snmp_generic:variable_func(new, volatile_db(vacmViewSpinLock)),
{A1,A2,A3} = erlang:now(),
random:seed(A1,A2,A3),
Val = random:uniform(2147483648) - 1,
- snmp_generic:variable_func(set, Val, {vacmViewSpinLock, volatile});
+ snmp_generic:variable_func(set, Val, volatile_db(vacmViewSpinLock));
vacmViewSpinLock(delete) ->
ok;
vacmViewSpinLock(get) ->
- snmp_generic:variable_func(get, {vacmViewSpinLock, volatile}).
+ snmp_generic:variable_func(get, volatile_db(vacmViewSpinLock)).
vacmViewSpinLock(is_set_ok, NewVal) ->
- case snmp_generic:variable_func(get, {vacmViewSpinLock, volatile}) of
+ case snmp_generic:variable_func(get, volatile_db(vacmViewSpinLock)) of
{value, NewVal} -> noError;
_ -> inconsistentValue
end;
vacmViewSpinLock(set, NewVal) ->
snmp_generic:variable_func(set, (NewVal + 1) rem 2147483648,
- {vacmViewSpinLock, volatile}).
-
-
+ volatile_db(vacmViewSpinLock)).
+
+
+vacmViewTreeFamilyTable(print) ->
+ Table = vacmViewTreeFamilyTable,
+ DB = db(Table),
+ FOI = foi(Table),
+ PrintRow =
+ fun(Prefix, Row) ->
+ lists:flatten(
+ io_lib:format("~sViewName: ~p"
+ "~n~sSubtree: ~p"
+ "~n~sMask: ~p"
+ "~n~sType: ~p (~w)"
+ "~n~sStorageType: ~p (~w)"
+ "~n~sStatus: ~p (~w)",
+ [Prefix, element(?vacmViewTreeFamilyViewName, Row),
+ Prefix, element(?vacmViewTreeFamilySubtree, Row),
+ Prefix, element(?vacmViewTreeFamilyMask, Row),
+ Prefix, element(?vacmViewTreeFamilyType, Row),
+ case element(?vacmViewTreeFamilyType, Row) of
+ ?vacmViewTreeFamilyType_included -> included;
+ ?vacmViewTreeFamilyType_excluded -> excluded;
+ _ -> undefined
+ end,
+ Prefix, element(?vacmViewTreeFamilyStorageType, Row),
+ case element(?vacmViewTreeFamilyStorageType, Row) of
+ ?vacmViewTreeFamilyStorageType_readOnly -> readOnly;
+ ?vacmViewTreeFamilyStorageType_permanent -> permanent;
+ ?vacmViewTreeFamilyStorageType_nonVolatile -> nonVolatile;
+ ?vacmViewTreeFamilyStorageType_volatile -> volatile;
+ ?vacmViewTreeFamilyStorageType_other -> other;
+ _ -> undefined
+ end,
+ Prefix, element(?vacmViewTreeFamilyStatus, Row),
+ case element(?vacmViewTreeFamilyStatus, Row) of
+ ?vacmViewTreeFamilyStatus_destroy -> destroy;
+ ?vacmViewTreeFamilyStatus_createAndWait -> createAndWait;
+ ?vacmViewTreeFamilyStatus_createAndGo -> createAndGo;
+ ?vacmViewTreeFamilyStatus_notReady -> notReady;
+ ?vacmViewTreeFamilyStatus_notInService -> notInService;
+ ?vacmViewTreeFamilyStatus_active -> active;
+ _ -> undefined
+ end]))
+ end,
+ snmpa_mib_lib:print_table(Table, DB, FOI, PrintRow);
vacmViewTreeFamilyTable(Op) ->
snmp_generic:table_func(Op, db(vacmViewTreeFamilyTable)).
vacmViewTreeFamilyTable(get_next, RowIndex, Cols) ->
@@ -795,7 +935,25 @@ table_next(Name, RestOid) ->
snmp_generic:table_next(db(Name), RestOid).
-db(X) -> snmpa_agent:db(X).
+get_table(vacmAccessTable) ->
+ do_get_vacmAccessTable([], []).
+
+do_get_vacmAccessTable(Key0, Acc) ->
+ case snmpa_vacm:get_next_row(Key0) of
+ {Key, _Row} = Entry ->
+ do_get_vacmAccessTable(Key, [Entry | Acc]);
+ false ->
+ lists:reverse(Acc)
+ end.
+
+
+%%-----------------------------------------------------------------
+%% Wrappers
+%%-----------------------------------------------------------------
+
+db(X) -> snmpa_agent:db(X).
+volatile_db(X) -> {X, volatile}.
+
fa(vacmSecurityToGroupTable) -> ?vacmGroupName;
fa(vacmViewTreeFamilyTable) -> ?vacmViewTreeFamilyMask.
diff --git a/lib/snmp/src/agent/snmpa.erl b/lib/snmp/src/agent/snmpa.erl
index 87b191caed..22fbd33add 100644
--- a/lib/snmp/src/agent/snmpa.erl
+++ b/lib/snmp/src/agent/snmpa.erl
@@ -105,6 +105,8 @@
set_request_limit/1, set_request_limit/2
]).
+-export([print_mib_info/0, print_mib_tables/0, print_mib_variables/0]).
+
-include("snmpa_atl.hrl").
-define(EXTRA_INFO, undefined).
@@ -283,6 +285,186 @@ whereis_mib(Agent, Mib) when is_atom(Mib) ->
%% -
+mibs_info() ->
+ [
+ {snmp_standard_mib,
+ [],
+ [
+ sysDescr,
+ sysObjectID,
+ sysContact,
+ sysName,
+ sysLocation,
+ sysServices,
+ snmpEnableAuthenTraps,
+ sysUpTime,
+ snmpInPkts,
+ snmpOutPkts,
+ snmpInBadVersions,
+ snmpInBadCommunityNames,
+ snmpInBadCommunityUses,
+ snmpInASNParseErrs,
+ snmpInTooBigs,
+ snmpInNoSuchNames,
+ snmpInBadValues,
+ snmpInReadOnlys,
+ snmpInGenErrs,
+ snmpInTotalReqVars,
+ snmpInTotalSetVars,
+ snmpInGetRequests,
+ snmpInSetRequests,
+ snmpInGetNexts,
+ snmpInGetResponses,
+ snmpInTraps,
+ snmpOutTooBigs,
+ snmpOutNoSuchNames,
+ snmpOutBadValues,
+ snmpOutGenErrs,
+ snmpOutGetRequests,
+ snmpOutSetRequests,
+ snmpOutGetNexts,
+ snmpOutGetResponses,
+ snmpOutTraps
+ ]
+ },
+ {snmp_framework_mib,
+ [
+ ],
+ [
+ snmpEngineID,
+ snmpEngineBoots,
+ snmpEngineTime,
+ snmpEngineMaxMessageSize
+ ]
+ },
+ {snmp_view_based_acm_mib,
+ [
+ vacmAccessTable,
+ vacmSecurityToGroupTable,
+ vacmViewTreeFamilyTable
+ ],
+ [
+ vacmViewSpinLock
+ ]
+ },
+ {snmp_target_mib,
+ [
+ snmpTargetAddrTable,
+ snmpTargetParamsTable
+ ],
+ [
+ snmpTargetSpinLock
+ ]
+ },
+ {snmp_community_mib,
+ [
+ snmpCommunityTable
+ ],
+ []
+ },
+ {snmp_notification_mib,
+ [
+ snmpNotifyTable
+ ],
+ []},
+ {snmp_user_based_sm_mib,
+ [
+ usmUserTable
+ ],
+ [
+ usmUserSpinLock,
+ usmStatsUnsupportedSecLevels,
+ usmStatsNotInTimeWindows,
+ usmStatsUnknownUserNames,
+ usmStatsUnknownEngineIDs,
+ usmStatsWrongDigests,
+ usmStatsDecryptionErrors
+ ]
+ }
+ ].
+
+print_mib_info() ->
+ MibsInfo = mibs_info(),
+ print_mib_info(MibsInfo).
+
+print_mib_info([]) ->
+ io:format("~n", []),
+ ok;
+print_mib_info([{Mod, Tables, Variables} | MibsInfo]) ->
+ io:format("~n** ~s ** ~n~n", [make_pretty_mib(Mod)]),
+ print_mib_variables2(Mod, Variables),
+ print_mib_tables2(Mod, Tables),
+ io:format("~n", []),
+ print_mib_info(MibsInfo).
+
+
+print_mib_tables() ->
+ Tables = [{Mod, Tabs} || {Mod, Tabs, _Vars} <- mibs_info()],
+ print_mib_tables(Tables).
+
+print_mib_tables([]) ->
+ ok;
+print_mib_tables([{Mod, Tabs}|MibTabs])
+ when is_atom(Mod) andalso is_list(Tabs) ->
+ print_mib_tables(Mod, Tabs),
+ print_mib_tables(MibTabs);
+print_mib_tables([_|MibTabs]) ->
+ print_mib_tables(MibTabs).
+
+print_mib_tables(_Mod, [] = _Tables) ->
+ ok;
+print_mib_tables(Mod, Tables) ->
+ io:format("~n** ~s ** ~n~n", [make_pretty_mib(Mod)]),
+ print_mib_tables2(Mod, Tables),
+ io:format("~n", []).
+
+print_mib_tables2(Mod, Tables) ->
+ [(catch Mod:Table(print)) || Table <- Tables].
+
+
+print_mib_variables() ->
+ Variables = [{Mod, Vars} || {Mod, _Tabs, Vars} <- mibs_info()],
+ print_mib_variables(Variables).
+
+print_mib_variables([]) ->
+ ok;
+print_mib_variables([{Mod, Vars}|MibVars])
+ when is_atom(Mod) andalso is_list(Vars) ->
+ print_mib_variables(Mod, Vars),
+ print_mib_variables(MibVars);
+print_mib_variables([_|MibVars]) ->
+ print_mib_variables(MibVars).
+
+print_mib_variables(_Mod, [] = _Vars) ->
+ ok;
+print_mib_variables(Mod, Vars) ->
+ io:format("~n** ~s ** ~n~n", [make_pretty_mib(Mod)]),
+ print_mib_variables2(Mod, Vars),
+ io:format("~n", []).
+
+print_mib_variables2(Mod, Variables) ->
+ Vars = [{Var, (catch Mod:Var(get))} || Var <- Variables],
+ snmpa_mib_lib:print_variables(Vars).
+
+
+make_pretty_mib(snmp_view_based_acm_mib) ->
+ "SNMP-VIEW-BASED-ACM-MIB";
+make_pretty_mib(snmp_target_mib) ->
+ "SNMP-TARGET-MIB";
+make_pretty_mib(snmp_community_mib) ->
+ "SNMP-COMMUNITY-MIB";
+make_pretty_mib(snmp_notification_mib) ->
+ "SNMP-NOTIFICATION-MIB";
+make_pretty_mib(snmp_user_based_sm_mib) ->
+ "SNMP-USER-BASED-SM-MIB";
+make_pretty_mib(snmp_framework_mib) ->
+ "SNMP-FRAMEWORK-MIB";
+make_pretty_mib(Mod) ->
+ atom_to_list(Mod).
+
+
+%% -
+
mib_of(Oid) ->
snmpa_agent:mib_of(Oid).
diff --git a/lib/snmp/src/agent/snmpa_mib_lib.erl b/lib/snmp/src/agent/snmpa_mib_lib.erl
index 441228b9ee..cb96ff8056 100644
--- a/lib/snmp/src/agent/snmpa_mib_lib.erl
+++ b/lib/snmp/src/agent/snmpa_mib_lib.erl
@@ -19,7 +19,8 @@
-module(snmpa_mib_lib).
-export([table_cre_row/3, table_del_row/2]).
--export([get_table/2, print_table/3, print_table/4, print_tables/1]).
+-export([get_table/2]).
+-export([print_variables/1, print_table/3, print_table/4, print_tables/1]).
-export([gc_tab/3, gc_tab/5]).
-include("SNMPv2-TC.hrl").
@@ -81,31 +82,69 @@ get_table(NameDb, FOI, Oid, Acc) ->
end.
+print_variables(Variables) when is_list(Variables) ->
+ Variables2 = print_variables_prefixify(Variables),
+ lists:foreach(fun({Variable, ValueResult, Prefix}) ->
+ print_variable(Variable, ValueResult, Prefix)
+ end, Variables2),
+ ok.
+
+print_variable(Variable, {value, Val}, Prefix) when is_atom(Variable) ->
+ io:format("~w~s=> ~p~n", [Variable, Prefix, Val]);
+print_variable(Variable, Error, Prefix) when is_atom(Variable) ->
+ io:format("~w~s=> [e] ~p~n", [Variable, Prefix, Error]).
+
+print_variables_prefixify(Variables) ->
+ MaxVarLength = print_variables_maxlength(Variables),
+ print_variables_prefixify(Variables, MaxVarLength, []).
+
+print_variables_prefixify([], _MaxVarLength, Acc) ->
+ lists:reverse(Acc);
+print_variables_prefixify([{Var, Res}|Variables], MaxVarLength, Acc) ->
+ Prefix = make_variable_print_prefix(Var, MaxVarLength),
+ print_variables_prefixify(Variables, MaxVarLength,
+ [{Var, Res, Prefix}|Acc]).
+
+make_variable_print_prefix(Var, MaxVarLength) ->
+ lists:duplicate(MaxVarLength - length(atom_to_list(Var)) + 1, $ ).
+
+print_variables_maxlength(Variables) ->
+ print_variables_maxlength(Variables, 0).
+
+print_variables_maxlength([], MaxLength) ->
+ MaxLength;
+print_variables_maxlength([{Var, _}|Variables], MaxLength) when is_atom(Var) ->
+ VarLen = length(atom_to_list(Var)),
+ if
+ VarLen > MaxLength ->
+ print_variables_maxlength(Variables, VarLen);
+ true ->
+ print_variables_maxlength(Variables, MaxLength)
+ end.
+
+
print_tables(Tables) when is_list(Tables) ->
lists:foreach(fun({Table, DB, FOI, PrintRow}) ->
print_table(Table, DB, FOI, PrintRow)
end, Tables),
ok.
-%% print_table(Table, DB, FOI, PrintRow) ->
-%% TableInfo = get_table(DB(Table), FOI(Table)),
-%% print_table(Table, TableInfo, PrintRow),
-%% ok.
-
print_table(Table, DB, FOI, PrintRow) ->
TableInfo = get_table(DB, FOI),
print_table(Table, TableInfo, PrintRow).
print_table(Table, TableInfo, PrintRow) when is_function(PrintRow, 2) ->
- io:format("~w => ~n", [Table]),
+ io:format("~w =>", [Table]),
do_print_table(TableInfo, PrintRow).
+do_print_table({ok, [] = _TableInfo}, _PrintRow) ->
+ io:format(" -~n", []);
do_print_table({ok, TableInfo}, PrintRow) when is_function(PrintRow, 2) ->
+ io:format("~n", []),
lists:foreach(fun({RowIdx, Row}) ->
io:format(" ~w => ~n~s~n",
[RowIdx, PrintRow(" ", Row)])
- end, TableInfo),
- io:format("~n", []);
+ end, TableInfo);
do_print_table({error, {invalid_rowindex, BadRowIndex, []}}, _PrintRow) ->
io:format("Error: Bad rowindex ~w~n", [BadRowIndex]);
do_print_table({error, {invalid_rowindex, BadRowIndex, TableInfo}}, PrintRow) ->
diff --git a/lib/snmp/src/agent/snmpa_vacm.erl b/lib/snmp/src/agent/snmpa_vacm.erl
index 2eacea4301..892dc265f1 100644
--- a/lib/snmp/src/agent/snmpa_vacm.erl
+++ b/lib/snmp/src/agent/snmpa_vacm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -21,7 +21,7 @@
-export([get_mib_view/5]).
-export([init/1, init/2, backup/1]).
-export([delete/1, get_row/1, get_next_row/1, insert/1, insert/2,
- dump_table/0]).
+ cleanup/0, dump_table/0]).
-include("SNMPv2-TC.hrl").
-include("SNMP-VIEW-BASED-ACM-MIB.hrl").
@@ -256,6 +256,11 @@ delete(Key) ->
ets:delete(snmpa_vacm, Key),
dump_table().
+
+cleanup() ->
+ ets:delete_all_objects(snmpa_vacm),
+ dump_table().
+
dump_table(true) ->
dump_table();
dump_table(_) ->
diff --git a/lib/snmp/src/app/snmp.appup.src b/lib/snmp/src/app/snmp.appup.src
index 2375e3df70..de0e5d6e14 100644
--- a/lib/snmp/src/app/snmp.appup.src
+++ b/lib/snmp/src/app/snmp.appup.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,43 +22,30 @@
%% ----- U p g r a d e -------------------------------------------------------
[
- {"4.17.1",
+ {"4.18",
[
- {load_module, snmp_community_mib, soft_purge, soft_purge, []},
- {load_module, snmp_framework_mib, soft_purge, soft_purge, []},
- {load_module, snmp_generic, soft_purge, soft_purge, []},
- {load_module, snmp_notification_mib, soft_purge, soft_purge, []},
- {load_module, snmp_standard_mib, soft_purge, soft_purge, []},
- {load_module, snmp_target_mib, soft_purge, soft_purge, []},
- {load_module, snmp_user_based_sm_mib, soft_purge, soft_purge, []},
- {load_module, snmp_view_based_acm_mib, soft_purge, soft_purge, []},
- {load_module, snmpa_conf, soft_purge, soft_purge, []},
- {update, snmpa_target_cache, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, []},
- {load_module, snmpm, soft_purge, soft_purge, []},
- {load_module, snmpm_conf, soft_purge, soft_purge, []},
- {update, snmpm_config, soft, soft_purge, soft_purge, []},
- {load_module, snmpm_usm, soft_purge, soft_purge, []}
- ]
- },
- {"4.17",
- [
- {load_module, snmp_community_mib, soft_purge, soft_purge, []},
- {load_module, snmp_framework_mib, soft_purge, soft_purge, []},
- {load_module, snmp_generic, soft_purge, soft_purge, []},
- {load_module, snmp_notification_mib, soft_purge, soft_purge, []},
- {load_module, snmp_standard_mib, soft_purge, soft_purge, []},
- {load_module, snmp_target_mib, soft_purge, soft_purge, []},
- {load_module, snmp_user_based_sm_mib, soft_purge, soft_purge, []},
- {load_module, snmp_view_based_acm_mib, soft_purge, soft_purge, []},
- {load_module, snmpa_conf, soft_purge, soft_purge, []},
- {update, snmpa_target_cache, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, []},
- {load_module, snmpm, soft_purge, soft_purge, []},
- {load_module, snmpm_conf, soft_purge, soft_purge, []},
- {update, snmpm_config, soft, soft_purge, soft_purge, []},
- {load_module, snmpm_usm, soft_purge, soft_purge, []},
- {load_module, snmpa_net_if, soft_purge, soft_purge, []}
+ {load_module, snmp_misc, soft_purge, soft_purge, []},
+ {load_module, snmpa_vacm, soft_purge, soft_purge, []},
+ {load_module, snmpa, soft_purge, soft_purge,
+ [snmp_community_mib,
+ snmp_framework_mib,
+ snmp_standard_mib,
+ snmp_target_mib,
+ snmp_user_based_sm_mib,
+ snmp_view_based_acm_mib]},
+ {load_module, snmp_community_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_framework_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_standard_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_target_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_user_based_sm_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_view_based_acm_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib, snmpa_vacm]},
+ {load_module, snmpa_mib_lib, soft_purge, soft_purge, []}
]
}
],
@@ -66,43 +53,30 @@
%% ------D o w n g r a d e ---------------------------------------------------
[
- {"4.17.1",
- [
- {load_module, snmp_community_mib, soft_purge, soft_purge, []},
- {load_module, snmp_framework_mib, soft_purge, soft_purge, []},
- {load_module, snmp_generic, soft_purge, soft_purge, []},
- {load_module, snmp_notification_mib, soft_purge, soft_purge, []},
- {load_module, snmp_standard_mib, soft_purge, soft_purge, []},
- {load_module, snmp_target_mib, soft_purge, soft_purge, []},
- {load_module, snmp_user_based_sm_mib, soft_purge, soft_purge, []},
- {load_module, snmp_view_based_acm_mib, soft_purge, soft_purge, []},
- {load_module, snmpa_conf, soft_purge, soft_purge, []},
- {update, snmpa_target_cache, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, []},
- {load_module, snmpm, soft_purge, soft_purge, []},
- {load_module, snmpm_conf, soft_purge, soft_purge, []},
- {update, snmpm_config, soft, soft_purge, soft_purge, []},
- {load_module, snmpm_usm, soft_purge, soft_purge, []}
- ]
- },
- {"4.17",
+ {"4.18",
[
- {load_module, snmp_community_mib, soft_purge, soft_purge, []},
- {load_module, snmp_framework_mib, soft_purge, soft_purge, []},
- {load_module, snmp_generic, soft_purge, soft_purge, []},
- {load_module, snmp_notification_mib, soft_purge, soft_purge, []},
- {load_module, snmp_standard_mib, soft_purge, soft_purge, []},
- {load_module, snmp_target_mib, soft_purge, soft_purge, []},
- {load_module, snmp_user_based_sm_mib, soft_purge, soft_purge, []},
- {load_module, snmp_view_based_acm_mib, soft_purge, soft_purge, []},
- {load_module, snmpa_conf, soft_purge, soft_purge, []},
- {update, snmpa_target_cache, soft, soft_purge, soft_purge, []},
- {load_module, snmpa_usm, soft_purge, soft_purge, []},
- {load_module, snmpm, soft_purge, soft_purge, []},
- {load_module, snmpm_conf, soft_purge, soft_purge, []},
- {update, snmpm_config, soft, soft_purge, soft_purge, []},
- {load_module, snmpm_usm, soft_purge, soft_purge, []},
- {load_module, snmpa_net_if, soft_purge, soft_purge, []}
+ {load_module, snmp_misc, soft_purge, soft_purge, []},
+ {load_module, snmpa_vacm, soft_purge, soft_purge, []},
+ {load_module, snmpa, soft_purge, soft_purge,
+ [snmp_community_mib,
+ snmp_framework_mib,
+ snmp_standard_mib,
+ snmp_target_mib,
+ snmp_user_based_sm_mib,
+ snmp_view_based_acm_mib]},
+ {load_module, snmp_community_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_framework_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_standard_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_target_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_user_based_sm_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib]},
+ {load_module, snmp_view_based_acm_mib, soft_purge, soft_purge,
+ [snmpa_mib_lib, snmpa_vacm]},
+ {load_module, snmpa_mib_lib, soft_purge, soft_purge, []}
]
}
]
diff --git a/lib/snmp/src/compile/Makefile b/lib/snmp/src/compile/Makefile
index 4be60e1835..1f1086eae1 100644
--- a/lib/snmp/src/compile/Makefile
+++ b/lib/snmp/src/compile/Makefile
@@ -20,6 +20,7 @@
include $(ERL_TOP)/make/target.mk
EBIN = ../../ebin
+BIN = ../../bin
include $(ERL_TOP)/make/$(TARGET)/otp.mk
@@ -44,9 +45,11 @@ RELSYSDIR = $(RELEASE_PATH)/lib/snmp-$(VSN)
include modules.mk
+ESCRIPT_BIN = $(ESCRIPT_SRC:%.src=$(BIN)/%)
+
ERL_FILES = $(MODULES:%=%.erl)
-TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR))
+TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(ESCRIPT_BIN)
GENERATED_PARSER = $(PARSER_MODULE:%=%.erl)
@@ -97,8 +100,12 @@ info:
@echo ""
@echo "EBIN: $(EBIN)"
@echo ""
+ @echo "ESCRIPT_SRC: $(ESCRIPT_SRC)"
+ @echo "ESCRIPT_BIN: $(ESCRIPT_BIN)"
+ @echo ""
@echo ""
+
# ----------------------------------------------------
# Special Build Targets
# ----------------------------------------------------
@@ -107,6 +114,7 @@ parser: $(PARSER_TARGET)
$(GENERATED_PARSER): $(PARSER_SRC)
+
# ----------------------------------------------------
# Release Target
# ----------------------------------------------------
@@ -115,9 +123,11 @@ include $(ERL_TOP)/make/otp_release_targets.mk
release_spec: opt
$(INSTALL_DIR) $(RELSYSDIR)/src
$(INSTALL_DIR) $(RELSYSDIR)/src/compiler
- $(INSTALL_DATA) $(PARSER_SRC) $(ERL_FILES) $(INTERNAL_HRL_FILES) $(RELSYSDIR)/src/compiler
+ $(INSTALL_DATA) $(ESCRIPT_SRC) $(PARSER_SRC) $(ERL_FILES) $(INTERNAL_HRL_FILES) $(RELSYSDIR)/src/compiler
$(INSTALL_DIR) $(RELSYSDIR)/ebin
$(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/bin
+ $(INSTALL_SCRIPT) $(ESCRIPT_BIN) $(RELSYSDIR)/bin
release_docs_spec:
diff --git a/lib/snmp/src/compile/depend.mk b/lib/snmp/src/compile/depend.mk
index 75af1bf293..74eb6e0864 100644
--- a/lib/snmp/src/compile/depend.mk
+++ b/lib/snmp/src/compile/depend.mk
@@ -44,3 +44,6 @@ $(EBIN)/snmpc_mib_gram.$(EMULATOR): \
../../include/snmp_types.hrl \
snmpc_mib_gram.erl
+$(BIN)/snmpc: snmpc.src
+ $(PERL) -p -e 's?%VSN%?$(VSN)? ' < $< > $@
+ chmod 755 $@
diff --git a/lib/snmp/src/compile/modules.mk b/lib/snmp/src/compile/modules.mk
index 6365b0e694..ca78e2e6a9 100644
--- a/lib/snmp/src/compile/modules.mk
+++ b/lib/snmp/src/compile/modules.mk
@@ -21,6 +21,9 @@ PARSER_SRC = snmpc_mib_gram.yrl
PARSER_MODULE = $(PARSER_SRC:%.yrl=%)
+ESCRIPT_SRC = \
+ snmpc.src
+
MODULES = \
$(PARSER_MODULE) \
snmpc \
diff --git a/lib/snmp/src/compile/snmpc.erl b/lib/snmp/src/compile/snmpc.erl
index a7f2cdc2bc..195c238184 100644
--- a/lib/snmp/src/compile/snmpc.erl
+++ b/lib/snmp/src/compile/snmpc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -112,6 +112,8 @@ compile(FileName) ->
%% description
%% reference
%% imports
+%% agent_capabilities
+%% module_compliance
%% module_identity
%% {module, string()}
%% no_defs
@@ -203,6 +205,10 @@ get_options([imports|Opts], Formats, Args) ->
get_options(Opts, ["~n imports"|Formats], Args);
get_options([module_identity|Opts], Formats, Args) ->
get_options(Opts, ["~n module_identity"|Formats], Args);
+get_options([module_compliance|Opts], Formats, Args) ->
+ get_options(Opts, ["~n module_compliance"|Formats], Args);
+get_options([agent_capabilities|Opts], Formats, Args) ->
+ get_options(Opts, ["~n agent_capabilities"|Formats], Args);
get_options([relaxed_row_name_assign_check|Opts], Formats, Args) ->
get_options(Opts, ["~n relaxed_row_name_assign_check"|Formats], Args);
get_options([_|Opts], Formats, Args) ->
@@ -288,6 +294,10 @@ check_options([imports| T]) ->
check_options(T);
check_options([module_identity| T]) ->
check_options(T);
+check_options([module_compliance| T]) ->
+ check_options(T);
+check_options([agent_capabilities| T]) ->
+ check_options(T);
check_options([relaxed_row_name_assign_check| T]) ->
check_options(T);
check_options([{module, M} | T]) when is_atom(M) ->
@@ -315,6 +325,12 @@ get_description(Options) ->
get_reference(Options) ->
get_bool_option(reference, Options).
+get_agent_capabilities(Options) ->
+ get_bool_option(agent_capabilities, Options).
+
+get_module_compliance(Options) ->
+ get_bool_option(module_compliance, Options).
+
get_relaxed_row_name_assign_check(Options) ->
lists:member(relaxed_row_name_assign_check, Options).
@@ -387,10 +403,12 @@ get_verbosity(Options) ->
init(From, MibFileName, Options) ->
{A,B,C} = now(),
random:seed(A,B,C),
- put(options, Options),
- put(verbosity, get_verbosity(Options)),
- put(description, get_description(Options)),
- put(reference, get_reference(Options)),
+ put(options, Options),
+ put(verbosity, get_verbosity(Options)),
+ put(description, get_description(Options)),
+ put(reference, get_reference(Options)),
+ put(agent_capabilities, get_agent_capabilities(Options)),
+ put(module_compliance, get_module_compliance(Options)),
File = filename:rootname(MibFileName, ".mib"),
put(filename, filename:basename(File ++ ".mib")),
R = case catch c_impl(File) of
@@ -876,12 +894,12 @@ definitions_loop([{#mc_object_type{name = NameOfEntry,
definitions_loop([{#mc_notification{name = TrapName,
status = deprecated}, Line}|T],
- false) ->
+ #dldata{deprecated = false} = Data) ->
?vinfo2("defloop -> notification ~w is deprecated => ignored",
[TrapName], Line),
update_status(TrapName, deprecated),
ensure_macro_imported('NOTIFICATION-TYPE', Line),
- definitions_loop(T, false);
+ definitions_loop(T, Data);
definitions_loop([{#mc_notification{name = TrapName,
status = obsolete}, Line}|T],
@@ -921,10 +939,96 @@ definitions_loop([{#mc_notification{name = TrapName,
snmpc_lib:add_cdata(#cdata.traps, [Notif]),
definitions_loop(T, Data);
-definitions_loop([{#mc_module_compliance{name = Name},Line}|T], Data) ->
- ?vlog2("defloop -> module_compliance:"
- "~n Name: ~p", [Name], Line),
+definitions_loop([{#mc_agent_capabilities{name = Name,
+ status = Status,
+ description = Desc,
+ reference = Ref,
+ modules = Mods,
+ name_assign = {Parent, SubIdx}},Line}|T], Data) ->
+ ?vlog2("defloop -> agent_capabilities ~p:"
+ "~n Status: ~p"
+ "~n Desc: ~p"
+ "~n Ref: ~p"
+ "~n Mods: ~p"
+ "~n Parent: ~p"
+ "~n SubIndex: ~p",
+ [Name, Status, Desc, Ref, Mods, Parent, SubIdx], Line),
+ ensure_macro_imported('AGENT-CAPABILITIES', Line),
+ case get(agent_capabilities) of
+ true ->
+ update_status(Name, Status),
+ snmpc_lib:register_oid(Line, Name, Parent, SubIdx),
+ NewME = snmpc_lib:makeInternalNode2(false, Name),
+ Description = make_description(Desc),
+ Reference =
+ case Ref of
+ undefined ->
+ [];
+ _ ->
+ [{reference, Ref}]
+ end,
+ Modules =
+ case Mods of
+ undefined ->
+ [];
+ [] ->
+ [];
+ _ ->
+ [{modules, Mods}]
+ end,
+ AssocList = Reference ++ Modules,
+ NewME2 = NewME#me{description = Description,
+ assocList = AssocList},
+ snmpc_lib:add_cdata(#cdata.mes, [NewME2]);
+ _ ->
+ ok
+ end,
+ definitions_loop(T, Data);
+
+definitions_loop([{#mc_module_compliance{name = Name,
+ status = Status,
+ description = Desc,
+ reference = Ref,
+ modules = Mods,
+ name_assign = {Parent, SubIdx}},Line}|T], Data) ->
+ ?vlog2("defloop -> module_compliance: ~p"
+ "~n Status: ~p"
+ "~n Desc: ~p"
+ "~n Ref: ~p"
+ "~n Mods: ~p"
+ "~n Parent: ~p"
+ "~n SubIndex: ~p",
+ [Name, Status, Desc, Ref, Mods, Parent, SubIdx], Line),
ensure_macro_imported('MODULE-COMPLIANCE', Line),
+ case get(module_compliance) of
+ true ->
+ update_status(Name, Status),
+ snmpc_lib:register_oid(Line, Name, Parent, SubIdx),
+ NewME = snmpc_lib:makeInternalNode2(false, Name),
+ Description = make_description(Desc),
+ Reference =
+ case Ref of
+ undefined ->
+ [];
+ _ ->
+ [{reference, Ref}]
+ end,
+ Modules =
+ case Mods of
+ undefined ->
+ [];
+ [] ->
+ [];
+ _ ->
+ [{modules, Mods}]
+ end,
+ AssocList = Reference ++ Modules,
+ NewME2 = NewME#me{description = Description,
+ assocList = AssocList},
+ snmpc_lib:add_cdata(#cdata.mes, [NewME2]);
+ _ ->
+ ok
+ end,
definitions_loop(T, Data);
definitions_loop([{#mc_object_group{name = Name,
@@ -1328,22 +1432,26 @@ save(Filename, MibName, Options) ->
parse(FileName) ->
+%% ?vtrace("parse -> start tokenizer for ~p", [FileName]),
case snmpc_tok:start_link(reserved_words(),
[{file, FileName ++ ".mib"},
{forget_stringdata, true}]) of
{error,ReasonStr} ->
snmpc_lib:error(lists:flatten(ReasonStr),[]);
{ok, TokPid} ->
+%% ?vtrace("parse -> tokenizer start, now get tokens", []),
Toks = snmpc_tok:get_all_tokens(TokPid),
+%% ?vtrace("parse -> tokens: ~p", [Toks]),
set_version(Toks),
- %% io:format("parse -> lexical analysis: ~n~p~n", [Toks]),
- %% t("parse -> lexical analysis: ~n~p", [Toks]),
+ %% ?vtrace("parse -> lexical analysis: ~n~p", [Toks]),
CDataArg =
case lists:keysearch(module, 1, get(options)) of
{value, {module, M}} -> {module, M};
_ -> {file, FileName ++ ".funcs"}
end,
put(cdata,snmpc_lib:make_cdata(CDataArg)),
+%% ?vtrace("parse -> stop tokenizer and then do the actual parse",
+%% []),
snmpc_tok:stop(TokPid),
Res = if
is_list(Toks) ->
@@ -1351,7 +1459,7 @@ parse(FileName) ->
true ->
Toks
end,
- %% t("parse -> parsed: ~n~p", [Res]),
+%% ?vtrace("parse -> parsed result: ~n~p", [Res]),
case Res of
{ok, PData} ->
{ok, PData};
@@ -1443,6 +1551,10 @@ reserved_words() ->
'NOTIFICATION-GROUP',
'NOTIFICATIONS',
'MODULE-COMPLIANCE',
+ 'AGENT-CAPABILITIES',
+ 'PRODUCT-RELEASE',
+ 'SUPPORTS',
+ 'INCLUDES',
'MODULE',
'MANDATORY-GROUPS',
'GROUP',
diff --git a/lib/snmp/src/compile/snmpc.hrl b/lib/snmp/src/compile/snmpc.hrl
index eb896cde6b..1c0808d065 100644
--- a/lib/snmp/src/compile/snmpc.hrl
+++ b/lib/snmp/src/compile/snmpc.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -103,16 +103,75 @@
).
+-record(mc_agent_capabilities,
+ {name,
+ product_release,
+ status,
+ description,
+ reference,
+ modules,
+ name_assign
+ }
+ ).
+
+-record(mc_ac_module,
+ {name,
+ groups,
+ variation
+ }
+ ).
+
+-record(mc_ac_object_variation,
+ {name,
+ syntax,
+ write_syntax,
+ access,
+ creation,
+ default_value,
+ description
+ }
+ ).
+
+-record(mc_ac_notification_variation,
+ {name,
+ access,
+ description
+ }
+ ).
+
+
-record(mc_module_compliance,
{name,
status,
description,
reference,
- module,
+ modules,
name_assign
}
).
+-record(mc_mc_compliance_group,
+ {name,
+ description
+ }
+ ).
+
+-record(mc_mc_object,
+ {name,
+ syntax,
+ write_syntax,
+ access,
+ description
+ }
+ ).
+
+-record(mc_mc_module,
+ {name,
+ mandatory,
+ compliance
+ }
+ ).
+
-record(mc_object_group,
{name,
diff --git a/lib/snmp/src/compile/snmpc.src b/lib/snmp/src/compile/snmpc.src
new file mode 100644
index 0000000000..e0734c056e
--- /dev/null
+++ b/lib/snmp/src/compile/snmpc.src
@@ -0,0 +1,381 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% SNMP MIB compiler frontend
+%%
+
+-mode(compile).
+
+-include_lib("kernel/include/file.hrl").
+-include_lib("snmp/include/snmp_types.hrl").
+
+
+-record(state,
+ {
+ version = "%VSN%",
+ mfv,
+ file, % .mib or .bin depending on which are compiled
+ outdir = "./",
+ db = volatile,
+ include_dirs = ["./"],
+ include_lib_dirs = [],
+ deprecated = false,
+ group_check = true,
+ description = false,
+ reference = false,
+ imports = false,
+ module_identity = false,
+ module_compliance = false,
+ agent_capabilities = false,
+ module,
+ no_defaults = false,
+ relaxed_row_name_assigne_check = false,
+ %% The default verbosity (silence) will be filled in
+ %% during argument processing.
+ verbosity,
+ warnings = false
+ }).
+
+
+%% ------------------------------------------------------------------------
+%% Valid arguments:
+%% --o Dir [defaults to "./"]
+%% --i Dir [defaults to "./"]
+%% --il Dir
+%% --sgc
+%% --db DB [defaults to volatile]
+%% --dep
+%% --desc
+%% --ref
+%% --imp
+%% --mi
+%% --mc
+%% --ac
+%% --mod Mod
+%% --nd
+%% --rrnac
+%% --version
+%% --verbosity V
+%% --warnings
+main(Args) when is_list(Args) ->
+ case (catch process_args(Args)) of
+ ok ->
+ usage();
+ {ok, State} when is_record(State, state) ->
+ compile(State);
+ {ok, Str} when is_list(Str) ->
+ io:format("~s~n~n", [Str]),
+ halt(1);
+ {error, ReasonStr} ->
+ usage(ReasonStr)
+ end;
+main(_) ->
+ usage().
+
+compile(State) ->
+ %% io:format("snmpc: ~p~n", [State]),
+ case mk_file(State) of
+ {mib, File} ->
+ Options = mk_mib_options(State),
+ case mib2bin(File, Options) of
+ {ok, _BinFileName} ->
+ ok;
+ {error, Reason} ->
+ io:format("ERROR: Failed compiling mib: "
+ "~n ~p~n", [Reason]),
+ halt(1)
+ end;
+ {bin, File} ->
+ Options = mk_hrl_options(State),
+ case bin2hrl(File, Options) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ io:format("ERROR: Failed generating hrl from mib: "
+ "~n ~p~n", [Reason]),
+ halt(1)
+ end
+ end.
+
+mib2bin(MibFileName, Options) ->
+ snmpc:compile(MibFileName, Options).
+
+bin2hrl(BinFileName, {OutDir, Verbosity}) ->
+ MibName = filename:basename(BinFileName),
+ BinFile = BinFileName ++ ".bin",
+ HrlFile = filename:join(OutDir, MibName) ++ ".hrl",
+ put(verbosity, Verbosity),
+ snmpc_mib_to_hrl:convert(BinFile, HrlFile, MibName).
+
+
+mk_file(#state{file = MIB}) ->
+ DirName = filename:dirname(MIB),
+ case filename:extension(MIB) of
+ ".mib" ->
+ BaseName = filename:basename(MIB, ".mib"),
+ {mib, filename:join(DirName, BaseName)};
+ ".bin" ->
+ BaseName = filename:basename(MIB, ".bin"),
+ {bin, filename:join(DirName, BaseName)};
+ BadExt ->
+ e(lists:flatten(io_lib:format("Unsupported file type: ~s", [BadExt])))
+ end.
+
+mk_mib_options(#state{outdir = OutDir,
+ db = DB,
+ include_dirs = IDs,
+ include_lib_dirs = ILDs,
+ deprecated = Dep,
+ group_check = GC,
+ description = Desc,
+ reference = Ref,
+ imports = Imp,
+ module_identity = MI,
+ module_compliance = MC,
+ agent_capabilities = AC,
+ module = Mod,
+ no_defaults = ND,
+ relaxed_row_name_assigne_check = RRNAC,
+ %% The default verbosity (silence) will be filled in
+ %% during argument processing.
+ verbosity = V,
+ warnings = W}) ->
+ [{outdir, OutDir},
+ {db, DB},
+ {i, IDs},
+ {il, ILDs},
+ {group_check, GC},
+ {verbosity, V},
+ {warnings, W},
+ {deprecated, Dep}] ++
+ if
+ (Mod =/= undefined) ->
+ [{module, Mod}];
+ true ->
+ []
+ end ++
+ maybe_option(ND, no_defs) ++
+ maybe_option(RRNAC, relaxed_row_name_assign_check) ++
+ maybe_option(Desc, description) ++
+ maybe_option(Ref, reference) ++
+ maybe_option(Imp, imports) ++
+ maybe_option(MI, module_identity) ++
+ maybe_option(MC, module_compliance) ++
+ maybe_option(AC, agent_capabilities).
+
+maybe_option(true, Opt) -> [Opt];
+maybe_option(_, _) -> [].
+
+
+mk_hrl_options(#state{outdir = OutDir,
+ verbosity = Verbosity}) ->
+ {OutDir, Verbosity}.
+
+
+process_args([]) ->
+ e("No input file");
+process_args(Args) ->
+ #mib{mib_format_version = MFV} = #mib{},
+ State = #state{},
+ process_args(Args, State#state{mfv = MFV}).
+
+process_args([], #state{verbosity = Verbosity0, file = MIB} = State) ->
+ if
+ (MIB =:= undefined) ->
+ e("No input file");
+ true ->
+ Verbosity =
+ case Verbosity0 of
+ undefined ->
+ silence;
+ _ ->
+ Verbosity0
+ end,
+ IPath = lists:reverse(State#state.include_dirs),
+ IlPath = lists:reverse(State#state.include_lib_dirs),
+ {ok, State#state{verbosity = Verbosity,
+ include_dirs = IPath,
+ include_lib_dirs = IlPath}}
+ end;
+process_args(["--help"|_Args], _State) ->
+ ok;
+process_args(["--version"|_Args], #state{version = Version, mfv = MFV} = _State) ->
+ {ok, lists:flatten(io_lib:format("snmpc ~s (~s)", [Version, MFV]))};
+process_args(["--verbosity", Verbosity0|Args], #state{verbosity = V} = State)
+ when (V =:= undefined) ->
+ Verbosity = list_to_atom(Verbosity0),
+ case lists:member(Verbosity, [trace,debug,log,info,silence]) of
+ true ->
+ process_args(Args, State#state{verbosity = Verbosity});
+ false ->
+ e(lists:flatten(io_lib:format("Unknown verbosity: ~s", [Verbosity0])))
+ end;
+process_args(["--verbosity"|_Args], #state{verbosity = V})
+ when (V =/= undefined) ->
+ e(lists:flatten(io_lib:format("Verbosity already set to ~w", [V])));
+process_args(["--warnings"|Args], State) ->
+ process_args(Args, State#state{warnings = true});
+process_args(["--o", Dir|Args], State) ->
+ case (catch file:read_file_info(Dir)) of
+ {ok, #file_info{type = directory}} ->
+ process_args(Args, State#state{outdir = Dir});
+ {ok, #file_info{type = BadType}} ->
+ e(lists:flatten(io_lib:format("Not a directory: ~p (~w)", [Dir, BadType])));
+ _ ->
+ e(lists:flatten(io_lib:format("Bad directory: ~p", [Dir])))
+ end;
+process_args(["--i", Dir|Args], State) ->
+ case (catch file:read_file_info(Dir)) of
+ {ok, #file_info{type = directory}} ->
+ IPath = [Dir | State#state.include_dirs],
+ process_args(Args, State#state{include_dirs = IPath});
+ {ok, #file_info{type = BadType}} ->
+ e(lists:flatten(io_lib:format("Not a directory: ~p (~w)", [Dir, BadType])));
+ _ ->
+ e(lists:flatten(io_lib:format("Bad directory: ~p", [Dir])))
+ end;
+process_args(["--il", Dir|Args], State) ->
+ case (catch file:read_file_info(Dir)) of
+ {ok, #file_info{type = directory}} ->
+ IlPath = [Dir | State#state.include_lib_dirs],
+ process_args(Args, State#state{include_lib_dirs = IlPath});
+ {ok, #file_info{type = BadType}} ->
+ e(lists:flatten(io_lib:format("Not a directory: ~p (~w)", [Dir, BadType])));
+ _ ->
+ e(lists:flatten(io_lib:format("Bad directory: ~p", [Dir])))
+ end;
+process_args(["--db", DB0|Args], State) ->
+ DB = list_to_atom(DB0),
+ case lists:member(DB, [volatile,persistent,mnesia]) of
+ true ->
+ process_args(Args, State#state{db = DB});
+ false ->
+ e(lists:flatten(io_lib:format("Invalid db: ~s", [DB0])))
+ end;
+process_args(["--dep"|Args], State) ->
+ process_args(Args, State#state{deprecated = true});
+process_args(["--sgc"|Args], State) ->
+ process_args(Args, State#state{group_check = false});
+process_args(["--desc"|Args], State) ->
+ process_args(Args, State#state{description = true});
+process_args(["--ref"|Args], State) ->
+ process_args(Args, State#state{reference = true});
+process_args(["--imp"|Args], State) ->
+ process_args(Args, State#state{imports = true});
+process_args(["--mi"|Args], State) ->
+ process_args(Args, State#state{module_identity = true});
+process_args(["--mod", Module0|Args], #state{module = M} = State)
+ when (M =:= undefined) ->
+ Module = list_to_atom(Module0),
+ process_args(Args, State#state{module = Module});
+process_args(["--mod"|_Args], #state{module = M})
+ when (M =/= undefined) ->
+ e(lists:flatten(io_lib:format("Module already set to ~w", [M])));
+process_args(["--nd"|Args], State) ->
+ process_args(Args, State#state{no_defaults = true});
+process_args(["--rrnac"|Args], State) ->
+ process_args(Args, State#state{relaxed_row_name_assigne_check = true});
+process_args([MIB], State) ->
+ Ext = filename:extension(MIB),
+ if
+ ((Ext =:= ".mib") orelse (Ext =:= ".bin")) ->
+ case (catch file:read_file_info(MIB)) of
+ {ok, #file_info{type = regular}} ->
+ process_args([], State#state{file = MIB});
+ {ok, #file_info{type = BadType}} ->
+ e(lists:flatten(io_lib:format("~s not a file: ~w", [MIB, BadType])));
+ {error, enoent} ->
+ e(lists:flatten(io_lib:format("No such file: ~s", [MIB])));
+ _ ->
+ e(lists:flatten(io_lib:format("Bad file: ~s", [MIB])))
+ end;
+ true ->
+ e(lists:flatten(io_lib:format("Unknown option: ~s", [MIB])))
+ end;
+process_args([Arg|Args], _State) when Args =/= [] ->
+ e(lists:flatten(io_lib:format("Unknown option: ~s", [Arg]))).
+
+usage(ReasonStr) ->
+ io:format("ERROR: ~s~n", [ReasonStr]),
+ usage().
+
+usage() ->
+ io:format("Usage: snmpc [options] MIB.mib|MIB.bin"
+ "~nCompile a MIB (.mib -> .bin) or generate an erlang header "
+ "~nfile from a compiled MIB file (.bin -> .hrl)"
+ "~nOptions:"
+ "~n --help - Prints this info."
+ "~n --version - Prints compiler version."
+ "~n --verbosity <verbosity> - Print debug info."
+ "~n verbosity = trace | debug | log | info | silence"
+ "~n Defaults to silence."
+ "~n --warnings - Print warning messages."
+ "~n --o <output dir> - The output dir."
+ "~n Defaults to current working dir."
+ "~n --i <include dir> - Add this dir to the list of dirs that will be"
+ "~n searched for imported (compiled) MIB files."
+ "~n The current workin dir will always be included. "
+ "~n --il <include_lib dir> - Add this dir to the list of dirs that will be"
+ "~n searched for imported (compiled) MIB files."
+ "~n It assumes that the first element in the dir name"
+ "~n correspond to an OTP application. For example snmp/mibs/"
+ "~n The current workin dir and the <snmp-home>/priv/mibs "
+ "~n are always listed last the includ path. "
+ "~n --db <DB> - Database to used for the default instrumentation."
+ "~n Defaults to volatile."
+ "~n --sgc - This option (skip group check), if present, disables "
+ "~n the \"group check\" of the mib compiler. "
+ "~n That is, should the OBJECT-GROUP and the NOTIFICATION-GROUP "
+ "~n macro(s) be checked for correctness or not. "
+ "~n By default the check is done. "
+ "~n --dep - Keep deprecated definition(s)."
+ "~n If not specified the compiler will ignore"
+ "~n deprecated definitions."
+ "~n --desc - The DESCRIPTION field will be included."
+ "~n --ref - The REFERENCE field will be included."
+ "~n --imp - The IMPORTS field will be included."
+ "~n --mi - The MODULE-IDENTITY field will be included."
+ "~n --mc - The MODULE-COMPLIANCE field will be included."
+ "~n --ac - The AGENT-CAPABILITIES field will be included."
+ "~n --mod <module> - The module which implements all the instrumentation"
+ "~n functions. "
+ "~n The name of all instrumentation functions must"
+ "~n be the same as the corresponding managed object"
+ "~n it implements."
+ "~n --nd - The default instrumentation functions will *not* be used"
+ "~n if a managed object have no instrumentation function. "
+ "~n Instead this will be reported as an error, and the "
+ "~n compilation aborts. "
+ "~n --rrnac - This option, if present, specifies that the row name "
+ "~n assign check shall not be done strictly according to"
+ "~n the SMI (which allows only the value 1). "
+ "~n With this option, all values greater than zero is allowed"
+ "~n (>= 1). This means that the error will be converted to "
+ "~n a warning. "
+ "~n By default it is not included, but if this option is "
+ "~n present it will be. "
+ "~n "
+ "~n", []),
+ halt(1).
+
+
+e(Reason) ->
+ throw({error, Reason}).
+
diff --git a/lib/snmp/src/compile/snmpc_lib.erl b/lib/snmp/src/compile/snmpc_lib.erl
index 4490412e84..4f71c47bfa 100644
--- a/lib/snmp/src/compile/snmpc_lib.erl
+++ b/lib/snmp/src/compile/snmpc_lib.erl
@@ -306,7 +306,10 @@ import_mib({{'SNMPv2-TC', ImportsFromMib},Line}) ->
Macros = ['TEXTUAL-CONVENTION'],
import_built_in_loop(ImportsFromMib,Nodes,Types,Macros,'SNMPv2-TC',Line);
import_mib({{'SNMPv2-CONF', ImportsFromMib},Line}) ->
- Macros = ['OBJECT-GROUP','NOTIFICATION-GROUP','MODULE-COMPLIANCE'],
+ Macros = ['OBJECT-GROUP',
+ 'NOTIFICATION-GROUP',
+ 'MODULE-COMPLIANCE',
+ 'AGENT-CAPABILITIES'],
import_built_in_loop(ImportsFromMib,[],[],Macros,'SNMPv2-CONF',Line);
import_mib({{'RFC1155-SMI', ImportsFromMib},Line}) ->
Nodes = [makeInternalNode(internet, [1,3,6,1]),
diff --git a/lib/snmp/src/compile/snmpc_mib_gram.yrl b/lib/snmp/src/compile/snmpc_mib_gram.yrl
index 1957f52936..74b9ddaa25 100644
--- a/lib/snmp/src/compile/snmpc_mib_gram.yrl
+++ b/lib/snmp/src/compile/snmpc_mib_gram.yrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -59,6 +59,7 @@ newtypename
objectidentifier
objectname
objecttypev1
+prodrel
range_num
referpart
size
@@ -79,7 +80,7 @@ revisions
listofdefinitionsv2
mibid
last_updated
-oranization
+organization
contact_info
revision
revision_string
@@ -101,19 +102,31 @@ textualconvention
objectgroup
notificationgroup
modulecompliance
-modulepart
-modules
-module
-modulenamepart
-mandatorypart
-compliancepart
-compliances
-compliance
-compliancegroup
-object
+mc_modulepart
+mc_modules
+mc_module
+mc_modulenamepart
+mc_mandatorypart
+mc_compliancepart
+mc_compliances
+mc_compliance
+mc_compliancegroup
+mc_object
+mc_accesspart
+agentcapabilities
+ac_status
+ac_modulepart
+ac_modules
+ac_module
+ac_modulenamepart
+ac_variationpart
+ac_variations
+ac_variation
+ac_accesspart
+ac_access
+ac_creationpart
syntaxpart
writesyntaxpart
-accesspart
fsyntax
defbitsvalue
defbitsnames
@@ -161,6 +174,12 @@ integer variable atom string quote '{' '}' '::=' ':' '=' ',' '.' '(' ')' ';' '|'
'CONTACT-INFO'
'MODULE-IDENTITY'
'NOTIFICATION-TYPE'
+'PRODUCT-RELEASE'
+'AGENT-CAPABILITIES'
+'INCLUDES'
+'SUPPORTS'
+'VARIATION'
+'CREATION-REQUIRES'
'MODULE-COMPLIANCE'
'OBJECT-GROUP'
'NOTIFICATION-GROUP'
@@ -212,8 +231,8 @@ mib -> mibname 'DEFINITIONS' implies 'BEGIN'
defs = Defs}.
v1orv2 -> moduleidentity listofdefinitionsv2 :
- {v2_mib, ['$1'|lists:reverse('$2')]}.
-v1orv2 -> listofdefinitions : {v1_mib, lists:reverse('$1')}.
+ {v2_mib, ['$1'|lreverse(v1orv2_mod, '$2')]}.
+v1orv2 -> listofdefinitions : {v1_mib, lreverse(v1orv2_list, '$1')}.
definition -> objectidentifier : '$1'.
definition -> objecttypev1 : '$1'.
@@ -231,7 +250,7 @@ imports -> imports_from_one_mib : ['$1'].
imports -> imports_from_one_mib imports : ['$1' | '$2'].
imports_from_one_mib -> listofimports 'FROM' variable :
- {{val('$3'), lists:reverse('$1')}, line_of('$2')}.
+ {{val('$3'), lreverse(imports_from_one_mib, '$1')}, line_of('$2')}.
listofimports -> import_stuff : ['$1'].
listofimports -> listofimports ',' import_stuff : ['$3' | '$1'].
@@ -251,6 +270,8 @@ import_stuff -> 'MODULE-IDENTITY'
: ensure_ver(2,'$1'), {builtin, 'MODULE-IDENTITY'}.
import_stuff -> 'NOTIFICATION-TYPE'
: ensure_ver(2,'$1'), {builtin, 'NOTIFICATION-TYPE'}.
+import_stuff -> 'AGENT-CAPABILITIES'
+ : ensure_ver(2,'$1'), {builtin, 'AGENT-CAPABILITIES'}.
import_stuff -> 'MODULE-COMPLIANCE'
: ensure_ver(2,'$1'), {builtin, 'MODULE-COMPLIANCE'}.
import_stuff -> 'NOTIFICATION-GROUP'
@@ -296,7 +317,7 @@ import_stuff -> 'TAddress'
traptype -> objectname 'TRAP-TYPE' 'ENTERPRISE' objectname varpart
description referpart implies integer :
- Trap = make_trap('$1', '$4', lists:reverse('$5'),
+ Trap = make_trap('$1', '$4', lreverse(traptype, '$5'),
'$6', '$7', val('$9')),
{Trap, line_of('$2')}.
@@ -324,7 +345,7 @@ newtype -> newtypename implies syntax :
{NT, line_of('$2')}.
tableentrydefinition -> newtypename implies 'SEQUENCE' '{' fields '}' :
- Seq = make_sequence('$1', lists:reverse('$5')),
+ Seq = make_sequence('$1', lreverse(tableentrydefinition, '$5')),
{Seq, line_of('$3')}.
% returns: list of {<fieldname>, <asn1_type>}
@@ -408,9 +429,9 @@ variables -> variables ',' objectname : ['$3' | '$1'].
implies -> '::=' : '$1'.
implies -> ':' ':' '=' : w("Sloppy asignment on line ~p", [line_of('$1')]), '$1'.
-descriptionfield -> string : lists:reverse(val('$1')).
+descriptionfield -> string : lreverse(descriptionfield, val('$1')).
descriptionfield -> '$empty' : undefined.
-description -> 'DESCRIPTION' string : lists:reverse(val('$2')).
+description -> 'DESCRIPTION' string : lreverse(description, val('$2')).
description -> '$empty' : undefined.
displaypart -> 'DISPLAY-HINT' string : display_hint('$2') .
@@ -418,7 +439,7 @@ displaypart -> '$empty' : undefined .
% returns: {indexes, undefined}
% | {indexes, IndexList} where IndexList is a list of aliasnames.
-indexpartv1 -> 'INDEX' '{' indextypesv1 '}' : {indexes, lists:reverse('$3')}.
+indexpartv1 -> 'INDEX' '{' indextypesv1 '}' : {indexes, lreverse(indexpartv1, '$3')}.
indexpartv1 -> '$empty' : {indexes, undefined}.
indextypesv1 -> indextypev1 : ['$1'].
@@ -436,14 +457,16 @@ parentintegers -> atom '(' integer ')' parentintegers : [val('$3') | '$5'].
defvalpart -> 'DEFVAL' '{' integer '}' : {defval, val('$3')}.
defvalpart -> 'DEFVAL' '{' atom '}' : {defval, val('$3')}.
defvalpart -> 'DEFVAL' '{' '{' defbitsvalue '}' '}' : {defval, '$4'}.
-defvalpart -> 'DEFVAL' '{' quote atom '}'
- : {defval, make_defval_for_string(line_of('$1'), lists:reverse(val('$3')),
- val('$4'))}.
-defvalpart -> 'DEFVAL' '{' quote variable '}'
- : {defval, make_defval_for_string(line_of('$1'), lists:reverse(val('$3')),
- val('$4'))}.
-defvalpart -> 'DEFVAL' '{' string '}'
- : {defval, lists:reverse(val('$3'))}.
+defvalpart -> 'DEFVAL' '{' quote atom '}' :
+ {defval, make_defval_for_string(line_of('$1'),
+ lreverse(defvalpart_quote_atom, val('$3')),
+ val('$4'))}.
+defvalpart -> 'DEFVAL' '{' quote variable '}' :
+ {defval, make_defval_for_string(line_of('$1'),
+ lreverse(defvalpart_quote_variable, val('$3')),
+ val('$4'))}.
+defvalpart -> 'DEFVAL' '{' string '}' :
+ {defval, lreverse(defvalpart_string, val('$3'))}.
defvalpart -> '$empty' : undefined.
defbitsvalue -> defbitsnames : '$1'.
@@ -461,7 +484,7 @@ accessv1 -> atom: accessv1('$1').
statusv1 -> atom : statusv1('$1').
-referpart -> 'REFERENCE' string : lists:reverse(val('$2')).
+referpart -> 'REFERENCE' string : lreverse(referpart, val('$2')).
referpart -> '$empty' : undefined.
@@ -471,7 +494,7 @@ referpart -> '$empty' : undefined.
%%----------------------------------------------------------------------
moduleidentity -> mibid 'MODULE-IDENTITY'
'LAST-UPDATED' last_updated
- 'ORGANIZATION' oranization
+ 'ORGANIZATION' organization
'CONTACT-INFO' contact_info
'DESCRIPTION' descriptionfield
revisionpart nameassign :
@@ -480,20 +503,20 @@ moduleidentity -> mibid 'MODULE-IDENTITY'
{MI, line_of('$2')}.
mibid -> atom : val('$1').
-last_updated -> string : lists:reverse(val('$1')) .
-oranization -> string : lists:reverse(val('$1')) .
-contact_info -> string : lists:reverse(val('$1')) .
+last_updated -> string : lreverse(last_updated, val('$1')) .
+organization -> string : lreverse(organization, val('$1')) .
+contact_info -> string : lreverse(contact_info, val('$1')) .
revisionpart -> '$empty' : [] .
-revisionpart -> revisions : lists:reverse('$1') .
+revisionpart -> revisions : lreverse(revisionpart, '$1') .
revisions -> revision : ['$1'] .
revisions -> revisions revision : ['$2' | '$1'] .
revision -> 'REVISION' revision_string 'DESCRIPTION' revision_desc :
make_revision('$2', '$4') .
-revision_string -> string : lists:reverse(val('$1')) .
-revision_desc -> string : lists:reverse(val('$1')) .
+revision_string -> string : lreverse(revision_string, val('$1')) .
+revision_desc -> string : lreverse(revision_desc, val('$1')) .
definitionv2 -> objectidentifier : '$1'.
definitionv2 -> objecttypev2 : '$1'.
@@ -505,6 +528,7 @@ definitionv2 -> notification : '$1'.
definitionv2 -> objectgroup : '$1'.
definitionv2 -> notificationgroup : '$1'.
definitionv2 -> modulecompliance : '$1'.
+definitionv2 -> agentcapabilities : '$1'.
listofdefinitionsv2 -> '$empty' : [] .
listofdefinitionsv2 -> listofdefinitionsv2 definitionv2 : ['$2' | '$1'].
@@ -535,46 +559,127 @@ notificationgroup -> objectname 'NOTIFICATION-GROUP' 'NOTIFICATIONS' '{'
{NG, line_of('$2')}.
modulecompliance -> objectname 'MODULE-COMPLIANCE' 'STATUS' statusv2
- description referpart modulepart nameassign :
+ description referpart mc_modulepart nameassign :
+%% io:format("modulecompliance -> "
+%% "~n '$1': ~p"
+%% "~n '$4': ~p"
+%% "~n '$5': ~p"
+%% "~n '$6': ~p"
+%% "~n '$7': ~p"
+%% "~n '$8': ~p"
+%% "~n", ['$1', '$4', '$5', '$6', '$7', '$8']),
MC = make_module_compliance('$1', '$4', '$5', '$6',
'$7', '$8'),
+%% io:format("modulecompliance -> "
+%% "~n MC: ~p"
+%% "~n", [MC]),
{MC, line_of('$2')}.
-modulepart -> '$empty'.
-modulepart -> modules.
-modules -> module.
-modules -> modules module.
+agentcapabilities -> objectname 'AGENT-CAPABILITIES'
+ 'PRODUCT-RELEASE' prodrel
+ 'STATUS' ac_status
+ description referpart ac_modulepart nameassign :
+ AC = make_agent_capabilities('$1', '$4', '$6', '$7',
+ '$8', '$9', '$10'),
+ {AC, line_of('$2')}.
+
+prodrel -> string : lreverse(prodrel, val('$1')).
+
+ac_status -> atom : ac_status('$1').
+
+ac_modulepart -> ac_modules :
+ lreverse(ac_modulepart, '$1').
+ac_modulepart -> '$empty' :
+ [].
+
+ac_modules -> ac_module :
+ ['$1'].
+ac_modules -> ac_module ac_modules :
+ ['$1' | '$2'].
+
+ac_module -> 'SUPPORTS' ac_modulenamepart 'INCLUDES' '{' objects '}' ac_variationpart :
+ make_ac_module('$2', '$5', '$7').
+
+ac_modulenamepart -> mibname : '$1'.
+ac_modulenamepart -> '$empty' : undefined.
+
+ac_variationpart -> '$empty' :
+ [].
+ac_variationpart -> ac_variations :
+ lreverse(ac_variationpart, '$1').
+
+ac_variations -> ac_variation :
+ ['$1'].
+ac_variations -> ac_variation ac_variations :
+ ['$1' | '$2'].
+
+%% ac_variation -> ac_objectvariation.
+%% ac_variation -> ac_notificationvariation.
+
+ac_variation -> 'VARIATION' objectname syntaxpart writesyntaxpart ac_accesspart ac_creationpart defvalpart description :
+ make_ac_variation('$2', '$3', '$4', '$5', '$6', '$7', '$8').
+
+ac_accesspart -> 'ACCESS' ac_access : '$2'.
+ac_accesspart -> '$empty' : undefined.
+
+ac_access -> atom: ac_access('$1').
+
+ac_creationpart -> 'CREATION-REQUIRES' '{' objects '}' :
+ lreverse(ac_creationpart, '$3').
+ac_creationpart -> '$empty' :
+ [].
+
+mc_modulepart -> '$empty' :
+ [].
+mc_modulepart -> mc_modules :
+ lreverse(mc_modulepart, '$1').
+
+mc_modules -> mc_module :
+ ['$1'].
+mc_modules -> mc_module mc_modules :
+ ['$1' | '$2'].
-module -> 'MODULE' modulenamepart mandatorypart compliancepart.
+mc_module -> 'MODULE' mc_modulenamepart mc_mandatorypart mc_compliancepart :
+ make_mc_module('$2', '$3', '$4').
-modulenamepart -> mibname.
-modulenamepart -> '$empty'.
+mc_modulenamepart -> mibname : '$1'.
+mc_modulenamepart -> '$empty' : undefined.
-mandatorypart -> 'MANDATORY-GROUPS' '{' objects '}'.
-mandatorypart -> '$empty'.
+mc_mandatorypart -> 'MANDATORY-GROUPS' '{' objects '}' :
+ lreverse(mc_mandatorypart, '$3').
+mc_mandatorypart -> '$empty' :
+ [].
-compliancepart -> compliances.
-compliancepart -> '$empty'.
+mc_compliancepart -> mc_compliances :
+ lreverse(mc_compliancepart, '$1').
+mc_compliancepart -> '$empty' :
+ [].
-compliances -> compliance.
-compliances -> compliances compliance.
+mc_compliances -> mc_compliance :
+ ['$1'].
+mc_compliances -> mc_compliance mc_compliances :
+ ['$1' | '$2'].
-compliance -> compliancegroup.
-compliance -> object.
+mc_compliance -> mc_compliancegroup :
+ '$1'.
+mc_compliance -> mc_object :
+ '$1'.
-compliancegroup -> 'GROUP' objectname description.
+mc_compliancegroup -> 'GROUP' objectname description :
+ make_mc_compliance_group('$2', '$3').
-object -> 'OBJECT' objectname syntaxpart writesyntaxpart accesspart description.
+mc_object -> 'OBJECT' objectname syntaxpart writesyntaxpart mc_accesspart description :
+ make_mc_object('$2', '$3', '$4', '$5', '$6').
-syntaxpart -> 'SYNTAX' syntax.
-syntaxpart -> '$empty'.
+syntaxpart -> 'SYNTAX' syntax : '$2'.
+syntaxpart -> '$empty' : undefined.
-writesyntaxpart -> 'WRITE-SYNTAX' syntax.
-writesyntaxpart -> '$empty'.
+writesyntaxpart -> 'WRITE-SYNTAX' syntax : '$2'.
+writesyntaxpart -> '$empty' : undefined.
-accesspart -> 'MIN-ACCESS' accessv2.
-accesspart -> '$empty'.
+mc_accesspart -> 'MIN-ACCESS' accessv2 : '$2'.
+mc_accesspart -> '$empty' : undefined.
objecttypev2 -> objectname 'OBJECT-TYPE'
'SYNTAX' syntax
@@ -589,7 +694,7 @@ objecttypev2 -> objectname 'OBJECT-TYPE'
'$11', '$12', Kind, '$15'),
{OT, line_of('$2')}.
-indexpartv2 -> 'INDEX' '{' indextypesv2 '}' : {indexes, lists:reverse('$3')}.
+indexpartv2 -> 'INDEX' '{' indextypesv2 '}' : {indexes, lreverse(indexpartv2, '$3')}.
indexpartv2 -> 'AUGMENTS' '{' entry '}' : {augments, '$3'}.
indexpartv2 -> '$empty' : {indexes, undefined}.
@@ -614,7 +719,7 @@ notification -> objectname 'NOTIFICATION-TYPE' objectspart
Not = make_notification('$1','$3','$5', '$7', '$8', '$9'),
{Not, line_of('$2')}.
-objectspart -> 'OBJECTS' '{' objects '}' : lists:reverse('$3').
+objectspart -> 'OBJECTS' '{' objects '}' : lreverse(objectspart, '$3').
objectspart -> '$empty' : [].
objects -> objectname : ['$1'].
@@ -655,6 +760,14 @@ statusv2(Tok) ->
"syntax error before: " ++ atom_to_list(Else))
end.
+ac_status(Tok) ->
+ case val(Tok) of
+ current -> current;
+ obsolete -> obsolete;
+ Else -> return_error(line_of(Tok),
+ "syntax error before: " ++ atom_to_list(Else))
+ end.
+
accessv1(Tok) ->
case val(Tok) of
'read-only' -> 'read-only';
@@ -676,6 +789,18 @@ accessv2(Tok) ->
"syntax error before: " ++ atom_to_list(Else))
end.
+ac_access(Tok) ->
+ case val(Tok) of
+ 'not-implemented' -> 'not-implemented'; % only for notifications
+ 'accessible-for-notify' -> 'accessible-for-notify';
+ 'read-only' -> 'read-only';
+ 'read-write' -> 'read-write';
+ 'read-create' -> 'read-create';
+ 'write-only' -> 'write-only'; % for backward-compatibility only
+ Else -> return_error(line_of(Tok),
+ "syntax error before: " ++ atom_to_list(Else))
+ end.
+
%% ---------------------------------------------------------------------
%% Various basic record build functions
%% ---------------------------------------------------------------------
@@ -744,14 +869,79 @@ make_notification(Name, Vars, Status, Desc, Ref, NA) ->
reference = Ref,
name_assign = NA}.
-make_module_compliance(Name, Status, Desc, Ref, Mod, NA) ->
+make_agent_capabilities(Name, ProdRel, Status, Desc, Ref, Mods, NA) ->
+ #mc_agent_capabilities{name = Name,
+ product_release = ProdRel,
+ status = Status,
+ description = Desc,
+ reference = Ref,
+ modules = Mods,
+ name_assign = NA}.
+
+make_ac_variation(Name,
+ undefined = _Syntax,
+ undefined = _WriteSyntax,
+ Access,
+ undefined = _Creation,
+ undefined = _DefVal,
+ Desc) ->
+%% io:format("make_ac_variation -> entry with"
+%% "~n Name: ~p"
+%% "~n Access: ~p"
+%% "~n Desc: ~p"
+%% "~n", [Name, Access, Desc]),
+ #mc_ac_notification_variation{name = Name,
+ access = Access,
+ description = Desc};
+
+make_ac_variation(Name, Syntax, WriteSyntax, Access, Creation, DefVal, Desc) ->
+%% io:format("make_ac_variation -> entry with"
+%% "~n Name: ~p"
+%% "~n Syntax: ~p"
+%% "~n WriteSyntax: ~p"
+%% "~n Access: ~p"
+%% "~n Creation: ~p"
+%% "~n DefVal: ~p"
+%% "~n Desc: ~p"
+%% "~n", [Name, Syntax, WriteSyntax, Access, Creation, DefVal, Desc]),
+ #mc_ac_object_variation{name = Name,
+ syntax = Syntax,
+ write_syntax = WriteSyntax,
+ access = Access,
+ creation = Creation,
+ default_value = DefVal,
+ description = Desc}.
+
+make_ac_module(Name, Grps, Var) ->
+ #mc_ac_module{name = Name,
+ groups = Grps,
+ variation = Var}.
+
+
+make_module_compliance(Name, Status, Desc, Ref, Mods, NA) ->
#mc_module_compliance{name = Name,
status = Status,
description = Desc,
reference = Ref,
- module = Mod,
+ modules = Mods,
name_assign = NA}.
+make_mc_module(Name, Mand, Compl) ->
+ #mc_mc_module{name = Name,
+ mandatory = Mand,
+ compliance = Compl}.
+
+make_mc_compliance_group(Name, Desc) ->
+ #mc_mc_compliance_group{name = Name,
+ description = Desc}.
+
+make_mc_object(Name, Syntax, WriteSyntax, Access, Desc) ->
+ #mc_mc_object{name = Name,
+ syntax = Syntax,
+ write_syntax = WriteSyntax,
+ access = Access,
+ description = Desc}.
+
make_object_group(Name, Objs, Status, Desc, Ref, NA) ->
#mc_object_group{name = Name,
objects = Objs,
@@ -968,6 +1158,12 @@ filter_v2imports(_,Type) -> {type, Type}.
w(F, A) ->
?vwarning(F, A).
-%i(F, A) ->
-% io:format("~w:" ++ F ++ "~n", [?MODULE|A]).
+lreverse(_Tag, L) when is_list(L) ->
+ lists:reverse(L);
+lreverse(Tag, X) ->
+ exit({bad_list, Tag, X}).
+
+
+%% i(F, A) ->
+%% io:format("~w:" ++ F ++ "~n", [?MODULE|A]).
diff --git a/lib/snmp/src/compile/snmpc_mib_to_hrl.erl b/lib/snmp/src/compile/snmpc_mib_to_hrl.erl
index 07bd29231b..decc1ce557 100644
--- a/lib/snmp/src/compile/snmpc_mib_to_hrl.erl
+++ b/lib/snmp/src/compile/snmpc_mib_to_hrl.erl
@@ -24,7 +24,8 @@
-include("snmpc_lib.hrl").
%% External exports
--export([convert/1, compile/3]).
+-export([convert/1, convert/3, compile/3]).
+
%%-----------------------------------------------------------------
%% Func: convert/1
diff --git a/lib/snmp/src/compile/snmpc_tok.erl b/lib/snmp/src/compile/snmpc_tok.erl
index 6b99e7ae43..e238b256d0 100644
--- a/lib/snmp/src/compile/snmpc_tok.erl
+++ b/lib/snmp/src/compile/snmpc_tok.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -37,6 +37,8 @@
-export([null_get_line/0, format_error/1, terminate/2, handle_call/3, init/1,
test/0]).
+-include("snmpc_lib.hrl").
+
%%----------------------------------------------------------------------
%% Reserved_words: list of KeyWords. Example: ['IF', 'BEGIN', ..., 'GOTO']
@@ -130,6 +132,10 @@ test() ->
'current','deprecated','not-accessible','obsolete',
'read-create','read-only','read-write', 'IMPORTS', 'FROM',
'MODULE-COMPLIANCE',
+ 'AGENT-CAPABILITIES',
+ 'PRODUCT-RELEASE',
+ 'SUPPORTS',
+ 'INCLUDES',
'DisplayString',
'PhysAddress',
'MacAddress',
@@ -225,6 +231,7 @@ get_all_tokens(Str,Toks) ->
case catch tokenise(Str) of
{error, ErrorInfo} -> {error, ErrorInfo};
{Token, RestChars} when is_tuple(Token) ->
+ %% ?vtrace("get_all_tokens -> Token: ~p", [Token]),
get_all_tokens(RestChars, [Token|Toks])
end.
diff --git a/lib/snmp/test/modules.mk b/lib/snmp/test/modules.mk
index 6a0c3e9481..eacc749b53 100644
--- a/lib/snmp/test/modules.mk
+++ b/lib/snmp/test/modules.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2004-2010. All Rights Reserved.
+# Copyright Ericsson AB 2004-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -62,6 +62,8 @@ COMPILER_MIB_FILES = \
OTP8574-MIB
MIB_FILES = \
+ AC-TEST-MIB.mib \
+ MC-TEST-MIB.mib \
OLD-SNMPEA-MIB.mib \
OLD-SNMPEA-MIB-v2.mib \
Klas1.mib \
diff --git a/lib/snmp/test/snmp_SUITE.erl b/lib/snmp/test/snmp_SUITE.erl
index 3f6473893b..b6d72da2fa 100644
--- a/lib/snmp/test/snmp_SUITE.erl
+++ b/lib/snmp/test/snmp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -19,26 +19,14 @@
-module(snmp_SUITE).
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2,
+-export([all/0,
+ suite/0,
+ groups/0,
+ init_per_suite/1, end_per_suite/1,
+ init_per_group/2, end_per_group/2,
init_per_testcase/2, end_per_testcase/2
]).
--export([]).
-
--export([
-
-
-
-
-
-
-
-
-
-
-
-
-]).
%%
%% -----
@@ -54,39 +42,43 @@ end_per_testcase(_Case, Config) when is_list(Config) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Top test case
-suite() -> [{ct_hooks,[ts_install_cth]}].
+suite() ->
+ [{ct_hooks, [ts_install_cth]}].
all() ->
-[{group, app}, {group, compiler}, {group, misc},
- {group, agent}, {group, manager}].
+ [{group, app},
+ {group, compiler},
+ {group, misc},
+ {group, agent},
+ {group, manager}].
groups() ->
- [{app, [], [{group, app_test}, {group, appup_test}]},
- {compiler, [], [{group, compiler_test}]},
- {misc, [],
- [{group, conf_test}, {group, pdus_test},
- {group, log_test}, {group, note_store_test}]},
- {agent, [],
- [{group, mibs_test}, {group, nfilter_test},
- {group, agent_test}]},
- {manager, [],
- [{group, manager_config_test},
- {group, manager_user_test}, {group, manager_test}]},
- {app_test, [], [{snmp_app_test, all}]},
- {appup_test, [], [{snmp_appup_test, all}]},
- {compiler_test, [], [{snmp_compiler_test, all}]},
- {conf_test, [], [{snmp_conf_test, all}]},
- {pdus_test, [], [{snmp_pdus_test, all}]},
- {log_test, [], [{snmp_log_test, all}]},
- {note_store_test, [], [{snmp_note_store_test, all}]},
- {mibs_test, [], [{snmp_agent_mibs_test, all}]},
- {nfilter_test, [], [{snmp_agent_nfilter_test, all}]},
- {agent_test, [], [{snmp_agent_test, all}]},
- {manager_config_test, [],
- [{snmp_manager_config_test, all}]},
- {manager_user_test, [],
- [{snmp_manager_user_test, all}]},
- {manager_test, [], [{snmp_manager_test, all}]}].
+ [{app, [], [{group, app_test},
+ {group, appup_test}]},
+ {compiler, [], [{group, compiler_test}]},
+ {misc, [], [{group, conf_test},
+ {group, pdus_test},
+ {group, log_test},
+ {group, note_store_test}]},
+ {agent, [], [{group, mibs_test},
+ {group, nfilter_test},
+ {group, agent_test}]},
+ {manager, [], [{group, manager_config_test},
+ {group, manager_user_test},
+ {group, manager_test}]},
+ {app_test, [], [{snmp_app_test, all}]},
+ {appup_test, [], [{snmp_appup_test, all}]},
+ {compiler_test, [], [{snmp_compiler_test, all}]},
+ {conf_test, [], [{snmp_conf_test, all}]},
+ {pdus_test, [], [{snmp_pdus_test, all}]},
+ {log_test, [], [{snmp_log_test, all}]},
+ {note_store_test, [], [{snmp_note_store_test, all}]},
+ {mibs_test, [], [{snmp_agent_mibs_test, all}]},
+ {nfilter_test, [], [{snmp_agent_nfilter_test, all}]},
+ {agent_test, [], [{snmp_agent_test, all}]},
+ {manager_config_test, [], [{snmp_manager_config_test, all}]},
+ {manager_user_test, [], [{snmp_manager_user_test, all}]},
+ {manager_test, [], [{snmp_manager_test, all}]}].
init_per_suite(Config) ->
Config.
@@ -95,41 +87,8 @@ end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
- Config.
+ Config.
end_per_group(_GroupName, Config) ->
- Config.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ Config.
diff --git a/lib/snmp/test/snmp_app_test.erl b/lib/snmp/test/snmp_app_test.erl
index 64dd638f83..27a7b4af2e 100644
--- a/lib/snmp/test/snmp_app_test.erl
+++ b/lib/snmp/test/snmp_app_test.erl
@@ -300,6 +300,25 @@ undef_funcs(Config) when is_list(Config) ->
xref:stop(XRef),
analyze_undefined_function_calls(Undefs, Mods, []).
+valid_undef(crypto = CalledMod) ->
+ case (catch CalledMod:version()) of
+ Version when is_list(Version) ->
+ %% The called module was crypto and the version
+ %% function returns a valid value.
+ %% This means that the function is
+ %% actually undefined...
+ true;
+ _ ->
+ %% The called module was crypto but the version
+ %% function does *not* return a valid value.
+ %% This means the crypto was not actually not
+ %% build, which is an case snmp handles.
+ false
+ end;
+valid_undef(_) ->
+ true.
+
+
analyze_undefined_function_calls([], _, []) ->
ok;
analyze_undefined_function_calls([], _, AppUndefs) ->
@@ -312,14 +331,25 @@ analyze_undefined_function_calls([{{Mod, _F, _A}, _C} = AppUndef|Undefs],
{Calling,Called} = AppUndef,
{Mod1,Func1,Ar1} = Calling,
{Mod2,Func2,Ar2} = Called,
- io:format("undefined function call: "
- "~n ~w:~w/~w calls ~w:~w/~w~n",
- [Mod1,Func1,Ar1,Mod2,Func2,Ar2]),
- analyze_undefined_function_calls(Undefs, AppModules,
- [AppUndef|AppUndefs]);
+ %% If the called module is crypto, then we will *not*
+ %% fail if crypto is not built (since crypto is actually
+ %% not built for all platforms)
+ case valid_undef(Mod2) of
+ true ->
+ io:format("undefined function call: "
+ "~n ~w:~w/~w calls ~w:~w/~w~n",
+ [Mod1,Func1,Ar1,Mod2,Func2,Ar2]),
+ analyze_undefined_function_calls(
+ Undefs, AppModules, [AppUndef|AppUndefs]);
+ false ->
+ io:format("skipping ~p (calling ~w:~w/~w)~n",
+ [Mod, Mod2, Func2, Ar2]),
+ analyze_undefined_function_calls(Undefs,
+ AppModules, AppUndefs)
+ end;
false ->
- io:format("dropping ~p~n", [Mod]),
- analyze_undefined_function_calls(Undefs, AppModules, AppUndefs)
+ io:format("dropping ~p~n", [Mod]),
+ analyze_undefined_function_calls(Undefs, AppModules, AppUndefs)
end.
%% This function is used simply to avoid cut-and-paste errors later...
diff --git a/lib/snmp/test/snmp_appup_test.erl b/lib/snmp/test/snmp_appup_test.erl
index b93f960814..99994a2410 100644
--- a/lib/snmp/test/snmp_appup_test.erl
+++ b/lib/snmp/test/snmp_appup_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -23,14 +23,17 @@
-module(snmp_appup_test).
-export([
- all/0,groups/0,init_per_group/2,end_per_group/2, init_per_suite/1,
- end_per_suite/1,
+ all/0,
+ groups/0, init_per_group/2, end_per_group/2,
+ init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
appup_file/1
]).
+-compile({no_auto_import, [error/1]}).
+
-include_lib("common_test/include/ct.hrl").
-include("snmp_test_lib.hrl").
@@ -38,17 +41,20 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
all() ->
-Cases = [appup_file],
- Cases.
+ Cases =
+ [
+ appup_file
+ ],
+ Cases.
groups() ->
[].
init_per_group(_GroupName, Config) ->
- Config.
+ Config.
end_per_group(_GroupName, Config) ->
- Config.
+ Config.
diff --git a/lib/snmp/test/snmp_compiler_test.erl b/lib/snmp/test/snmp_compiler_test.erl
index 98ffaf8bae..2e6020ae7a 100644
--- a/lib/snmp/test/snmp_compiler_test.erl
+++ b/lib/snmp/test/snmp_compiler_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -37,15 +37,17 @@
%% External exports
%%----------------------------------------------------------------------
-export([
- all/0,groups/0,init_per_group/2,end_per_group/2,
+ all/0,
+ groups/0, init_per_group/2, end_per_group/2,
init_per_testcase/2, end_per_testcase/2,
description/1,
oid_conflicts/1,
imports/1,
module_identity/1,
+ agent_capabilities/1,
+ module_compliance/1,
-
otp_6150/1,
otp_8574/1,
otp_8595/1
@@ -78,7 +80,7 @@ init_per_testcase(_Case, Config) when is_list(Config) ->
MibDir = join(lists:reverse(["snmp_test_data"|RL])),
CompDir = join(Dir, "comp_dir/"),
?line ok = file:make_dir(CompDir),
- [{comp_dir, CompDir},{mib_dir, MibDir}|Config].
+ [{comp_dir, CompDir}, {mib_dir, MibDir} | Config].
end_per_testcase(_Case, Config) when is_list(Config) ->
CompDir = ?config(comp_dir, Config),
@@ -91,17 +93,24 @@ end_per_testcase(_Case, Config) when is_list(Config) ->
%%======================================================================
all() ->
-[description, oid_conflicts, imports, module_identity,
- {group, tickets}].
+ [
+ description,
+ oid_conflicts,
+ imports,
+ module_identity,
+ agent_capabilities,
+ module_compliance,
+ {group, tickets}
+ ].
groups() ->
[{tickets, [], [otp_6150, otp_8574, otp_8595]}].
init_per_group(_GroupName, Config) ->
- Config.
+ Config.
end_per_group(_GroupName, Config) ->
- Config.
+ Config.
@@ -168,6 +177,88 @@ module_identity(Config) when is_list(Config) ->
?SKIP(not_yet_implemented).
+agent_capabilities(suite) ->
+ [];
+agent_capabilities(Config) when is_list(Config) ->
+ put(tname,agent_capabilities),
+ p("starting with Config: ~p~n", [Config]),
+
+ SnmpPrivDir = code:priv_dir(snmp),
+ SnmpMibsDir = join(SnmpPrivDir, "mibs"),
+ OtpMibsPrivDir = code:priv_dir(otp_mibs),
+ OtpMibsMibsDir = join(OtpMibsPrivDir, "mibs"),
+ Dir = ?config(mib_dir, Config),
+ AcMib = join(Dir,"AC-TEST-MIB.mib"),
+ ?line {ok, MibFile1} = snmpc:compile(AcMib, [options,
+ version,
+ {i, [SnmpMibsDir, OtpMibsMibsDir]},
+ {outdir, Dir},
+ {verbosity, trace}]),
+ ?line {ok, Mib1} = snmp_misc:read_mib(MibFile1),
+ ?line {ok, MibFile2} = snmpc:compile(AcMib, [options,
+ version,
+ agent_capabilities,
+ {i, [SnmpMibsDir, OtpMibsMibsDir]},
+ {outdir, Dir},
+ {verbosity, trace}]),
+ ?line {ok, Mib2} = snmp_misc:read_mib(MibFile2),
+ MEDiff = Mib2#mib.mes -- Mib1#mib.mes,
+ %% This is a rather pathetic test, but it is somthing...
+ io:format("agent_capabilities -> "
+ "~n MEDiff: ~p"
+ "~n Mib1: ~p"
+ "~n Mib2: ~p"
+ "~n", [MEDiff, Mib1, Mib2]),
+ case length(MEDiff) of
+ 2 ->
+ ok;
+ _BadLen ->
+ exit({unexpected_mes, MEDiff})
+ end,
+ ok.
+
+
+module_compliance(suite) ->
+ [];
+module_compliance(Config) when is_list(Config) ->
+ put(tname,module_compliance),
+ p("starting with Config: ~p~n", [Config]),
+
+ SnmpPrivDir = code:priv_dir(snmp),
+ SnmpMibsDir = join(SnmpPrivDir, "mibs"),
+ OtpMibsPrivDir = code:priv_dir(otp_mibs),
+ OtpMibsMibsDir = join(OtpMibsPrivDir, "mibs"),
+ Dir = ?config(mib_dir, Config),
+ AcMib = join(Dir,"MC-TEST-MIB.mib"),
+ ?line {ok, MibFile1} = snmpc:compile(AcMib, [options,
+ version,
+ {i, [SnmpMibsDir, OtpMibsMibsDir]},
+ {outdir, Dir},
+ {verbosity, trace}]),
+ ?line {ok, Mib1} = snmp_misc:read_mib(MibFile1),
+ ?line {ok, MibFile2} = snmpc:compile(AcMib, [options,
+ version,
+ module_compliance,
+ {i, [SnmpMibsDir, OtpMibsMibsDir]},
+ {outdir, Dir},
+ {verbosity, trace}]),
+ ?line {ok, Mib2} = snmp_misc:read_mib(MibFile2),
+ MEDiff = Mib2#mib.mes -- Mib1#mib.mes,
+ %% This is a rather pathetic test, but it is somthing...
+ io:format("agent_capabilities -> "
+ "~n MEDiff: ~p"
+ "~n Mib1: ~p"
+ "~n Mib2: ~p"
+ "~n", [MEDiff, Mib1, Mib2]),
+ case length(MEDiff) of
+ 1 ->
+ ok;
+ _BadLen ->
+ exit({unexpected_mes, MEDiff})
+ end,
+ ok.
+
+
otp_6150(suite) ->
[];
otp_6150(Config) when is_list(Config) ->
@@ -256,7 +347,7 @@ LAST-UPDATED \"0005290000Z\"
Ericsson Utvecklings AB
Open System
Box 1505
-SE-125 25 �LVSJ�\"
+SE-125 25 ÄLVSJÖ\"
DESCRIPTION
\" Objects for management \"
diff --git a/lib/snmp/test/snmp_manager_config_test.erl b/lib/snmp/test/snmp_manager_config_test.erl
index a72dd0cc22..e38a99d413 100644
--- a/lib/snmp/test/snmp_manager_config_test.erl
+++ b/lib/snmp/test/snmp_manager_config_test.erl
@@ -169,36 +169,41 @@ all() ->
groups() ->
[{start_and_stop, [],
- [simple_start_and_stop, start_without_mandatory_opts1,
- start_without_mandatory_opts2,
- start_with_all_valid_opts, start_with_unknown_opts,
- start_with_incorrect_opts,
- start_with_invalid_manager_conf_file1,
- start_with_invalid_users_conf_file1,
- start_with_invalid_agents_conf_file1,
- start_with_invalid_usm_conf_file1]},
- {normal_op, [],
- [{group, system}, {group, agents}, {group, users},
- {group, usm_users}, {group, counter},
- {group, stats_counter}]},
- {system, [], [simple_system_op]},
- {users, [],
- [register_user_using_file, register_user_using_function,
- register_user_failed_using_function1]},
- {agents, [],
- [register_agent_using_file,
- register_agent_using_function,
- register_agent_failed_using_function1]},
- {usm_users, [],
- [register_usm_user_using_file,
- register_usm_user_using_function,
- register_usm_user_failed_using_function1,
- update_usm_user_info]},
- {counter, [], [create_and_increment]},
- {stats_counter, [], [stats_create_and_increment]},
- {tickets, [], [otp_7219, {group, otp_8395}]},
- {otp_8395, [],
- [otp_8395_1, otp_8395_2, otp_8395_3, otp_8395_4]}].
+ [simple_start_and_stop,
+ start_without_mandatory_opts1,
+ start_without_mandatory_opts2,
+ start_with_all_valid_opts, start_with_unknown_opts,
+ start_with_incorrect_opts,
+ start_with_invalid_manager_conf_file1,
+ start_with_invalid_users_conf_file1,
+ start_with_invalid_agents_conf_file1,
+ start_with_invalid_usm_conf_file1]},
+ {normal_op, [],
+ [{group, system},
+ {group, agents},
+ {group, users},
+ {group, usm_users},
+ {group, counter},
+ {group, stats_counter}]},
+ {system, [], [simple_system_op]},
+ {users, [],
+ [register_user_using_file,
+ register_user_using_function,
+ register_user_failed_using_function1]},
+ {agents, [],
+ [register_agent_using_file,
+ register_agent_using_function,
+ register_agent_failed_using_function1]},
+ {usm_users, [],
+ [register_usm_user_using_file,
+ register_usm_user_using_function,
+ register_usm_user_failed_using_function1,
+ update_usm_user_info]},
+ {counter, [], [create_and_increment]},
+ {stats_counter, [], [stats_create_and_increment]},
+ {tickets, [], [otp_7219, {group, otp_8395}]},
+ {otp_8395, [],
+ [otp_8395_1, otp_8395_2, otp_8395_3, otp_8395_4]}].
init_per_group(_GroupName, Config) ->
Config.
@@ -816,7 +821,10 @@ start_with_invalid_users_conf_file1(Conf) when is_list(Conf) ->
p("start"),
process_flag(trap_exit, true),
ConfDir = ?config(manager_conf_dir, Conf),
- DbDir = ?config(manager_db_dir, Conf),
+ DbDir = ?config(manager_db_dir, Conf),
+
+ verify_dir_existing(conf, ConfDir),
+ verify_dir_existing(db, DbDir),
Opts = [{versions, [v1]},
{config, [{verbosity, trace}, {dir, ConfDir}, {db_dir, DbDir}]}],
@@ -917,7 +925,10 @@ start_with_invalid_agents_conf_file1(Conf) when is_list(Conf) ->
p("start"),
process_flag(trap_exit, true),
ConfDir = ?config(manager_conf_dir, Conf),
- DbDir = ?config(manager_db_dir, Conf),
+ DbDir = ?config(manager_db_dir, Conf),
+
+ verify_dir_existing(conf, ConfDir),
+ verify_dir_existing(db, DbDir),
Opts = [{versions, [v1]},
{config, [{verbosity, trace}, {dir, ConfDir}, {db_dir, DbDir}]}],
@@ -2022,7 +2033,6 @@ register_usm_user_using_file(Conf) when is_list(Conf) ->
%% --
p("done"),
ok.
-%% ?SKIP(not_yet_implemented).
%%
@@ -2651,9 +2661,21 @@ write_usm_conf2(Dir, Str) ->
write_conf_file(Dir, File, Str) ->
- ?line {ok, Fd} = file:open(filename:join(Dir, File), write),
- ?line ok = io:format(Fd, "~s", [Str]),
- file:close(Fd).
+ case file:open(filename:join(Dir, File), write) of
+ {ok, Fd} ->
+ ?line ok = io:format(Fd, "~s", [Str]),
+ file:close(Fd);
+ {error, Reason} ->
+ Info =
+ [{dir, Dir, case (catch file:read_file_info(Dir)) of
+ {ok, FI} ->
+ FI;
+ _ ->
+ undefined
+ end},
+ {file, File}],
+ exit({failed_writing_conf_file, Info, Reason})
+ end.
maybe_start_crypto() ->
@@ -2679,6 +2701,17 @@ maybe_stop_crypto() ->
%% ------
+verify_dir_existing(DirName, Dir) ->
+ case file:read_file_info(Dir) of
+ {ok, _} ->
+ ok;
+ {error, Reason} ->
+ exit({non_existing_dir, DirName, Dir, Reason})
+ end.
+
+
+%% ------
+
str(X) ->
lists:flatten(io_lib:format("~w", [X])).
diff --git a/lib/snmp/test/snmp_test_data/AC-TEST-MIB.mib b/lib/snmp/test/snmp_test_data/AC-TEST-MIB.mib
new file mode 100644
index 0000000000..58defbe1cf
--- /dev/null
+++ b/lib/snmp/test/snmp_test_data/AC-TEST-MIB.mib
@@ -0,0 +1,131 @@
+--
+-- AC-TEST-MIB.mib
+-- MIB generated by MG-SOFT Visual MIB Builder Version 5.0 Build 250
+-- Tuesday, November 30, 2010 at 23:03:18
+--
+
+ AC-TEST-MIB DEFINITIONS ::= BEGIN
+
+ IMPORTS
+ otpExpr
+ FROM OTP-REG
+ OBJECT-GROUP, AGENT-CAPABILITIES
+ FROM SNMPv2-CONF
+ Integer32, OBJECT-TYPE, MODULE-IDENTITY, OBJECT-IDENTITY
+ FROM SNMPv2-SMI;
+
+
+ acTestModule MODULE-IDENTITY
+ LAST-UPDATED "201011302230Z" -- November 30, 2010 at 22:30 GMT
+ ORGANIZATION
+ "Ac Test Co."
+ CONTACT-INFO
+ DESCRIPTION
+ "Ac Test module."
+ ::= { reg 1 }
+
+
+
+--
+-- Node definitions
+--
+
+ acTest OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Test area."
+ ::= { otpExpr 4321 }
+
+
+ reg OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Registrations."
+ ::= { acTest 1 }
+
+
+ mib OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Objects."
+ ::= { acTest 2 }
+
+
+ someObject OBJECT-TYPE
+ SYNTAX Integer32
+ MAX-ACCESS read-write
+ STATUS current
+ DESCRIPTION
+ "Description."
+ ::= { mib 1 }
+
+
+ oneMore OBJECT-TYPE
+ SYNTAX Integer32
+ MAX-ACCESS read-write
+ STATUS current
+ DESCRIPTION
+ "Description."
+ ::= { mib 2 }
+
+
+ grp OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Groups
+ ."
+ ::= { acTest 3 }
+
+
+ basicGrp OBJECT-GROUP
+ OBJECTS { someObject }
+ STATUS current
+ DESCRIPTION
+ "Basic set of objects."
+ ::= { grp 1 }
+
+
+ allObjects OBJECT-GROUP
+ OBJECTS { someObject, oneMore }
+ STATUS current
+ DESCRIPTION
+ "Complete set."
+ ::= { grp 2 }
+
+
+ cap OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Capabilities."
+ ::= { acTest 5 }
+
+
+ basicAgent AGENT-CAPABILITIES
+ PRODUCT-RELEASE
+ "Product release v1."
+ STATUS current
+ DESCRIPTION
+ "Basic agent."
+ SUPPORTS AC-TEST-MIB
+ INCLUDES { basicGrp }
+ ::= { cap 1 }
+
+
+ fullAgent AGENT-CAPABILITIES
+ PRODUCT-RELEASE
+ "Product release v2."
+ STATUS current
+ DESCRIPTION
+ "Full featured agent."
+ SUPPORTS AC-TEST-MIB
+ INCLUDES { allObjects }
+ ::= { cap 2 }
+
+
+
+ END
+
+--
+-- AC-TEST-MIB.mib
+--
diff --git a/lib/snmp/test/snmp_test_data/MC-TEST-MIB.mib b/lib/snmp/test/snmp_test_data/MC-TEST-MIB.mib
new file mode 100644
index 0000000000..cadaa6f891
--- /dev/null
+++ b/lib/snmp/test/snmp_test_data/MC-TEST-MIB.mib
@@ -0,0 +1,173 @@
+MC-TEST-MIB DEFINITIONS ::= BEGIN
+
+IMPORTS
+ otpExpr
+ FROM OTP-REG
+ MODULE-IDENTITY, OBJECT-TYPE,
+ mib-2, NOTIFICATION-TYPE, OBJECT-IDENTITY
+ FROM SNMPv2-SMI
+ TDomain, TAddress, DisplayString, TEXTUAL-CONVENTION,
+ AutonomousType, RowPointer, TimeStamp,
+ RowStatus, StorageType
+ FROM SNMPv2-TC
+ MODULE-COMPLIANCE, OBJECT-GROUP, NOTIFICATION-GROUP
+ FROM SNMPv2-CONF;
+
+mcTestModule MODULE-IDENTITY
+ LAST-UPDATED "9605160000Z"
+ ORGANIZATION "MC Test Co."
+ CONTACT-INFO
+ DESCRIPTION
+ "MC Test module."
+ ::= { reg 1 }
+
+mcObjects OBJECT IDENTIFIER ::= { mcTestModule 1 }
+
+-- MIB contains one group
+
+mcMisc OBJECT IDENTIFIER ::= { mcObjects 1 }
+mcGeneral OBJECT IDENTIFIER ::= { mcObjects 2 }
+
+
+mcTest OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Test area."
+ ::= { otpExpr 4322 }
+
+
+reg OBJECT-IDENTITY
+ STATUS current
+ DESCRIPTION
+ "Registrations."
+ ::= { mcTest 1 }
+
+
+mcTable OBJECT-TYPE
+ SYNTAX SEQUENCE OF McEntry
+ MAX-ACCESS not-accessible
+ STATUS current
+ DESCRIPTION
+ "This table contains one row per physical entity. There is
+ always at least one row for an 'overall' physical entity."
+ ::= { mcMisc 1 }
+
+mcEntry OBJECT-TYPE
+ SYNTAX McEntry
+ MAX-ACCESS not-accessible
+ STATUS current
+ DESCRIPTION
+ "Table entry..."
+ INDEX { mcIndex }
+ ::= { mcTable 1 }
+
+McEntry ::= SEQUENCE {
+ mcIndex INTEGER,
+ mcName DisplayString,
+ mcStorageType StorageType,
+ mcRowStatus RowStatus
+}
+
+mcIndex OBJECT-TYPE
+ SYNTAX INTEGER
+ MAX-ACCESS not-accessible
+ STATUS current
+ DESCRIPTION
+ "The index for this entry."
+ ::= { mcEntry 1 }
+
+mcName OBJECT-TYPE
+ SYNTAX DisplayString
+ MAX-ACCESS read-only
+ STATUS current
+ DESCRIPTION
+ "Name of... "
+ ::= { mcEntry 2 }
+
+
+mcStorageType OBJECT-TYPE
+ SYNTAX StorageType
+ MAX-ACCESS read-create
+ STATUS current
+ DESCRIPTION
+ "The storage type for this conceptual row."
+ DEFVAL { nonVolatile }
+ ::= { mcEntry 3 }
+
+mcRowStatus OBJECT-TYPE
+ SYNTAX RowStatus
+ MAX-ACCESS read-create
+ STATUS current
+ DESCRIPTION
+ "The status of this conceptual row..."
+ ::= { mcEntry 4 }
+
+
+-- last change time stamp for the whole MIB
+mcTimeStamp OBJECT-TYPE
+ SYNTAX TimeStamp
+ MAX-ACCESS read-only
+ STATUS current
+ DESCRIPTION
+ "The sysUpTime value when of the last time *anything* in the
+ MIB was changed. "
+ ::= { mcGeneral 1 }
+
+-- Entity MIB Trap Definitions
+mcTraps OBJECT IDENTIFIER ::= { mcTestModule 2 }
+mcTrapPrefix OBJECT IDENTIFIER ::= { mcTraps 0 }
+
+mcConfigChange NOTIFICATION-TYPE
+ STATUS current
+ DESCRIPTION
+ "An mcConfigChange trap is sent when the value of
+ entLastChangeTime changes..."
+ ::= { mcTrapPrefix 1 }
+
+-- conformance information
+mcConformance OBJECT IDENTIFIER ::= { mcTestModule 3 }
+
+mcCompliances OBJECT IDENTIFIER ::= { mcConformance 1 }
+mcGroups OBJECT IDENTIFIER ::= { mcConformance 2 }
+
+-- compliance statements
+
+
+mcCompliance MODULE-COMPLIANCE
+ STATUS current
+ DESCRIPTION
+ "The compliance statement for SNMP entities which implement
+ the MC Test MIB."
+ MODULE -- this module
+ MANDATORY-GROUPS { mcGeneralGroup,
+ mcNotificationsGroup }
+ ::= { mcCompliances 1 }
+
+-- MIB groupings
+
+mcGeneralGroup OBJECT-GROUP
+ OBJECTS {
+ mcName,
+ mcStorageType,
+ mcRowStatus,
+ mcTimeStamp
+ }
+ STATUS current
+ DESCRIPTION
+ "The collection of objects which are used to represent
+ general information..."
+ ::= { mcGroups 1 }
+
+mcNotificationsGroup NOTIFICATION-GROUP
+ NOTIFICATIONS { mcConfigChange }
+ STATUS current
+ DESCRIPTION
+ "The collection of notifications..."
+ ::= { mcGroups 2 }
+
+
+END
+
+
+
diff --git a/lib/snmp/test/snmp_test_mgr_misc.erl b/lib/snmp/test/snmp_test_mgr_misc.erl
index ef1ba0b948..fc6dedd96d 100644
--- a/lib/snmp/test/snmp_test_mgr_misc.erl
+++ b/lib/snmp/test/snmp_test_mgr_misc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,6 +33,8 @@
%% internal exports
-export([init_packet/10]).
+-compile({no_auto_import, [error/2]}).
+
-define(SNMP_USE_V3, true).
-include_lib("snmp/include/snmp_types.hrl").
diff --git a/lib/snmp/test/test_config/.gitignore b/lib/snmp/test/test_config/.gitignore
new file mode 100644
index 0000000000..fc2d5dbadf
--- /dev/null
+++ b/lib/snmp/test/test_config/.gitignore
@@ -0,0 +1,19 @@
+# Sys config files (Generated)
+/sys.config
+/sys-agent.config
+/sys-manager.config
+
+# Agent config files (Generated)
+/agent/agent.conf
+/agent/community.conf
+/agent/context.conf
+/agent/notify.conf
+/agent/standard.conf
+/agent/target_addr.conf
+/agent/target_params.conf
+/agent/usm.conf
+/agent/vacm.conf
+
+# Manager config files (Generated)
+/manager/manager.conf
+/manager/usm.conf
diff --git a/lib/snmp/test/test_config/Makefile b/lib/snmp/test/test_config/Makefile
new file mode 100644
index 0000000000..4953de7fe8
--- /dev/null
+++ b/lib/snmp/test/test_config/Makefile
@@ -0,0 +1,199 @@
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1997-2009. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../../vsn.mk
+
+VSN = $(SNMP_VSN)
+
+
+# ----------------------------------------------------
+# Configured variables
+# ----------------------------------------------------
+
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+
+include modules.mk
+
+ERL_TARGETS = $(MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+SYS_CONFIG_SRCS = $(SYS_CONFIG_FILES:%=%.src)
+AGENT_CONFIG_SRCS = $(AGENT_CONFIG_FILES:%=%.src)
+MANAGER_CONFIG_SRCS = $(MANAGER_CONFIG_FILES:%=%.src)
+
+CONFIG_FILES = \
+ $(SYS_CONFIG_FILES) \
+ $(AGENT_CONFIG_FILES) \
+ $(MANAGER_CONFIG_FILES)
+
+TARGETS = \
+ $(ERL_TARGETS) \
+ $(CONFIG_FILES)
+
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+ifeq ($(TESTROOT),)
+TESTROOT=/tmp
+endif
+RELSYSDIR = $(TESTROOT)
+
+
+# ----------------------------------------------------
+# FLAGS AND VARIABLES
+# ----------------------------------------------------
+
+EBIN = .
+
+ERL_COMPILE_FLAGS += +'{parse_transform,sys_pre_attributes}' \
+ +'{attribute,insert,app_vsn,$(APP_VSN)}'
+
+ifeq ($(ADDR),)
+ADDR = $(shell erl -noshell -s snmp_test_config ip_address -s init stop)
+endif
+
+ifeq ($(TARGET_NAME_PRE),)
+TARGET_NAME_PRE = $(shell erl -noshell -s snmp_test_config ip_address2 -s init stop)
+endif
+
+ifeq ($(SYS_CONTACT),)
+SYS_CONTACT = [email protected]
+endif
+
+ifeq ($(SYS_LOCATION),)
+SYS_LOCATION = Erlang/OTP
+endif
+
+ifeq ($(SYS_NAME),)
+SYS_NAME = FOO
+endif
+
+ifeq ($(AGENT_ENGINE_ID),)
+AGENT_ENGINE_ID = Agent engine of $(USER)
+endif
+
+ifeq ($(AGENT_USM_ENGINE_ID),)
+AGENT_USM_ENGINE_ID = $(AGENT_ENGINE_ID)
+endif
+
+ifeq ($(MANAGER_ENGINE_ID),)
+MANAGER_ENGINE_ID = Manager engine of $(USER)
+endif
+
+ifeq ($(MANAGER_USM_ENGINE_ID),)
+MANAGER_USM_ENGINE_ID = $(MANAGER_ENGINE_ID)
+endif
+
+
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+tests debug opt: $(TARGETS)
+
+clean:
+ rm -f $(CONFIG_FILES)
+ rm -f $(ERL_TARGETS)
+ rm -f core
+
+docs:
+
+%.config: %.config.src
+ @echo "$< -> $@"
+ $(PERL) -p -e 's?%DIR%?$(RELSYSDIR)? ' < $< > $@
+
+agent/%.conf: agent/%.conf.src
+ @echo "$< -> $@"
+ sed -e 's?%ADDR%?$(ADDR)? ' \
+ -e 's?%SYS_CONTACT%?$(SYS_CONTACT)? ' \
+ -e 's?%SYS_LOCATION%?$(SYS_LOCATION)? ' \
+ -e 's?%SYS_NAME%?$(SYS_NAME)? ' \
+ -e 's?%TARGET_NAME_PRE%?$(TARGET_NAME_PRE)? ' \
+ -e 's?%ENGINE_ID%?\"$(AGENT_ENGINE_ID)\"? ' \
+ -e 's?%USM_ENGINE_ID%?\"$(AGENT_USM_ENGINE_ID)\"? ' < $< > $@
+
+manager/%.conf: manager/%.conf.src
+ @echo "$< -> $@"
+ sed -e 's?%ADDR%?$(ADDR)? ' \
+ -e 's?%ENGINE_ID%?\"$(MANAGER_ENGINE_ID)\"? ' \
+ -e 's?%USM_ENGINE_ID%?\"$(MANAGER_USM_ENGINE_ID)\"? ' < $< > $@
+
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec:
+
+release_tests_spec: clean opt
+ $(INSTALL_DIR) $(RELSYSDIR)
+ chmod -f -R u+w $(RELSYSDIR)
+ $(INSTALL_DIR) $(RELSYSDIR)/agent
+ chmod -f -R u+w $(RELSYSDIR)/agent
+ $(INSTALL_DIR) $(RELSYSDIR)/agent/conf
+ chmod -f -R u+w $(RELSYSDIR)/agent/conf
+ $(INSTALL_DIR) $(RELSYSDIR)/agent/db
+ chmod -f -R u+w $(RELSYSDIR)/agent/db
+ $(INSTALL_DIR) $(RELSYSDIR)/agent/log
+ chmod -f -R u+w $(RELSYSDIR)/agent/log
+ $(INSTALL_DIR) $(RELSYSDIR)/manager
+ chmod -f -R u+w $(RELSYSDIR)/manager
+ $(INSTALL_DIR) $(RELSYSDIR)/manager/conf
+ chmod -f -R u+w $(RELSYSDIR)/manager/conf
+ $(INSTALL_DIR) $(RELSYSDIR)/manager/db
+ chmod -f -R u+w $(RELSYSDIR)/manager/db
+ $(INSTALL_DIR) $(RELSYSDIR)/manager/log
+ chmod -f -R u+w $(RELSYSDIR)/manager/log
+ $(INSTALL_DATA) $(SYS_CONFIG_FILES) $(RELSYSDIR)
+ $(INSTALL_DATA) $(AGENT_CONFIG_FILES) $(RELSYSDIR)/agent/conf
+ $(INSTALL_DATA) $(MANAGER_CONFIG_FILES) $(RELSYSDIR)/manager/conf
+
+release_docs_spec:
+
+
+info:
+ @echo ""
+ @echo "RELSYSDIR = $(RELSYSDIR)"
+ @echo ""
+ @echo "SYS_CONFIG_SRCS = $(SYS_CONFIG_SRCS)"
+ @echo "SYS_CONFIG_FILES = $(SYS_CONFIG_FILES)"
+ @echo ""
+ @echo "AGENT_CONFIG_SRCS = $(AGENT_CONFIG_SRCS)"
+ @echo "AGENT_CONFIG_FILES = $(AGENT_CONFIG_FILES)"
+ @echo ""
+ @echo "MANAGER_CONFIG_SRCS = $(MANAGER_CONFIG_SRCS)"
+ @echo "MANAGER_CONFIG_FILES = $(MANAGER_CONFIG_FILES)"
+ @echo ""
+ @echo "ADDR = $(ADDR)"
+ @echo "TARGET_NAME_PRE = $(TARGET_NAME_PRE)"
+ @echo ""
+
+
diff --git a/lib/snmp/test/test_config/agent/agent.conf.src b/lib/snmp/test/test_config/agent/agent.conf.src
new file mode 100644
index 0000000000..1fe95cc72d
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/agent.conf.src
@@ -0,0 +1,19 @@
+%% This file defines the Agent local configuration info
+%% The data is inserted into the snmpEngine* variables defined
+%% in SNMP-FRAMEWORK-MIB, and the intAgent* variables defined
+%% in OTP-SNMPEA-MIB.
+%% Each row is a 2-tuple:
+%% {AgentVariable, Value}.
+%% For example
+%% {intAgentUDPPort, 4000}.
+%% The ip address for the agent is sent as id in traps.
+%% {intAgentIpAddress, [127,42,17,5]}.
+%% {snmpEngineID, "agentEngine"}.
+%% {snmpEngineMaxMessageSize, 484}.
+%%
+
+
+{intAgentUDPPort, 4000}.
+{intAgentIpAddress, %ADDR%}.
+{snmpEngineID, %ENGINE_ID%}.
+{snmpEngineMaxMessageSize, 484}.
diff --git a/lib/snmp/test/test_config/agent/community.conf.src b/lib/snmp/test/test_config/agent/community.conf.src
new file mode 100644
index 0000000000..8dccb929c9
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/community.conf.src
@@ -0,0 +1,15 @@
+%% This file defines the community info which maps to VACM parameters.
+%% The data is inserted into the snmpCommunityTable defined
+%% in SNMP-COMMUNITY-MIB.
+%% Each row is a 5-tuple:
+%% {CommunityIndex, CommunityName, SecurityName, ContextName, TransportTag}.
+%% For example
+%% {"1", "public", "initial", "", ""}.
+%% {"2", "secret", "secret_name", "", "tag"}.
+%% {"3", "bridge1", "initial", "bridge1", ""}.
+%%
+
+
+{"public", "public", "initial", "", ""}.
+{"all-rights", "all-rights", "all-rights", "", ""}.
+{"standard trap", "standard trap", "initial", "", ""}.
diff --git a/lib/snmp/test/test_config/agent/context.conf.src b/lib/snmp/test/test_config/agent/context.conf.src
new file mode 100644
index 0000000000..ea8b5a97eb
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/context.conf.src
@@ -0,0 +1,14 @@
+%% This file defines the contexts known to the agent.
+%% The data is inserted into the vacmContextTable defined
+%% in SNMP-VIEW-BASED-ACM-MIB.
+%% Each row is a string:
+%% ContextName.
+%%
+%% The empty string is the default context.
+%% For example
+%% "bridge1".
+%% "bridge2".
+%%
+
+
+"".
diff --git a/lib/snmp/test/test_config/agent/notify.conf.src b/lib/snmp/test/test_config/agent/notify.conf.src
new file mode 100644
index 0000000000..164fd25b95
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/notify.conf.src
@@ -0,0 +1,13 @@
+%% This file defines the notification parameters.
+%% The data is inserted into the snmpNotifyTable defined
+%% in SNMP-NOTIFICATION-MIB.
+%% The Name is used as CommunityString for v1 and v2c.
+%% Each row is a 3-tuple:
+%% {Name, Tag, Type}.
+%% For example
+%% {"standard trap", "std_trap", trap}.
+%% {"standard inform", "std_inform", inform}.
+%%
+
+
+{"stadard_trap", "std_trap", trap}.
diff --git a/lib/snmp/test/test_config/agent/standard.conf.src b/lib/snmp/test/test_config/agent/standard.conf.src
new file mode 100644
index 0000000000..31e04e7695
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/standard.conf.src
@@ -0,0 +1,21 @@
+%% This file defines the STANDARD-MIB info.
+%% Each row is a 2-tuple:
+%% {StandardVariable, Value}.
+%% For example
+%% {sysDescr, "Erlang SNMP agent"}.
+%% {sysObjectID, [1,2,3]}.
+%% {sysContact, "[email protected]"}.
+%% {sysName, "test"}.
+%% {sysLocation, "erlang"}.
+%% {sysServices, 72}.
+%% {snmpEnableAuthenTraps, enabled}.
+%%
+
+
+{sysDescr, "Erlang SNMP agent"}.
+{sysObjectID, [1,2,3]}.
+{sysContact, "%SYS_CONTACT%"}.
+{sysLocation, "%SYS_LOCATION%"}.
+{sysServices, 72}.
+{snmpEnableAuthenTraps, disabled}.
+{sysName, "%SYS_NAME%"}.
diff --git a/lib/snmp/test/test_config/agent/target_addr.conf.src b/lib/snmp/test/test_config/agent/target_addr.conf.src
new file mode 100644
index 0000000000..740df74ecf
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/target_addr.conf.src
@@ -0,0 +1,21 @@
+%% This file defines the target address parameters.
+%% The data is inserted into the snmpTargetAddrTable defined
+%% in SNMP-TARGET-MIB, and in the snmpTargetAddrExtTable defined
+%% in SNMP-COMMUNITY-MIB.
+%% Each row is a 10-tuple:
+%% {Name, Ip, Udp, Timeout, RetryCount, TagList, ParamsName, EngineId,
+%% TMask, MaxMessageSize}.
+%% The EngineId value is only used if Inform-Requests are sent to this
+%% target. If Informs are not sent, this value is ignored, and can be
+%% e.g. an empty string. However, if Informs are sent, it is essential
+%% that the value of EngineId matches the value of the target's
+%% actual snmpEngineID.
+%% For example
+%% {"1.2.3.4 v1", [1,2,3,4], 162,
+%% 1500, 3, "std_inform", "otp_v2", "",
+%% [127,0,0,0], 2048}.
+%%
+
+
+{"%TARGET_NAME_PRE% v2", %ADDR%, 5000, 1500, 3, "std_trap", "target_v2", "", [], 2048}.
+{"%TARGET_NAME_PRE% v2.2", %ADDR%, 5000, 1500, 3, "std_inform", "target_v2", "", [], 2048}.
diff --git a/lib/snmp/test/test_config/agent/target_params.conf.src b/lib/snmp/test/test_config/agent/target_params.conf.src
new file mode 100644
index 0000000000..a4a535baa2
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/target_params.conf.src
@@ -0,0 +1,11 @@
+%% This file defines the target parameters.
+%% The data is inserted into the snmpTargetParamsTable defined
+%% in SNMP-TARGET-MIB.
+%% Each row is a 5-tuple:
+%% {Name, MPModel, SecurityModel, SecurityName, SecurityLevel}.
+%% For example
+%% {"target_v3", v3, usm, "", noAuthNoPriv}.
+%%
+
+
+{"target_v2", v2c, v2c, "initial", noAuthNoPriv}.
diff --git a/lib/snmp/test/test_config/agent/usm.conf.src b/lib/snmp/test/test_config/agent/usm.conf.src
new file mode 100644
index 0000000000..0409084048
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/usm.conf.src
@@ -0,0 +1,17 @@
+%% This file defines the security parameters for the user-based
+%% security model.
+%% The data is inserted into the usmUserTable defined
+%% in SNMP-USER-BASED-SM-MIB.
+%% Each row is a 13-tuple:
+%% {EngineID, UserName, SecName, Clone, AuthP, AuthKeyC, OwnAuthKeyC,
+%% PrivP, PrivKeyC, OwnPrivKeyC, Public, AuthKey, PrivKey}.
+%% For example
+%% {"agentEngine", "initial", "initial", zeroDotZero,
+%% usmNoAuthProtocol, "", "", usmNoPrivProtocol, "", "", "",
+%% "", ""}.
+%%
+
+
+{%USM_ENGINE_ID%, "initial", "initial", zeroDotZero, usmHMACMD5AuthProtocol, "", "", usmNoPrivProtocol, "", "", "", [160,66,33,136,178,59,246,214,102,63,131,131,54,14,221,177], ""}.
+{%USM_ENGINE_ID%, "templateMD5", "templateMD5", zeroDotZero, usmHMACMD5AuthProtocol, "", "", usmNoPrivProtocol, "", "", "", [160,66,33,136,178,59,246,214,102,63,131,131,54,14,221,177], ""}.
+{%USM_ENGINE_ID%, "templateSHA", "templateSHA", zeroDotZero, usmHMACSHAAuthProtocol, "", "", usmNoPrivProtocol, "", "", "", [199,94,239,13,229,135,141,77,124,129,65,189,230,240,115,163,239,15,13,242], ""}.
diff --git a/lib/snmp/test/test_config/agent/vacm.conf.src b/lib/snmp/test/test_config/agent/vacm.conf.src
new file mode 100644
index 0000000000..86271443ad
--- /dev/null
+++ b/lib/snmp/test/test_config/agent/vacm.conf.src
@@ -0,0 +1,27 @@
+%% This file defines the Mib Views.
+%% The data is inserted into the vacm* tables defined
+%% in SNMP-VIEW-BASED-ACM-MIB.
+%% Each row is one of 3 tuples; one for each table in the MIB:
+%% {vacmSecurityToGroup, SecModel, SecName, GroupName}.
+%% {vacmAccess, GroupName, Prefix, SecModel, SecLevel, Match, RV, WV, NV}.
+%% {vacmViewTreeFamily, ViewIndex, ViewSubtree, ViewStatus, ViewMask}.
+%% For example
+%% {vacmSecurityToGroup, v2c, "initial", "initial"}.
+%% {vacmSecurityToGroup, usm, "initial", "initial"}.
+%% read/notify access to system
+%% {vacmAccess, "initial", "", any, noAuthNoPriv, exact,
+%% "system", "", "system"}.
+%% {vacmViewTreeFamily, "system", [1,3,6,1,2,1,1], included, null}.
+%% {vacmViewTreeFamily, "exmib", [1,3,6,1,3], included, null}. % for EX1-MIB
+%% {vacmViewTreeFamily, "internet", [1,3,6,1], included, null}.
+%%
+
+
+{vacmSecurityToGroup, v2c, "initial", "initial"}.
+{vacmSecurityToGroup, v2c, "all-rights", "all-rights"}.
+{vacmAccess, "initial", "", any, noAuthNoPriv, exact, "restricted", "", "restricted"}.
+{vacmAccess, "initial", "", usm, authNoPriv, exact, "internet", "internet", "internet"}.
+{vacmAccess, "initial", "", usm, authPriv, exact, "internet", "internet", "internet"}.
+{vacmAccess, "all-rights", "", any, noAuthNoPriv, exact, "internet", "internet", "internet"}.
+{vacmViewTreeFamily, "restricted", [1,3,6,1], included, null}.
+{vacmViewTreeFamily, "internet", [1,3,6,1], included, null}.
diff --git a/lib/snmp/test/test_config/manager/manager.conf.src b/lib/snmp/test/test_config/manager/manager.conf.src
new file mode 100644
index 0000000000..c38a61b13c
--- /dev/null
+++ b/lib/snmp/test/test_config/manager/manager.conf.src
@@ -0,0 +1,16 @@
+%% This file was generated by snmp_config (version-4.9.3) 2007-06-29 13:35:05
+%% This file defines the Manager local configuration info
+%% Each row is a 2-tuple:
+%% {Variable, Value}.
+%% For example
+%% {port, 5000}.
+%% {address, [127,42,17,5]}.
+%% {engine_id, "managerEngine"}.
+%% {max_message_size, 484}.
+%%
+
+
+{port, 5000}.
+{address, %ADDR%}.
+{engine_id, %ENGINE_ID%}.
+{max_message_size, 484}.
diff --git a/lib/snmp/test/test_config/manager/usm.conf.src b/lib/snmp/test/test_config/manager/usm.conf.src
new file mode 100644
index 0000000000..a558c86710
--- /dev/null
+++ b/lib/snmp/test/test_config/manager/usm.conf.src
@@ -0,0 +1,9 @@
+%% This file was generated by snmp_config (version-4.9.3) 2007-06-29 13:35:05
+%% This file defines the usm users the manager handles
+%% Each row is a 6 or 7-tuple:
+%% {EngineID, UserName, AuthP, AuthKey, PrivP, PrivKey}
+%% {EngineID, UserName, SecName, AuthP, AuthKey, PrivP, PrivKey}
+%%
+
+{%USM_ENGINE_ID%, "initial", usmNoAuthProtocol, "", usmNoPrivProtocol, ""}.
+
diff --git a/lib/snmp/test/test_config/modules.mk b/lib/snmp/test/test_config/modules.mk
new file mode 100644
index 0000000000..3d084cef01
--- /dev/null
+++ b/lib/snmp/test/test_config/modules.mk
@@ -0,0 +1,41 @@
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 2004-2010. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+SYS_CONFIG_FILES = \
+ sys.config \
+ sys-agent.config \
+ sys-manager.config
+
+AGENT_CONFIG_FILES = \
+ agent/agent.conf \
+ agent/community.conf \
+ agent/context.conf \
+ agent/notify.conf \
+ agent/standard.conf \
+ agent/target_addr.conf \
+ agent/target_params.conf \
+ agent/usm.conf \
+ agent/vacm.conf
+
+MANAGER_CONFIG_FILES = \
+ manager/manager.conf \
+ manager/usm.conf
+
+MODULES = \
+ snmp_test_config
diff --git a/lib/snmp/test/test_config/snmp_test_config.erl b/lib/snmp/test/test_config/snmp_test_config.erl
new file mode 100644
index 0000000000..550a276c4c
--- /dev/null
+++ b/lib/snmp/test/test_config/snmp_test_config.erl
@@ -0,0 +1,32 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(snmp_test_config).
+
+-export([ip_address/0, ip_address2/0]).
+
+ip_address() ->
+ {ok, Hostname} = inet:gethostname(),
+ {ok, Address} = inet:getaddr(Hostname, inet),
+ io:format("~w", [tuple_to_list(Address)]).
+
+ip_address2() ->
+ {ok, Hostname} = inet:gethostname(),
+ {ok, {A1, A2, A3, A4}} = inet:getaddr(Hostname, inet),
+ io:format("~w.~w.~w.~w", [A1, A2, A3, A4]).
diff --git a/lib/snmp/test/test_config/sys-agent.config.src b/lib/snmp/test/test_config/sys-agent.config.src
new file mode 100644
index 0000000000..46a458203d
--- /dev/null
+++ b/lib/snmp/test/test_config/sys-agent.config.src
@@ -0,0 +1,43 @@
+%% This is an example sys config file for starting the snmp application
+%% with only a agent running.
+[{snmp,
+ [
+ {agent,
+ [
+ {priority, normal},
+ {versions, [v1,v2,v3]},
+ {db_dir, "%DIR%/agent/db"},
+ {mib_storage, ets},
+%% {agent_mib_storage, volatile},
+ {agent_mib_storage, persistent},
+ {target_cache, [{verbosity,silence}]},
+ {symbolic_store, [{verbosity,silence}]},
+ {local_db, [{repair,true},{auto_save,5000},{verbosity,silence}]},
+ {error_report_module, snmpa_error_logger},
+ {agent_type, master},
+ {agent_verbosity, trace},
+ {audit_trail_log, [{type, read},
+ {dir, "%DIR%/agent/log"},
+ {size, {10240,10}}]},
+ {config, [{dir, "%DIR%/agent/conf"},
+ {force_load, true},
+ {verbosity, trace}]},
+ {multi_threaded, true},
+ {mib_server, [{mibentry_override, false},
+ {trapentry_override, false},
+ {cache, true},
+ {verbosity, trace}]},
+ {note_store, [{timeout,30000}, {verbosity,silence}]},
+ {supervisor, [{verbosity,silence}]},
+ {net_if, [{module, snmpa_net_if},
+ {verbosity, silence},
+ {options, [{bind_to, true},
+ {no_reuse, false},
+ {req_limit, infinity},
+ {sndbuf, 32000},
+ {recbuf, 32000}]}]}
+ ]
+ }
+ ]
+ }
+].
diff --git a/lib/snmp/test/test_config/sys-manager.config.src b/lib/snmp/test/test_config/sys-manager.config.src
new file mode 100644
index 0000000000..4366263084
--- /dev/null
+++ b/lib/snmp/test/test_config/sys-manager.config.src
@@ -0,0 +1,35 @@
+%% This is an example sys config file for starting the snmp application
+%% with only a manager running.
+[{snmp,
+ [
+ {manager,
+ [
+ {priority, normal},
+ {versions, [v1,v2,v3]},
+ {config, [{dir, "%DIR%/manager/conf"},
+ {verbosity, trace},
+ {db_dir, "%DIR%/manager/db"},
+ {repair, true},
+ {auto_save, 5000}]},
+ {inform_request_behaviour, user},
+ {mibs, []},
+ {server, [{timeout, 30000},
+ {verbosity, trace}]},
+ {note_store, [{timeout,30000},
+ {verbosity,silence}]},
+ {audit_trail_log, [{type, read},
+ {dir, "%DIR%/manager/log"},
+ {size, {10240,10}}]},
+ {net_if, [{module,snmpm_net_if},
+ {verbosity, trace},
+ {options, [{bind_to, true},
+ {no_reuse, false},
+% {sndbuf, 32000},
+ {recbuf, 45000}]}]},
+ {def_user_mod, snmpm_user_default},
+ {def_user_data, undefined}
+ ]
+ }
+ ]
+ }
+].
diff --git a/lib/snmp/test/test_config/sys.config.src b/lib/snmp/test/test_config/sys.config.src
new file mode 100644
index 0000000000..b2cd399883
--- /dev/null
+++ b/lib/snmp/test/test_config/sys.config.src
@@ -0,0 +1,68 @@
+%% This is an example sys config file for starting the snmp application
+%% with both an agent and a manager running.
+[{snmp,
+ [
+ {agent,
+ [
+ {priority, normal},
+ {versions, [v1,v2,v3]},
+ {db_dir, "%DIR%/agent/db"},
+ {mib_storage, ets},
+ {agent_mib_storage, volatile},
+ {target_cache, [{verbosity,silence}]},
+ {symbolic_store, [{verbosity,silence}]},
+ {local_db, [{repair,true},{auto_save,5000},{verbosity,silence}]},
+ {error_report_module, snmpa_error_logger},
+ {agent_type, master},
+ {agent_verbosity, silence},
+ {audit_trail_log, [{type, read},
+ {dir, "%DIR%/agent/log"},
+ {size, {10240,10}}]},
+ {config, [{dir, "%DIR%/agent/conf"},
+ {force_load, true},
+ {verbosity, silence}]},
+ {multi_threaded, false},
+ {mib_server, [{mibentry_override, false},
+ {trapentry_override, false},
+ {verbosity, silence}]},
+ {note_store, [{timeout,30000},{verbosity,silence}]},
+ {net_if, [{module, snmpa_net_if},
+ {verbosity, silence},
+ {options, [{bind_to, true},
+ {no_reuse, false},
+ {req_limit, infinity},
+ {sndbuf, 32000},
+ {recbuf, 32000}]}]}
+ ]
+ },
+ {manager,
+ [
+ {priority, normal},
+ {versions, [v1,v2,v3]},
+ {config, [{dir, "%DIR%/manager/conf"},
+ {verbosity, silence},
+ {db_dir, "%DIR%/manager/db"},
+ {repair, true},
+ {auto_save, 5000}]},
+ {inform_request_behaviour, auto},
+ {mibs, []},
+ {server, [{timeout, 30000},
+ {verbosity, silence}]},
+ {note_store, [{timeout, 30000},
+ {verbosity, silence}]},
+ {audit_trail_log, [{type, read},
+ {dir, "%DIR%/manager/log"},
+ {size, {10240,10}}]},
+ {net_if, [{module,snmpm_net_if},
+ {verbosity, silence},
+ {options, [{bind_to, true},
+ {no_reuse, false},
+ {recbuf, 33000},
+ {sndbuf, 34000}]}]},
+ {def_user_mod, snmpm_user_default},
+ {def_user_data, undefined}
+ ]
+ }
+ ]
+ }
+].
diff --git a/lib/snmp/vsn.mk b/lib/snmp/vsn.mk
index 1229b12ae2..e70c97dcb8 100644
--- a/lib/snmp/vsn.mk
+++ b/lib/snmp/vsn.mk
@@ -1,3 +1,22 @@
-SNMP_VSN = 4.18
+#-*-makefile-*- ; force emacs to enter makefile-mode
+
+# %CopyrightBegin%
+#
+# Copyright Ericsson AB 1997-2011. All Rights Reserved.
+#
+# The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved online at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# %CopyrightEnd%
+
+SNMP_VSN = 4.19
PRE_VSN =
APP_VSN = "snmp-$(SNMP_VSN)$(PRE_VSN)"
diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml
index daf7b77527..cd5c9281cd 100644
--- a/lib/ssl/doc/src/ssl.xml
+++ b/lib/ssl/doc/src/ssl.xml
@@ -269,6 +269,13 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom()} |
<p> {bad_cert, cert_expired}, {bad_cert, invalid_issuer}, {bad_cert, invalid_signature}, {bad_cert, unknown_ca}, {bad_cert, name_not_permitted}, {bad_cert, missing_basic_constraint}, {bad_cert, invalid_key_usage}</p>
</item>
+ <tag>{hibernate_after, integer()|undefined}</tag>
+ <item>When an integer-value is specified, the <code>ssl_connection</code>
+ will go into hibernation after the specified number of milliseconds
+ of inactivity, thus reducing its memory footprint. When
+ <code>undefined</code> is specified (this is the default), the process
+ will never go into hibernation.
+ </item>
</taglist>
</section>
diff --git a/lib/ssl/src/inet_ssl_dist.erl b/lib/ssl/src/inet_ssl_dist.erl
index b10aa76246..6c0fbc0618 100644
--- a/lib/ssl/src/inet_ssl_dist.erl
+++ b/lib/ssl/src/inet_ssl_dist.erl
@@ -1,8 +1,8 @@
-%%<copyright>
-%% <year>2000-2008</year>
-%% <holder>Ericsson AB, All Rights Reserved</holder>
-%%</copyright>
-%%<legalnotice>
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
@@ -14,8 +14,9 @@
%% the License for the specific language governing rights and limitations
%% under the License.
%%
-%% The Initial Developer of the Original Code is Ericsson AB.
-%%</legalnotice>
+%% %CopyrightEnd%
+%%
+
%%
-module(inet_ssl_dist).
@@ -137,7 +138,7 @@ accept_connection(AcceptPid, Socket, MyNode, Allowed, SetupTime) ->
%% Suppress dialyzer warning, we do not really care about old ssl code
%% as we intend to remove it.
--spec(do_accept/6 :: (_,_,_,_,_,_) -> no_return()).
+-spec(do_accept(_,_,_,_,_,_) -> no_return()).
do_accept(Kernel, AcceptPid, Socket, MyNode, Allowed, SetupTime) ->
process_flag(priority, max),
receive
@@ -170,8 +171,8 @@ do_accept(Kernel, AcceptPid, Socket, MyNode, Allowed, SetupTime) ->
ssl_prim:getll(S)
end,
f_address = fun get_remote_id/2,
- mf_tick = {?MODULE, tick},
- mf_getstat = {?MODULE,getstat}
+ mf_tick = fun ?MODULE:tick/1,
+ mf_getstat = fun ?MODULE:getstat/1
},
dist_util:handshake_other_started(HSData);
{false,IP} ->
@@ -209,7 +210,7 @@ setup(Node, Type, MyNode, LongOrShortNames,SetupTime) ->
%% Suppress dialyzer warning, we do not really care about old ssl code
%% as we intend to remove it.
--spec(do_setup/6 :: (_,_,_,_,_,_) -> no_return()).
+-spec(do_setup(_,_,_,_,_,_) -> no_return()).
do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
process_flag(priority, max),
?trace("~p~n",[{inet_ssl_dist,self(),setup,Node}]),
@@ -264,8 +265,8 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
protocol = ssl,
family = inet}
end,
- mf_tick = {?MODULE, tick},
- mf_getstat = {?MODULE,getstat},
+ mf_tick = fun ?MODULE:tick/1,
+ mf_getstat = fun ?MODULE:getstat/1,
request_type = Type
},
dist_util:handshake_we_started(HSData);
diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src
index e6a8c557fc..d3e426f254 100644
--- a/lib/ssl/src/ssl.appup.src
+++ b/lib/ssl/src/ssl.appup.src
@@ -1,12 +1,14 @@
%% -*- erlang -*-
{"%VSN%",
[
+ {"4.1.3", [{restart_application, ssl}]},
{"4.1.2", [{restart_application, ssl}]},
{"4.1.1", [{restart_application, ssl}]},
{"4.1", [{restart_application, ssl}]},
{"4.0.1", [{restart_application, ssl}]}
],
[
+ {"4.1.3", [{restart_application, ssl}]},
{"4.1.2", [{restart_application, ssl}]},
{"4.1.1", [{restart_application, ssl}]},
{"4.1", [{restart_application, ssl}]},
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index 65b081937f..3512e194bc 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -60,7 +60,7 @@
{keyfile, path()} | {password, string()} | {cacerts, [der_encoded()]} |
{cacertfile, path()} | {dh, der_encoded()} | {dhfile, path()} |
{ciphers, ciphers()} | {ssl_imp, ssl_imp()} | {reuse_sessions, boolean()} |
- {reuse_session, fun()}.
+ {reuse_session, fun()} | {hibernate_after, integer()|undefined}.
-type verify_type() :: verify_none | verify_peer.
-type path() :: string().
@@ -72,8 +72,8 @@
%%--------------------------------------------------------------------
--spec start() -> ok.
--spec start(permanent | transient | temporary) -> ok.
+-spec start() -> ok | {error, reason()}.
+-spec start(permanent | transient | temporary) -> ok | {error, reason()}.
%%
%% Description: Utility function that starts the ssl,
%% crypto and public_key applications. Default type
@@ -711,7 +711,8 @@ handle_options(Opts0, _Role) ->
reuse_sessions = handle_option(reuse_sessions, Opts, true),
secure_renegotiate = handle_option(secure_renegotiate, Opts, false),
renegotiate_at = handle_option(renegotiate_at, Opts, ?DEFAULT_RENEGOTIATE_AT),
- debug = handle_option(debug, Opts, [])
+ debug = handle_option(debug, Opts, []),
+ hibernate_after = handle_option(hibernate_after, Opts, undefined)
},
CbInfo = proplists:get_value(cb_info, Opts, {gen_tcp, tcp, tcp_closed, tcp_error}),
@@ -720,7 +721,7 @@ handle_options(Opts0, _Role) ->
depth, cert, certfile, key, keyfile,
password, cacerts, cacertfile, dh, dhfile, ciphers,
debug, reuse_session, reuse_sessions, ssl_imp,
- cb_info, renegotiate_at, secure_renegotiate],
+ cb_info, renegotiate_at, secure_renegotiate, hibernate_after],
SockOpts = lists:foldl(fun(Key, PropList) ->
proplists:delete(Key, PropList)
@@ -827,6 +828,10 @@ validate_option(renegotiate_at, Value) when is_integer(Value) ->
validate_option(debug, Value) when is_list(Value); Value == true ->
Value;
+validate_option(hibernate_after, undefined) ->
+ undefined;
+validate_option(hibernate_after, Value) when is_integer(Value), Value >= 0 ->
+ Value;
validate_option(Opt, Value) ->
throw({error, {eoptions, {Opt, Value}}}).
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index 489895cf29..574e1e9468 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -289,9 +289,9 @@ start_link(Role, Host, Port, Socket, Options, User, CbInfo) ->
%% gen_fsm callbacks
%%====================================================================
%%--------------------------------------------------------------------
--spec init(list()) -> {ok, state_name(), #state{}} | {stop, term()}.
+-spec init(list()) -> {ok, state_name(), #state{}, timeout()} | {stop, term()}.
%% Possible return values not used now.
-%% | {ok, state_name(), #state{}, timeout()} |
+%% | {ok, state_name(), #state{}} |
%% ignore
%% Description:Whenever a gen_fsm is started using gen_fsm:start/[3,4] or
%% gen_fsm:start_link/3,4, this function is called by the new process to
@@ -311,7 +311,7 @@ init([Role, Host, Port, Socket, {SSLOpts0, _} = Options,
session_cache = CacheRef,
private_key = Key,
diffie_hellman_params = DHParams},
- {ok, hello, State}
+ {ok, hello, State, get_timeout(State)}
catch
throw:Error ->
{stop, Error}
@@ -412,6 +412,9 @@ hello(Hello = #client_hello{client_version = ClientVersion},
{stop, normal, State}
end;
+hello(timeout, State) ->
+ { next_state, hello, State, hibernate };
+
hello(Msg, State) ->
handle_unexpected_message(Msg, hello, State).
%%--------------------------------------------------------------------
@@ -460,6 +463,9 @@ abbreviated(#finished{verify_data = Data} = Finished,
{stop, normal, State}
end;
+abbreviated(timeout, State) ->
+ { next_state, abbreviated, State, hibernate };
+
abbreviated(Msg, State) ->
handle_unexpected_message(Msg, abbreviated, State).
@@ -582,6 +588,9 @@ certify(#client_key_exchange{exchange_keys = Keys},
{stop, normal, State}
end;
+certify(timeout, State) ->
+ { next_state, certify, State, hibernate };
+
certify(Msg, State) ->
handle_unexpected_message(Msg, certify, State).
@@ -664,6 +673,9 @@ cipher(#finished{verify_data = Data} = Finished,
{stop, normal, State}
end;
+cipher(timeout, State) ->
+ { next_state, cipher, State, hibernate };
+
cipher(Msg, State) ->
handle_unexpected_message(Msg, cipher, State).
@@ -693,6 +705,9 @@ connection(#hello_request{}, #state{host = Host, port = Port,
connection(#client_hello{} = Hello, #state{role = server} = State) ->
hello(Hello, State);
+connection(timeout, State) ->
+ {next_state, connection, State, hibernate};
+
connection(Msg, State) ->
handle_unexpected_message(Msg, connection, State).
%%--------------------------------------------------------------------
@@ -705,7 +720,7 @@ connection(Msg, State) ->
%% the event. Not currently used!
%%--------------------------------------------------------------------
handle_event(_Event, StateName, State) ->
- {next_state, StateName, State}.
+ {next_state, StateName, State, get_timeout(State)}.
%%--------------------------------------------------------------------
-spec handle_sync_event(term(), from(), state_name(), #state{}) ->
@@ -736,7 +751,8 @@ handle_sync_event({application_data, Data0}, From, connection,
{Msgs, [], ConnectionStates} ->
Result = Transport:send(Socket, Msgs),
{reply, Result,
- connection, State#state{connection_states = ConnectionStates}};
+ connection, State#state{connection_states = ConnectionStates},
+ get_timeout(State)};
{Msgs, RestData, ConnectionStates} ->
if
Msgs =/= [] ->
@@ -749,12 +765,14 @@ handle_sync_event({application_data, Data0}, From, connection,
renegotiation = {true, internal}})
end
catch throw:Error ->
- {reply, Error, connection, State}
+ {reply, Error, connection, State, get_timeout(State)}
end;
handle_sync_event({application_data, Data}, From, StateName,
#state{send_queue = Queue} = State) ->
%% In renegotiation priorities handshake, send data when handshake is finished
- {next_state, StateName, State#state{send_queue = queue:in({From, Data}, Queue)}};
+ {next_state, StateName,
+ State#state{send_queue = queue:in({From, Data}, Queue)},
+ get_timeout(State)};
handle_sync_event(start, From, hello, State) ->
hello(start, State#state{from = From});
@@ -768,9 +786,9 @@ handle_sync_event(start, From, hello, State) ->
%% here to make sure it is the users problem and not owers if
%% they upgrade a active socket.
handle_sync_event(start, _, connection, State) ->
- {reply, connected, connection, State};
+ {reply, connected, connection, State, get_timeout(State)};
handle_sync_event(start, From, StateName, State) ->
- {next_state, StateName, State#state{from = From}};
+ {next_state, StateName, State#state{from = From}, get_timeout(State)};
handle_sync_event(close, _, StateName, State) ->
%% Run terminate before returning
@@ -796,7 +814,7 @@ handle_sync_event({shutdown, How0}, _, StateName,
case Transport:shutdown(Socket, How0) of
ok ->
- {reply, ok, StateName, State};
+ {reply, ok, StateName, State, get_timeout(State)};
Error ->
{stop, normal, Error, State}
end;
@@ -807,30 +825,33 @@ handle_sync_event({recv, N}, From, connection = StateName, State0) ->
%% Doing renegotiate wait with handling request until renegotiate is
%% finished. Will be handled by next_state_connection/2.
handle_sync_event({recv, N}, From, StateName, State) ->
- {next_state, StateName, State#state{bytes_to_read = N, from = From,
- recv_during_renegotiation = true}};
+ {next_state, StateName,
+ State#state{bytes_to_read = N, from = From,
+ recv_during_renegotiation = true},
+ get_timeout(State)};
handle_sync_event({new_user, User}, _From, StateName,
State =#state{user_application = {OldMon, _}}) ->
NewMon = erlang:monitor(process, User),
erlang:demonitor(OldMon, [flush]),
- {reply, ok, StateName, State#state{user_application = {NewMon,User}}};
+ {reply, ok, StateName, State#state{user_application = {NewMon,User}},
+ get_timeout(State)};
handle_sync_event({get_opts, OptTags}, _From, StateName,
#state{socket = Socket,
socket_options = SockOpts} = State) ->
OptsReply = get_socket_opts(Socket, OptTags, SockOpts, []),
- {reply, OptsReply, StateName, State};
+ {reply, OptsReply, StateName, State, get_timeout(State)};
handle_sync_event(sockname, _From, StateName,
#state{socket = Socket} = State) ->
SockNameReply = inet:sockname(Socket),
- {reply, SockNameReply, StateName, State};
+ {reply, SockNameReply, StateName, State, get_timeout(State)};
handle_sync_event(peername, _From, StateName,
#state{socket = Socket} = State) ->
PeerNameReply = inet:peername(Socket),
- {reply, PeerNameReply, StateName, State};
+ {reply, PeerNameReply, StateName, State, get_timeout(State)};
handle_sync_event({set_opts, Opts0}, _From, StateName,
#state{socket_options = Opts1,
@@ -840,27 +861,27 @@ handle_sync_event({set_opts, Opts0}, _From, StateName,
State1 = State0#state{socket_options = Opts},
if
Opts#socket_options.active =:= false ->
- {reply, ok, StateName, State1};
+ {reply, ok, StateName, State1, get_timeout(State1)};
Buffer =:= <<>>, Opts1#socket_options.active =:= false ->
%% Need data, set active once
{Record, State2} = next_record_if_active(State1),
case next_state(StateName, Record, State2) of
- {next_state, StateName, State} ->
- {reply, ok, StateName, State};
+ {next_state, StateName, State, Timeout} ->
+ {reply, ok, StateName, State, Timeout};
{stop, Reason, State} ->
{stop, Reason, State}
end;
Buffer =:= <<>> ->
%% Active once already set
- {reply, ok, StateName, State1};
+ {reply, ok, StateName, State1, get_timeout(State1)};
true ->
case application_data(<<>>, State1) of
Stop = {stop,_,_} ->
Stop;
{Record, State2} ->
case next_state(StateName, Record, State2) of
- {next_state, StateName, State} ->
- {reply, ok, StateName, State};
+ {next_state, StateName, State, Timeout} ->
+ {reply, ok, StateName, State, Timeout};
{stop, Reason, State} ->
{stop, Reason, State}
end
@@ -871,7 +892,7 @@ handle_sync_event(renegotiate, From, connection, State) ->
renegotiate(State#state{renegotiation = {true, From}});
handle_sync_event(renegotiate, _, StateName, State) ->
- {reply, {error, already_renegotiating}, StateName, State};
+ {reply, {error, already_renegotiating}, StateName, State, get_timeout(State)};
handle_sync_event(info, _, StateName,
#state{negotiated_version = Version,
@@ -879,19 +900,19 @@ handle_sync_event(info, _, StateName,
AtomVersion = ssl_record:protocol_version(Version),
{reply, {ok, {AtomVersion, ssl_cipher:suite_definition(Suite)}},
- StateName, State};
+ StateName, State, get_timeout(State)};
handle_sync_event(session_info, _, StateName,
#state{session = #session{session_id = Id,
cipher_suite = Suite}} = State) ->
{reply, [{session_id, Id},
{cipher_suite, ssl_cipher:suite_definition(Suite)}],
- StateName, State};
+ StateName, State, get_timeout(State)};
handle_sync_event(peer_certificate, _, StateName,
#state{session = #session{peer_certificate = Cert}}
= State) ->
- {reply, {ok, Cert}, StateName, State}.
+ {reply, {ok, Cert}, StateName, State, get_timeout(State)}.
%%--------------------------------------------------------------------
-spec handle_info(msg(),state_name(), #state{}) ->
@@ -955,7 +976,7 @@ handle_info({'DOWN', MonitorRef, _, _, _}, _,
handle_info(Msg, StateName, State) ->
Report = io_lib:format("SSL: Got unexpected info: ~p ~n", [Msg]),
error_logger:info_report(Report),
- {next_state, StateName, State}.
+ {next_state, StateName, State, get_timeout(State)}.
%%--------------------------------------------------------------------
-spec terminate(reason(), state_name(), #state{}) -> term().
@@ -1086,13 +1107,13 @@ init_private_key({rsa, PrivateKey}, _, _,_) ->
init_private_key({dsa, PrivateKey},_,_,_) ->
public_key:der_decode('DSAPrivateKey', PrivateKey).
--spec(handle_file_error/6 :: (_,_,_,_,_,_) -> no_return()).
+-spec(handle_file_error(_,_,_,_,_,_) -> no_return()).
handle_file_error(Line, Error, {badmatch, Reason}, File, Throw, Stack) ->
file_error(Line, Error, Reason, File, Throw, Stack);
handle_file_error(Line, Error, Reason, File, Throw, Stack) ->
file_error(Line, Error, Reason, File, Throw, Stack).
--spec(file_error/6 :: (_,_,_,_,_,_) -> no_return()).
+-spec(file_error(_,_,_,_,_,_) -> no_return()).
file_error(Line, Error, Reason, File, Throw, Stack) ->
Report = io_lib:format("SSL: ~p: ~p:~p ~s~n ~p~n",
[Line, Error, Reason, File, Stack]),
@@ -1412,8 +1433,6 @@ key_exchange(#state{role = client,
State#state{connection_states = ConnectionStates1,
tls_handshake_hashes = Hashes1}.
--spec(rsa_key_exchange/2 :: (_,_) -> no_return()).
-
rsa_key_exchange(PremasterSecret, PublicKeyInfo = {Algorithm, _, _})
when Algorithm == ?rsaEncryption;
Algorithm == ?md2WithRSAEncryption;
@@ -1780,7 +1799,7 @@ handle_tls_handshake(Handle, StateName, #state{tls_packets = [Packet]} = State)
handle_tls_handshake(Handle, StateName, #state{tls_packets = [Packet | Packets]} = State0) ->
FsmReturn = {next_state, StateName, State0#state{tls_packets = Packets}},
case Handle(Packet, FsmReturn) of
- {next_state, NextStateName, State} ->
+ {next_state, NextStateName, State, _Timeout} ->
handle_tls_handshake(Handle, NextStateName, State);
{stop, _,_} = Stop ->
Stop
@@ -1791,11 +1810,11 @@ next_state(_, #alert{} = Alert, #state{negotiated_version = Version} = State) ->
{stop, normal, State};
next_state(Next, no_record, State) ->
- {next_state, Next, State};
+ {next_state, Next, State, get_timeout(State)};
next_state(Next, #ssl_tls{type = ?ALERT, fragment = EncAlerts}, State) ->
Alerts = decode_alerts(EncAlerts),
- handle_alerts(Alerts, {next_state, Next, State});
+ handle_alerts(Alerts, {next_state, Next, State, get_timeout(State)});
next_state(StateName, #ssl_tls{type = ?HANDSHAKE, fragment = Data},
State0 = #state{tls_handshake_buffer = Buf0, negotiated_version = Version}) ->
@@ -2046,7 +2065,7 @@ handle_alerts([], Result) ->
handle_alerts(_, {stop, _, _} = Stop) ->
%% If it is a fatal alert immediately close
Stop;
-handle_alerts([Alert | Alerts], {next_state, StateName, State}) ->
+handle_alerts([Alert | Alerts], {next_state, StateName, State, _Timeout}) ->
handle_alerts(Alerts, handle_alert(Alert, StateName, State)).
handle_alert(#alert{level = ?FATAL} = Alert, StateName,
@@ -2227,3 +2246,8 @@ linux_workaround_transport_delivery_problems(#alert{level = ?FATAL}, Socket) ->
end;
linux_workaround_transport_delivery_problems(_, _) ->
ok.
+
+get_timeout(#state{ssl_options=#ssl_options{hibernate_after=undefined}}) ->
+ infinity;
+get_timeout(#state{ssl_options=#ssl_options{hibernate_after=HibernateAfter}}) ->
+ HibernateAfter.
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 715941e3ad..c28daa271e 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -98,7 +98,11 @@
reuse_sessions, % boolean()
renegotiate_at,
secure_renegotiate,
- debug %
+ debug,
+ hibernate_after % undefined if not hibernating,
+ % or number of ms of inactivity
+ % after which ssl_connection will
+ % go into hibernation
}).
-record(socket_options,
diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile
index 823401c863..fd3b6d06ad 100644
--- a/lib/ssl/test/Makefile
+++ b/lib/ssl/test/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2010. All Rights Reserved.
+# Copyright Ericsson AB 1999-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -60,6 +60,7 @@ ERL_FILES = $(MODULES:%=%.erl)
HRL_FILES = ssl_test_MACHINE.hrl
HRL_FILES_SRC = \
+ ssl_int.hrl \
ssl_alert.hrl \
ssl_handshake.hrl
diff --git a/lib/ssl/test/old_ssl_dist_SUITE.erl b/lib/ssl/test/old_ssl_dist_SUITE.erl
index 6a072c9d98..4544fb616a 100644
--- a/lib/ssl/test/old_ssl_dist_SUITE.erl
+++ b/lib/ssl/test/old_ssl_dist_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -62,9 +62,15 @@ end_per_group(_GroupName, Config) ->
init_per_suite(Config) ->
- add_ssl_opts_config(Config).
+ try crypto:start() of
+ ok ->
+ add_ssl_opts_config(Config)
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
end_per_suite(Config) ->
+ application:stop(crypto),
Config.
init_per_testcase(Case, Config) when list(Config) ->
diff --git a/lib/ssl/test/old_transport_accept_SUITE.erl b/lib/ssl/test/old_transport_accept_SUITE.erl
index 21ee0690b1..6f0c8e456b 100644
--- a/lib/ssl/test/old_transport_accept_SUITE.erl
+++ b/lib/ssl/test/old_transport_accept_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -57,9 +57,15 @@ groups() ->
[].
init_per_suite(Config) ->
- Config.
+ try crypto:start() of
+ ok ->
+ Config
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
end_per_suite(_Config) ->
+ application:stop(crypto),
ok.
init_per_group(_GroupName, Config) ->
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index 87d5fc8d71..4f0907027f 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -29,6 +29,7 @@
-include_lib("public_key/include/public_key.hrl").
-include("ssl_alert.hrl").
+-include("ssl_int.hrl").
-define('24H_in_sec', 86400).
-define(TIMEOUT, 60000).
@@ -48,7 +49,7 @@
%%--------------------------------------------------------------------
init_per_suite(Config0) ->
Dog = ssl_test_lib:timetrap(?LONG_TIMEOUT *2),
- case application:start(crypto) of
+ try crypto:start() of
ok ->
application:start(public_key),
ssl:start(),
@@ -61,8 +62,8 @@ init_per_suite(Config0) ->
Config1 = ssl_test_lib:make_dsa_cert(Config0),
Config = ssl_test_lib:cert_options(Config1),
- [{watchdog, Dog} | Config];
- _ ->
+ [{watchdog, Dog} | Config]
+ catch _:_ ->
{skip, "Crypto did not start"}
end.
%%--------------------------------------------------------------------
@@ -250,7 +251,9 @@ all() ->
unknown_server_ca_accept_backwardscompatibilty,
%%different_ca_peer_sign,
no_reuses_session_server_restart_new_cert,
- no_reuses_session_server_restart_new_cert_file, reuseaddr].
+ no_reuses_session_server_restart_new_cert_file, reuseaddr,
+ hibernate
+ ].
groups() ->
[].
@@ -3319,6 +3322,45 @@ reuseaddr(Config) when is_list(Config) ->
ssl_test_lib:close(Client1).
%%--------------------------------------------------------------------
+
+hibernate(doc) ->
+ ["Check that an SSL connection that is started with option "
+ "{hibernate_after, 1000} indeed hibernates after 1000ms of "
+ "inactivity"];
+
+hibernate(suite) ->
+ [];
+
+hibernate(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, send_recv_result_active, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ {Client, #sslsocket{pid=Pid}} = ssl_test_lib:start_client([return_socket,
+ {node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, send_recv_result_active, []}},
+ {options, [{hibernate_after, 1000}|ClientOpts]}]),
+
+ { current_function, { _M, _F, _A } } =
+ process_info(Pid, current_function),
+
+ timer:sleep(1100),
+
+ { current_function, { erlang, hibernate, 3} } =
+ process_info(Pid, current_function),
+
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client).
+
+%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
send_recv_result(Socket) ->
diff --git a/lib/ssl/test/ssl_packet_SUITE.erl b/lib/ssl/test/ssl_packet_SUITE.erl
index 1ecf55d6e8..d22d5d2954 100644
--- a/lib/ssl/test/ssl_packet_SUITE.erl
+++ b/lib/ssl/test/ssl_packet_SUITE.erl
@@ -53,7 +53,7 @@
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
init_per_suite(Config) ->
- case application:start(crypto) of
+ try crypto:start() of
ok ->
application:start(public_key),
ssl:start(),
@@ -61,8 +61,8 @@ init_per_suite(Config) ->
(catch make_certs:all(?config(data_dir, Config),
?config(priv_dir, Config))),
test_server:format("Make certs ~p~n", [Result]),
- ssl_test_lib:cert_options(Config);
- _ ->
+ ssl_test_lib:cert_options(Config)
+ catch _:_ ->
{skip, "Crypto did not start"}
end.
%%--------------------------------------------------------------------
diff --git a/lib/ssl/test/ssl_payload_SUITE.erl b/lib/ssl/test/ssl_payload_SUITE.erl
index f57d7fa0e8..24e86b3913 100644
--- a/lib/ssl/test/ssl_payload_SUITE.erl
+++ b/lib/ssl/test/ssl_payload_SUITE.erl
@@ -37,13 +37,13 @@
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
init_per_suite(Config) ->
- case application:start(crypto) of
+ try crypto:start() of
ok ->
application:start(public_key),
ssl:start(),
make_certs:all(?config(data_dir, Config), ?config(priv_dir, Config)),
- ssl_test_lib:cert_options(Config);
- _ ->
+ ssl_test_lib:cert_options(Config)
+ catch _:_ ->
{skip, "Crypto did not start"}
end.
%%--------------------------------------------------------------------
diff --git a/lib/ssl/test/ssl_session_cache_SUITE.erl b/lib/ssl/test/ssl_session_cache_SUITE.erl
index b47efe0941..a43b9ab586 100644
--- a/lib/ssl/test/ssl_session_cache_SUITE.erl
+++ b/lib/ssl/test/ssl_session_cache_SUITE.erl
@@ -47,7 +47,7 @@
%%--------------------------------------------------------------------
init_per_suite(Config0) ->
Dog = ssl_test_lib:timetrap(?LONG_TIMEOUT *2),
- case application:start(crypto) of
+ try crypto:start() of
ok ->
application:start(public_key),
ssl:start(),
@@ -60,8 +60,8 @@ init_per_suite(Config0) ->
Config1 = ssl_test_lib:make_dsa_cert(Config0),
Config = ssl_test_lib:cert_options(Config1),
- [{watchdog, Dog} | Config];
- _ ->
+ [{watchdog, Dog} | Config]
+ catch _:_ ->
{skip, "Crypto did not start"}
end.
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index f6ccbe85e3..8c639aa312 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -128,12 +128,14 @@ remove_close_msg(ReconnectTimes) ->
remove_close_msg(ReconnectTimes -1)
end.
-
start_client(Args) ->
- Result = spawn_link(?MODULE, run_client, [Args]),
+ Result = spawn_link(?MODULE, run_client, [lists:delete(return_socket, Args)]),
receive
- connected ->
- Result
+ { connected, Socket } ->
+ case lists:member(return_socket, Args) of
+ true -> { Result, Socket };
+ false -> Result
+ end
end.
run_client(Opts) ->
@@ -145,7 +147,7 @@ run_client(Opts) ->
test_server:format("ssl:connect(~p, ~p, ~p)~n", [Host, Port, Options]),
case rpc:call(Node, ssl, connect, [Host, Port, Options]) of
{ok, Socket} ->
- Pid ! connected,
+ Pid ! { connected, Socket },
test_server:format("Client: connected~n", []),
%% In specail cases we want to know the client port, it will
%% be indicated by sending {port, 0} in options list!
diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl
index 4ab8fe3273..64a6a9eaf8 100644
--- a/lib/ssl/test/ssl_to_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl
@@ -50,7 +50,7 @@ init_per_suite(Config0) ->
false ->
{skip, "Openssl not found"};
_ ->
- case application:start(crypto) of
+ try crypto:start() of
ok ->
application:start(public_key),
ssl:start(),
@@ -60,8 +60,8 @@ init_per_suite(Config0) ->
test_server:format("Make certs ~p~n", [Result]),
Config1 = ssl_test_lib:make_dsa_cert(Config0),
Config = ssl_test_lib:cert_options(Config1),
- [{watchdog, Dog} | Config];
- _ ->
+ [{watchdog, Dog} | Config]
+ catch _:_ ->
{skip, "Crypto did not start"}
end
end.
diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk
index a4be7bb889..2f1edfa186 100644
--- a/lib/ssl/vsn.mk
+++ b/lib/ssl/vsn.mk
@@ -1,2 +1 @@
-
-SSL_VSN = 4.1.3
+SSL_VSN = 4.1.4
diff --git a/lib/stdlib/doc/src/calendar.xml b/lib/stdlib/doc/src/calendar.xml
index 36f0c03162..f90d8308b6 100644
--- a/lib/stdlib/doc/src/calendar.xml
+++ b/lib/stdlib/doc/src/calendar.xml
@@ -63,6 +63,14 @@
given as local time, they must be converted to universal time, in
order to get the correct value of the elapsed time between epochs.
Use of the function <c>time_difference/2</c> is discouraged.</p>
+ <p>There exists different definitions for the week of the year.
+ The calendar module contains a week of the year implementation
+ which conforms to the ISO 8601 standard. Since the week number for
+ a given date can fall on the previous, the current or on the next
+ year it is important to provide the information which year is it
+ together with the week number. The function <c>iso_week_number/0</c>
+ and <c>iso_week_number/1</c> returns a tuple of the year and the
+ week number.</p>
</description>
<section>
@@ -154,6 +162,30 @@ time() = {Hour, Minute, Second}
</desc>
</func>
<func>
+ <name>iso_week_number() -> IsoWeekNumber</name>
+ <fsummary>Compute the iso week number for the actual date</fsummary>
+ <type>
+ <v>IsoWeekNumber = {int(), int()}</v>
+ </type>
+ <desc>
+ <p>This function returns the tuple {Year, WeekNum} representing
+ the iso week number for the actual date. For determining the
+ actual date, the function <c>local_time/0</c> is used.</p>
+ </desc>
+ </func>
+ <func>
+ <name>iso_week_number(Date) -> IsoWeekNumber</name>
+ <fsummary>Compute the iso week number for the given date</fsummary>
+ <type>
+ <v>Date = date()</v>
+ <v>IsoWeekNumber = {int(), int()}</v>
+ </type>
+ <desc>
+ <p>This function returns the tuple {Year, WeekNum} representing
+ the iso week number for the given date.</p>
+ </desc>
+ </func>
+ <func>
<name>last_day_of_the_month(Year, Month) -> int()</name>
<fsummary>Compute the number of days in a month</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/dict.xml b/lib/stdlib/doc/src/dict.xml
index ebcd2eed09..1695e9d14f 100644
--- a/lib/stdlib/doc/src/dict.xml
+++ b/lib/stdlib/doc/src/dict.xml
@@ -165,8 +165,8 @@ dictionary()
<v>Dict = dictionary()</v>
</type>
<desc>
- <p>This function converts the key/value list <c>List</c> to a
- dictionary.</p>
+ <p>This function converts the <c>Key</c> - <c>Value</c> list
+ <c>List</c> to a dictionary.</p>
</desc>
</func>
<func>
@@ -270,7 +270,7 @@ merge(Fun, D1, D2) ->
<v>Dict1 = Dict2 = dictionary()</v>
</type>
<desc>
- <p>Update the a value in a dictionary by calling <c>Fun</c> on
+ <p>Update a value in a dictionary by calling <c>Fun</c> on
the value to get a new value. An exception is generated if
<c>Key</c> is not present in the dictionary.</p>
</desc>
@@ -285,7 +285,7 @@ merge(Fun, D1, D2) ->
<v>Dict1 = Dict2 = dictionary()</v>
</type>
<desc>
- <p>Update the a value in a dictionary by calling <c>Fun</c> on
+ <p>Update a value in a dictionary by calling <c>Fun</c> on
the value to get a new value. If <c>Key</c> is not present
in the dictionary then <c>Initial</c> will be stored as
the first value. For example <c>append/3</c> could be defined
diff --git a/lib/stdlib/doc/src/orddict.xml b/lib/stdlib/doc/src/orddict.xml
index 08c808f822..9d036f0725 100644
--- a/lib/stdlib/doc/src/orddict.xml
+++ b/lib/stdlib/doc/src/orddict.xml
@@ -172,8 +172,8 @@ ordered_dictionary()
<v>Orddict = ordered_dictionary()</v>
</type>
<desc>
- <p>This function converts the key/value list <c>List</c> to a
- dictionary.</p>
+ <p>This function converts the <c>Key</c> - <c>Value</c> list
+ <c>List</c> to a dictionary.</p>
</desc>
</func>
<func>
@@ -277,7 +277,7 @@ merge(Fun, D1, D2) ->
<v>Orddict1 = Orddict2 = ordered_dictionary()</v>
</type>
<desc>
- <p>Update the a value in a dictionary by calling <c>Fun</c> on
+ <p>Update a value in a dictionary by calling <c>Fun</c> on
the value to get a new value. An exception is generated if
<c>Key</c> is not present in the dictionary.</p>
</desc>
@@ -292,7 +292,7 @@ merge(Fun, D1, D2) ->
<v>Orddict1 = Orddict2 = ordered_dictionary()</v>
</type>
<desc>
- <p>Update the a value in a dictionary by calling <c>Fun</c> on
+ <p>Update a value in a dictionary by calling <c>Fun</c> on
the value to get a new value. If <c>Key</c> is not present
in the dictionary then <c>Initial</c> will be stored as
the first value. For example <c>append/3</c> could be defined
diff --git a/lib/stdlib/src/calendar.erl b/lib/stdlib/src/calendar.erl
index ddc0666f77..57b7c28dee 100644
--- a/lib/stdlib/src/calendar.erl
+++ b/lib/stdlib/src/calendar.erl
@@ -28,6 +28,8 @@
gregorian_days_to_date/1,
gregorian_seconds_to_datetime/1,
is_leap_year/1,
+ iso_week_number/0,
+ iso_week_number/1,
last_day_of_the_month/2,
local_time/0,
local_time_to_universal_time/1,
@@ -70,6 +72,7 @@
-type second() :: 0..59.
-type daynum() :: 1..7.
-type ldom() :: 28 | 29 | 30 | 31. % last day of month
+-type weeknum() :: 1..53.
-type t_now() :: {non_neg_integer(),non_neg_integer(),non_neg_integer()}.
@@ -77,6 +80,7 @@
-type t_time() :: {hour(),minute(),second()}.
-type t_datetime() :: {t_date(),t_time()}.
-type t_datetime1970() :: {{year1970(),month(),day()},t_time()}.
+-type t_yearweeknum() :: {year(),weeknum()}.
%%----------------------------------------------------------------------
@@ -172,6 +176,42 @@ is_leap_year1(Year) when Year rem 400 =:= 0 ->
is_leap_year1(_) -> false.
+%%
+%% Calculates the iso week number for the current date.
+%%
+-spec iso_week_number() -> t_yearweeknum().
+iso_week_number() ->
+ {Date, _} = local_time(),
+ iso_week_number(Date).
+
+
+%%
+%% Calculates the iso week number for the given date.
+%%
+-spec iso_week_number(t_date()) -> t_yearweeknum().
+iso_week_number({Year, Month, Day}) ->
+ D = date_to_gregorian_days({Year, Month, Day}),
+ W01_1_Year = gregorian_days_of_iso_w01_1(Year),
+ W01_1_NextYear = gregorian_days_of_iso_w01_1(Year + 1),
+ if W01_1_Year =< D andalso D < W01_1_NextYear ->
+ % Current Year Week 01..52(,53)
+ {Year, (D - W01_1_Year) div 7 + 1};
+ D < W01_1_Year ->
+ % Previous Year 52 or 53
+ PWN = case day_of_the_week(Year - 1, 1, 1) of
+ 4 -> 53;
+ _ -> case day_of_the_week(Year - 1, 12, 31) of
+ 4 -> 53;
+ _ -> 52
+ end
+ end,
+ {Year - 1, PWN};
+ W01_1_NextYear =< D ->
+ % Next Year, Week 01
+ {Year + 1, 1}
+ end.
+
+
%% last_day_of_the_month(Year, Month)
%%
%% Returns the number of days in a month.
@@ -377,6 +417,19 @@ dty(Y, D1, D2) when D1 < D2 ->
dty(Y, _D1, D2) ->
{Y, D2}.
+%%
+%% The Gregorian days of the iso week 01 day 1 for a given year.
+%%
+-spec gregorian_days_of_iso_w01_1(year()) -> non_neg_integer().
+gregorian_days_of_iso_w01_1(Year) ->
+ D0101 = date_to_gregorian_days(Year, 1, 1),
+ DOW = day_of_the_week(Year, 1, 1),
+ if DOW =< 4 ->
+ D0101 - DOW + 1;
+ true ->
+ D0101 + 7 - DOW + 1
+ end.
+
%% year_day_to_date(Year, DayOfYear) = {Month, DayOfMonth}
%%
%% Note: 1 is the first day of the month.
diff --git a/lib/stdlib/src/escript.erl b/lib/stdlib/src/escript.erl
index 7cb02afb11..0d2d23180a 100644
--- a/lib/stdlib/src/escript.erl
+++ b/lib/stdlib/src/escript.erl
@@ -31,7 +31,7 @@
%%-----------------------------------------------------------------------
--type mode() :: 'compile' | 'debug' | 'interpret' | 'run'.
+-type mode() :: 'native' | 'compile' | 'debug' | 'interpret' | 'run'.
-type source() :: 'archive' | 'beam' | 'text'.
-record(state, {file :: file:filename(),
@@ -304,7 +304,11 @@ parse_and_run(File, Args, Options) ->
false ->
case lists:member("i", Options) of
true -> interpret;
- false -> Mode
+ false ->
+ case lists:member("n", Options) of
+ true -> native;
+ false -> Mode
+ end
end
end
end,
@@ -321,6 +325,14 @@ parse_and_run(File, Args, Options) ->
_Other ->
fatal("There were compilation errors.")
end;
+ native ->
+ case compile:forms(FormsOrBin, [report,native]) of
+ {ok, Module, BeamBin} ->
+ {module, Module} = code:load_binary(Module, File, BeamBin),
+ run(Module, Args);
+ _Other ->
+ fatal("There were compilation errors.")
+ end;
debug ->
case compile:forms(FormsOrBin, [report, debug_info]) of
{ok,Module,BeamBin} ->
@@ -664,7 +676,7 @@ epp_parse_file2(Epp, S, Forms, Parsed) ->
{attribute,Ln,mode,NewMode} ->
S2 = S#state{mode = NewMode},
if
- NewMode =:= compile; NewMode =:= interpret; NewMode =:= debug ->
+ NewMode =:= compile; NewMode =:= interpret; NewMode =:= debug; NewMode =:= native ->
epp_parse_file(Epp, S2, [Form | Forms]);
true ->
Args = lists:flatten(io_lib:format("illegal mode attribute: ~p", [NewMode])),
diff --git a/lib/stdlib/src/supervisor.erl b/lib/stdlib/src/supervisor.erl
index 7102fb9f6e..3c5800effa 100644
--- a/lib/stdlib/src/supervisor.erl
+++ b/lib/stdlib/src/supervisor.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -40,7 +40,7 @@
%%--------------------------------------------------------------------------
-type child_id() :: pid() | 'undefined'.
--type mfargs() :: {module(), atom(), [term()]}.
+-type mfargs() :: {module(), atom(), [term()] | undefined}.
-type modules() :: [module()] | 'dynamic'.
-type restart() :: 'permanent' | 'transient' | 'temporary'.
-type shutdown() :: 'brutal_kill' | timeout().
@@ -69,7 +69,7 @@
-record(state, {name,
strategy :: strategy(),
children = [] :: [child()],
- dynamics = ?DICT:new() :: ?DICT(),
+ dynamics :: ?DICT() | list(),
intensity :: non_neg_integer(),
period :: pos_integer(),
restarts = [],
@@ -283,16 +283,15 @@ do_start_child_i(M, F, A) ->
-spec handle_call(call(), term(), state()) -> {'reply', term(), state()}.
handle_call({start_child, EArgs}, _From, State) when ?is_simple(State) ->
- #child{mfargs = {M, F, A}} = hd(State#state.children),
+ Child = hd(State#state.children),
+ #child{mfargs = {M, F, A}} = Child,
Args = A ++ EArgs,
case do_start_child_i(M, F, Args) of
{ok, Pid} ->
- NState = State#state{dynamics =
- ?DICT:store(Pid, Args, State#state.dynamics)},
+ NState = save_dynamic_child(Child#child.restart_type, Pid, Args, State),
{reply, {ok, Pid}, NState};
{ok, Pid, Extra} ->
- NState = State#state{dynamics =
- ?DICT:store(Pid, Args, State#state.dynamics)},
+ NState = save_dynamic_child(Child#child.restart_type, Pid, Args, State),
{reply, {ok, Pid, Extra}, NState};
What ->
{reply, What, State}
@@ -351,10 +350,20 @@ handle_call({terminate_child, Name}, _From, State) ->
{reply, {error, not_found}, State}
end;
-handle_call(which_children, _From, State) when ?is_simple(State) ->
- [#child{child_type = CT, modules = Mods}] = State#state.children,
+handle_call(which_children, _From, #state{children = [#child{restart_type = temporary,
+ child_type = CT,
+ modules = Mods}]} =
+ State) when ?is_simple(State) ->
+ Reply = lists:map(fun(Pid) -> {undefined, Pid, CT, Mods} end, dynamics_db(temporary,
+ State#state.dynamics)),
+ {reply, Reply, State};
+
+handle_call(which_children, _From, #state{children = [#child{restart_type = RType,
+ child_type = CT,
+ modules = Mods}]} =
+ State) when ?is_simple(State) ->
Reply = lists:map(fun({Pid, _}) -> {undefined, Pid, CT, Mods} end,
- ?DICT:to_list(State#state.dynamics)),
+ ?DICT:to_list(dynamics_db(RType, State#state.dynamics))),
{reply, Reply, State};
handle_call(which_children, _From, State) ->
@@ -366,13 +375,31 @@ handle_call(which_children, _From, State) ->
State#state.children),
{reply, Resp, State};
-handle_call(count_children, _From, State) when ?is_simple(State) ->
- [#child{child_type = CT}] = State#state.children,
+
+handle_call(count_children, _From, #state{children = [#child{restart_type = temporary,
+ child_type = CT}]} = State)
+ when ?is_simple(State) ->
+ {Active, Count} =
+ lists:foldl(fun(Pid, {Alive, Tot}) ->
+ if is_pid(Pid) -> {Alive+1, Tot +1};
+ true -> {Alive, Tot + 1} end
+ end, {0, 0}, dynamics_db(temporary, State#state.dynamics)),
+ Reply = case CT of
+ supervisor -> [{specs, 1}, {active, Active},
+ {supervisors, Count}, {workers, 0}];
+ worker -> [{specs, 1}, {active, Active},
+ {supervisors, 0}, {workers, Count}]
+ end,
+ {reply, Reply, State};
+
+handle_call(count_children, _From, #state{children = [#child{restart_type = RType,
+ child_type = CT}]} = State)
+ when ?is_simple(State) ->
{Active, Count} =
?DICT:fold(fun(Pid, _Val, {Alive, Tot}) ->
if is_pid(Pid) -> {Alive+1, Tot +1};
true -> {Alive, Tot + 1} end
- end, {0, 0}, State#state.dynamics),
+ end, {0, 0}, dynamics_db(RType, State#state.dynamics)),
Reply = case CT of
supervisor -> [{specs, 1}, {active, Active},
{supervisors, Count}, {workers, 0}];
@@ -535,15 +562,11 @@ handle_start_child(Child, State) ->
false ->
case do_start_child(State#state.name, Child) of
{ok, Pid} ->
- Children = State#state.children,
{{ok, Pid},
- State#state{children =
- [Child#child{pid = Pid}|Children]}};
+ save_child(Child#child{pid = Pid}, State)};
{ok, Pid, Extra} ->
- Children = State#state.children,
{{ok, Pid, Extra},
- State#state{children =
- [Child#child{pid = Pid}|Children]}};
+ save_child(Child#child{pid = Pid}, State)};
{error, What} ->
{{error, {What, Child}}, State}
end;
@@ -558,22 +581,21 @@ handle_start_child(Child, State) ->
%%% Returns: {ok, state()} | {shutdown, state()}
%%% ---------------------------------------------------
-restart_child(Pid, Reason, State) when ?is_simple(State) ->
- case ?DICT:find(Pid, State#state.dynamics) of
+restart_child(Pid, Reason, #state{children = [Child]} = State) when ?is_simple(State) ->
+ RestartType = Child#child.restart_type,
+ case dynamic_child_args(Pid, dynamics_db(RestartType, State#state.dynamics)) of
{ok, Args} ->
- [Child] = State#state.children,
- RestartType = Child#child.restart_type,
{M, F, _} = Child#child.mfargs,
NChild = Child#child{pid = Pid, mfargs = {M, F, Args}},
do_restart(RestartType, Reason, NChild, State);
error ->
- {ok, State}
+ {ok, State}
end;
+
restart_child(Pid, Reason, State) ->
Children = State#state.children,
case lists:keyfind(Pid, #child.pid, Children) of
- #child{} = Child ->
- RestartType = Child#child.restart_type,
+ #child{restart_type = RestartType} = Child ->
do_restart(RestartType, Reason, Child, State);
false ->
{ok, State}
@@ -608,7 +630,8 @@ restart(Child, State) ->
restart(simple_one_for_one, Child, State) ->
#child{mfargs = {M, F, A}} = Child,
- Dynamics = ?DICT:erase(Child#child.pid, State#state.dynamics),
+ Dynamics = ?DICT:erase(Child#child.pid, dynamics_db(Child#child.restart_type,
+ State#state.dynamics)),
case do_start_child_i(M, F, A) of
{ok, Pid} ->
NState = State#state{dynamics = ?DICT:store(Pid, A, Dynamics)},
@@ -755,8 +778,40 @@ monitor_child(Pid) ->
%%-----------------------------------------------------------------
%% Child/State manipulating functions.
%%-----------------------------------------------------------------
-state_del_child(#child{pid = Pid}, State) when ?is_simple(State) ->
- NDynamics = ?DICT:erase(Pid, State#state.dynamics),
+
+%% Note we do not want to save the parameter list for temporary processes as
+%% they will not be restarted, and hence we do not need this information.
+%% Especially for dynamic children to simple_one_for_one supervisors
+%% it could become very costly as it is not uncommon to spawn
+%% very many such processes.
+save_child(#child{restart_type = temporary,
+ mfargs = {M, F, _}} = Child, #state{children = Children} = State) ->
+ State#state{children = [Child#child{mfargs = {M, F, undefined}} |Children]};
+save_child(Child, #state{children = Children} = State) ->
+ State#state{children = [Child |Children]}.
+
+save_dynamic_child(temporary, Pid, _, #state{dynamics = Dynamics} = State) ->
+ State#state{dynamics = [Pid | dynamics_db(temporary, Dynamics)]};
+save_dynamic_child(RestartType, Pid, Args, #state{dynamics = Dynamics} = State) ->
+ State#state{dynamics = ?DICT:store(Pid, Args, dynamics_db(RestartType, Dynamics))}.
+
+dynamics_db(temporary, undefined) ->
+ [];
+dynamics_db(_, undefined) ->
+ ?DICT:new();
+dynamics_db(_,Dynamics) ->
+ Dynamics.
+
+dynamic_child_args(_, Dynamics) when is_list(Dynamics)->
+ {ok, undefined};
+dynamic_child_args(Pid, Dynamics) ->
+ ?DICT:find(Pid, Dynamics).
+
+state_del_child(#child{pid = Pid, restart_type = temporary}, State) when ?is_simple(State) ->
+ NDynamics = lists:delete(Pid, dynamics_db(temporary, State#state.dynamics)),
+ State#state{dynamics = NDynamics};
+state_del_child(#child{pid = Pid, restart_type = RType}, State) when ?is_simple(State) ->
+ NDynamics = ?DICT:erase(Pid, dynamics_db(RType, State#state.dynamics)),
State#state{dynamics = NDynamics};
state_del_child(Child, State) ->
NChildren = del_child(Child#child.name, State#state.children),
diff --git a/lib/stdlib/test/calendar_SUITE.erl b/lib/stdlib/test/calendar_SUITE.erl
index 81b0299118..8192d035ca 100644
--- a/lib/stdlib/test/calendar_SUITE.erl
+++ b/lib/stdlib/test/calendar_SUITE.erl
@@ -28,7 +28,8 @@
day_of_the_week_calibrate/1,
leap_years/1,
last_day_of_the_month/1,
- local_time_to_universal_time_dst/1]).
+ local_time_to_universal_time_dst/1,
+ iso_week_number/1]).
-define(START_YEAR, 1947).
-define(END_YEAR, 2012).
@@ -38,7 +39,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[gregorian_days, gregorian_seconds, day_of_the_week,
day_of_the_week_calibrate, leap_years,
- last_day_of_the_month, local_time_to_universal_time_dst].
+ last_day_of_the_month, local_time_to_universal_time_dst, iso_week_number].
groups() ->
[].
@@ -55,7 +56,6 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-
gregorian_days(doc) ->
"Tests that date_to_gregorian_days and gregorian_days_to_date "
"are each others inverses from ?START_YEAR-01-01 up to ?END_YEAR-01-01. "
@@ -170,6 +170,15 @@ local_time_to_universal_time_dst_x(Config) when is_list(Config) ->
{comment,"Bug in mktime() in this OS"}
end.
+iso_week_number(doc) ->
+ "Test the iso week number calculation for all three possibilities."
+ " When the date falls on the last week of the previous year,"
+ " when the date falls on a week within the given year and finally,"
+ " when the date falls on the first week of the next year.";
+iso_week_number(suite) ->
+ [];
+iso_week_number(Config) when is_list(Config) ->
+ ?line check_iso_week_number().
%%
%% LOCAL FUNCTIONS
@@ -259,7 +268,12 @@ check_last_day_of_the_month({SYr, SMon}, {EYr, EMon}) when SYr < EYr ->
check_last_day_of_the_month(_, _) ->
ok.
-
+%% check_iso_week_number
+%%
+check_iso_week_number() ->
+ ?line {2004, 53} = calendar:iso_week_number({2005, 1, 1}),
+ ?line {2007, 1} = calendar:iso_week_number({2007, 1, 1}),
+ ?line {2009, 1} = calendar:iso_week_number({2008, 12, 29}).
diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl
index dba70d9f3f..9d348b5f1a 100644
--- a/lib/stdlib/test/ets_SUITE.erl
+++ b/lib/stdlib/test/ets_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -93,13 +93,16 @@
misc1_do/1, safe_fixtable_do/1, info_do/1, dups_do/1, heavy_lookup_do/1,
heavy_lookup_element_do/1, member_do/1, otp_5340_do/1, otp_7665_do/1, meta_wb_do/1,
do_heavy_concurrent/1, tab2file2_do/2, exit_large_table_owner_do/2,
- types_do/1, sleeper/0, rpc_externals/0, memory_do/1
+ types_do/1, sleeper/0, rpc_externals/0, memory_do/1,
+ ms_tracee_dummy/1, ms_tracee_dummy/2, ms_tracee_dummy/3, ms_tracee_dummy/4
]).
-export([t_select_reverse/1]).
-include_lib("test_server/include/test_server.hrl").
+-define(m(A,B), ?line assert_eq(A,B)).
+
init_per_testcase(Case, Config) ->
Seed = {S1,S2,S3} = random:seed0(), %now(),
random:seed(S1,S2,S3),
@@ -213,29 +216,180 @@ t_match_spec_run(suite) ->
t_match_spec_run(doc) ->
["Check ets:match_spec_run/2."];
t_match_spec_run(Config) when is_list(Config) ->
+ init_externals(),
?line EtsMem = etsmem(),
- ?line [2,3] = ets:match_spec_run([{1},{2},{3}],
- ets:match_spec_compile(
- [{{'$1'},[{'>','$1',1}],['$1']}])),
+
+ t_match_spec_run_test([{1},{2},{3}],
+ [{{'$1'},[{'>','$1',1}],['$1']}],
+ [2,3]),
+
?line Huge = [{X} || X <- lists:seq(1,2500)],
?line L = lists:seq(2476,2500),
- ?line L = ets:match_spec_run(Huge,
- ets:match_spec_compile(
- [{{'$1'},[{'>','$1',2475}],['$1']}])),
+ t_match_spec_run_test(Huge, [{{'$1'},[{'>','$1',2475}],['$1']}], L),
+
?line L2 = [{X*16#FFFFFFF} || X <- L],
- ?line L2 = ets:match_spec_run(Huge,
- ets:match_spec_compile(
- [{{'$1'},
- [{'>','$1',2475}],
- [{{{'*','$1',16#FFFFFFF}}}]}])),
- ?line [500,1000,1500,2000,2500] =
- ets:match_spec_run(Huge,
- ets:match_spec_compile(
- [{{'$1'},
- [{'=:=',{'rem','$1',500},0}],
- ['$1']}])),
+ t_match_spec_run_test(Huge,
+ [{{'$1'}, [{'>','$1',2475}], [{{{'*','$1',16#FFFFFFF}}}]}],
+ L2),
+
+ t_match_spec_run_test(Huge, [{{'$1'}, [{'=:=',{'rem','$1',500},0}], ['$1']}],
+ [500,1000,1500,2000,2500]),
+
+ %% More matching fun with several match clauses and guards,
+ %% applied to a variety of terms.
+ Fun = fun(Term) ->
+ CTerm = {const, Term},
+
+ N_List = [{Term, "0", "v-element"},
+ {"=hidden_node", "0", Term},
+ {"0", Term, Term},
+ {"something", Term, "something else"},
+ {"guard and res", Term, 872346},
+ {Term, {'and',Term,'again'}, 3.14},
+ {Term, {'and',Term,'again'}, "m&g"},
+ {Term, {'and',Term,'again'}, "m&g&r"},
+ {[{second,Term}, 'and', "tail"], Term, ['and',"tail"]}],
+
+ N_MS = [{{'$1','$2','$3'},
+ [{'=:=','$1',CTerm}, {'=:=','$2',{const,"0"}}],
+ [{{"Guard only for $1",'$3'}}]},
+
+ {{'$3','$1','$4'},
+ [{'=:=','$3',"=hidden_node"}, {'=:=','$1',{const,"0"}}],
+ [{{"Result only for $4",'$4'}}]},
+
+ {{'$2','$1','$1'},
+ [{'=:=','$2',{const,"0"}}],
+ [{{"Match only for $1",'$2'}}]},
+
+ {{'$2',Term,['$3'|'_']},
+ [{is_list,'$2'},{'=:=','$3',$s}],
+ [{{"Matching term",'$2'}}]},
+
+ {{'$1','$2',872346},
+ [{'=:=','$2',CTerm}, {is_list,'$1'}],
+ [{{"Guard and result",'$2'}}]},
+
+ {{'$1', {'and','$1','again'}, '$2'},
+ [{is_float,'$2'}],
+ [{{"Match and result",'$1'}}]},
+
+ {{'$1', {'and','$1','again'}, '$2'},
+ [{'=:=','$1',CTerm}, {'=:=', '$2', "m&g"}],
+ [{{"Match and guard",'$2'}}]},
+
+ {{'$1', {'and','$1','again'}, "m&g&r"},
+ [{'=:=','$1',CTerm}],
+ [{{"Match, guard and result",'$1'}}]},
+
+ {{'$1', '$2', '$3'},
+ [{'=:=','$1',[{{second,'$2'}} | '$3']}],
+ [{{"Building guard"}}]}
+ ],
+
+ N_Result = [{"Guard only for $1", "v-element"},
+ {"Result only for $4", Term},
+ {"Match only for $1", "0"},
+ {"Matching term","something"},
+ {"Guard and result",Term},
+ {"Match and result",Term},
+ {"Match and guard","m&g"},
+ {"Match, guard and result",Term},
+ {"Building guard"}],
+
+ F = fun(N_MS_Perm) ->
+ t_match_spec_run_test(N_List, N_MS_Perm, N_Result)
+ end,
+ repeat_for_permutations(F, N_MS)
+ end,
+
+ test_terms(Fun, skip_refc_check),
+
?line verify_etsmem(EtsMem).
+t_match_spec_run_test(List, MS, Result) ->
+
+ %%io:format("ms = ~p\n",[MS]),
+
+ ?m(Result, ets:match_spec_run(List, ets:match_spec_compile(MS))),
+
+ %% Check that ets:select agree
+ Tab = ets:new(xxx, [bag]),
+ ets:insert(Tab, List),
+ SRes = lists:sort(Result),
+ ?m(SRes, lists:sort(ets:select(Tab, MS))),
+ ets:delete(Tab),
+
+ %% Check that tracing agree
+ Self = self(),
+ {Tracee, MonRef} = spawn_monitor(fun() -> ms_tracee(Self, List) end),
+ receive {Tracee, ready} -> ok end,
+
+ MST = lists:map(fun(Clause) -> ms_clause_ets_to_trace(Clause) end, MS),
+
+ %%io:format("MS = ~p\nMST= ~p\n",[MS,MST]),
+
+ erlang:trace_pattern({?MODULE,ms_tracee_dummy,'_'}, MST , [local]),
+ erlang:trace(Tracee, true, [call]),
+ Tracee ! start,
+ TRes = ms_tracer_collect(Tracee, MonRef, []),
+ %erlang:trace(Tracee, false, [call]),
+ %Tracee ! stop,
+ case TRes of
+ SRes -> ok;
+ _ ->
+ io:format("TRACE MATCH FAILED\n"),
+ io:format("Input = ~p\nMST = ~p\nExpected = ~p\nGot = ~p\n", [List, MST, SRes, TRes]),
+ ?t:fail("TRACE MATCH FAILED")
+ end,
+ ok.
+
+
+
+ms_tracer_collect(Tracee, Ref, Acc) ->
+ receive
+ {trace, Tracee, call, _Args, [Msg]} ->
+ %io:format("trace Args=~p Msg=~p\n", [_Args, Msg]),
+ ms_tracer_collect(Tracee, Ref, [Msg | Acc]);
+
+ {'DOWN', Ref, process, Tracee, _} ->
+ %io:format("monitor DOWN for ~p\n", [Tracee]),
+ TDRef = erlang:trace_delivered(Tracee),
+ ms_tracer_collect(Tracee, TDRef, Acc);
+
+ {trace_delivered, Tracee, Ref} ->
+ %%io:format("trace delivered for ~p\n", [Tracee]),
+ lists:sort(Acc);
+
+ Other ->
+ io:format("Unexpected message = ~p\n", [Other]),
+ ?t:fail("Unexpected tracer msg")
+ end.
+
+
+ms_tracee(Parent, CallArgList) ->
+ %io:format("ms_tracee ~p started with ArgList = ~p\n", [self(), CallArgList]),
+ Parent ! {self(), ready},
+ receive start -> ok end,
+ lists:foreach(fun(Args) ->
+ erlang:apply(?MODULE, ms_tracee_dummy, tuple_to_list(Args))
+ end, CallArgList).
+ %%receive stop -> ok end.
+
+
+
+ms_tracee_dummy(_) -> ok.
+ms_tracee_dummy(_,_) -> ok.
+ms_tracee_dummy(_,_,_) -> ok.
+ms_tracee_dummy(_,_,_,_) -> ok.
+
+ms_clause_ets_to_trace({Head, Guard, Body}) ->
+ {tuple_to_list(Head), Guard, [{message, Body}]}.
+
+assert_eq(A,A) -> ok;
+assert_eq(A,B) ->
+ io:format("FAILED MATCH:\n~p\n =/=\n~p\n",[A,B]),
+ ?t:fail("assert_eq failed").
t_repair_continuation(suite) ->
@@ -1427,8 +1581,7 @@ update_element_opts(Tuple,KeyPos,UpdPos,Opts) ->
ok.
update_element(T,Tuple,KeyPos,UpdPos) ->
- KeyList = [Key || Key <- [17,"seventeen",<<"seventeen">>,{17},list_to_binary(lists:seq(1,100)),
- make_ref(), self()]],
+ KeyList = [17,"seventeen",<<"seventeen">>,{17},list_to_binary(lists:seq(1,100)),make_ref(), self()],
lists:foreach(fun(Key) ->
TupleWithKey = setelement(KeyPos,Tuple,Key),
update_element_do(T,TupleWithKey,Key,UpdPos)
@@ -1442,6 +1595,8 @@ update_element_do(Tab,Tuple,Key,UpdPos) ->
% This will try all combinations of {fromValue,toValue}
%
% IMPORTANT: size(Values) must be a prime number for this to work!!!
+
+ %io:format("update_element_do for key=~p\n",[Key]),
Big32 = 16#12345678,
Big64 = 16#123456789abcdef0,
Values = { 623, -27, 0, Big32, -Big32, Big64, -Big64, Big32*Big32,
@@ -1462,7 +1617,7 @@ update_element_do(Tab,Tuple,Key,UpdPos) ->
(ToIx, [], Pos, _Rand, _MeF) ->
{Pos, element(ToIx+1,Values)} % single {pos,value} arg
end,
-
+
UpdateF = fun(ToIx,Rand) ->
PosValArg = PosValArgF(ToIx,[],UpdPos,Rand,PosValArgF),
%%io:format("update_element(~p)~n",[PosValArg]),
@@ -1589,6 +1744,7 @@ update_counter_for(T) ->
(Obj, Times, Arg3, Myself) ->
?line {NewObj, Ret} = uc_mimic(Obj,Arg3),
ArgHash = erlang:phash2({T,a,Arg3}),
+ %%io:format("update_counter(~p, ~p, ~p) expecting ~p\n",[T,a,Arg3,Ret]),
?line Ret = ets:update_counter(T,a,Arg3),
?line ArgHash = erlang:phash2({T,a,Arg3}),
%%io:format("NewObj=~p~n ",[NewObj]),
@@ -3441,7 +3597,7 @@ firstnext_concurrent(Config) when is_list(Config) ->
[dynamic_go() || _ <- lists:seq(1, 2)],
receive
after 5000 -> ok
- end.
+ end.
ets_init(Tab, N) ->
ets_new(Tab, [named_table,public,ordered_set]),
@@ -4073,7 +4229,7 @@ do_lookup_element(Tab, N, M) ->
end.
-heavy_concurrent(_Config) ->
+heavy_concurrent(Config) when is_list(Config) ->
repeat_for_opts(do_heavy_concurrent).
do_heavy_concurrent(Opts) ->
@@ -5259,7 +5415,7 @@ types_do(Opts) ->
ets:delete_all_objects(T),
?line 0 = ets:info(T,size)
end,
- test_terms(Fun),
+ test_terms(Fun, strict),
ets:delete(T),
?line verify_etsmem(EtsMem).
@@ -5503,6 +5659,20 @@ repeat_while(Fun, Arg0) ->
{false,Ret} -> Ret
end.
+%% Some (but not all) permutations of List
+repeat_for_permutations(Fun, List) ->
+ repeat_for_permutations(Fun, List, length(List)-1).
+repeat_for_permutations(Fun, List, 0) ->
+ Fun(List);
+repeat_for_permutations(Fun, List, N) ->
+ {A,B} = lists:split(N, List),
+ L1 = B++A,
+ L2 = lists:reverse(L1),
+ L3 = B++lists:reverse(A),
+ L4 = lists:reverse(B)++A,
+ Fun(L1), Fun(L2), Fun(L3), Fun(L4),
+ repeat_for_permutations(Fun, List, N-1).
+
receive_any() ->
receive M ->
io:format("Process ~p got msg ~p\n", [self(),M]),
@@ -5560,7 +5730,7 @@ only_if_smp(Schedulers, Func) ->
%% Copy-paste from emulator/test/binary_SUITE.erl
-define(heap_binary_size, 64).
-test_terms(Test_Func) ->
+test_terms(Test_Func, Mode) ->
garbage_collect(),
?line Pib0 = process_info(self(),binary),
@@ -5637,7 +5807,10 @@ test_terms(Test_Func) ->
Pib = process_info(self(),binary),
?line Test_Func(Bin3),
garbage_collect(),
- ?line Pib = process_info(self(),binary),
+ case Mode of
+ strict -> ?line Pib = process_info(self(),binary);
+ skip_refc_check -> ok
+ end,
?line Test_Func(make_unaligned_sub_binary(Bin0)),
?line Test_Func(make_unaligned_sub_binary(Bin1)),
@@ -5681,9 +5854,13 @@ test_terms(Test_Func) ->
?line Test_Func(lists:duplicate(32, FF)),
garbage_collect(),
- ?line Pib0 = process_info(self(),binary),
+ case Mode of
+ strict -> ?line Pib0 = process_info(self(),binary);
+ skip_refc_check -> ok
+ end,
ok.
+
id(I) -> I.
very_big_num() ->
@@ -5715,27 +5892,32 @@ make_ext_ref() ->
Ref.
init_externals() ->
- SysDistSz = ets:info(sys_dist,size),
- ?line Pa = filename:dirname(code:which(?MODULE)),
- ?line {ok, Node} = test_server:start_node(plopp, slave, [{args, " -pa " ++ Pa}]),
- ?line Res = case rpc:call(Node, ?MODULE, rpc_externals, []) of
- {badrpc, {'EXIT', E}} ->
- test_server:fail({rpcresult, E});
- R -> R
- end,
- ?line test_server:stop_node(Node),
-
- %% Wait for table 'sys_dist' to stabilize
- repeat_while(fun() ->
- case ets:info(sys_dist,size) of
- SysDistSz -> false;
- Sz ->
- io:format("Waiting for sys_dist to revert size from ~p to size ~p\n",
- [Sz, SysDistSz]),
- receive after 1000 -> true end
- end
- end),
- put(externals, Res).
+ case get(externals) of
+ undefined ->
+ SysDistSz = ets:info(sys_dist,size),
+ ?line Pa = filename:dirname(code:which(?MODULE)),
+ ?line {ok, Node} = test_server:start_node(plopp, slave, [{args, " -pa " ++ Pa}]),
+ ?line Res = case rpc:call(Node, ?MODULE, rpc_externals, []) of
+ {badrpc, {'EXIT', E}} ->
+ test_server:fail({rpcresult, E});
+ R -> R
+ end,
+ ?line test_server:stop_node(Node),
+
+ %% Wait for table 'sys_dist' to stabilize
+ repeat_while(fun() ->
+ case ets:info(sys_dist,size) of
+ SysDistSz -> false;
+ Sz ->
+ io:format("Waiting for sys_dist to revert size from ~p to size ~p\n",
+ [Sz, SysDistSz]),
+ receive after 1000 -> true end
+ end
+ end),
+ put(externals, Res);
+
+ {_,_,_} -> ok
+ end.
rpc_externals() ->
{self(), make_port(), make_ref()}.
diff --git a/lib/stdlib/test/stdlib_SUITE.erl b/lib/stdlib/test/stdlib_SUITE.erl
index f46493a2d8..0cca030b3d 100644
--- a/lib/stdlib/test/stdlib_SUITE.erl
+++ b/lib/stdlib/test/stdlib_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -75,7 +75,7 @@ app_test(suite) ->
[];
app_test(doc) ->
["Application consistency test."];
-app_test(Config) when list(Config) ->
+app_test(Config) when is_list(Config) ->
?t:app_test(stdlib),
ok.
diff --git a/lib/stdlib/test/supervisor_SUITE.erl b/lib/stdlib/test/supervisor_SUITE.erl
index 82643e105f..6e927da2ab 100644
--- a/lib/stdlib/test/supervisor_SUITE.erl
+++ b/lib/stdlib/test/supervisor_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -52,10 +52,12 @@
simple_one_for_one_extra/1]).
%% Misc tests
--export([child_unlink/1, tree/1, count_children_memory/1]).
+-export([child_unlink/1, tree/1, count_children_memory/1,
+ do_not_save_start_parameters_for_temporary_children/1]).
%-------------------------------------------------------------------------
+
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
@@ -67,7 +69,7 @@ all() ->
{group, restart_rest_for_one},
{group, normal_termination},
{group, abnormal_termination}, child_unlink, tree,
- count_children_memory].
+ count_children_memory, do_not_save_start_parameters_for_temporary_children].
groups() ->
[{sup_start, [],
@@ -117,7 +119,6 @@ init_per_testcase(_Case, Config) ->
end_per_testcase(_Case, _Config) ->
ok.
-
start(InitResult) ->
supervisor:start_link({local, sup_test}, ?MODULE, InitResult).
@@ -135,14 +136,8 @@ get_child_counts(Supervisor) ->
proplists:get_value(supervisors, Counts),
proplists:get_value(workers, Counts)].
-
%-------------------------------------------------------------------------
-%
% Test cases starts here.
-%
-%-------------------------------------------------------------------------
-
-
%-------------------------------------------------------------------------
sup_start_normal(doc) ->
["Tests that the supervisor process starts correctly and that it "
@@ -242,8 +237,6 @@ sup_start_fail(Config) when is_list(Config) ->
ok.
%-------------------------------------------------------------------------
-%-------------------------------------------------------------------------
-
sup_stop_infinity(doc) ->
["See sup_stop/1 when Shutdown = infinity, this walue is only allowed "
"for children of type supervisor"];
@@ -594,7 +587,6 @@ child_specs(Config) when is_list(Config) ->
?line ok = supervisor:check_childspecs([C3]),
?line ok = supervisor:check_childspecs([C4]),
ok.
-%-------------------------------------------------------------------------
%-------------------------------------------------------------------------
permanent_normal(doc) ->
@@ -656,7 +648,6 @@ temporary_normal(Config) when is_list(Config) ->
?line [1,0,0,1] = get_child_counts(sup_test),
ok.
-%-------------------------------------------------------------------------
%-------------------------------------------------------------------------
permanent_abnormal(doc) ->
@@ -725,8 +716,6 @@ temporary_abnormal(Config) when is_list(Config) ->
ok.
%-------------------------------------------------------------------------
-
-%-------------------------------------------------------------------------
one_for_one(doc) ->
["Test the one_for_one base case."];
one_for_one(suite) -> [];
@@ -805,8 +794,6 @@ one_for_one_escalation(Config) when is_list(Config) ->
end,
ok.
%-------------------------------------------------------------------------
-
-%-------------------------------------------------------------------------
one_for_all(doc) ->
["Test the one_for_all base case."];
one_for_all(suite) -> [];
@@ -894,8 +881,6 @@ one_for_all_escalation(Config) when is_list(Config) ->
ok.
%-------------------------------------------------------------------------
-
-%-------------------------------------------------------------------------
simple_one_for_one(doc) ->
["Test the simple_one_for_one base case."];
simple_one_for_one(suite) -> [];
@@ -1012,8 +997,6 @@ simple_one_for_one_escalation(Config) when is_list(Config) ->
end,
ok.
%-------------------------------------------------------------------------
-
-%-------------------------------------------------------------------------
rest_for_one(doc) ->
["Test the rest_for_one base case."];
rest_for_one(suite) -> [];
@@ -1287,7 +1270,9 @@ tree(Config) when is_list(Config) ->
ok.
%-------------------------------------------------------------------------
count_children_memory(doc) ->
- ["Test that which_children eats memory, but count_children does not."];
+ ["Test that count_children does not eat memory."];
+count_children_memory(suite) ->
+ [];
count_children_memory(Config) when is_list(Config) ->
process_flag(trap_exit, true),
Child = {child, {supervisor_1, start_child, []}, temporary, 1000,
@@ -1300,7 +1285,7 @@ count_children_memory(Config) when is_list(Config) ->
Children = supervisor:which_children(sup_test),
_Size2 = erlang:memory(processes_used),
ChildCount = get_child_counts(sup_test),
- Size3 = erlang:memory(processes_used),
+ _Size3 = erlang:memory(processes_used),
[supervisor:start_child(sup_test, []) || _Ignore2 <- lists:seq(1,1000)],
@@ -1324,8 +1309,8 @@ count_children_memory(Config) when is_list(Config) ->
?line ChildCount3 = ChildCount2,
%% count_children consumes memory using an accumulator function,
- %% but the space can be reclaimed incrementally, whereas
- %% which_children generates a return list.
+ %% but the space can be reclaimed incrementally,
+ %% which_children may generate garbage that will be reclaimed later.
case (Size5 =< Size4) of
true -> ok;
false ->
@@ -1337,23 +1322,10 @@ count_children_memory(Config) when is_list(Config) ->
?line test_server:fail({count_children, used_more_memory})
end,
- case Size4 > Size3 of
- true -> ok;
- false ->
- ?line test_server:fail({which_children, used_no_memory})
- end,
- case Size6 > Size5 of
- true -> ok;
- false ->
- ?line test_server:fail({which_children, used_no_memory})
- end,
-
[exit(Pid, kill) || {undefined, Pid, worker, _Modules} <- Children3],
test_server:sleep(100),
?line [1,0,0,0] = get_child_counts(sup_test),
-
ok.
-
count_children_allocator_test(MemoryState) ->
Allocators = [temp_alloc, eheap_alloc, binary_alloc, ets_alloc,
driver_alloc, sl_alloc, ll_alloc, fix_alloc, std_alloc,
@@ -1364,3 +1336,84 @@ count_children_allocator_test(MemoryState) ->
AllocStates = [lists:keyfind(e, 1, AllocValue)
|| {_Type, AllocValue} <- AllocTypes],
lists:all(fun(State) -> State == {e, true} end, AllocStates).
+%-------------------------------------------------------------------------
+do_not_save_start_parameters_for_temporary_children(doc) ->
+ ["Temporary children shall not be restarted so they should not "
+ "save start parameters, as it potentially can "
+ "take up a huge amount of memory for no purpose."];
+do_not_save_start_parameters_for_temporary_children(suite) ->
+ [];
+do_not_save_start_parameters_for_temporary_children(Config) when is_list(Config) ->
+ process_flag(trap_exit, true),
+ dont_save_start_parameters_for_temporary_children(one_for_all),
+ dont_save_start_parameters_for_temporary_children(one_for_one),
+ dont_save_start_parameters_for_temporary_children(rest_for_one),
+ dont_save_start_parameters_for_temporary_children(simple_one_for_one).
+
+dont_save_start_parameters_for_temporary_children(simple_one_for_one = Type) ->
+ Permanent = {child, {supervisor_1, start_child, []},
+ permanent, 1000, worker, []},
+ Transient = {child, {supervisor_1, start_child, []},
+ transient, 1000, worker, []},
+ Temporary = {child, {supervisor_1, start_child, []},
+ temporary, 1000, worker, []},
+ {ok, Sup1} = supervisor:start_link(?MODULE, {ok, {{Type, 2, 3600}, [Permanent]}}),
+ {ok, Sup2} = supervisor:start_link(?MODULE, {ok, {{Type, 2, 3600}, [Transient]}}),
+ {ok, Sup3} = supervisor:start_link(?MODULE, {ok, {{Type, 2, 3600}, [Temporary]}}),
+
+ LargeList = lists:duplicate(10, "Potentially large"),
+
+ start_children(Sup1, [LargeList], 100),
+ start_children(Sup2, [LargeList], 100),
+ start_children(Sup3, [LargeList], 100),
+
+ [{memory,Mem1}] = process_info(Sup1, [memory]),
+ [{memory,Mem2}] = process_info(Sup2, [memory]),
+ [{memory,Mem3}] = process_info(Sup3, [memory]),
+
+ true = (Mem3 < Mem1) and (Mem3 < Mem2),
+
+ exit(Sup1, shutdown),
+ exit(Sup2, shutdown),
+ exit(Sup3, shutdown);
+
+dont_save_start_parameters_for_temporary_children(Type) ->
+ {ok, Sup1} = supervisor:start_link(?MODULE, {ok, {{Type, 2, 3600}, []}}),
+ {ok, Sup2} = supervisor:start_link(?MODULE, {ok, {{Type, 2, 3600}, []}}),
+ {ok, Sup3} = supervisor:start_link(?MODULE, {ok, {{Type, 2, 3600}, []}}),
+
+ LargeList = lists:duplicate(10, "Potentially large"),
+
+ Permanent = {child1, {supervisor_1, start_child, [LargeList]},
+ permanent, 1000, worker, []},
+ Transient = {child2, {supervisor_1, start_child, [LargeList]},
+ transient, 1000, worker, []},
+ Temporary = {child3, {supervisor_1, start_child, [LargeList]},
+ temporary, 1000, worker, []},
+
+ start_children(Sup1, Permanent, 100),
+ start_children(Sup2, Transient, 100),
+ start_children(Sup3, Temporary, 100),
+
+ [{memory,Mem1}] = process_info(Sup1, [memory]),
+ [{memory,Mem2}] = process_info(Sup2, [memory]),
+ [{memory,Mem3}] = process_info(Sup3, [memory]),
+
+ true = (Mem3 < Mem1) and (Mem3 < Mem2),
+
+ exit(Sup1, shutdown),
+ exit(Sup2, shutdown),
+ exit(Sup3, shutdown).
+
+start_children(_,_, 0) ->
+ ok;
+start_children(Sup, Args, N) ->
+ Spec = child_spec(Args, N),
+ {ok, _, _} = supervisor:start_child(Sup, Spec),
+ start_children(Sup, Args, N-1).
+
+child_spec([_|_] = SimpleOneForOneArgs, _) ->
+ SimpleOneForOneArgs;
+child_spec({Name, MFA, RestartType, Shutdown, Type, Modules}, N) ->
+ NewName = list_to_atom((atom_to_list(Name) ++ integer_to_list(N))),
+ {NewName, MFA, RestartType, Shutdown, Type, Modules}.
diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl
index e0bf50bc43..2ab4e9c28a 100644
--- a/lib/test_server/src/test_server.erl
+++ b/lib/test_server/src/test_server.erl
@@ -470,7 +470,7 @@ cover_analyse(Analyse,Modules) ->
overview ->
fun(_) -> undefined end
end,
- R = lists:map(
+ R = pmap(
fun(M) ->
case cover:analyse(M,module) of
{ok,{M,{Cov,NotCov}}} ->
@@ -486,6 +486,19 @@ cover_analyse(Analyse,Modules) ->
stick_all_sticky(node(),Sticky),
R.
+pmap(Fun,List) ->
+ Collector = self(),
+ Pids = lists:map(fun(E) ->
+ spawn(fun() ->
+ Collector ! {res,self(),Fun(E)}
+ end)
+ end, List),
+ lists:map(fun(Pid) ->
+ receive
+ {res,Pid,Res} ->
+ Res
+ end
+ end, Pids).
unstick_all_sticky(Node) ->
lists:filter(
diff --git a/lib/test_server/src/ts_install_cth.erl b/lib/test_server/src/ts_install_cth.erl
index 3e28ebd529..d1a24525ab 100644
--- a/lib/test_server/src/ts_install_cth.erl
+++ b/lib/test_server/src/ts_install_cth.erl
@@ -68,7 +68,6 @@ id(_Opts) ->
-spec init(Id :: term(), Opts :: proplist()) ->
State :: #state{}.
init(_Id, Opts) ->
-% ct:log("CurrWD: ~p",[file:get_cwd()]),
Nodenames = proplists:get_value(nodenames, Opts, 0),
Nodes = proplists:get_value(nodes, Opts, 0),
TSConfDir = proplists:get_value(ts_conf_dir, Opts),
@@ -93,13 +92,8 @@ pre_init_per_suite(Suite,Config,#state{ ts_conf_dir = undefined} = State) ->
TSConfDir = filename:join([ParentDir, "..","test_server"]),
pre_init_per_suite(Suite, Config, State#state{ ts_conf_dir = TSConfDir });
pre_init_per_suite(_Suite,Config,State) ->
-% ct:log("pre_init_per_suite(~p,~p,~p)",[_Suite,Config,State]),
DataDir = proplists:get_value(data_dir, Config),
try
-% install(State#state.ts_conf_dir,
-% State#state.target_system,
-% State#state.install_opts),
-
{ok,Variables} =
file:consult(filename:join(State#state.ts_conf_dir,"variables")),
@@ -118,7 +112,7 @@ pre_init_per_suite(_Suite,Config,State) ->
catch Error:Reason ->
Stack = erlang:get_stacktrace(),
ct:pal("~p failed! ~p:{~p,~p}",[?MODULE,Error,Reason,Stack]),
- {fail,{?MODULE,{Error,Reason, Stack}}}
+ {{fail,{?MODULE,{Error,Reason, Stack}}},State}
end.
%% @doc Called after init_per_suite.
@@ -221,7 +215,6 @@ on_tc_skip(_TC, _Reason, State) ->
-spec terminate(State :: #state{}) ->
term().
terminate(_State) ->
-% ct:log("Terminate called"),
ok.
%%% ============================================================================
diff --git a/lib/test_server/src/ts_run.erl b/lib/test_server/src/ts_run.erl
index 60e01600e1..d572b1454c 100644
--- a/lib/test_server/src/ts_run.erl
+++ b/lib/test_server/src/ts_run.erl
@@ -28,7 +28,7 @@
-include("ts.hrl").
--import(lists, [map/2,member/2,filter/2,reverse/1]).
+-import(lists, [member/2,filter/2]).
-record(state,
{file, % File given.
@@ -75,19 +75,6 @@ run(File, Args0, Options, Vars0) ->
Error -> Error
end.
-make_loop(Hooks, Vars0, Spec0, St0) ->
- case St0#state.makefiles of
- [Makefile|Rest] ->
- case execute(Hooks, Vars0, Spec0, St0#state{makefile=Makefile}) of
- {error, Reason} ->
- {error, Reason};
- {ok, Vars, Spec, St} ->
- make_loop(Hooks, Vars, Spec, St#state{makefiles=Rest})
- end;
- [] ->
- {ok, Vars0, Spec0, St0}
- end.
-
execute([Hook|Rest], Vars0, Spec0, St0) ->
case Hook(Vars0, Spec0, St0) of
ok ->
@@ -137,101 +124,6 @@ init_state(Vars, [], St0) ->
false ->
{error,{no_test_directory,TestDir}}
end.
-
-%% Read the spec file for the test suite.
-
-read_spec_file(Vars, _, St) ->
- TestDir = St#state.test_dir,
- File = St#state.file,
- {SpecFile,Res} = get_spec_filename(Vars, TestDir, File),
- case Res of
- {ok,Spec} ->
- {ok,Vars,Spec,St};
- {error,Atom} when is_atom(Atom) ->
- {error,{no_spec,SpecFile}};
- {error,Reason} ->
- {error,{bad_spec,lists:flatten(file:format_error(Reason))}}
- end.
-
-get_spec_filename(Vars, TestDir, File) ->
- DynSpec = filename:join(TestDir, File ++ ".dynspec"),
- case filelib:is_file(DynSpec) of
- true ->
- Bs0 = erl_eval:new_bindings(),
- Bs1 = erl_eval:add_binding('Target', ts_lib:var(target, Vars), Bs0),
- Bs2 = erl_eval:add_binding('Os', ts_lib:var(os, Vars), Bs1),
- TCCStr = ts_lib:var(test_c_compiler, Vars),
- TCC = try
- {ok, Toks, _} = erl_scan:string(TCCStr ++ "."),
- {ok, Tcc} = erl_parse:parse_term(Toks),
- Tcc
- catch
- _:_ -> undefined
- end,
- Bs = erl_eval:add_binding('TestCCompiler', TCC, Bs2),
- {DynSpec,file:script(DynSpec, Bs)};
- false ->
- SpecFile = get_spec_filename_1(Vars, TestDir, File),
- {SpecFile,file:consult(SpecFile)}
- end.
-
-get_spec_filename_1(Vars, TestDir, File) ->
- case ts_lib:var(os, Vars) of
- "VxWorks" ->
- check_spec_filename(TestDir, File, ".spec.vxworks");
- "Windows"++_ ->
- check_spec_filename(TestDir, File, ".spec.win");
- _Other ->
- filename:join(TestDir, File ++ ".spec")
- end.
-
-check_spec_filename(TestDir, File, Ext) ->
- Spec = filename:join(TestDir, File ++ Ext),
- case filelib:is_file(Spec) of
- true -> Spec;
- false -> filename:join(TestDir, File ++ ".spec")
- end.
-
-%% Remove the top case from the spec file. We will add our own
-%% top case later.
-
-remove_original_topcase(Vars, Spec, St) ->
- {ok,Vars,filter(fun ({topcase,_}) -> false;
- (_Other) -> true end, Spec),St}.
-
-%% Initialize our new top case. We'll keep in it the state to be
-%% able to add more to it.
-
-init_topcase(Vars, Spec, St) ->
- TestDir = St#state.test_dir,
- TopCase =
- case St#state.mod of
- Mod when is_atom(Mod) ->
- ModStr = atom_to_list(Mod),
- case filelib:is_file(filename:join(TestDir,ModStr++".erl")) of
- true -> [{Mod,all}];
- false ->
- Wc = filename:join(TestDir, ModStr ++ "*_SUITE.erl"),
- [{list_to_atom(filename:basename(M, ".erl")),all} ||
- M <- filelib:wildcard(Wc)]
- end;
- _Other ->
- %% Here we used to return {dir,TestDir}. Now we instead
- %% list all suites in TestDir, so we can add make testcases
- %% around it later (see add_make_testcase) without getting
- %% duplicates of the suite. (test_server_ctrl does no longer
- %% check for duplicates of testcases)
- Wc = filename:join(TestDir, "*_SUITE.erl"),
- [{list_to_atom(filename:basename(M, ".erl")),all} ||
- M <- filelib:wildcard(Wc)]
- end,
- {ok,Vars,Spec,St#state{topcase=TopCase}}.
-
-%% Or if option keep_topcase was given, eh... keep the topcase
-copy_topcase(Vars, Spec, St) ->
- {value,{topcase,Tc}} = lists:keysearch(topcase,1,Spec),
- {ok, Vars, lists:keydelete(topcase,1,Spec),St#state{topcase=Tc}}.
-
%% Run any "Makefile.first" files first.
%% XXX We should fake a failing test case if the make fails.
@@ -260,157 +152,6 @@ run_pre_makefile(Vars, Spec, St) ->
{error,_Reason}=Error -> Error
end.
-%% Search for `Makefile.src' in each *_SUITE_data directory.
-
-find_makefiles(Vars, Spec, St) ->
- Wc = filename:join(St#state.data_wc, "Makefile.src"),
- Makefiles = reverse(del_skipped_suite_data_dir(filelib:wildcard(Wc), Spec)),
- {ok,Vars,Spec,St#state{makefiles=Makefiles}}.
-
-%% Create "Makefile" from "Makefile.src".
-
-make_make(Vars, Spec, State) ->
- Src = State#state.makefile,
- Dest = filename:rootname(Src),
- ts_lib:progress(Vars, 1, "Making ~s...\n", [Dest]),
- case ts_lib:subst_file(Src, Dest, Vars) of
- ok ->
- {ok, Vars, Spec, State#state{makefile=Dest}};
- {error, Reason} ->
- {error, {Src, Reason}}
- end.
-
-%% Add a testcase which will do the making of the stuff in the data directory.
-
-add_make_testcase(Vars, Spec, St) ->
- Makefile = St#state.makefile,
- Dir = filename:dirname(Makefile),
- Shortname = filename:basename(Makefile),
- Suite = filename:basename(Dir, "_data"),
- Config = [{data_dir,Dir},{makefile,Shortname}],
- MakeModule = Suite ++ "_make",
- MakeModuleSrc = filename:join(filename:dirname(Dir),
- MakeModule ++ ".erl"),
- MakeMod = list_to_atom(MakeModule),
- case filelib:is_file(MakeModuleSrc) of
- true -> ok;
- false -> generate_make_module(ts_lib:var(make_command, Vars),
- MakeModuleSrc,
- MakeModule)
- end,
- case Suite of
- "all_SUITE" ->
- {ok,Vars,Spec,St#state{all={MakeMod,Config}}};
- _ ->
- %% Avoid duplicates of testcases. There is no longer
- %% a check for this in test_server_ctrl.
- TestCase = {list_to_atom(Suite),all},
- TopCase0 = case St#state.topcase of
- List when is_list(List) ->
- List -- [TestCase];
- Top ->
- [Top] -- [TestCase]
- end,
- TopCase = [{make,{MakeMod,make,[Config]},
- TestCase,
- {MakeMod,unmake,[Config]}}|TopCase0],
- {ok,Vars,Spec,St#state{topcase=TopCase}}
- end.
-
-generate_make_module(MakeCmd, Name, ModuleString) ->
- {ok,Host} = inet:gethostname(),
- file:write_file(Name,
- ["-module(",ModuleString,").\n",
- "\n",
- "-export([make/1,unmake/1]).\n",
- "\n",
- "make(Config) when is_list(Config) ->\n",
- " Mins = " ++ integer_to_list(?DEFAULT_MAKE_TIMETRAP_MINUTES) ++ ",\n"
- " test_server:format(\"=== Setting timetrap to ~p minutes ===~n\", [Mins]),\n"
- " TimeTrap = test_server:timetrap(test_server:minutes(Mins)),\n"
- " Res = ts_make:make([{make_command, \""++MakeCmd++"\"},{cross_node,\'ts@" ++ Host ++ "\'}|Config]),\n",
- " test_server:timetrap_cancel(TimeTrap),\n"
- " Res.\n"
- "\n",
- "unmake(Config) when is_list(Config) ->\n",
- " Mins = " ++ integer_to_list(?DEFAULT_UNMAKE_TIMETRAP_MINUTES) ++ ",\n"
- " test_server:format(\"=== Setting timetrap to ~p minutes ===~n\", [Mins]),\n"
- " TimeTrap = test_server:timetrap(test_server:minutes(Mins)),\n"
- " Res = ts_make:unmake([{make_command, \""++MakeCmd++"\"}|Config]),\n"
- " test_server:timetrap_cancel(TimeTrap),\n"
- " Res.\n"
- "\n"]).
-
-
-make_test_suite(Vars, _Spec, State) ->
- TestDir = State#state.test_dir,
-
- Erl_flags=[{i, "../test_server"}|ts_lib:var(erl_flags,Vars)],
-
- case code:is_loaded(test_server_line) of
- false -> code:load_file(test_server_line);
- _ -> ok
- end,
-
- {ok, Cwd} = file:get_cwd(),
- ok = file:set_cwd(TestDir),
- Result = (catch make_all(Erl_flags)),
- ok = file:set_cwd(Cwd),
- case Result of
- up_to_date ->
- ok;
- {'EXIT', Reason} ->
- %% If I return an error here, the test will be stopped
- %% and it will not show up in the top index page. Instead
- %% I return ok - the test will run for all existing suites.
- %% It might be that there are old suites that are run, but
- %% at least one suite is missing, and that is reported on the
- %% top index page.
- io:format("~s: {error,{make_crashed,~p}\n",
- [State#state.file,Reason]),
- ok;
- error ->
- %% See comment above
- io:format("~s: {error,make_of_test_suite_failed}\n",
- [State#state.file]),
- ok
- end.
-
-%% Add topcase to spec.
-
-add_topcase_to_spec(Vars, Spec, St) ->
- Tc = case St#state.all of
- {MakeMod,Config} ->
- [{make,{MakeMod,make,[Config]},
- St#state.topcase,
- {MakeMod,unmake,[Config]}}];
- undefined -> St#state.topcase
- end,
- {ok,Vars,Spec++[{topcase,Tc}],St}.
-
-%% Writes the (possibly transformed) spec file.
-
-write_spec_file(Vars, Spec, _State) ->
- F = fun(Term) -> io_lib:format("~p.~n", [Term]) end,
- SpecFile = map(F, Spec),
- Hosts =
- case lists:keysearch(hosts, 1, Vars) of
- false ->
- [];
- {value, {hosts, HostList}} ->
- io_lib:format("{hosts,~p}.~n",[HostList])
- end,
- DiskLess =
- case lists:keysearch(diskless, 1, Vars) of
- false ->
- [];
- {value, {diskless, How}} ->
- io_lib:format("{diskless, ~p}.~n",[How])
- end,
- Conf = consult_config(),
- MoreConfig = io_lib:format("~p.\n", [{config,Conf}]),
- file:write_file("current.spec", [DiskLess,Hosts,MoreConfig,SpecFile]).
-
get_config_files() ->
TSConfig = "ts.config",
[TSConfig | case os:type() of
@@ -420,21 +161,6 @@ get_config_files() ->
_ -> []
end].
-consult_config() ->
- {ok,Conf} = file:consult("ts.config"),
- case os:type() of
- {unix,_} -> consult_config("ts.unix.config", Conf);
- {win32,_} -> consult_config("ts.win32.config", Conf);
- vxworks -> consult_config("ts.vxworks.config", Conf);
- _ -> Conf
- end.
-
-consult_config(File, Conf0) ->
- case file:consult(File) of
- {ok,Conf} -> Conf++Conf0;
- {error,enoent} -> Conf0
- end.
-
%% Makes the command to start up the Erlang node to run the tests.
backslashify([$\\, $" | T]) ->
@@ -647,77 +373,11 @@ make_common_test_args(Args0, Options, _Vars) ->
io_lib:format("~100000p",[Args0++Trace++Cover++Logdir++
ConfigFiles++Options]).
-make_test_server_args(Args0,Options,Vars) ->
- Parameters =
- case ts_lib:var(os, Vars) of
- "VxWorks" ->
- F = write_parameterfile(vxworks,Vars),
- " PARAMETERS " ++ F;
- _ ->
- ""
- end,
- Trace =
- case lists:keysearch(trace,1,Options) of
- {value,{trace,TI}} when is_tuple(TI); is_tuple(hd(TI)) ->
- ok = file:write_file(?tracefile,io_lib:format("~p.~n",[TI])),
- " TRACE " ++ ?tracefile;
- {value,{trace,TIFile}} when is_atom(TIFile) ->
- " TRACE " ++ atom_to_list(TIFile);
- {value,{trace,TIFile}} ->
- " TRACE " ++ TIFile;
- false ->
- ""
- end,
- Cover =
- case lists:keysearch(cover,1,Options) of
- {value,{cover,App,File,Analyse}} ->
- " COVER " ++ to_list(App) ++ " " ++ to_list(File) ++ " " ++
- to_list(Analyse);
- false ->
- ""
- end,
- TCCallback =
- case ts_lib:var(ts_testcase_callback, Vars) of
- "" ->
- "";
- {Mod,Func} ->
- io:format("Function ~w:~w/4 will be called before and "
- "after each test case.\n", [Mod,Func]),
- " TESTCASE_CALLBACK " ++ to_list(Mod) ++ " " ++ to_list(Func);
- ModFunc when is_list(ModFunc) ->
- [Mod,Func]=string:tokens(ModFunc," "),
- io:format("Function ~s:~s/4 will be called before and "
- "after each test case.\n", [Mod,Func]),
- " TESTCASE_CALLBACK " ++ ModFunc;
- _ ->
- ""
- end,
- Args0 ++ Parameters ++ Trace ++ Cover ++ TCCallback.
-
to_list(X) when is_atom(X) ->
atom_to_list(X);
to_list(X) when is_list(X) ->
X.
-write_parameterfile(Type,Vars) ->
- Cross_host = ts_lib:var(target_host, Vars),
- SlaveTargets = case lists:keysearch(slavetargets,1,Vars) of
- {value, ST} ->
- [ST];
- _ ->
- []
- end,
- Master = case lists:keysearch(master,1,Vars) of
- {value,M} -> [M];
- false -> []
- end,
- ToWrite = [{type,Type},
- {target, list_to_atom(Cross_host)}] ++ SlaveTargets ++ Master,
-
- Crossfile = atom_to_list(Type) ++ "parameters" ++ os:getpid(),
- ok = file:write_file(Crossfile,io_lib:format("~p.~n", [ToWrite])),
- Crossfile.
-
%%
%% Paths and spaces handling for w2k and XP
%%
@@ -763,53 +423,3 @@ split_one(Path) ->
split_path(Path) ->
string:tokens(Path,";").
-
-%%
-%% Run make:all/1 if the test suite seems to be designed
-%% to be built/re-built by ts.
-%%
-make_all(Flags) ->
- case filelib:is_regular("Emakefile") of
- false ->
- make_all_no_emakefile(Flags);
- true ->
- make:all(Flags)
- end.
-
-make_all_no_emakefile(Flags) ->
- case filelib:wildcard("*.beam") of
- [] ->
- %% Since there are no *.beam files, we will assume
- %% that this test suite was designed to be built and
- %% re-built by ts. Create an Emakefile so that
- %% make:all/1 will be run the next time too
- %% (in case a test suite is being interactively
- %% developed).
- create_emakefile(Flags, "*.erl");
- [_|_] ->
- %% There is no Emakefile and there already are
- %% some *.beam files here. Assume that this test
- %% suite was not designed to be re-built by ts.
- %% Only create a Emakefile that will compile
- %% generated *_SUITE_make files (if any).
- create_emakefile(Flags, "*_SUITE_make.erl")
- end.
-
-create_emakefile(Flags, Wc) ->
- case filelib:wildcard(Wc) of
- [] ->
- %% There are no files to be built (i.e. not even any
- %% generated *_SUITE_make.erl files). We must handle
- %% this case specially, because make:all/1 will crash
- %% on Emakefile with an empty list of modules.
- io:put_chars("No Emakefile found - not running make:all/1\n"),
- up_to_date;
- [_|_]=Ms0 ->
- io:format("Creating an Emakefile for compiling files matching ~s\n",
- [Wc]),
- Ms = [list_to_atom(filename:rootname(M, ".erl")) || M <- Ms0],
- Make0 = {Ms,Flags},
- Make = io_lib:format("~p. \n", [Make0]),
- ok = file:write_file("Emakefile", Make),
- make:all(Flags)
- end.
diff --git a/lib/test_server/test/Makefile b/lib/test_server/test/Makefile
index 9fe5aee3bb..0648c1f96a 100644
--- a/lib/test_server/test/Makefile
+++ b/lib/test_server/test/Makefile
@@ -27,11 +27,7 @@ include $(ERL_TOP)/make/$(TARGET)/otp.mk
MODULES= \
test_server_SUITE \
test_server_line_SUITE \
- test_server_skip_SUITE \
- test_server_conf01_SUITE \
- test_server_conf02_SUITE \
- test_server_parallel01_SUITE \
- test_server_shuffle01_SUITE
+ test_server_test_lib
ERL_FILES= $(MODULES:%=%.erl)
@@ -52,6 +48,7 @@ RELSYSDIR = $(RELEASE_PATH)/test_server_test
ERL_MAKE_FLAGS += -pa $(ERL_TOP)/lib/test_server/ebin
ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/test_server/include
+ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/test_server/test
EBIN = .
diff --git a/lib/test_server/test/test_server.spec b/lib/test_server/test/test_server.spec
index 23b0b71963..a3b4d01d08 100644
--- a/lib/test_server/test/test_server.spec
+++ b/lib/test_server/test/test_server.spec
@@ -1,2 +1 @@
-{topcase, {dir, "../test_server_test"}}.
-{skip,{test_server_SUITE,skip_case7,"This case should be noted as `Skipped'"}}.
+{suites, "../test_server_test", all}.
diff --git a/lib/test_server/test/test_server_SUITE.erl b/lib/test_server/test/test_server_SUITE.erl
index 0563e1104f..f4c19eeaf9 100644
--- a/lib/test_server/test/test_server_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2010. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -16,539 +16,149 @@
%%
%% %CopyrightEnd%
%%
-
-%%%------------------------------------------------------------------
-%%% Test Server self test.
-%%%------------------------------------------------------------------
+%%%-------------------------------------------------------------------
+%%% @author Lukas Larsson <[email protected]>
+%%% @copyright (C) 2011, Erlang Solutions Ltd.
+%%% @doc
+%%%
+%%% @end
+%%% Created : 15 Feb 2011 by Lukas Larsson <[email protected]>
+%%%-------------------------------------------------------------------
-module(test_server_SUITE).
--include_lib("test_server/include/test_server.hrl").
--include_lib("test_server/include/test_server_line.hrl").
--include_lib("kernel/include/file.hrl").
--export([all/1]).
--export([init_per_suite/1, end_per_suite/1]).
--export([init_per_testcase/2, end_per_testcase/2, fin_per_testcase/2]).
--export([config/1, comment/1, timetrap/1, timetrap_cancel/1, multiply_timetrap/1,
- init_per_s/1, init_per_tc/1, end_per_tc/1,
- timeconv/1, msgs/1, capture/1, timecall/1,
- do_times/1, do_times_mfa/1, do_times_fun/1,
- skip_cases/1, skip_case1/1, skip_case2/1, skip_case3/1,
- skip_case4/1, skip_case5/1, skip_case6/1, skip_case7/1,
- skip_case8/1, skip_case9/1, undefined_functions/1,
- conf_init/1, check_new_conf/1, conf_cleanup/1,
- check_old_conf/1, conf_init_fail/1, start_stop_node/1,
- cleanup_nodes_init/1, check_survive_nodes/1, cleanup_nodes_fin/1,
- commercial/1]).
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include("test_server_test_lib.hrl").
--export([dummy_function/0,dummy_function/1,doer/1]).
+%%--------------------------------------------------------------------
+%% COMMON TEST CALLBACK FUNCTIONS
+%%--------------------------------------------------------------------
-all(doc) -> ["Test Server self test"];
-all(suite) ->
- [config, comment, timetrap, timetrap_cancel, multiply_timetrap,
- init_per_s, init_per_tc, end_per_tc,
- timeconv, msgs, capture, timecall, do_times, skip_cases,
- undefined_functions, commercial,
- {conf, conf_init, [check_new_conf], conf_cleanup},
- check_old_conf,
- {conf, conf_init_fail,[conf_member_skip],conf_cleanup_skip},
- start_stop_node,
- {conf, cleanup_nodes_init,[check_survive_nodes],cleanup_nodes_fin},
- config
- ].
+%% @spec suite() -> Info
+suite() ->
+ [{ct_hooks,[ts_install_cth,test_server_test_lib]}].
+%% @spec init_per_suite(Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
init_per_suite(Config) ->
- [{init_per_suite_var,ok}|Config].
+ [{path_dirs,[proplists:get_value(data_dir,Config)]} | Config].
+%% @spec end_per_suite(Config) -> _
end_per_suite(_Config) ->
- ok.
-
-init_per_testcase(Func, Config) when is_atom(Func), is_list(Config) ->
- Dog = ?t:timetrap(?t:minutes(2)),
- Config1 = [{watchdog, Dog}|Config],
- case Func of
- init_per_tc ->
- [{strange_var, 1}|Config1];
- skip_case8 ->
- {skipped, "This case should be noted as `Skipped'"};
- skip_case9 ->
- {skip, "This case should be noted as `Skipped'"};
- _ ->
- Config1
- end;
-init_per_testcase(Func, Config) ->
- io:format("Func:~p",[Func]),
- io:format("Config:~p",[Config]),
- ?t:fail("Arguments to init_per_testcase not correct").
-
-end_per_testcase(Func, Config) when is_atom(Func), is_list(Config) ->
- Dog=?config(watchdog, Config),
- ?t:timetrap_cancel(Dog),
- case Func of
- end_per_tc -> io:format("CLEANUP => this test case is ok\n");
- _Other -> ok
- end;
-end_per_testcase(Func, Config) ->
- io:format("Func:~p",[Func]),
- io:format("Config:~p",[Config]),
- ?t:fail("Arguments to end_per_testcase not correct").
-
-fin_per_testcase(Func, Config) ->
- io:format("Func:~p",[Func]),
- io:format("Config:~p",[Config]),
- ?t:fail("fin_per_testcase/2 called, should have called end_per_testcase/2").
+ io:format("TEST_SERVER_FRAMEWORK: ~p",[os:getenv("TEST_SERVER_FRAMEWORK")]),
+ ok.
+
+%% @spec init_per_group(GroupName, Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
+init_per_group(_GroupName, Config) ->
+ Config.
+
+%% @spec end_per_group(GroupName, Config0) ->
+%% void() | {save_config,Config1}
+end_per_group(_GroupName, _Config) ->
+ ok.
+
+%% @spec init_per_testcase(TestCase, Config0) ->
+%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+%% @spec end_per_testcase(TestCase, Config0) ->
+%% void() | {save_config,Config1} | {fail,Reason}
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+%% @spec: groups() -> [Group]
+groups() ->
+ [].
+
+%% @spec all() -> GroupsAndTestCases | {skip,Reason}
+all() ->
+ [test_server_SUITE, test_server_parallel01_SUITE,
+ test_server_conf02_SUITE, test_server_conf01_SUITE,
+ test_server_skip_SUITE, test_server_shuffle01_SUITE].
+
+
+%%--------------------------------------------------------------------
+%% TEST CASES
+%%--------------------------------------------------------------------
+%% @spec TestCase(Config0) ->
+%% ok | exit() | {skip,Reason} | {comment,Comment} |
+%% {save_config,Config1} | {skip_and_save,Reason,Config1}
+test_server_SUITE(Config) ->
+% rpc:call(Node,dbg, tracer,[]),
+% rpc:call(Node,dbg, p,[all,c]),
+% rpc:call(Node,dbg, tpl,[test_server_ctrl,x]),
+ run_test_server_tests("test_server_SUITE", 39, 1, 31,
+ 20, 9, 1, 11, 2, 26, Config).
+
+test_server_parallel01_SUITE(Config) ->
+ run_test_server_tests("test_server_parallel01_SUITE", 37, 0, 19,
+ 19, 0, 0, 0, 0, 37, Config).
+
+test_server_shuffle01_SUITE(Config) ->
+ run_test_server_tests("test_server_shuffle01_SUITE", 130, 0, 0,
+ 76, 0, 0, 0, 0, 130, Config).
+
+test_server_skip_SUITE(Config) ->
+ run_test_server_tests("test_server_skip_SUITE", 3, 0, 1,
+ 0, 0, 1, 3, 0, 0, Config).
+
+test_server_conf01_SUITE(Config) ->
+ run_test_server_tests("test_server_conf01_SUITE", 24, 0, 12,
+ 12, 0, 0, 0, 0, 24, Config).
+
+test_server_conf02_SUITE(Config) ->
+ run_test_server_tests("test_server_conf02_SUITE", 26, 0, 12,
+ 12, 0, 0, 0, 0, 26, Config).
+
+
+run_test_server_tests(SuiteName, NCases, NFail, NExpected, NSucc,
+ NUsrSkip, NAutoSkip,
+ NActualSkip, NActualFail, NActualSucc, Config) ->
+ Node = proplists:get_value(node, Config),
+ {ok,_Pid} = rpc:call(Node,test_server_ctrl, start, []),
+ rpc:call(Node,
+ test_server_ctrl,add_dir_with_skip,
+ [SuiteName,
+ [proplists:get_value(data_dir,Config)],SuiteName,
+ [{test_server_SUITE,skip_case7,"SKIPPED!"}]]),
+
+ until(fun() ->
+ rpc:call(Node,test_server_ctrl,jobs,[]) =:= []
+ end),
-
-config(suite) -> [];
-config(doc) -> ["Test that the Config variable is decent, ",
- "and that the std config variables are correct ",
- "(check that data/priv dir exists)."
- "Also check that ?config macro works."];
-config(Config) when is_list(Config) ->
- is_tuplelist(Config),
- {value,{data_dir,Dd}}=lists:keysearch(data_dir,1,Config),
- {value,{priv_dir,Dp}}=lists:keysearch(priv_dir,1,Config),
- true=is_dir(Dd),
- {ok, _Bin}=file:read_file(filename:join(Dd, "dummy_file")),
- true=is_dir(Dp),
-
- Dd = ?config(data_dir,Config),
- Dp = ?config(priv_dir,Config),
- ok;
-config(_Config) ->
- ?t:fail("Config variable is not a list.").
-
-is_tuplelist([]) ->
- true;
-is_tuplelist([{_A,_B}|Rest]) ->
- is_tuplelist(Rest);
-is_tuplelist(_) ->
- false.
-
-is_dir(Dir) ->
- case file:read_file_info(Dir) of
- {ok, #file_info{type=directory}} ->
- true;
- _ ->
- false
- end.
-
-comment(suite) -> [];
-comment(doc) -> ["Print a comment in the HTML log"];
-comment(Config) when is_list(Config) ->
- ?t:comment("This comment should not occur in the HTML log because a later"
- " comment shall overwrite it"),
- ?t:comment("This comment is printed with the comment/1 function."
- " It should occur in the HTML log").
-
-
-
-timetrap(suite) -> [];
-timetrap(doc) -> ["Test that timetrap works."];
-timetrap(Config) when is_list(Config) ->
- TrapAfter = 3000,
- Dog=?t:timetrap(TrapAfter),
- process_flag(trap_exit, true),
- TimeOut = TrapAfter * test_server:timetrap_scale_factor() + 1000,
- receive
- {'EXIT', Dog, {timetrap_timeout, _, _}} ->
- ok;
- {'EXIT', _OtherPid, {timetrap_timeout, _, _}} ->
- ?t:fail("EXIT signal from wrong process")
- after
- TimeOut ->
- ?t:fail("Timetrap is not working.")
- end,
- ?t:timetrap_cancel(Dog),
- ok.
-
-
-timetrap_cancel(suite) -> [];
-timetrap_cancel(doc) -> ["Test that timetrap_cancel works."];
-timetrap_cancel(Config) when is_list(Config) ->
- Dog=?t:timetrap(1000),
- receive
- after
- 500 ->
- ok
- end,
- ?t:timetrap_cancel(Dog),
- receive
- after 1000 ->
- ok
- end,
- ok.
-
-multiply_timetrap(suite) -> [];
-multiply_timetrap(doc) -> ["Test multiply timetrap"];
-multiply_timetrap(Config) when is_list(Config) ->
- %% This simulates the call to test_server_ctrl:multiply_timetraps/1:
- put(test_server_multiply_timetraps,{2,true}),
-
- Dog = ?t:timetrap(500),
- timer:sleep(800),
- ?t:timetrap_cancel(Dog),
-
- %% Reset
- put(test_server_multiply_timetraps,1),
- ok.
-
-
-init_per_s(suite) -> [];
-init_per_s(doc) -> ["Test that a Config that is altered in ",
- "init_per_suite gets through to the testcases."];
-init_per_s(Config) ->
- %% Check that the config var sent from init_per_suite
- %% really exists.
- {value, {init_per_suite_var, ok}} =
- lists:keysearch(init_per_suite_var,1,Config),
-
- %% Check that the other variables still exist.
- {value,{data_dir,_Dd}}=lists:keysearch(data_dir,1,Config),
- {value,{priv_dir,_Dp}}=lists:keysearch(priv_dir,1,Config),
- ok.
-
-init_per_tc(suite) -> [];
-init_per_tc(doc) -> ["Test that a Config that is altered in ",
- "init_per_testcase gets through to the ",
- "actual testcase."];
-init_per_tc(Config) ->
- %% Check that the config var sent from init_per_testcase
- %% really exists.
- {value, {strange_var, 1}} = lists:keysearch(strange_var,1,Config),
-
- %% Check that the other variables still exist.
- {value,{data_dir,_Dd}}=lists:keysearch(data_dir,1,Config),
- {value,{priv_dir,_Dp}}=lists:keysearch(priv_dir,1,Config),
- ok.
-
-end_per_tc(suite) -> [];
-end_per_tc(doc) -> ["Test that end_per_testcase/2 is called even if"
- " test case fails"];
-end_per_tc(Config) when is_list(Config) ->
- ?t:fail("This case should fail! Check that \"CLEANUP\" is"
- " printed in the minor log file.").
-
-
-timeconv(suite) -> [];
-timeconv(doc) -> ["Test that the time unit conversion functions ",
- "works."];
-timeconv(Config) when is_list(Config) ->
- Val=2,
- Secs=Val*1000,
- Mins=Secs*60,
- Hrs=Mins*60,
- Secs=?t:seconds(2),
- Mins=?t:minutes(2),
- Hrs=?t:hours(2),
- ok.
-
-
-msgs(suite) -> [];
-msgs(doc) -> ["Tests the messages_get function."];
-msgs(Config) when is_list(Config) ->
- self() ! {hej, du},
- self() ! {lite, "data"},
- self() ! en_atom,
- [{hej, du}, {lite, "data"}, en_atom] = ?t:messages_get(),
- ok.
-
-capture(suite) -> [];
-capture(doc) -> ["Test that the capture functions work properly."];
-capture(Config) when is_list(Config) ->
- String1="abcedfghjiklmnopqrstuvwxyz",
- String2="0123456789",
- ?t:capture_start(),
- io:format(String1),
- [String1]=?t:capture_get(),
- io:format(String2),
- [String2]=?t:capture_get(),
- ?t:capture_stop(),
- []=?t:capture_get(),
- io:format(String2),
- []=?t:capture_get(),
- ok.
-
-timecall(suite) -> [];
-timecall(doc) -> ["Tests that timed calls work."];
-timecall(Config) when is_list(Config) ->
- {_Time1, liten_apa_e_oxo_farlig} = ?t:timecall(?MODULE, dummy_function, []),
- {Time2, jag_ar_en_gorilla} = ?t:timecall(?MODULE, dummy_function, [gorilla]),
- DTime=round(Time2),
- if
- DTime<1 ->
- ?t:fail("Timecall reported a too low time.");
- DTime==1 ->
+ rpc:call(Node,test_server_ctrl, stop, []),
+ {ok,#suite{ n_cases = NCases,
+ n_cases_failed = NFail,
+ n_cases_expected = NExpected,
+ n_cases_succ = NSucc,
+ n_cases_user_skip = NUsrSkip,
+ n_cases_auto_skip = NAutoSkip,
+ cases = Cases }} = Data =
+ test_server_test_lib:parse_suite(
+ hd(filelib:wildcard(
+ filename:join([proplists:get_value(priv_dir, Config),
+ SuiteName++".logs","run*","suite.log"])))),
+ {NActualSkip,NActualFail,NActualSucc} =
+ lists:foldl(fun(#tc{ result = skip },{S,F,Su}) ->
+ {S+1,F,Su};
+ (#tc{ result = ok },{S,F,Su}) ->
+ {S,F,Su+1};
+ (#tc{ result = failed },{S,F,Su}) ->
+ {S,F+1,Su}
+ end,{0,0,0},Cases),
+ Data.
+
+until(Fun) ->
+ case Fun() of
+ true ->
ok;
- DTime>1 ->
- ?t:fail("Timecall reported a too high time.")
- end,
- ok.
-
-dummy_function() ->
- liten_apa_e_oxo_farlig.
-dummy_function(gorilla) ->
- receive after 1000 -> ok end,
- jag_ar_en_gorilla.
-
-
-do_times(suite) -> [do_times_mfa, do_times_fun];
-do_times(doc) -> ["Test the do_times function."].
-
-do_times_mfa(suite) -> [];
-do_times_mfa(doc) -> ["Test the do_times function with M,F,A given."];
-do_times_mfa(Config) when is_list(Config) ->
- ?t:do_times(100, ?MODULE, doer, [self()]),
- 100=length(?t:messages_get()),
- ok.
-
-do_times_fun(suite) -> [];
-do_times_fun(doc) -> ["Test the do_times function with fun given."];
-do_times_fun(Config) when is_list(Config) ->
- Self = self(),
- ?t:do_times(100, fun() -> doer(Self) end),
- 100=length(?t:messages_get()),
- ok.
-
-doer(From) ->
- From ! a,
- ok.
-
-skip_cases(doc) -> ["Test all possible ways to skip a test case."];
-skip_cases(suite) -> [skip_case1, skip_case2, skip_case3, skip_case4,
- skip_case5, skip_case6, skip_case7, skip_case8,
- skip_case9].
-
-skip_case1(suite) -> [];
-skip_case1(doc) -> ["Test that you can return {skipped, Reason},"
- " and that Reason is in the comment field in the HTML log"];
-skip_case1(Config) when is_list(Config) ->
- %% If this comment shows, the case failed!!
- ?t:comment("ERROR: This case should have been noted as `Skipped'"),
- %% The Reason in {skipped, Reason} should overwrite a 'comment'
- {skipped, "This case should be noted as `Skipped'"}.
-
-skip_case2(suite) -> [];
-skip_case2(doc) -> ["Test that you can return {skipped, Reason},"
- " and that Reason is in the comment field in the HTML log"];
-skip_case2(Config) when is_list(Config) ->
- %% If this comment shows, the case failed!!
- ?t:comment("ERROR: This case should have been noted as `Skipped'"),
- %% The Reason in {skipped, Reason} should overwrite a 'comment'
- exit({skipped, "This case should be noted as `Skipped'"}).
-
-skip_case3(suite) -> [];
-skip_case3(doc) -> ["Test that you can return {skip, Reason},"
- " and that Reason is in the comment field in the HTML log"];
-skip_case3(Config) when is_list(Config) ->
- %% If this comment shows, the case failed!!
- ?t:comment("ERROR: This case should have been noted as `Skipped'"),
- %% The Reason in {skip, Reason} should overwrite a 'comment'
- {skip, "This case should be noted as `Skipped'"}.
-
-skip_case4(suite) -> [];
-skip_case4(doc) -> ["Test that you can return {skip, Reason},"
- " and that Reason is in the comment field in the HTML log"];
-skip_case4(Config) when is_list(Config) ->
- %% If this comment shows, the case failed!!
- ?t:comment("ERROR: This case should have been noted as `Skipped'"),
- %% The Reason in {skip, Reason} should overwrite a 'comment'
- exit({skip, "This case should be noted as `Skipped'"}).
-
-skip_case5(suite) -> {skipped, "This case should be noted as `Skipped'"};
-skip_case5(doc) -> ["Test that you can return {skipped, Reason}"
- " from the specification clause"].
-
-skip_case6(suite) -> {skip, "This case should be noted as `Skipped'"};
-skip_case6(doc) -> ["Test that you can return {skip, Reason}"
- " from the specification clause"].
-
-skip_case7(suite) -> [];
-skip_case7(doc) -> ["Test that skip works from a test specification file"];
-skip_case7(Config) when is_list(Config) ->
- %% This case shall be skipped by adding
- %% {skip, {test_server_SUITE, skip_case7, Reason}}.
- %% to the test specification file.
- ?t:fail("This case should have been Skipped by the .spec file").
-
-skip_case8(suite) -> [];
-skip_case8(doc) -> ["Test that {skipped, Reason} works from"
- " init_per_testcase/2"];
-skip_case8(Config) when is_list(Config) ->
- %% This case shall be skipped by adding a specific clause to
- %% returning {skipped, Reason} from init_per_testcase/2 for this case.
- ?t:fail("This case should have been Skipped by init_per_testcase/2").
-
-skip_case9(suite) -> [];
-skip_case9(doc) -> ["Test that {skip, Reason} works from a init_per_testcase/2"];
-skip_case9(Config) when is_list(Config) ->
- %% This case shall be skipped by adding a specific clause to
- %% returning {skip, Reason} from init_per_testcase/2 for this case.
- ?t:fail("This case should have been Skipped by init_per_testcase/2").
-
-undefined_functions(suite) -> [];
-undefined_functions(doc) -> ["Check for calls to undefined functions in"
- " test_server."
- "Skip if cover is running"];
-undefined_functions(Config) when is_list(Config) ->
- case whereis(cover_server) of
- Pid when is_pid(Pid) ->
- {skip,"Cover is running"};
- undefined ->
- undefined_functions()
- end.
-
-undefined_functions() ->
- TestServerDir = filename:dirname(code:which(test_server)),
- Res = xref:d(TestServerDir),
-
- {value,{unused,Unused}} = lists:keysearch(unused, 1, Res),
- case Unused of
- [] -> ok;
- _ ->
- lists:foreach(fun (MFA) ->
- io:format("~s unused", [format_mfa(MFA)])
- end, Unused)
- end,
-
- {value,{undefined,Undef0}} = lists:keysearch(undefined, 1, Res),
- Undef = [U || U <- Undef0, not unresolved(U)],
- case Undef of
- [] -> ok;
- _ ->
- lists:foreach(fun ({MFA1,MFA2}) ->
- io:format("~s calls undefined ~s",
- [format_mfa(MFA1),format_mfa(MFA2)])
- end, Undef),
- ?t:fail({length(Undef),undefined_functions_in_otp})
- end,
- ok.
-
-unresolved({_,{_,'$F_EXPR',_}}) -> true;
-unresolved(_) -> false.
-
-format_mfa({M,F,A}) ->
- lists:flatten(io_lib:format("~s:~s/~p", [M,F,A])).
-
-conf_init(doc) -> ["Test successful conf case: Change Config parameter"];
-conf_init(Config) when is_list(Config) ->
- [{conf_init_var,1389}|Config].
-
-check_new_conf(suite) -> [];
-check_new_conf(doc) -> ["Check that Config parameter changed by"
- " conf_init is used"];
-check_new_conf(Config) when is_list(Config) ->
- 1389 = ?config(conf_init_var,Config),
- ok.
-
-conf_cleanup(doc) -> ["Test successful conf case: Restore Config parameter"];
-conf_cleanup(Config) when is_list(Config) ->
- lists:keydelete(conf_init_var,1,Config).
-
-check_old_conf(suite) -> [];
-check_old_conf(doc) -> ["Test that the restored Config is used after a"
- " conf cleanup"];
-check_old_conf(Config) when is_list(Config) ->
- undefined = ?config(conf_init_var,Config),
- ok.
-
-conf_init_fail(doc) -> ["Test that config members are skipped if"
- " conf init function fails."];
-conf_init_fail(Config) when is_list(Config) ->
- ?t:fail("This case should fail! Check that conf_member_skip and"
- " conf_cleanup_skip are skipped.").
-
-
-
-start_stop_node(suite) -> [];
-start_stop_node(doc) -> ["Test start and stop of slave and peer nodes"];
-start_stop_node(Config) when is_list(Config) ->
- {ok,Node2} = ?t:start_node(node2,peer,[]),
- {error, _} = ?t:start_node(node2,peer,[{fail_on_error,false}]),
- true = lists:member(Node2,nodes()),
-
- {ok,Node3} = ?t:start_node(node3,slave,[]),
- {error, _} = ?t:start_node(node3,slave,[]),
- true = lists:member(Node3,nodes()),
-
- {ok,Node4} = ?t:start_node(node4,peer,[{wait,false}]),
- case lists:member(Node4,nodes()) of
- true ->
- ?t:comment("WARNING: Node started with {wait,false}"
- " is up faster than expected...");
false ->
- wait_for_node(Node4,0),
- true = lists:member(Node4,nodes())
- end,
-
- true = ?t:stop_node(Node2),
- false = lists:member(Node2,nodes()),
-
- true = ?t:stop_node(Node3),
- false = lists:member(Node3,nodes()),
-
- true = ?t:stop_node(Node4),
- false = lists:member(Node4,nodes()),
- timer:sleep(2000),
- false = ?t:stop_node(Node4),
-
- ok.
-
-
-wait_for_node(Node,Acc) ->
- case net_adm:ping(Node) of
- pang ->
timer:sleep(100),
- wait_for_node(Node,Acc+100);
- pong ->
- Acc
+ until(Fun)
end.
-
-cleanup_nodes_init(doc) -> ["Test that nodes are terminated when test case"
- " is finished unless {cleanup,false} is given."];
-cleanup_nodes_init(Config) when is_list(Config) ->
- {ok,DieSlave} = ?t:start_node(die_slave, slave, []),
- {ok,SurviveSlave} = ?t:start_node(survive_slave, slave, [{cleanup,false}]),
- {ok,DiePeer} = ?t:start_node(die_peer, peer, []),
- {ok,SurvivePeer} = ?t:start_node(survive_peer, peer, [{cleanup,false}]),
- [{die_slave,DieSlave},
- {survive_slave,SurviveSlave},
- {die_peer,DiePeer},
- {survive_peer,SurvivePeer} | Config].
-
-
-
-check_survive_nodes(suite) -> [];
-check_survive_nodes(doc) -> ["Test that nodes with {cleanup,false} survived"];
-check_survive_nodes(Config) when is_list(Config) ->
- timer:sleep(1000),
- false = lists:member(?config(die_slave,Config),nodes()),
- true = lists:member(?config(survive_slave,Config),nodes()),
- false = lists:member(?config(die_peer,Config),nodes()),
- true = lists:member(?config(survive_peer,Config),nodes()),
- ok.
-
-
-cleanup_nodes_fin(doc) -> ["Test that nodes started with {cleanup,false}"
- " can be stopped"];
-cleanup_nodes_fin(Config) when is_list(Config) ->
- Slave = ?config(survive_slave,Config),
- Peer = ?config(survive_peer,Config),
-
- true = ?t:stop_node(Slave),
- false = lists:member(Slave,nodes()),
- true = ?t:stop_node(Peer),
- false = lists:member(Peer,nodes()),
-
- C1 = lists:keydelete(die_slave,1,Config),
- C2 = lists:keydelete(survive_slave,1,C1),
- C3 = lists:keydelete(die_peer,1,C2),
- lists:keydelete(survive_peer,1,C3).
-
-commercial(Config) when is_list(Config) ->
- case ?t:is_commercial() of
- false -> {comment,"Open-source build"};
- true -> {comment,"Commercial build"}
- end.
-
-
+
diff --git a/lib/test_server/test/test_server_SUITE_data/Makefile.src b/lib/test_server/test/test_server_SUITE_data/Makefile.src
new file mode 100644
index 0000000000..d5af919eec
--- /dev/null
+++ b/lib/test_server/test/test_server_SUITE_data/Makefile.src
@@ -0,0 +1,2 @@
+all:
+ erlc *.erl \ No newline at end of file
diff --git a/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl
new file mode 100644
index 0000000000..0563e1104f
--- /dev/null
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_SUITE.erl
@@ -0,0 +1,554 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%%------------------------------------------------------------------
+%%% Test Server self test.
+%%%------------------------------------------------------------------
+-module(test_server_SUITE).
+-include_lib("test_server/include/test_server.hrl").
+-include_lib("test_server/include/test_server_line.hrl").
+-include_lib("kernel/include/file.hrl").
+-export([all/1]).
+
+-export([init_per_suite/1, end_per_suite/1]).
+-export([init_per_testcase/2, end_per_testcase/2, fin_per_testcase/2]).
+-export([config/1, comment/1, timetrap/1, timetrap_cancel/1, multiply_timetrap/1,
+ init_per_s/1, init_per_tc/1, end_per_tc/1,
+ timeconv/1, msgs/1, capture/1, timecall/1,
+ do_times/1, do_times_mfa/1, do_times_fun/1,
+ skip_cases/1, skip_case1/1, skip_case2/1, skip_case3/1,
+ skip_case4/1, skip_case5/1, skip_case6/1, skip_case7/1,
+ skip_case8/1, skip_case9/1, undefined_functions/1,
+ conf_init/1, check_new_conf/1, conf_cleanup/1,
+ check_old_conf/1, conf_init_fail/1, start_stop_node/1,
+ cleanup_nodes_init/1, check_survive_nodes/1, cleanup_nodes_fin/1,
+ commercial/1]).
+
+-export([dummy_function/0,dummy_function/1,doer/1]).
+
+all(doc) -> ["Test Server self test"];
+all(suite) ->
+ [config, comment, timetrap, timetrap_cancel, multiply_timetrap,
+ init_per_s, init_per_tc, end_per_tc,
+ timeconv, msgs, capture, timecall, do_times, skip_cases,
+ undefined_functions, commercial,
+ {conf, conf_init, [check_new_conf], conf_cleanup},
+ check_old_conf,
+ {conf, conf_init_fail,[conf_member_skip],conf_cleanup_skip},
+ start_stop_node,
+ {conf, cleanup_nodes_init,[check_survive_nodes],cleanup_nodes_fin},
+ config
+ ].
+
+
+init_per_suite(Config) ->
+ [{init_per_suite_var,ok}|Config].
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_testcase(Func, Config) when is_atom(Func), is_list(Config) ->
+ Dog = ?t:timetrap(?t:minutes(2)),
+ Config1 = [{watchdog, Dog}|Config],
+ case Func of
+ init_per_tc ->
+ [{strange_var, 1}|Config1];
+ skip_case8 ->
+ {skipped, "This case should be noted as `Skipped'"};
+ skip_case9 ->
+ {skip, "This case should be noted as `Skipped'"};
+ _ ->
+ Config1
+ end;
+init_per_testcase(Func, Config) ->
+ io:format("Func:~p",[Func]),
+ io:format("Config:~p",[Config]),
+ ?t:fail("Arguments to init_per_testcase not correct").
+
+end_per_testcase(Func, Config) when is_atom(Func), is_list(Config) ->
+ Dog=?config(watchdog, Config),
+ ?t:timetrap_cancel(Dog),
+ case Func of
+ end_per_tc -> io:format("CLEANUP => this test case is ok\n");
+ _Other -> ok
+ end;
+end_per_testcase(Func, Config) ->
+ io:format("Func:~p",[Func]),
+ io:format("Config:~p",[Config]),
+ ?t:fail("Arguments to end_per_testcase not correct").
+
+fin_per_testcase(Func, Config) ->
+ io:format("Func:~p",[Func]),
+ io:format("Config:~p",[Config]),
+ ?t:fail("fin_per_testcase/2 called, should have called end_per_testcase/2").
+
+
+config(suite) -> [];
+config(doc) -> ["Test that the Config variable is decent, ",
+ "and that the std config variables are correct ",
+ "(check that data/priv dir exists)."
+ "Also check that ?config macro works."];
+config(Config) when is_list(Config) ->
+ is_tuplelist(Config),
+ {value,{data_dir,Dd}}=lists:keysearch(data_dir,1,Config),
+ {value,{priv_dir,Dp}}=lists:keysearch(priv_dir,1,Config),
+ true=is_dir(Dd),
+ {ok, _Bin}=file:read_file(filename:join(Dd, "dummy_file")),
+ true=is_dir(Dp),
+
+ Dd = ?config(data_dir,Config),
+ Dp = ?config(priv_dir,Config),
+ ok;
+config(_Config) ->
+ ?t:fail("Config variable is not a list.").
+
+is_tuplelist([]) ->
+ true;
+is_tuplelist([{_A,_B}|Rest]) ->
+ is_tuplelist(Rest);
+is_tuplelist(_) ->
+ false.
+
+is_dir(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok, #file_info{type=directory}} ->
+ true;
+ _ ->
+ false
+ end.
+
+comment(suite) -> [];
+comment(doc) -> ["Print a comment in the HTML log"];
+comment(Config) when is_list(Config) ->
+ ?t:comment("This comment should not occur in the HTML log because a later"
+ " comment shall overwrite it"),
+ ?t:comment("This comment is printed with the comment/1 function."
+ " It should occur in the HTML log").
+
+
+
+timetrap(suite) -> [];
+timetrap(doc) -> ["Test that timetrap works."];
+timetrap(Config) when is_list(Config) ->
+ TrapAfter = 3000,
+ Dog=?t:timetrap(TrapAfter),
+ process_flag(trap_exit, true),
+ TimeOut = TrapAfter * test_server:timetrap_scale_factor() + 1000,
+ receive
+ {'EXIT', Dog, {timetrap_timeout, _, _}} ->
+ ok;
+ {'EXIT', _OtherPid, {timetrap_timeout, _, _}} ->
+ ?t:fail("EXIT signal from wrong process")
+ after
+ TimeOut ->
+ ?t:fail("Timetrap is not working.")
+ end,
+ ?t:timetrap_cancel(Dog),
+ ok.
+
+
+timetrap_cancel(suite) -> [];
+timetrap_cancel(doc) -> ["Test that timetrap_cancel works."];
+timetrap_cancel(Config) when is_list(Config) ->
+ Dog=?t:timetrap(1000),
+ receive
+ after
+ 500 ->
+ ok
+ end,
+ ?t:timetrap_cancel(Dog),
+ receive
+ after 1000 ->
+ ok
+ end,
+ ok.
+
+multiply_timetrap(suite) -> [];
+multiply_timetrap(doc) -> ["Test multiply timetrap"];
+multiply_timetrap(Config) when is_list(Config) ->
+ %% This simulates the call to test_server_ctrl:multiply_timetraps/1:
+ put(test_server_multiply_timetraps,{2,true}),
+
+ Dog = ?t:timetrap(500),
+ timer:sleep(800),
+ ?t:timetrap_cancel(Dog),
+
+ %% Reset
+ put(test_server_multiply_timetraps,1),
+ ok.
+
+
+init_per_s(suite) -> [];
+init_per_s(doc) -> ["Test that a Config that is altered in ",
+ "init_per_suite gets through to the testcases."];
+init_per_s(Config) ->
+ %% Check that the config var sent from init_per_suite
+ %% really exists.
+ {value, {init_per_suite_var, ok}} =
+ lists:keysearch(init_per_suite_var,1,Config),
+
+ %% Check that the other variables still exist.
+ {value,{data_dir,_Dd}}=lists:keysearch(data_dir,1,Config),
+ {value,{priv_dir,_Dp}}=lists:keysearch(priv_dir,1,Config),
+ ok.
+
+init_per_tc(suite) -> [];
+init_per_tc(doc) -> ["Test that a Config that is altered in ",
+ "init_per_testcase gets through to the ",
+ "actual testcase."];
+init_per_tc(Config) ->
+ %% Check that the config var sent from init_per_testcase
+ %% really exists.
+ {value, {strange_var, 1}} = lists:keysearch(strange_var,1,Config),
+
+ %% Check that the other variables still exist.
+ {value,{data_dir,_Dd}}=lists:keysearch(data_dir,1,Config),
+ {value,{priv_dir,_Dp}}=lists:keysearch(priv_dir,1,Config),
+ ok.
+
+end_per_tc(suite) -> [];
+end_per_tc(doc) -> ["Test that end_per_testcase/2 is called even if"
+ " test case fails"];
+end_per_tc(Config) when is_list(Config) ->
+ ?t:fail("This case should fail! Check that \"CLEANUP\" is"
+ " printed in the minor log file.").
+
+
+timeconv(suite) -> [];
+timeconv(doc) -> ["Test that the time unit conversion functions ",
+ "works."];
+timeconv(Config) when is_list(Config) ->
+ Val=2,
+ Secs=Val*1000,
+ Mins=Secs*60,
+ Hrs=Mins*60,
+ Secs=?t:seconds(2),
+ Mins=?t:minutes(2),
+ Hrs=?t:hours(2),
+ ok.
+
+
+msgs(suite) -> [];
+msgs(doc) -> ["Tests the messages_get function."];
+msgs(Config) when is_list(Config) ->
+ self() ! {hej, du},
+ self() ! {lite, "data"},
+ self() ! en_atom,
+ [{hej, du}, {lite, "data"}, en_atom] = ?t:messages_get(),
+ ok.
+
+capture(suite) -> [];
+capture(doc) -> ["Test that the capture functions work properly."];
+capture(Config) when is_list(Config) ->
+ String1="abcedfghjiklmnopqrstuvwxyz",
+ String2="0123456789",
+ ?t:capture_start(),
+ io:format(String1),
+ [String1]=?t:capture_get(),
+ io:format(String2),
+ [String2]=?t:capture_get(),
+ ?t:capture_stop(),
+ []=?t:capture_get(),
+ io:format(String2),
+ []=?t:capture_get(),
+ ok.
+
+timecall(suite) -> [];
+timecall(doc) -> ["Tests that timed calls work."];
+timecall(Config) when is_list(Config) ->
+ {_Time1, liten_apa_e_oxo_farlig} = ?t:timecall(?MODULE, dummy_function, []),
+ {Time2, jag_ar_en_gorilla} = ?t:timecall(?MODULE, dummy_function, [gorilla]),
+ DTime=round(Time2),
+ if
+ DTime<1 ->
+ ?t:fail("Timecall reported a too low time.");
+ DTime==1 ->
+ ok;
+ DTime>1 ->
+ ?t:fail("Timecall reported a too high time.")
+ end,
+ ok.
+
+dummy_function() ->
+ liten_apa_e_oxo_farlig.
+dummy_function(gorilla) ->
+ receive after 1000 -> ok end,
+ jag_ar_en_gorilla.
+
+
+do_times(suite) -> [do_times_mfa, do_times_fun];
+do_times(doc) -> ["Test the do_times function."].
+
+do_times_mfa(suite) -> [];
+do_times_mfa(doc) -> ["Test the do_times function with M,F,A given."];
+do_times_mfa(Config) when is_list(Config) ->
+ ?t:do_times(100, ?MODULE, doer, [self()]),
+ 100=length(?t:messages_get()),
+ ok.
+
+do_times_fun(suite) -> [];
+do_times_fun(doc) -> ["Test the do_times function with fun given."];
+do_times_fun(Config) when is_list(Config) ->
+ Self = self(),
+ ?t:do_times(100, fun() -> doer(Self) end),
+ 100=length(?t:messages_get()),
+ ok.
+
+doer(From) ->
+ From ! a,
+ ok.
+
+skip_cases(doc) -> ["Test all possible ways to skip a test case."];
+skip_cases(suite) -> [skip_case1, skip_case2, skip_case3, skip_case4,
+ skip_case5, skip_case6, skip_case7, skip_case8,
+ skip_case9].
+
+skip_case1(suite) -> [];
+skip_case1(doc) -> ["Test that you can return {skipped, Reason},"
+ " and that Reason is in the comment field in the HTML log"];
+skip_case1(Config) when is_list(Config) ->
+ %% If this comment shows, the case failed!!
+ ?t:comment("ERROR: This case should have been noted as `Skipped'"),
+ %% The Reason in {skipped, Reason} should overwrite a 'comment'
+ {skipped, "This case should be noted as `Skipped'"}.
+
+skip_case2(suite) -> [];
+skip_case2(doc) -> ["Test that you can return {skipped, Reason},"
+ " and that Reason is in the comment field in the HTML log"];
+skip_case2(Config) when is_list(Config) ->
+ %% If this comment shows, the case failed!!
+ ?t:comment("ERROR: This case should have been noted as `Skipped'"),
+ %% The Reason in {skipped, Reason} should overwrite a 'comment'
+ exit({skipped, "This case should be noted as `Skipped'"}).
+
+skip_case3(suite) -> [];
+skip_case3(doc) -> ["Test that you can return {skip, Reason},"
+ " and that Reason is in the comment field in the HTML log"];
+skip_case3(Config) when is_list(Config) ->
+ %% If this comment shows, the case failed!!
+ ?t:comment("ERROR: This case should have been noted as `Skipped'"),
+ %% The Reason in {skip, Reason} should overwrite a 'comment'
+ {skip, "This case should be noted as `Skipped'"}.
+
+skip_case4(suite) -> [];
+skip_case4(doc) -> ["Test that you can return {skip, Reason},"
+ " and that Reason is in the comment field in the HTML log"];
+skip_case4(Config) when is_list(Config) ->
+ %% If this comment shows, the case failed!!
+ ?t:comment("ERROR: This case should have been noted as `Skipped'"),
+ %% The Reason in {skip, Reason} should overwrite a 'comment'
+ exit({skip, "This case should be noted as `Skipped'"}).
+
+skip_case5(suite) -> {skipped, "This case should be noted as `Skipped'"};
+skip_case5(doc) -> ["Test that you can return {skipped, Reason}"
+ " from the specification clause"].
+
+skip_case6(suite) -> {skip, "This case should be noted as `Skipped'"};
+skip_case6(doc) -> ["Test that you can return {skip, Reason}"
+ " from the specification clause"].
+
+skip_case7(suite) -> [];
+skip_case7(doc) -> ["Test that skip works from a test specification file"];
+skip_case7(Config) when is_list(Config) ->
+ %% This case shall be skipped by adding
+ %% {skip, {test_server_SUITE, skip_case7, Reason}}.
+ %% to the test specification file.
+ ?t:fail("This case should have been Skipped by the .spec file").
+
+skip_case8(suite) -> [];
+skip_case8(doc) -> ["Test that {skipped, Reason} works from"
+ " init_per_testcase/2"];
+skip_case8(Config) when is_list(Config) ->
+ %% This case shall be skipped by adding a specific clause to
+ %% returning {skipped, Reason} from init_per_testcase/2 for this case.
+ ?t:fail("This case should have been Skipped by init_per_testcase/2").
+
+skip_case9(suite) -> [];
+skip_case9(doc) -> ["Test that {skip, Reason} works from a init_per_testcase/2"];
+skip_case9(Config) when is_list(Config) ->
+ %% This case shall be skipped by adding a specific clause to
+ %% returning {skip, Reason} from init_per_testcase/2 for this case.
+ ?t:fail("This case should have been Skipped by init_per_testcase/2").
+
+undefined_functions(suite) -> [];
+undefined_functions(doc) -> ["Check for calls to undefined functions in"
+ " test_server."
+ "Skip if cover is running"];
+undefined_functions(Config) when is_list(Config) ->
+ case whereis(cover_server) of
+ Pid when is_pid(Pid) ->
+ {skip,"Cover is running"};
+ undefined ->
+ undefined_functions()
+ end.
+
+undefined_functions() ->
+ TestServerDir = filename:dirname(code:which(test_server)),
+ Res = xref:d(TestServerDir),
+
+ {value,{unused,Unused}} = lists:keysearch(unused, 1, Res),
+ case Unused of
+ [] -> ok;
+ _ ->
+ lists:foreach(fun (MFA) ->
+ io:format("~s unused", [format_mfa(MFA)])
+ end, Unused)
+ end,
+
+ {value,{undefined,Undef0}} = lists:keysearch(undefined, 1, Res),
+ Undef = [U || U <- Undef0, not unresolved(U)],
+ case Undef of
+ [] -> ok;
+ _ ->
+ lists:foreach(fun ({MFA1,MFA2}) ->
+ io:format("~s calls undefined ~s",
+ [format_mfa(MFA1),format_mfa(MFA2)])
+ end, Undef),
+ ?t:fail({length(Undef),undefined_functions_in_otp})
+ end,
+ ok.
+
+unresolved({_,{_,'$F_EXPR',_}}) -> true;
+unresolved(_) -> false.
+
+format_mfa({M,F,A}) ->
+ lists:flatten(io_lib:format("~s:~s/~p", [M,F,A])).
+
+conf_init(doc) -> ["Test successful conf case: Change Config parameter"];
+conf_init(Config) when is_list(Config) ->
+ [{conf_init_var,1389}|Config].
+
+check_new_conf(suite) -> [];
+check_new_conf(doc) -> ["Check that Config parameter changed by"
+ " conf_init is used"];
+check_new_conf(Config) when is_list(Config) ->
+ 1389 = ?config(conf_init_var,Config),
+ ok.
+
+conf_cleanup(doc) -> ["Test successful conf case: Restore Config parameter"];
+conf_cleanup(Config) when is_list(Config) ->
+ lists:keydelete(conf_init_var,1,Config).
+
+check_old_conf(suite) -> [];
+check_old_conf(doc) -> ["Test that the restored Config is used after a"
+ " conf cleanup"];
+check_old_conf(Config) when is_list(Config) ->
+ undefined = ?config(conf_init_var,Config),
+ ok.
+
+conf_init_fail(doc) -> ["Test that config members are skipped if"
+ " conf init function fails."];
+conf_init_fail(Config) when is_list(Config) ->
+ ?t:fail("This case should fail! Check that conf_member_skip and"
+ " conf_cleanup_skip are skipped.").
+
+
+
+start_stop_node(suite) -> [];
+start_stop_node(doc) -> ["Test start and stop of slave and peer nodes"];
+start_stop_node(Config) when is_list(Config) ->
+ {ok,Node2} = ?t:start_node(node2,peer,[]),
+ {error, _} = ?t:start_node(node2,peer,[{fail_on_error,false}]),
+ true = lists:member(Node2,nodes()),
+
+ {ok,Node3} = ?t:start_node(node3,slave,[]),
+ {error, _} = ?t:start_node(node3,slave,[]),
+ true = lists:member(Node3,nodes()),
+
+ {ok,Node4} = ?t:start_node(node4,peer,[{wait,false}]),
+ case lists:member(Node4,nodes()) of
+ true ->
+ ?t:comment("WARNING: Node started with {wait,false}"
+ " is up faster than expected...");
+ false ->
+ wait_for_node(Node4,0),
+ true = lists:member(Node4,nodes())
+ end,
+
+ true = ?t:stop_node(Node2),
+ false = lists:member(Node2,nodes()),
+
+ true = ?t:stop_node(Node3),
+ false = lists:member(Node3,nodes()),
+
+ true = ?t:stop_node(Node4),
+ false = lists:member(Node4,nodes()),
+ timer:sleep(2000),
+ false = ?t:stop_node(Node4),
+
+ ok.
+
+
+wait_for_node(Node,Acc) ->
+ case net_adm:ping(Node) of
+ pang ->
+ timer:sleep(100),
+ wait_for_node(Node,Acc+100);
+ pong ->
+ Acc
+ end.
+
+cleanup_nodes_init(doc) -> ["Test that nodes are terminated when test case"
+ " is finished unless {cleanup,false} is given."];
+cleanup_nodes_init(Config) when is_list(Config) ->
+ {ok,DieSlave} = ?t:start_node(die_slave, slave, []),
+ {ok,SurviveSlave} = ?t:start_node(survive_slave, slave, [{cleanup,false}]),
+ {ok,DiePeer} = ?t:start_node(die_peer, peer, []),
+ {ok,SurvivePeer} = ?t:start_node(survive_peer, peer, [{cleanup,false}]),
+ [{die_slave,DieSlave},
+ {survive_slave,SurviveSlave},
+ {die_peer,DiePeer},
+ {survive_peer,SurvivePeer} | Config].
+
+
+
+check_survive_nodes(suite) -> [];
+check_survive_nodes(doc) -> ["Test that nodes with {cleanup,false} survived"];
+check_survive_nodes(Config) when is_list(Config) ->
+ timer:sleep(1000),
+ false = lists:member(?config(die_slave,Config),nodes()),
+ true = lists:member(?config(survive_slave,Config),nodes()),
+ false = lists:member(?config(die_peer,Config),nodes()),
+ true = lists:member(?config(survive_peer,Config),nodes()),
+ ok.
+
+
+cleanup_nodes_fin(doc) -> ["Test that nodes started with {cleanup,false}"
+ " can be stopped"];
+cleanup_nodes_fin(Config) when is_list(Config) ->
+ Slave = ?config(survive_slave,Config),
+ Peer = ?config(survive_peer,Config),
+
+ true = ?t:stop_node(Slave),
+ false = lists:member(Slave,nodes()),
+ true = ?t:stop_node(Peer),
+ false = lists:member(Peer,nodes()),
+
+ C1 = lists:keydelete(die_slave,1,Config),
+ C2 = lists:keydelete(survive_slave,1,C1),
+ C3 = lists:keydelete(die_peer,1,C2),
+ lists:keydelete(survive_peer,1,C3).
+
+commercial(Config) when is_list(Config) ->
+ case ?t:is_commercial() of
+ false -> {comment,"Open-source build"};
+ true -> {comment,"Commercial build"}
+ end.
+
+
diff --git a/lib/test_server/test/test_server_SUITE_data/dummy_file b/lib/test_server/test/test_server_SUITE_data/test_server_SUITE_data/dummy_file
index 65c88fbd75..65c88fbd75 100644
--- a/lib/test_server/test/test_server_SUITE_data/dummy_file
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_SUITE_data/dummy_file
diff --git a/lib/test_server/test/test_server_conf01_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_conf01_SUITE.erl
index a6d7dfe851..a6d7dfe851 100644
--- a/lib/test_server/test/test_server_conf01_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_conf01_SUITE.erl
diff --git a/lib/test_server/test/test_server_conf02_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_conf02_SUITE.erl
index deba4660c6..deba4660c6 100644
--- a/lib/test_server/test/test_server_conf02_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_conf02_SUITE.erl
diff --git a/lib/test_server/test/test_server_parallel01_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_parallel01_SUITE.erl
index 0e7f329f89..0e7f329f89 100644
--- a/lib/test_server/test/test_server_parallel01_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_parallel01_SUITE.erl
diff --git a/lib/test_server/test/test_server_shuffle01_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_shuffle01_SUITE.erl
index 7ad269501d..7ad269501d 100644
--- a/lib/test_server/test/test_server_shuffle01_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_shuffle01_SUITE.erl
diff --git a/lib/test_server/test/test_server_skip_SUITE.erl b/lib/test_server/test/test_server_SUITE_data/test_server_skip_SUITE.erl
index 4037e1cc0e..4037e1cc0e 100644
--- a/lib/test_server/test/test_server_skip_SUITE.erl
+++ b/lib/test_server/test/test_server_SUITE_data/test_server_skip_SUITE.erl
diff --git a/lib/test_server/test/test_server_line_SUITE.erl b/lib/test_server/test/test_server_line_SUITE.erl
index 02897f164f..aa14862e5a 100644
--- a/lib/test_server/test/test_server_line_SUITE.erl
+++ b/lib/test_server/test/test_server_line_SUITE.erl
@@ -23,20 +23,29 @@
-module(test_server_line_SUITE).
-include_lib("test_server/include/test_server.hrl").
--export([all/1]).
--export([init_per_testcase/2, fin_per_testcase/2]).
+-export([all/0,suite/0]).
+-export([init_per_suite/1,end_per_suite/1,
+ init_per_testcase/2, end_per_testcase/2]).
-export([parse_transform/1, lines/1]).
-all(doc) -> ["Test of parse transform for collection line numbers"];
-all(suite) -> [parse_transform,lines].
+suite() ->
+ [{ct_hooks,[ts_install_cth]},
+ {doc,["Test of parse transform for collection line numbers"]}].
+all() -> [parse_transform,lines].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
init_per_testcase(_Case, Config) ->
?line test_server_line:clear(),
Dog = ?t:timetrap(?t:minutes(2)),
[{watchdog, Dog}|Config].
-fin_per_testcase(_Case, Config) ->
+end_per_testcase(_Case, Config) ->
?line test_server_line:clear(),
Dog=?config(watchdog, Config),
?t:timetrap_cancel(Dog),
diff --git a/lib/test_server/test/test_server_test_lib.erl b/lib/test_server/test/test_server_test_lib.erl
new file mode 100644
index 0000000000..66ff06e0ce
--- /dev/null
+++ b/lib/test_server/test/test_server_test_lib.erl
@@ -0,0 +1,191 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(test_server_test_lib).
+-export([parse_suite/1]).
+-export([init/2, pre_init_per_testcase/3, post_end_per_testcase/4]).
+
+-include("test_server_test_lib.hrl").
+
+%% The CTH hooks all tests
+init(_Id, _Opts) ->
+ [].
+
+pre_init_per_testcase(_TC,Config,State) ->
+ case os:type() of
+ {win32, _} ->
+ %% Extend timeout for windows as starting node
+ %% can take a long time there
+ test_server:timetrap( 120000 * test_server:timetrap_scale_factor());
+ _ ->
+ ok
+ end,
+ {start_slave(Config, 50),State}.
+
+start_slave(Config,_Level) ->
+ [_,Host] = string:tokens(atom_to_list(node()), "@"),
+
+ ct:log("Trying to start ~s~n",
+ ["test_server_tester@"++Host]),
+ case slave:start(Host, test_server_tester, []) of
+ {error,Reason} ->
+ test_server:fail(Reason);
+ {ok,Node} ->
+ ct:log("Node ~p started~n", [Node]),
+ IsCover = test_server:is_cover(),
+ if IsCover ->
+ cover:start(Node);
+ true->
+ ok
+ end,
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+
+ %% PrivDir as well as directory of Test Server suites
+ %% have to be in code path on Test Server node.
+ [_ | Parts] = lists:reverse(filename:split(DataDir)),
+ TSDir = filename:join(lists:reverse(Parts)),
+ AddPathDirs = case proplists:get_value(path_dirs, Config) of
+ undefined -> [];
+ Ds -> Ds
+ end,
+ PathDirs = [PrivDir,TSDir | AddPathDirs],
+ [true = rpc:call(Node, code, add_patha, [D]) || D <- PathDirs],
+ io:format("Dirs added to code path (on ~w):~n",
+ [Node]),
+ [io:format("~s~n", [D]) || D <- PathDirs],
+
+ true = rpc:call(Node, os, putenv,
+ ["TEST_SERVER_FRAMEWORK", "undefined"]),
+
+ ok = rpc:call(Node, file, set_cwd, [PrivDir]),
+ [{node,Node} | Config]
+ end.
+
+post_end_per_testcase(_TC, Config, Return, State) ->
+ Node = proplists:get_value(node, Config),
+ cover:stop(Node),
+ slave:stop(Node),
+
+ {Return, State}.
+
+%% Parse an .suite log file
+parse_suite(FileName) ->
+
+ case file:open(FileName, [read, raw, read_ahead]) of
+ {ok, Fd} ->
+ Data = parse_suite(Fd, #suite{ }),
+ file:close(Fd),
+ {ok, Data};
+ _ ->
+ error
+ end.
+
+fline(Fd) ->
+ case prim_file:read_line(Fd) of
+ eof -> eof;
+ {ok, Line} -> Line
+ end.
+
+parse_suite(Fd, S) ->
+ _Started = fline(Fd),
+ _Starting = fline(Fd),
+ "=cases" ++ NCases = fline(Fd),
+ "=user" ++ _User = fline(Fd),
+ "=host" ++ Host = fline(Fd),
+ "=hosts" ++ _Hosts = fline(Fd),
+ "=emulator_vsn" ++ Evsn = fline(Fd),
+ "=emulator" ++ Emu = fline(Fd),
+ "=otp_release" ++ OtpRel = fline(Fd),
+ "=started" ++ Start = fline(Fd),
+ NewS = parse_cases(Fd, S#suite{
+ n_cases_expected = list_to_int(clean(NCases)),
+ host = list_to_binary(clean(Host)),
+ emulator_vsn = list_to_binary(clean(Evsn)),
+ emulator = list_to_binary(clean(Emu)),
+ otp_release = list_to_binary(clean(OtpRel)),
+ started = list_to_binary(clean(Start))
+ }),
+ "=failed" ++ Failed = fline(Fd),
+ "=successful" ++ Succ = fline(Fd),
+ "=user_skipped" ++ UsrSkip = fline(Fd),
+ "=auto_skipped" ++ AutSkip = fline(Fd),
+ NewS#suite{ n_cases_failed = list_to_int(clean(Failed)),
+ n_cases_succ = list_to_int(clean(Succ)),
+ n_cases_user_skip = list_to_int(clean(UsrSkip)),
+ n_cases_auto_skip = list_to_int(clean(AutSkip)) }.
+
+
+parse_cases(Fd, #suite{ n_cases = N,
+ cases = Cases } = S) ->
+ case parse_case(Fd) of
+ finished -> S#suite{ log_ok = true };
+ {eof, Tc} ->
+ S#suite{ n_cases = N + 1,
+ cases = [Tc#tc{ result = crashed }|Cases]};
+ {ok, Case} ->
+ parse_cases(Fd, S#suite{ n_cases = N + 1,
+ cases = [Case|Cases]})
+ end.
+
+parse_case(Fd) -> parse_case(Fd, #tc{}).
+parse_case(Fd, Tc) -> parse_case(fline(Fd), Fd, Tc).
+
+parse_case(eof, _, Tc) -> {eof, Tc};
+parse_case("=case" ++ Case, Fd, Tc) ->
+ Name = list_to_binary(clean(Case)),
+ parse_case(fline(Fd), Fd, Tc#tc{ name = Name });
+parse_case("=logfile" ++ File, Fd, Tc) ->
+ Log = list_to_binary(clean(File)),
+ parse_case(fline(Fd), Fd, Tc#tc{ logfile = Log });
+parse_case("=elapsed" ++ Elapsed, Fd, Tc) ->
+ {ok, [Time], _} = io_lib:fread("~f", clean(Elapsed)),
+ parse_case(fline(Fd), Fd, Tc#tc{ elapsed = Time });
+parse_case("=result" ++ Result, _, Tc) ->
+ case clean(Result) of
+ "ok" ++ _ ->
+ {ok, Tc#tc{ result = ok } };
+ "failed" ++ _ ->
+ {ok, Tc#tc{ result = failed } };
+ "skipped" ++ _ ->
+ {ok, Tc#tc{ result = skip } }
+ end;
+parse_case("=finished" ++ _ , _Fd, #tc{ name = undefined }) ->
+ finished;
+parse_case(_, Fd, Tc) ->
+ parse_case(fline(Fd), Fd, Tc).
+
+skip([]) -> [];
+skip([$ |Ts]) -> skip(Ts);
+skip(Ts) -> Ts.
+
+%rmnl(L) -> L.
+rmnl([]) -> [];
+rmnl([$\n | Ts]) -> rmnl(Ts);
+rmnl([T|Ts]) -> [T | rmnl(Ts)].
+
+clean(L) ->
+ rmnl(skip(L)).
+
+list_to_int(L) ->
+ try
+ list_to_integer(L)
+ catch
+ _:_ ->
+ 0
+ end.
diff --git a/lib/test_server/test/test_server_test_lib.hrl b/lib/test_server/test/test_server_test_lib.hrl
new file mode 100644
index 0000000000..27b7be9618
--- /dev/null
+++ b/lib/test_server/test/test_server_test_lib.hrl
@@ -0,0 +1,23 @@
+-record(tc, {
+ name,
+ result,
+ elapsed,
+ logfile
+ }).
+
+-record(suite, {
+ application,
+ n_cases = 0,
+ n_cases_failed = 0,
+ n_cases_expected = 0,
+ n_cases_succ,
+ n_cases_user_skip,
+ n_cases_auto_skip,
+ cases = [],
+ host,
+ emulator_vsn,
+ emulator,
+ otp_release,
+ started,
+ log_ok = false
+ }).
diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml
index 323bd0dda8..0a3302bda5 100644
--- a/lib/tools/doc/src/cover.xml
+++ b/lib/tools/doc/src/cover.xml
@@ -270,6 +270,8 @@
defaults to <c>function</c>.</p>
<p>If <c>Module</c> is not Cover compiled, the function returns
<c>{error,{not_cover_compiled,Module}}</c>.</p>
+ <p>HINT: It is possible to issue multiple analyse_to_file commands at
+ the same time. </p>
</desc>
</func>
<func>
@@ -307,6 +309,33 @@
<c>.beam</c> file, or in <c>../src</c> relative to that
directory. If no source code is found,
<c>,{error,no_source_code_found}</c> is returned.</p>
+ <p>HINT: It is possible to issue multiple analyse_to_file commands at
+ the same time. </p>
+ </desc>
+ </func>
+ <func>
+ <name>async_analyse_to_file(Module) -> </name>
+ <name>async_analyse_to_file(Module,Options) -> </name>
+ <name>async_analyse_to_file(Module, OutFile) -> </name>
+ <name>async_analyse_to_file(Module, OutFile, Options) -> pid()</name>
+ <fsummary>Asynchronous call to analyse_to_file.</fsummary>
+ <type>
+ <v>Module = atom()</v>
+ <v>OutFile = string()</v>
+ <v>Options = [Option]</v>
+ <v>Option = html</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>&nbsp;File = string()</v>
+ <v>&nbsp;Reason = term()</v>
+ </type>
+ <desc>
+ <p>This function works exactly the same way as
+ <seealso marker="#analyse_to_file-1">analyse_to_file</seealso> except
+ that it is asynchronous instead of synchronous. The spawned process
+ will link with the caller when created. If an <c>Error</c> occurs
+ while doing the cover analysis the process will crash with the same
+ error reason as <seealso marker="#analyse_to_file-1">analyse_to_file</seealso>
+ would return.</p>
</desc>
</func>
<func>
diff --git a/lib/tools/doc/src/cover_chapter.xml b/lib/tools/doc/src/cover_chapter.xml
index b4f7919183..92a790c34e 100644
--- a/lib/tools/doc/src/cover_chapter.xml
+++ b/lib/tools/doc/src/cover_chapter.xml
@@ -403,6 +403,13 @@ ok
database contains information about each executable line in each
Cover compiled module, performance decreases proportionally to
the size and number of the Cover compiled modules.</p>
+ <p>To improve performance when analysing cover results it is possible
+ to do multiple calls to <seealso marker="cover#analyse-1">analyse</seealso>
+ and <seealso marker="cover#analyse_to_file-1">analyse_to_file</seealso>
+ at once. You can also use the
+ <seealso marker="cover#async_analyse_to_file-1">async_analyse_to_file</seealso>
+ convenience function.
+ </p>
</section>
<section>
diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl
index c4d1bd1d2f..ada2db45be 100644
--- a/lib/tools/src/cover.erl
+++ b/lib/tools/src/cover.erl
@@ -35,23 +35,37 @@
%% remote_process_loop/1.
%%
%% TABLES
-%% Each nodes has an ets table named 'cover_internal_data_table'
-%% (?COVER_TABLE). This table contains the coverage data and is
-%% continously updated when cover compiled code is executed.
+%% Each nodes has two tables: cover_internal_data_table (?COVER_TABLE) and.
+%% cover_internal_clause_table (?COVER_CLAUSE_TABLE).
+%% ?COVER_TABLE contains the bump data i.e. the data about which lines
+%% have been executed how many times.
+%% ?COVER_CLAUSE_TABLE contains information about which clauses in which modules
+%% cover is currently collecting statistics.
%%
-%% The main node owns a table named
-%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE). This table
-%% contains data which is collected from remote nodes (either when a
-%% remote node is stopped with cover:stop/1 or when analysing. When
-%% analysing, data is even moved from the ?COVER_TABLE on the main
-%% node to the ?COLLECTION_TABLE.
+%% The main node owns tables named
+%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE) and
+%% 'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE).
+%% These tables contain data which is collected from remote nodes (either when a
+%% remote node is stopped with cover:stop/1 or when analysing). When
+%% analysing, data is even moved from the COVER tables on the main
+%% node to the COLLECTION tables.
%%
%% The main node also has a table named 'cover_binary_code_table'
%% (?BINARY_TABLE). This table contains the binary code for each cover
%% compiled module. This is necessary so that the code can be loaded
%% on remote nodes that are started after the compilation.
%%
-
+%% PARELLALISM
+%% To take advantage of SMP when doing the cover analysis both the data
+%% collection and analysis has been parallelized. One process is spawned for
+%% each node when collecting data, and on the remote node when collecting data
+%% one process is spawned per module.
+%%
+%% When analyzing data it is possible to issue multiple analyse(_to_file)/X
+%% calls at once. They are however all calls (for backwardscompatability
+%% reasons) so the user of cover will have to spawn several processes to to the
+%% calls ( or use async_analyse_to_file ).
+%%
%% External exports
-export([start/0, start/1,
@@ -61,6 +75,9 @@
analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3,
analyse_to_file/1, analyse_to_file/2, analyse_to_file/3,
analyze_to_file/1, analyze_to_file/2, analyze_to_file/3,
+ async_analyse_to_file/1,async_analyse_to_file/2,
+ async_analyse_to_file/3, async_analyze_to_file/1,
+ async_analyze_to_file/2, async_analyze_to_file/3,
export/1, export/2, import/1,
modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1,
reset/1, reset/0,
@@ -100,8 +117,10 @@
}).
-define(COVER_TABLE, 'cover_internal_data_table').
+-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table').
-define(BINARY_TABLE, 'cover_binary_code_table').
-define(COLLECTION_TABLE, 'cover_collected_remote_data_table').
+-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table').
-define(TAG, cover_compiled).
-define(SERVER, cover_server).
@@ -114,6 +133,8 @@
true -> ?BLOCK(Expr)
end).
+-define(SPAWN_DBG(Tag,Value),put(Tag,Value)).
+
-include_lib("stdlib/include/ms_transform.hrl").
%%%----------------------------------------------------------------------
@@ -127,7 +148,10 @@ start() ->
case whereis(?SERVER) of
undefined ->
Starter = self(),
- Pid = spawn(fun() -> init_main(Starter) end),
+ Pid = spawn(fun() ->
+ ?SPAWN_DBG(start,[]),
+ init_main(Starter)
+ end),
Ref = erlang:monitor(process,Pid),
Return =
receive
@@ -382,6 +406,30 @@ analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut).
analyze_to_file(Module, OutFile, Options) ->
analyse_to_file(Module, OutFile, Options).
+async_analyse_to_file(Module) ->
+ do_spawn(?MODULE, analyse_to_file, [Module]).
+async_analyse_to_file(Module, OutFileOrOpts) ->
+ do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]).
+async_analyse_to_file(Module, OutFile, Options) ->
+ do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]).
+
+do_spawn(M,F,A) ->
+ spawn_link(fun() ->
+ case apply(M,F,A) of
+ {ok, _} ->
+ ok;
+ {error, Reason} ->
+ exit(Reason)
+ end
+ end).
+
+async_analyze_to_file(Module) ->
+ async_analyse_to_file(Module).
+async_analyze_to_file(Module, OutFileOrOpts) ->
+ async_analyse_to_file(Module, OutFileOrOpts).
+async_analyze_to_file(Module, OutFile, Options) ->
+ async_analyse_to_file(Module, OutFile, Options).
+
outfilename(Module,Opts) ->
case lists:member(html,Opts) of
true ->
@@ -500,6 +548,8 @@ remote_call(Node,Request) ->
Return
end.
+remote_reply(Proc,Reply) when is_pid(Proc) ->
+ Proc ! {?SERVER,Reply};
remote_reply(MainNode,Reply) ->
{?SERVER,MainNode} ! {?SERVER,Reply}.
@@ -509,9 +559,15 @@ remote_reply(MainNode,Reply) ->
init_main(Starter) ->
register(?SERVER,self()),
- ets:new(?COVER_TABLE, [set, public, named_table]),
+ %% Having write concurrancy here gives a 40% performance boost
+ %% when collect/1 is called.
+ ets:new(?COVER_TABLE, [set, public, named_table
+ ,{write_concurrency, true}
+ ]),
+ ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
ets:new(?BINARY_TABLE, [set, named_table]),
ets:new(?COLLECTION_TABLE, [set, public, named_table]),
+ ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]),
process_flag(trap_exit,true),
Starter ! {?SERVER,started},
main_process_loop(#main_state{}).
@@ -593,40 +649,10 @@ main_process_loop(State) ->
end;
{From, {export,OutFile,Module}} ->
- case file:open(OutFile,[write,binary,raw]) of
- {ok,Fd} ->
- Reply =
- case Module of
- '_' ->
- export_info(State#main_state.imported),
- collect(State#main_state.nodes),
- do_export_table(State#main_state.compiled,
- State#main_state.imported,
- Fd);
- _ ->
- export_info(Module,State#main_state.imported),
- case is_loaded(Module, State) of
- {loaded, File} ->
- [{Module,Clauses}] =
- ets:lookup(?COVER_TABLE,Module),
- collect(Module, Clauses,
- State#main_state.nodes),
- do_export_table([{Module,File}],[],Fd);
- {imported, File, ImportFiles} ->
- %% don't know if I should allow this -
- %% export a module which is only imported
- Imported = [{Module,File,ImportFiles}],
- do_export_table([],Imported,Fd);
- _NotLoaded ->
- {error,{not_cover_compiled,Module}}
- end
- end,
- file:close(Fd),
- reply(From, Reply);
- {error,Reason} ->
- reply(From, {error, {cant_open_file,OutFile,Reason}})
-
- end,
+ spawn(fun() ->
+ ?SPAWN_DBG(export,{OutFile, Module}),
+ do_export(Module, OutFile, From, State)
+ end),
main_process_loop(State);
{From, {import,File}} ->
@@ -692,107 +718,73 @@ main_process_loop(State) ->
unregister(?SERVER),
reply(From, ok);
- {From, {Request, Module}} ->
- case is_loaded(Module, State) of
- {loaded, File} ->
- {Reply,State1} =
- case Request of
- {analyse, Analysis, Level} ->
- analyse_info(Module,State#main_state.imported),
- [{Module,Clauses}] =
- ets:lookup(?COVER_TABLE,Module),
- collect(Module,Clauses,State#main_state.nodes),
- R = do_analyse(Module, Analysis, Level, Clauses),
- {R,State};
-
- {analyse_to_file, OutFile, Opts} ->
- R = case find_source(File) of
- {beam,_BeamFile} ->
- {error,no_source_code_found};
- ErlFile ->
- Imported = State#main_state.imported,
- analyse_info(Module,Imported),
- [{Module,Clauses}] =
- ets:lookup(?COVER_TABLE,Module),
- collect(Module, Clauses,
- State#main_state.nodes),
- HTML = lists:member(html,Opts),
- do_analyse_to_file(Module,OutFile,
- ErlFile,HTML)
- end,
- {R,State};
-
- is_compiled ->
- {{file, File},State};
-
- reset ->
- R = do_reset_main_node(Module,
- State#main_state.nodes),
- Imported =
- remove_imported(Module,
- State#main_state.imported),
- {R,State#main_state{imported=Imported}}
- end,
- reply(From, Reply),
- main_process_loop(State1);
-
- {imported,File,_ImportFiles} ->
- {Reply,State1} =
- case Request of
- {analyse, Analysis, Level} ->
- analyse_info(Module,State#main_state.imported),
- [{Module,Clauses}] =
- ets:lookup(?COLLECTION_TABLE,Module),
- R = do_analyse(Module, Analysis, Level, Clauses),
- {R,State};
-
- {analyse_to_file, OutFile, Opts} ->
- R = case find_source(File) of
- {beam,_BeamFile} ->
- {error,no_source_code_found};
- ErlFile ->
- Imported = State#main_state.imported,
- analyse_info(Module,Imported),
- HTML = lists:member(html,Opts),
- do_analyse_to_file(Module,OutFile,
- ErlFile,HTML)
- end,
- {R,State};
-
- is_compiled ->
- {false,State};
-
- reset ->
- R = do_reset_collection_table(Module),
- Imported =
- remove_imported(Module,
- State#main_state.imported),
- {R,State#main_state{imported=Imported}}
- end,
- reply(From, Reply),
- main_process_loop(State1);
-
- NotLoaded ->
- Reply =
- case Request of
- is_compiled ->
- false;
- _ ->
- {error, {not_cover_compiled,Module}}
- end,
- Compiled =
- case NotLoaded of
- unloaded ->
- do_clear(Module),
- remote_unload(State#main_state.nodes,[Module]),
- update_compiled([Module],
- State#main_state.compiled);
- false ->
- State#main_state.compiled
+ {From, {{analyse, Analysis, Level}, Module}} ->
+ S = try
+ Loaded = is_loaded(Module, State),
+ spawn(fun() ->
+ ?SPAWN_DBG(analyse,{Module,Analysis, Level}),
+ do_parallel_analysis(
+ Module, Analysis, Level,
+ Loaded, From, State)
+ end),
+ State
+ catch throw:Reason ->
+ reply(From,{error, {not_cover_compiled,Module}}),
+ not_loaded(Module, Reason, State)
+ end,
+ main_process_loop(S);
+
+ {From, {{analyse_to_file, OutFile, Opts},Module}} ->
+ S = try
+ Loaded = is_loaded(Module, State),
+ spawn(fun() ->
+ ?SPAWN_DBG(analyse_to_file,
+ {Module,OutFile, Opts}),
+ do_parallel_analysis_to_file(
+ Module, OutFile, Opts,
+ Loaded, From, State)
+ end),
+ State
+ catch throw:Reason ->
+ reply(From,{error, {not_cover_compiled,Module}}),
+ not_loaded(Module, Reason, State)
+ end,
+ main_process_loop(S);
+
+ {From, {is_compiled, Module}} ->
+ S = try is_loaded(Module, State) of
+ {loaded, File} ->
+ reply(From,{file, File}),
+ State;
+ {imported,_File,_ImportFiles} ->
+ reply(From,false),
+ State
+ catch throw:Reason ->
+ reply(From,false),
+ not_loaded(Module, Reason, State)
+ end,
+ main_process_loop(S);
+
+ {From, {reset, Module}} ->
+ S = try
+ Loaded = is_loaded(Module,State),
+ R = case Loaded of
+ {loaded, _File} ->
+ do_reset_main_node(
+ Module, State#main_state.nodes);
+ {imported, _File, _} ->
+ do_reset_collection_table(Module)
end,
- reply(From, Reply),
- main_process_loop(State#main_state{compiled=Compiled})
- end;
+ Imported =
+ remove_imported(Module,
+ State#main_state.imported),
+ reply(From, R),
+ State#main_state{imported=Imported}
+ catch throw:Reason ->
+ reply(From,{error, {not_cover_compiled,Module}}),
+ not_loaded(Module, Reason, State)
+ end,
+ main_process_loop(S);
{'EXIT',Pid,_Reason} ->
%% Exit is trapped on the main node only, so this will only happen
@@ -807,17 +799,17 @@ main_process_loop(State) ->
main_process_loop(State)
end.
-
-
-
-
%%%----------------------------------------------------------------------
%%% cover_server on remote node
%%%----------------------------------------------------------------------
init_remote(Starter,MainNode) ->
register(?SERVER,self()),
- ets:new(?COVER_TABLE, [set, public, named_table]),
+ ets:new(?COVER_TABLE, [set, public, named_table
+ %% write_concurrency here makes otp_8270 break :(
+ %,{write_concurrency, true}
+ ]),
+ ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
Starter ! {self(),started},
remote_process_loop(#remote_state{main_node=MainNode}).
@@ -843,29 +835,14 @@ remote_process_loop(State) ->
remote_process_loop(State);
{remote,collect,Module,CollectorPid} ->
- MS =
- case Module of
- '_' -> ets:fun2ms(fun({M,C}) when is_atom(M) -> C end);
- _ -> ets:fun2ms(fun({M,C}) when M=:=Module -> C end)
- end,
- AllClauses = lists:flatten(ets:select(?COVER_TABLE,MS)),
-
- %% Sending clause by clause in order to avoid large lists
- lists:foreach(
- fun({M,F,A,C,_L}) ->
- Pattern =
- {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
- %% Reset
- lists:foreach(fun({Bump,_N}) ->
- ets:insert(?COVER_TABLE, {Bump,0})
- end,
- Bumps),
- CollectorPid ! {chunk,Bumps}
- end,
- AllClauses),
- CollectorPid ! done,
- remote_reply(State#remote_state.main_node, ok),
+ self() ! {remote,collect,Module,CollectorPid, ?SERVER};
+
+ {remote,collect,Module,CollectorPid,From} ->
+ spawn(fun() ->
+ ?SPAWN_DBG(remote_collect,
+ {Module, CollectorPid, From}),
+ do_collect(Module, CollectorPid, From)
+ end),
remote_process_loop(State);
{remote,stop} ->
@@ -894,6 +871,33 @@ remote_process_loop(State) ->
end.
+do_collect(Module, CollectorPid, From) ->
+ AllMods =
+ case Module of
+ '_' -> ets:tab2list(?COVER_CLAUSE_TABLE);
+ _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module)
+ end,
+
+ %% Sending clause by clause in order to avoid large lists
+ pmap(
+ fun({_Mod,Clauses}) ->
+ lists:map(fun(Clause) ->
+ send_collected_data(Clause, CollectorPid)
+ end,Clauses)
+ end,AllMods),
+ CollectorPid ! done,
+ remote_reply(From, ok).
+
+send_collected_data({M,F,A,C,_L}, CollectorPid) ->
+ Pattern =
+ {#bump{module=M, function=F, arity=A, clause=C}, '_'},
+ Bumps = ets:match_object(?COVER_TABLE, Pattern),
+ %% Reset
+ lists:foreach(fun({Bump,_N}) ->
+ ets:insert(?COVER_TABLE, {Bump,0})
+ end,
+ Bumps),
+ CollectorPid ! {chunk,Bumps}.
reload_originals([{Module,_File}|Compiled]) ->
do_reload_original(Module),
@@ -932,6 +936,9 @@ load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) ->
load_compiled([],Acc) ->
Acc.
+insert_initial_data([Item|Items]) when is_atom(element(1,Item)) ->
+ ets:insert(?COVER_CLAUSE_TABLE, Item),
+ insert_initial_data(Items);
insert_initial_data([Item|Items]) ->
ets:insert(?COVER_TABLE, Item),
insert_initial_data(Items);
@@ -957,7 +964,10 @@ remote_start(MainNode) ->
case whereis(?SERVER) of
undefined ->
Starter = self(),
- Pid = spawn(fun() -> init_remote(Starter,MainNode) end),
+ Pid = spawn(fun() ->
+ ?SPAWN_DBG(remote_start,{MainNode}),
+ init_remote(Starter,MainNode)
+ end),
Ref = erlang:monitor(process,Pid),
Return =
receive
@@ -972,14 +982,25 @@ remote_start(MainNode) ->
{error,{already_started,Pid}}
end.
-%% Load a set of cover compiled modules on remote nodes
-remote_load_compiled(Nodes,Compiled0) ->
- Compiled = lists:map(fun get_data_for_remote_loading/1,Compiled0),
+%% Load a set of cover compiled modules on remote nodes,
+%% We do it ?MAX_MODS modules at a time so that we don't
+%% run out of memory on the cover_server node.
+-define(MAX_MODS, 10).
+remote_load_compiled(Nodes,Compiled) ->
+ remote_load_compiled(Nodes, Compiled, [], 0).
+remote_load_compiled(_Nodes, [], [], _ModNum) ->
+ ok;
+remote_load_compiled(Nodes, Compiled, Acc, ModNum)
+ when Compiled == []; ModNum == ?MAX_MODS ->
lists:foreach(
fun(Node) ->
- remote_call(Node,{remote,load_compiled,Compiled})
+ remote_call(Node,{remote,load_compiled,Acc})
end,
- Nodes).
+ Nodes),
+ remote_load_compiled(Nodes, Compiled, [], 0);
+remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) ->
+ remote_load_compiled(
+ Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1).
%% Read all data needed for loading a cover compiled module on a remote node
%% Binary is the beam code for the module and InitialTable is the initial
@@ -987,15 +1008,15 @@ remote_load_compiled(Nodes,Compiled0) ->
get_data_for_remote_loading({Module,File}) ->
[{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module),
%%! The InitialTable list will be long if the module is big - what to do??
- InitialTable = ets:select(?COVER_TABLE,ms(Module)),
- {Module,File,Binary,InitialTable}.
+ InitialBumps = ets:select(?COVER_TABLE,ms(Module)),
+ InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module),
+
+ {Module,File,Binary,InitialBumps ++ InitialClauses}.
%% Create a match spec which returns the clause info {Module,InitInfo} and
%% all #bump keys for the given module with 0 number of calls.
ms(Module) ->
- ets:fun2ms(fun({Module,InitInfo}) ->
- {Module,InitInfo};
- ({Key,_}) when is_record(Key,bump),Key#bump.module=:=Module ->
+ ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module ->
{Key,0}
end).
@@ -1017,27 +1038,30 @@ remote_reset(Module,Nodes) ->
%% Collect data from remote nodes - used for analyse or stop(Node)
remote_collect(Module,Nodes,Stop) ->
- CollectorPid = spawn(fun() -> collector_proc(length(Nodes)) end),
- lists:foreach(
- fun(Node) ->
- remote_call(Node,{remote,collect,Module,CollectorPid}),
- if Stop -> remote_call(Node,{remote,stop});
- true -> ok
- end
- end,
- Nodes).
+ pmap(fun(Node) ->
+ ?SPAWN_DBG(remote_collect,
+ {Module, Nodes, Stop}),
+ do_collection(Node, Module, Stop)
+ end,
+ Nodes).
+
+do_collection(Node, Module, Stop) ->
+ CollectorPid = spawn(fun collector_proc/0),
+ remote_call(Node,{remote,collect,Module,CollectorPid, self()}),
+ if Stop -> remote_call(Node,{remote,stop});
+ true -> ok
+ end.
%% Process which receives chunks of data from remote nodes - either when
%% analysing or when stopping cover on the remote nodes.
-collector_proc(0) ->
- ok;
-collector_proc(N) ->
+collector_proc() ->
+ ?SPAWN_DBG(collector_proc, []),
receive
{chunk,Chunk} ->
insert_in_collection_table(Chunk),
- collector_proc(N);
+ collector_proc();
done ->
- collector_proc(N-1)
+ ok
end.
insert_in_collection_table([{Key,Val}|Chunk]) ->
@@ -1052,7 +1076,13 @@ insert_in_collection_table(Key,Val) ->
ets:update_counter(?COLLECTION_TABLE,
Key,Val);
false ->
- ets:insert(?COLLECTION_TABLE,{Key,Val})
+ %% Make sure that there are no race conditions from ets:member
+ case ets:insert_new(?COLLECTION_TABLE,{Key,Val}) of
+ false ->
+ insert_in_collection_table(Key,Val);
+ _ ->
+ ok
+ end
end.
@@ -1073,14 +1103,15 @@ analyse_info(Module,Imported) ->
export_info(_Module,[]) ->
ok;
-export_info(Module,Imported) ->
- imported_info("Export",Module,Imported).
+export_info(_Module,_Imported) ->
+ %% Do not print that the export includes imported modules
+ ok.
export_info([]) ->
ok;
-export_info(Imported) ->
- AllImportFiles = get_all_importfiles(Imported,[]),
- io:format("Export includes data from imported files\n~p\n",[AllImportFiles]).
+export_info(_Imported) ->
+ %% Do not print that the export includes imported modules
+ ok.
get_all_importfiles([{_M,_F,ImportFiles}|Imported],Acc) ->
NewAcc = do_get_all_importfiles(ImportFiles,Acc),
@@ -1153,14 +1184,14 @@ is_loaded(Module, State) ->
{ok, File} ->
case code:which(Module) of
?TAG -> {loaded, File};
- _ -> unloaded
+ _ -> throw(unloaded)
end;
false ->
case get_file(Module,State#main_state.imported) of
{ok,File,ImportFiles} ->
{imported, File, ImportFiles};
false ->
- false
+ throw(not_loaded)
end
end.
@@ -1259,7 +1290,7 @@ do_compile_beam(Module,Beam) ->
%% Store info about all function clauses in database
InitInfo = reverse(Vars#vars.init_info),
- ets:insert(?COVER_TABLE, {Module, InitInfo}),
+ ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}),
%% Store binary code so it can be loaded on remote nodes
ets:insert(?BINARY_TABLE, {Module, Binary}),
@@ -1793,9 +1824,8 @@ common_elems(L1, L2) ->
%% Collect data for all modules
collect(Nodes) ->
%% local node
- MS = ets:fun2ms(fun({M,C}) when is_atom(M) -> {M,C} end),
- AllClauses = ets:select(?COVER_TABLE,MS),
- move_modules(AllClauses),
+ AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE),
+ pmap(fun move_modules/1,AllClauses),
%% remote nodes
remote_collect('_',Nodes,false).
@@ -1803,7 +1833,7 @@ collect(Nodes) ->
%% Collect data for one module
collect(Module,Clauses,Nodes) ->
%% local node
- move_modules([{Module,Clauses}]),
+ move_modules({Module,Clauses}),
%% remote nodes
remote_collect(Module,Nodes,false).
@@ -1811,12 +1841,9 @@ collect(Module,Clauses,Nodes) ->
%% When analysing, the data from the local ?COVER_TABLE is moved to the
%% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
-move_modules([{Module,Clauses}|AllClauses]) ->
- ets:insert(?COLLECTION_TABLE,{Module,Clauses}),
- move_clauses(Clauses),
- move_modules(AllClauses);
-move_modules([]) ->
- ok.
+move_modules({Module,Clauses}) ->
+ ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
+ move_clauses(Clauses).
move_clauses([{M,F,A,C,_L}|Clauses]) ->
Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
@@ -1855,6 +1882,22 @@ find_source(File0) ->
end
end.
+do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
+ analyse_info(Module,State#main_state.imported),
+ C = case Loaded of
+ {loaded, _File} ->
+ [{Module,Clauses}] =
+ ets:lookup(?COVER_CLAUSE_TABLE,Module),
+ collect(Module,Clauses,State#main_state.nodes),
+ Clauses;
+ _ ->
+ [{Module,Clauses}] =
+ ets:lookup(?COLLECTION_CLAUSE_TABLE,Module),
+ Clauses
+ end,
+ R = do_analyse(Module, Analysis, Level, C),
+ reply(From, R).
+
%% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error}
%% Clauses = [{Module,Function,Arity,Clause,Lines}]
do_analyse(Module, Analysis, line, _Clauses) ->
@@ -1931,6 +1974,28 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) ->
merge_functions([], _MFun, Result) ->
Result.
+do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
+ File = case Loaded of
+ {loaded, File0} ->
+ [{Module,Clauses}] =
+ ets:lookup(?COVER_CLAUSE_TABLE,Module),
+ collect(Module, Clauses,
+ State#main_state.nodes),
+ File0;
+ {imported, File0, _} ->
+ File0
+ end,
+ case find_source(File) of
+ {beam,_BeamFile} ->
+ reply(From, {error,no_source_code_found});
+ ErlFile ->
+ analyse_info(Module,State#main_state.imported),
+ HTML = lists:member(html,Opts),
+ R = do_analyse_to_file(Module,OutFile,
+ ErlFile,HTML),
+ reply(From, R)
+ end.
+
%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
%% Module = atom()
%% OutFile = ErlFile = string()
@@ -2027,6 +2092,42 @@ fill2() -> ".| ".
fill3() -> "| ".
%%%--Export--------------------------------------------------------------
+do_export(Module, OutFile, From, State) ->
+ case file:open(OutFile,[write,binary,raw]) of
+ {ok,Fd} ->
+ Reply =
+ case Module of
+ '_' ->
+ export_info(State#main_state.imported),
+ collect(State#main_state.nodes),
+ do_export_table(State#main_state.compiled,
+ State#main_state.imported,
+ Fd);
+ _ ->
+ export_info(Module,State#main_state.imported),
+ try is_loaded(Module, State) of
+ {loaded, File} ->
+ [{Module,Clauses}] =
+ ets:lookup(?COVER_CLAUSE_TABLE,Module),
+ collect(Module, Clauses,
+ State#main_state.nodes),
+ do_export_table([{Module,File}],[],Fd);
+ {imported, File, ImportFiles} ->
+ %% don't know if I should allow this -
+ %% export a module which is only imported
+ Imported = [{Module,File,ImportFiles}],
+ do_export_table([],Imported,Fd)
+ catch throw:_ ->
+ {error,{not_cover_compiled,Module}}
+ end
+ end,
+ file:close(Fd),
+ reply(From, Reply);
+ {error,Reason} ->
+ reply(From, {error, {cant_open_file,OutFile,Reason}})
+
+ end.
+
do_export_table(Compiled, Imported, Fd) ->
ModList = merge(Imported,Compiled),
write_module_data(ModList,Fd).
@@ -2043,7 +2144,7 @@ merge([],ModuleList) ->
write_module_data([{Module,File}|ModList],Fd) ->
write({file,Module,File},Fd),
- [Clauses] = ets:lookup(?COLLECTION_TABLE,Module),
+ [Clauses] = ets:lookup(?COLLECTION_CLAUSE_TABLE,Module),
write(Clauses,Fd),
ModuleData = ets:match_object(?COLLECTION_TABLE,{#bump{module=Module},'_'}),
do_write_module_data(ModuleData,Fd),
@@ -2093,7 +2194,7 @@ do_import_to_table(Fd,ImportFile,Imported,DontImport) ->
{Module,Clauses} ->
case lists:member(Module,DontImport) of
false ->
- ets:insert(?COLLECTION_TABLE,{Module,Clauses});
+ ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses});
true ->
ok
end,
@@ -2127,14 +2228,14 @@ do_reset_main_node(Module,Nodes) ->
remote_reset(Module,Nodes).
do_reset_collection_table(Module) ->
- ets:delete(?COLLECTION_TABLE,Module),
+ ets:delete(?COLLECTION_CLAUSE_TABLE,Module),
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
%% do_reset(Module) -> ok
%% The reset is done on a per-clause basis to avoid building
%% long lists in the case of very large modules
do_reset(Module) ->
- [{Module,Clauses}] = ets:lookup(?COVER_TABLE, Module),
+ [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module),
do_reset2(Clauses).
do_reset2([{M,F,A,C,_L}|Clauses]) ->
@@ -2149,10 +2250,19 @@ do_reset2([]) ->
ok.
do_clear(Module) ->
- ets:match_delete(?COVER_TABLE, {Module,'_'}),
+ ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}),
ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}),
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
+not_loaded(Module, unloaded, State) ->
+ do_clear(Module),
+ remote_unload(State#main_state.nodes,[Module]),
+ Compiled = update_compiled([Module],
+ State#main_state.compiled),
+ State#main_state{ compiled = Compiled };
+not_loaded(_Module,_Else, State) ->
+ State.
+
%%%--Div-----------------------------------------------------------------
@@ -2180,3 +2290,30 @@ escape_lt_and_gt1([],Acc) ->
lists:reverse(Acc);
escape_lt_and_gt1([H|T],Acc) ->
escape_lt_and_gt1(T,[H|Acc]).
+
+pmap(Fun, List) ->
+ pmap(Fun, List, 20).
+pmap(Fun, List, Limit) ->
+ pmap(Fun, List, [], Limit, 0, []).
+pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit ->
+ Collector = self(),
+ Pid = spawn_link(fun() ->
+ ?SPAWN_DBG(pmap,E),
+ Collector ! {res,self(),Fun(E)}
+ end),
+ erlang:monitor(process, Pid),
+ pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc);
+pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) ->
+ receive
+ {'DOWN', _Ref, process, _, _} ->
+ pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc);
+ {res, Pid, Res} ->
+ pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc])
+ end;
+pmap(_Fun, [], [], _Limit, 0, Acc) ->
+ lists:reverse(Acc);
+pmap(Fun, [], [], Limit, Cnt, Acc) ->
+ receive
+ {'DOWN', _Ref, process, _, _} ->
+ pmap(Fun, [], [], Limit, Cnt - 1, Acc)
+ end.
diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl
index d9daff7a1f..67197c80cb 100644
--- a/lib/tools/test/cover_SUITE.erl
+++ b/lib/tools/test/cover_SUITE.erl
@@ -18,8 +18,10 @@
%%
-module(cover_SUITE).
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+-export([all/0, init_per_testcase/2, end_per_testcase/2,
+ suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
+
-export([start/1, compile/1, analyse/1, misc/1, stop/1,
distribution/1, export_import/1,
otp_5031/1, eif/1, otp_5305/1, otp_5418/1, otp_6115/1, otp_7095/1,
@@ -68,6 +70,24 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
+init_per_testcase(TC, Config) when TC =:= misc;
+ TC =:= compile;
+ TC =:= analyse;
+ TC =:= distribution;
+ TC =:= otp_5031;
+ TC =:= stop ->
+ case code:which(crypto) of
+ Path when is_list(Path) ->
+ init_per_testcase(dummy_tc, Config);
+ _Else ->
+ {skip, "No crypto file to test with"}
+ end;
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ %cover:stop(),
+ ok.
start(suite) -> [];
start(Config) when is_list(Config) ->
@@ -351,6 +371,7 @@ distribution(Config) when is_list(Config) ->
%% Check that stop() unloads on all nodes
?line ok = cover:stop(),
+ ?line timer:sleep(100), %% Give nodes time to unload on slow machines.
?line LocalBeam = code:which(f),
?line N2Beam = rpc:call(N2,code,which,[f]),
?line true = is_unloaded(LocalBeam),
@@ -401,8 +422,8 @@ export_import(Config) when is_list(Config) ->
?line {ok,a} = cover:compile(a),
?line ?t:capture_start(),
?line ok = cover:export("all_exported"),
- ?line [Text2] = ?t:capture_get(),
- ?line "Export includes data from imported files"++_ = lists:flatten(Text2),
+ ?line [] = ?t:capture_get(),
+% ?line "Export includes data from imported files"++_ = lists:flatten(Text2),
?line ?t:capture_stop(),
?line ok = cover:stop(),
?line ok = cover:import("all_exported"),
diff --git a/lib/typer/RELEASE_NOTES b/lib/typer/RELEASE_NOTES
new file mode 100644
index 0000000000..d91a815ee9
--- /dev/null
+++ b/lib/typer/RELEASE_NOTES
@@ -0,0 +1,22 @@
+==============================================================================
+ Major features, additions and changes between Typer versions
+ (in reversed chronological order)
+==============================================================================
+
+Version 0.9 (in Erlang/OTP R14B02)
+----------------------------------
+ - Major rewrite; all code has been cleaned up and placed in one file.
+ The only reason why this is not version 1.0 yet is that there is no proper
+ documentation for typer which can be displayed in the www.erlang.org site.
+ - Added ability to receive the set of exported types and report unknown ones.
+ - Better handling of overloaded contracts; especially erroneous ones on which
+ typer does not crash anymore.
+ - Fixed problem that caused typer to hang when given a file whose module name
+ did not correspond to the file name.
+ - Added two undocumented options that may come very handy when trying to
+ understand why typer reports some particular set of types for the functions
+ in a module. These options are mainly for typer developers at this point,
+ but may become documented in some future version.
+
+Older versions
+--------------
diff --git a/lib/typer/src/Makefile b/lib/typer/src/Makefile
index 9c9ef6156f..3d7827b5b5 100644
--- a/lib/typer/src/Makefile
+++ b/lib/typer/src/Makefile
@@ -45,15 +45,9 @@ DIALYZER_DIR = $(ERL_TOP)/lib/dialyzer
# ----------------------------------------------------
# Target Specs
# ----------------------------------------------------
-MODULES = \
- typer \
- typer_annotator \
- typer_info \
- typer_map \
- typer_options \
- typer_preprocess
-
-HRL_FILES= typer.hrl
+MODULES = typer
+
+HRL_FILES=
ERL_FILES= $(MODULES:%=%.erl)
INSTALL_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
TARGET_FILES= $(INSTALL_FILES)
@@ -87,8 +81,8 @@ clean:
# Special Build Targets
# ----------------------------------------------------
-$(EBIN)/typer_options.$(EMULATOR): typer_options.erl ../vsn.mk Makefile
- erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) typer_options.erl
+$(EBIN)/typer.$(EMULATOR): typer.erl ../vsn.mk Makefile
+ erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) typer.erl
$(APP_TARGET): $(APP_SRC) ../vsn.mk
sed -e 's;%VSN%;$(VSN);' $< > $@
@@ -97,14 +91,9 @@ $(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
sed -e 's;%VSN%;$(VSN);' $< > $@
# ---------------------------------------------------------------------
-# dependencies -- I wish they were somehow automatically generated
+# dependencies
# ---------------------------------------------------------------------
-$(EBIN)/typer.beam: typer.hrl
-$(EBIN)/typer_annotator.beam: typer.hrl
-$(EBIN)/typer_info.beam: typer.hrl
-$(EBIN)/typer_options.beam: typer.hrl
-$(EBIN)/typer_preprocess.beam: typer.hrl
# ----------------------------------------------------
# Release Target
diff --git a/lib/typer/src/typer.app.src b/lib/typer/src/typer.app.src
index 3eb0cbf816..850829e1dc 100644
--- a/lib/typer/src/typer.app.src
+++ b/lib/typer/src/typer.app.src
@@ -3,12 +3,7 @@
{application, typer,
[{description, "TYPe annotator for ERlang programs, version %VSN%"},
{vsn, "%VSN%"},
- {modules, [typer,
- typer_annotator,
- typer_info,
- typer_map,
- typer_options,
- typer_preprocess]},
+ {modules, [typer]},
{registered, []},
{applications, [compiler, dialyzer, hipe, kernel, stdlib]},
{env, []}]}.
diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl
index e19614f911..fc8caa4f21 100644
--- a/lib/typer/src/typer.erl
+++ b/lib/typer/src/typer.erl
@@ -1,65 +1,107 @@
%% -*- erlang-indent-level: 2 -*-
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2010. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2006-2011. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-%%--------------------------------------------------------------------
+%%-----------------------------------------------------------------------
%% File : typer.erl
-%% Author : Bingwen He <[email protected]>
-%% Description : The main driver of the TypEr application
-%%--------------------------------------------------------------------
+%% Author(s) : The first version of typer was written by Bingwen He
+%% with guidance from Kostis Sagonas and Tobias Lindahl.
+%% Since June 2008 typer is maintained by Kostis Sagonas.
+%% Description : An Erlang/OTP application that shows type information
+%% for Erlang modules to the user. Additionally, it can
+%% annotate the code of files with such type information.
+%%-----------------------------------------------------------------------
-module(typer).
-%% Avoid warning for local function error/1 clashing with autoimported BIF.
--compile({no_auto_import,[error/1]}).
-export([start/0]).
--export([error/1, compile_error/1]). % for error reporting
--include("typer.hrl").
+%%-----------------------------------------------------------------------
+
+-define(SHOW, show).
+-define(SHOW_EXPORTED, show_exported).
+-define(ANNOTATE, annotate).
+-define(ANNOTATE_INC_FILES, annotate_inc_files).
+
+-type mode() :: ?SHOW | ?SHOW_EXPORTED | ?ANNOTATE | ?ANNOTATE_INC_FILES.
+
+%%-----------------------------------------------------------------------
+
+-type files() :: [file:filename()].
+-type callgraph() :: dialyzer_callgraph:callgraph().
+-type codeserver() :: dialyzer_codeserver:codeserver().
+-type plt() :: dialyzer_plt:plt().
+
+-record(analysis,
+ {mode :: mode() | 'undefined',
+ macros = [] :: [{atom(), term()}],
+ includes = [] :: files(),
+ codeserver = dialyzer_codeserver:new():: codeserver(),
+ callgraph = dialyzer_callgraph:new() :: callgraph(),
+ files = [] :: files(), % absolute names
+ plt = none :: 'none' | file:filename(),
+ no_spec = false :: boolean(),
+ show_succ = false :: boolean(),
+ %% For choosing between specs or edoc @spec comments
+ edoc = false :: boolean(),
+ %% Files in 'fms' are compilable with option 'to_pp'; we keep them
+ %% as {FileName, ModuleName} in case the ModuleName is different
+ fms = [] :: [{file:filename(), module()}],
+ ex_func = map__new() :: map(),
+ record = map__new() :: map(),
+ func = map__new() :: map(),
+ inc_func = map__new() :: map(),
+ trust_plt = dialyzer_plt:new() :: plt()}).
+-type analysis() :: #analysis{}.
+
+-record(args, {files = [] :: files(),
+ files_r = [] :: files(),
+ trusted = [] :: files()}).
+-type args() :: #args{}.
%%--------------------------------------------------------------------
-spec start() -> no_return().
start() ->
- {Args, Analysis} = typer_options:process(),
+ {Args, Analysis} = process_cl_args(),
%% io:format("Args: ~p\n", [Args]),
%% io:format("Analysis: ~p\n", [Analysis]),
- TrustedFiles = typer_preprocess:get_all_files(Args, trust),
- Analysis1 = Analysis#typer_analysis{t_files = TrustedFiles},
- Analysis2 = extract(Analysis1),
- All_Files = typer_preprocess:get_all_files(Args, analysis),
+ TrustedFiles = filter_fd(Args#args.trusted, [], fun is_erl_file/1),
+ Analysis2 = extract(Analysis, TrustedFiles),
+ All_Files = get_all_files(Args),
%% io:format("All_Files: ~p\n", [All_Files]),
- Analysis3 = Analysis2#typer_analysis{ana_files = All_Files},
- Analysis4 = typer_info:collect(Analysis3),
- %% io:format("Final: ~p\n", [Analysis4#typer_analysis.final_files]),
+ Analysis3 = Analysis2#analysis{files = All_Files},
+ Analysis4 = collect_info(Analysis3),
+ %% io:format("Final: ~p\n", [Analysis4#analysis.fms]),
TypeInfo = get_type_info(Analysis4),
- typer_annotator:annotate(TypeInfo),
+ show_or_annotate(TypeInfo),
%% io:format("\nTyper analysis finished\n"),
erlang:halt(0).
%%--------------------------------------------------------------------
--spec extract(#typer_analysis{}) -> #typer_analysis{}.
+-spec extract(analysis(), files()) -> analysis().
-extract(#typer_analysis{macros = Macros, includes = Includes,
- t_files = TFiles, trust_plt = TrustPLT} = Analysis) ->
+extract(#analysis{macros = Macros,
+ includes = Includes,
+ trust_plt = TrustPLT} = Analysis, TrustedFiles) ->
%% io:format("--- Extracting trusted typer_info... "),
Ds = [{d, Name, Value} || {Name, Value} <- Macros],
CodeServer = dialyzer_codeserver:new(),
@@ -87,7 +129,7 @@ extract(#typer_analysis{macros = Macros, includes = Includes,
{error, Reason} -> compile_error(Reason)
end
end,
- CodeServer1 = lists:foldl(Fun, CodeServer, TFiles),
+ CodeServer1 = lists:foldl(Fun, CodeServer, TrustedFiles),
%% Process remote types
NewCodeServer =
try
@@ -112,30 +154,30 @@ extract(#typer_analysis{macros = Macros, includes = Includes,
dialyzer_plt:insert_contract_list(TmpPlt, SpecList)
end,
NewTrustPLT = lists:foldl(FoldFun, TrustPLT, Modules),
- Analysis#typer_analysis{trust_plt = NewTrustPLT}.
+ Analysis#analysis{trust_plt = NewTrustPLT}.
%%--------------------------------------------------------------------
--spec get_type_info(#typer_analysis{}) -> #typer_analysis{}.
+-spec get_type_info(analysis()) -> analysis().
-get_type_info(#typer_analysis{callgraph = CallGraph,
- trust_plt = TrustPLT,
- code_server = CodeServer} = Analysis) ->
+get_type_info(#analysis{callgraph = CallGraph,
+ trust_plt = TrustPLT,
+ codeserver = CodeServer} = Analysis) ->
StrippedCallGraph = remove_external(CallGraph, TrustPLT),
%% io:format("--- Analyzing callgraph... "),
try
NewPlt = dialyzer_succ_typings:analyze_callgraph(StrippedCallGraph,
TrustPLT, CodeServer),
- Analysis#typer_analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt}
+ Analysis#analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt}
catch
error:What ->
- error(io_lib:format("Analysis failed with message: ~p",
- [{What, erlang:get_stacktrace()}]));
+ fatal_error(io_lib:format("Analysis failed with message: ~p",
+ [{What, erlang:get_stacktrace()}]));
throw:{dialyzer_succ_typing_error, Msg} ->
- error(io_lib:format("Analysis failed with message: ~s", [Msg]))
+ fatal_error(io_lib:format("Analysis failed with message: ~s", [Msg]))
end.
--spec remove_external(dialyzer_callgraph:callgraph(), dialyzer_plt:plt()) -> dialyzer_callgraph:callgraph().
+-spec remove_external(callgraph(), plt()) -> callgraph().
remove_external(CallGraph, PLT) ->
{StrippedCG0, Ext} = dialyzer_callgraph:remove_external(CallGraph),
@@ -143,11 +185,16 @@ remove_external(CallGraph, PLT) ->
case get_external(Ext, PLT) of
[] -> ok;
Externals ->
- msg(io_lib:format(" Unknown functions: ~p\n", [lists:usort(Externals)]))
+ msg(io_lib:format(" Unknown functions: ~p\n", [lists:usort(Externals)])),
+ ExtTypes = rcv_ext_types(),
+ case ExtTypes of
+ [] -> ok;
+ _ -> msg(io_lib:format(" Unknown types: ~p\n", [ExtTypes]))
+ end
end,
StrippedCG.
--spec get_external([{mfa(), mfa()}], dialyzer_plt:plt()) -> [mfa()].
+-spec get_external([{mfa(), mfa()}], plt()) -> [mfa()].
get_external(Exts, Plt) ->
Fun = fun ({_From, To = {M, F, A}}, Acc) ->
@@ -163,31 +210,782 @@ get_external(Exts, Plt) ->
lists:foldl(Fun, [], Exts).
%%--------------------------------------------------------------------
+%% Showing type information or annotating files with such information.
+%%--------------------------------------------------------------------
+
+-define(TYPER_ANN_DIR, "typer_ann").
+
+-type line() :: non_neg_integer().
+-type fa() :: {atom(), arity()}.
+-type func_info() :: {line(), atom(), arity()}.
+
+-record(info, {records = map__new() :: map(),
+ functions = [] :: [func_info()],
+ types = map__new() :: map(),
+ edoc = false :: boolean()}).
+-record(inc, {map = map__new() :: map(), filter = [] :: files()}).
+-type inc() :: #inc{}.
+
+-spec show_or_annotate(analysis()) -> 'ok'.
+
+show_or_annotate(#analysis{mode = Mode, fms = Files} = Analysis) ->
+ case Mode of
+ ?SHOW -> show(Analysis);
+ ?SHOW_EXPORTED -> show(Analysis);
+ ?ANNOTATE ->
+ Fun = fun ({File, Module}) ->
+ Info = get_final_info(File, Module, Analysis),
+ write_typed_file(File, Info)
+ end,
+ lists:foreach(Fun, Files);
+ ?ANNOTATE_INC_FILES ->
+ IncInfo = write_and_collect_inc_info(Analysis),
+ write_inc_files(IncInfo)
+ end.
+
+write_and_collect_inc_info(Analysis) ->
+ Fun = fun ({File, Module}, Inc) ->
+ Info = get_final_info(File, Module, Analysis),
+ write_typed_file(File, Info),
+ IncFuns = get_functions(File, Analysis),
+ collect_imported_functions(IncFuns, Info#info.types, Inc)
+ end,
+ NewInc = lists:foldl(Fun, #inc{}, Analysis#analysis.fms),
+ clean_inc(NewInc).
+
+write_inc_files(Inc) ->
+ Fun =
+ fun (File) ->
+ Val = map__lookup(File, Inc#inc.map),
+ %% Val is function with its type info
+ %% in form [{{Line,F,A},Type}]
+ Functions = [Key || {Key, _} <- Val],
+ Val1 = [{{F,A},Type} || {{_Line,F,A},Type} <- Val],
+ Info = #info{types = map__from_list(Val1),
+ records = map__new(),
+ %% Note we need to sort functions here!
+ functions = lists:keysort(1, Functions)},
+ %% io:format("Types ~p\n", [Info#info.types]),
+ %% io:format("Functions ~p\n", [Info#info.functions]),
+ %% io:format("Records ~p\n", [Info#info.records]),
+ write_typed_file(File, Info)
+ end,
+ lists:foreach(Fun, dict:fetch_keys(Inc#inc.map)).
+
+show(Analysis) ->
+ Fun = fun ({File, Module}) ->
+ Info = get_final_info(File, Module, Analysis),
+ show_type_info(File, Info)
+ end,
+ lists:foreach(Fun, Analysis#analysis.fms).
+
+get_final_info(File, Module, Analysis) ->
+ Records = get_records(File, Analysis),
+ Types = get_types(Module, Analysis, Records),
+ Functions = get_functions(File, Analysis),
+ Edoc = Analysis#analysis.edoc,
+ #info{records = Records, functions = Functions, types = Types, edoc = Edoc}.
+
+collect_imported_functions(Functions, Types, Inc) ->
+ %% Coming from other sourses, including:
+ %% FIXME: How to deal with yecc-generated file????
+ %% --.yrl (yecc-generated file)???
+ %% -- yeccpre.hrl (yecc-generated file)???
+ %% -- other cases
+ Fun = fun ({File, _} = Obj, I) ->
+ case is_yecc_gen(File, I) of
+ {true, NewI} -> NewI;
+ {false, NewI} ->
+ check_imported_functions(Obj, NewI, Types)
+ end
+ end,
+ lists:foldl(Fun, Inc, Functions).
+
+-spec is_yecc_gen(file:filename(), inc()) -> {boolean(), inc()}.
+
+is_yecc_gen(File, #inc{filter = Fs} = Inc) ->
+ case lists:member(File, Fs) of
+ true -> {true, Inc};
+ false ->
+ case filename:extension(File) of
+ ".yrl" ->
+ Rootname = filename:rootname(File, ".yrl"),
+ Obj = Rootname ++ ".erl",
+ case lists:member(Obj, Fs) of
+ true -> {true, Inc};
+ false ->
+ NewInc = Inc#inc{filter = [Obj|Fs]},
+ {true, NewInc}
+ end;
+ _ ->
+ case filename:basename(File) of
+ "yeccpre.hrl" -> {true, Inc};
+ _ -> {false, Inc}
+ end
+ end
+ end.
+
+check_imported_functions({File, {Line, F, A}}, Inc, Types) ->
+ IncMap = Inc#inc.map,
+ FA = {F, A},
+ Type = get_type_info(FA, Types),
+ case map__lookup(File, IncMap) of
+ none -> %% File is not added. Add it
+ Obj = {File,[{FA, {Line, Type}}]},
+ NewMap = map__insert(Obj, IncMap),
+ Inc#inc{map = NewMap};
+ Val -> %% File is already in. Check.
+ case lists:keyfind(FA, 1, Val) of
+ false ->
+ %% Function is not in; add it
+ Obj = {File, Val ++ [{FA, {Line, Type}}]},
+ NewMap = map__insert(Obj, IncMap),
+ Inc#inc{map = NewMap};
+ Type ->
+ %% Function is in and with same type
+ Inc;
+ _ ->
+ %% Function is in but with diff type
+ inc_warning(FA, File),
+ Elem = lists:keydelete(FA, 1, Val),
+ NewMap = case Elem of
+ [] -> map__remove(File, IncMap);
+ _ -> map__insert({File, Elem}, IncMap)
+ end,
+ Inc#inc{map = NewMap}
+ end
+ end.
+
+inc_warning({F, A}, File) ->
+ io:format(" ***Warning: Skip function ~p/~p ", [F, A]),
+ io:format("in file ~p because of inconsistent type\n", [File]).
+
+clean_inc(Inc) ->
+ Inc1 = remove_yecc_generated_file(Inc),
+ normalize_obj(Inc1).
+
+remove_yecc_generated_file(#inc{filter = Filter} = Inc) ->
+ Fun = fun (Key, #inc{map = Map} = I) ->
+ I#inc{map = map__remove(Key, Map)}
+ end,
+ lists:foldl(Fun, Inc, Filter).
+
+normalize_obj(TmpInc) ->
+ Fun = fun (Key, Val, Inc) ->
+ NewVal = [{{Line,F,A},Type} || {{F,A},{Line,Type}} <- Val],
+ map__insert({Key, NewVal}, Inc)
+ end,
+ TmpInc#inc{map = map__fold(Fun, map__new(), TmpInc#inc.map)}.
+
+get_records(File, Analysis) ->
+ map__lookup(File, Analysis#analysis.record).
+
+get_types(Module, Analysis, Records) ->
+ TypeInfoPlt = Analysis#analysis.trust_plt,
+ TypeInfo =
+ case dialyzer_plt:lookup_module(TypeInfoPlt, Module) of
+ none -> [];
+ {value, List} -> List
+ end,
+ CodeServer = Analysis#analysis.codeserver,
+ TypeInfoList =
+ case Analysis#analysis.show_succ of
+ true ->
+ [convert_type_info(I) || I <- TypeInfo];
+ false ->
+ [get_type(I, CodeServer, Records) || I <- TypeInfo]
+ end,
+ map__from_list(TypeInfoList).
+
+convert_type_info({{_M, F, A}, Range, Arg}) ->
+ {{F, A}, {Range, Arg}}.
+
+get_type({{M, F, A} = MFA, Range, Arg}, CodeServer, Records) ->
+ case dialyzer_codeserver:lookup_mfa_contract(MFA, CodeServer) of
+ error ->
+ {{F, A}, {Range, Arg}};
+ {ok, {_FileLine, Contract}} ->
+ Sig = erl_types:t_fun(Arg, Range),
+ case dialyzer_contracts:check_contract(Contract, Sig) of
+ ok -> {{F, A}, {contract, Contract}};
+ {error, {extra_range, _, _}} ->
+ {{F, A}, {contract, Contract}};
+ {error, {overlapping_contract, []}} ->
+ {{F, A}, {contract, Contract}};
+ {error, invalid_contract} ->
+ CString = dialyzer_contracts:contract_to_string(Contract),
+ SigString = dialyzer_utils:format_sig(Sig, Records),
+ Msg = io_lib:format("Error in contract of function ~w:~w/~w\n"
+ "\t The contract is: " ++ CString ++ "\n" ++
+ "\t but the inferred signature is: ~s",
+ [M, F, A, SigString]),
+ fatal_error(Msg);
+ {error, ErrorStr} when is_list(ErrorStr) -> % ErrorStr is a string()
+ Msg = io_lib:format("Error in contract of function ~w:~w/~w: ~s",
+ [M, F, A, ErrorStr]),
+ fatal_error(Msg)
+ end
+ end.
+
+get_functions(File, Analysis) ->
+ case Analysis#analysis.mode of
+ ?SHOW ->
+ Funcs = map__lookup(File, Analysis#analysis.func),
+ Inc_Funcs = map__lookup(File, Analysis#analysis.inc_func),
+ remove_module_info(Funcs) ++ normalize_incFuncs(Inc_Funcs);
+ ?SHOW_EXPORTED ->
+ Ex_Funcs = map__lookup(File, Analysis#analysis.ex_func),
+ remove_module_info(Ex_Funcs);
+ ?ANNOTATE ->
+ Funcs = map__lookup(File, Analysis#analysis.func),
+ remove_module_info(Funcs);
+ ?ANNOTATE_INC_FILES ->
+ map__lookup(File, Analysis#analysis.inc_func)
+ end.
+
+normalize_incFuncs(Functions) ->
+ [FunInfo || {_FileName, FunInfo} <- Functions].
+
+-spec remove_module_info([func_info()]) -> [func_info()].
+
+remove_module_info(FunInfoList) ->
+ F = fun ({_,module_info,0}) -> false;
+ ({_,module_info,1}) -> false;
+ ({Line,F,A}) when is_integer(Line), is_atom(F), is_integer(A) -> true
+ end,
+ lists:filter(F, FunInfoList).
+
+write_typed_file(File, Info) ->
+ io:format(" Processing file: ~p\n", [File]),
+ Dir = filename:dirname(File),
+ RootName = filename:basename(filename:rootname(File)),
+ Ext = filename:extension(File),
+ TyperAnnDir = filename:join(Dir, ?TYPER_ANN_DIR),
+ TmpNewFilename = lists:concat([RootName, ".ann", Ext]),
+ NewFileName = filename:join(TyperAnnDir, TmpNewFilename),
+ case file:make_dir(TyperAnnDir) of
+ {error, Reason} ->
+ case Reason of
+ eexist -> %% TypEr dir exists; remove old typer files
+ ok = file:delete(NewFileName),
+ write_typed_file(File, Info, NewFileName);
+ enospc ->
+ Msg = io_lib:format("Not enough space in ~p\n", [Dir]),
+ fatal_error(Msg);
+ eacces ->
+ Msg = io:format("No write permission in ~p\n", [Dir]),
+ fatal_error(Msg);
+ _ ->
+ Msg = io_lib:format("Unhandled error ~s when writing ~p\n",
+ [Reason, Dir]),
+ fatal_error(Msg)
+ end;
+ ok -> %% Typer dir does NOT exist
+ write_typed_file(File, Info, NewFileName)
+ end.
+
+write_typed_file(File, Info, NewFileName) ->
+ {ok, Binary} = file:read_file(File),
+ Chars = binary_to_list(Binary),
+ write_typed_file(Chars, NewFileName, Info, 1, []),
+ io:format(" Saved as: ~p\n", [NewFileName]).
+
+write_typed_file(Chars, File, #info{functions = []}, _LNo, _Acc) ->
+ ok = file:write_file(File, list_to_binary(Chars), [append]);
+write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) ->
+ [{Line,F,A}|RestFuncs] = Info#info.functions,
+ case Line of
+ 1 -> %% This will happen only for inc files
+ ok = raw_write(F, A, Info, File, []),
+ NewInfo = Info#info{functions = RestFuncs},
+ NewAcc = [],
+ write_typed_file(Chars, File, NewInfo, Line, NewAcc);
+ _ ->
+ case Ch of
+ 10 ->
+ NewLineNo = LineNo + 1,
+ {NewInfo, NewAcc} =
+ case NewLineNo of
+ Line ->
+ ok = raw_write(F, A, Info, File, [Ch|Acc]),
+ {Info#info{functions = RestFuncs}, []};
+ _ ->
+ {Info, [Ch|Acc]}
+ end,
+ write_typed_file(Chs, File, NewInfo, NewLineNo, NewAcc);
+ _ ->
+ write_typed_file(Chs, File, Info, LineNo, [Ch|Acc])
+ end
+ end.
+
+raw_write(F, A, Info, File, Content) ->
+ TypeInfo = get_type_string(F, A, Info, file),
+ ContentList = lists:reverse(Content) ++ TypeInfo ++ "\n",
+ ContentBin = list_to_binary(ContentList),
+ file:write_file(File, ContentBin, [append]).
+
+get_type_string(F, A, Info, Mode) ->
+ Type = get_type_info({F,A}, Info#info.types),
+ TypeStr =
+ case Type of
+ {contract, C} ->
+ dialyzer_contracts:contract_to_string(C);
+ {RetType, ArgType} ->
+ Sig = erl_types:t_fun(ArgType, RetType),
+ dialyzer_utils:format_sig(Sig, Info#info.records)
+ end,
+ case Info#info.edoc of
+ false ->
+ case {Mode, Type} of
+ {file, {contract, _}} -> "";
+ _ ->
+ Prefix = lists:concat(["-spec ", F]),
+ lists:concat([Prefix, TypeStr, "."])
+ end;
+ true ->
+ Prefix = lists:concat(["%% @spec ", F]),
+ lists:concat([Prefix, TypeStr, "."])
+ end.
+
+show_type_info(File, Info) ->
+ io:format("\n%% File: ~p\n%% ", [File]),
+ OutputString = lists:concat(["~.", length(File)+8, "c~n"]),
+ io:fwrite(OutputString, [$-]),
+ Fun = fun ({_LineNo, F, A}) ->
+ TypeInfo = get_type_string(F, A, Info, show),
+ io:format("~s\n", [TypeInfo])
+ end,
+ lists:foreach(Fun, Info#info.functions).
+
+get_type_info(Func, Types) ->
+ case map__lookup(Func, Types) of
+ none ->
+ %% Note: Typeinfo of any function should exist in
+ %% the result offered by dialyzer, otherwise there
+ %% *must* be something wrong with the analysis
+ Msg = io_lib:format("No type info for function: ~p\n", [Func]),
+ fatal_error(Msg);
+ {contract, _Fun} = C -> C;
+ {_RetType, _ArgType} = RA -> RA
+ end.
+
+%%--------------------------------------------------------------------
+%% Processing of command-line options and arguments.
+%%--------------------------------------------------------------------
+
+-spec process_cl_args() -> {args(), analysis()}.
+
+process_cl_args() ->
+ ArgList = init:get_plain_arguments(),
+ %% io:format("Args is ~p\n", [ArgList]),
+ {Args, Analysis} = analyze_args(ArgList, #args{}, #analysis{}),
+ %% if the mode has not been set, set it to the default mode (show)
+ {Args, case Analysis#analysis.mode of
+ undefined -> Analysis#analysis{mode = ?SHOW};
+ Mode when is_atom(Mode) -> Analysis
+ end}.
+
+analyze_args([], Args, Analysis) ->
+ {Args, Analysis};
+analyze_args(ArgList, Args, Analysis) ->
+ {Result, Rest} = cl(ArgList),
+ {NewArgs, NewAnalysis} = analyze_result(Result, Args, Analysis),
+ analyze_args(Rest, NewArgs, NewAnalysis).
+
+cl(["-h"|_]) -> help_message();
+cl(["--help"|_]) -> help_message();
+cl(["-v"|_]) -> version_message();
+cl(["--version"|_]) -> version_message();
+cl(["--edoc"|Opts]) -> {edoc, Opts};
+cl(["--show"|Opts]) -> {{mode, ?SHOW}, Opts};
+cl(["--show_exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
+cl(["--show-exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
+cl(["--show_success_typings"|Opts]) -> {show_succ, Opts};
+cl(["--show-success-typings"|Opts]) -> {show_succ, Opts};
+cl(["--annotate"|Opts]) -> {{mode, ?ANNOTATE}, Opts};
+cl(["--annotate-inc-files"|Opts]) -> {{mode, ?ANNOTATE_INC_FILES}, Opts};
+cl(["--no_spec"|Opts]) -> {no_spec, Opts};
+cl(["--plt",Plt|Opts]) -> {{plt, Plt}, Opts};
+cl(["-D"++Def|Opts]) ->
+ case Def of
+ "" -> fatal_error("no variable name specified after -D");
+ _ ->
+ DefPair = process_def_list(re:split(Def, "=", [{return, list}])),
+ {{def, DefPair}, Opts}
+ end;
+cl(["-I",Dir|Opts]) -> {{inc, Dir}, Opts};
+cl(["-I"++Dir|Opts]) ->
+ case Dir of
+ "" -> fatal_error("no include directory specified after -I");
+ _ -> {{inc, Dir}, Opts}
+ end;
+cl(["-T"|Opts]) ->
+ {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
+ case Files of
+ [] -> fatal_error("no file or directory specified after -T");
+ [_|_] -> {{trusted, Files}, RestOpts}
+ end;
+cl(["-r"|Opts]) ->
+ {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
+ {{files_r, Files}, RestOpts};
+cl(["-"++H|_]) -> fatal_error("unknown option -"++H);
+cl(Opts) ->
+ {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
+ {{files, Files}, RestOpts}.
+
+process_def_list(L) ->
+ case L of
+ [Name, Value] ->
+ {ok, Tokens, _} = erl_scan:string(Value ++ "."),
+ {ok, ErlValue} = erl_parse:parse_term(Tokens),
+ {list_to_atom(Name), ErlValue};
+ [Name] ->
+ {list_to_atom(Name), true}
+ end.
+
+%% Get information about files that the user trusts and wants to analyze
+analyze_result({files, Val}, Args, Analysis) ->
+ NewVal = Args#args.files ++ Val,
+ {Args#args{files = NewVal}, Analysis};
+analyze_result({files_r, Val}, Args, Analysis) ->
+ NewVal = Args#args.files_r ++ Val,
+ {Args#args{files_r = NewVal}, Analysis};
+analyze_result({trusted, Val}, Args, Analysis) ->
+ NewVal = Args#args.trusted ++ Val,
+ {Args#args{trusted = NewVal}, Analysis};
+analyze_result(edoc, Args, Analysis) ->
+ {Args, Analysis#analysis{edoc = true}};
+%% Get useful information for actual analysis
+analyze_result({mode, Mode}, Args, Analysis) ->
+ case Analysis#analysis.mode of
+ undefined -> {Args, Analysis#analysis{mode = Mode}};
+ OldMode -> mode_error(OldMode, Mode)
+ end;
+analyze_result({def, Val}, Args, Analysis) ->
+ NewVal = Analysis#analysis.macros ++ [Val],
+ {Args, Analysis#analysis{macros = NewVal}};
+analyze_result({inc, Val}, Args, Analysis) ->
+ NewVal = Analysis#analysis.includes ++ [Val],
+ {Args, Analysis#analysis{includes = NewVal}};
+analyze_result({plt, Plt}, Args, Analysis) ->
+ {Args, Analysis#analysis{plt = Plt}};
+analyze_result(show_succ, Args, Analysis) ->
+ {Args, Analysis#analysis{show_succ = true}};
+analyze_result(no_spec, Args, Analysis) ->
+ {Args, Analysis#analysis{no_spec = true}}.
+
+%%--------------------------------------------------------------------
+%% File processing.
+%%--------------------------------------------------------------------
+
+-spec get_all_files(args()) -> [file:filename(),...].
+
+get_all_files(#args{files = Fs, files_r = Ds}) ->
+ case filter_fd(Fs, Ds, fun test_erl_file_exclude_ann/1) of
+ [] -> fatal_error("no file(s) to analyze");
+ AllFiles -> AllFiles
+ end.
--spec error(string()) -> no_return().
+-spec test_erl_file_exclude_ann(file:filename()) -> boolean().
-error(Slogan) ->
+test_erl_file_exclude_ann(File) ->
+ case is_erl_file(File) of
+ true -> %% Exclude files ending with ".ann.erl"
+ case re:run(File, "[\.]ann[\.]erl$") of
+ {match, _} -> false;
+ nomatch -> true
+ end;
+ false -> false
+ end.
+
+-spec is_erl_file(file:filename()) -> boolean().
+
+is_erl_file(File) ->
+ filename:extension(File) =:= ".erl".
+
+-type test_file_fun() :: fun((file:filename()) -> boolean()).
+
+-spec filter_fd(files(), files(), test_file_fun()) -> files().
+
+filter_fd(File_Dir, Dir_R, Fun) ->
+ All_File_1 = process_file_and_dir(File_Dir, Fun),
+ All_File_2 = process_dir_rec(Dir_R, Fun),
+ remove_dup(All_File_1 ++ All_File_2).
+
+-spec process_file_and_dir(files(), test_file_fun()) -> files().
+
+process_file_and_dir(File_Dir, TestFun) ->
+ Fun =
+ fun (Elem, Acc) ->
+ case filelib:is_regular(Elem) of
+ true -> process_file(Elem, TestFun, Acc);
+ false -> check_dir(Elem, false, Acc, TestFun)
+ end
+ end,
+ lists:foldl(Fun, [], File_Dir).
+
+-spec process_dir_rec(files(), test_file_fun()) -> files().
+
+process_dir_rec(Dirs, TestFun) ->
+ Fun = fun (Dir, Acc) -> check_dir(Dir, true, Acc, TestFun) end,
+ lists:foldl(Fun, [], Dirs).
+
+-spec check_dir(file:filename(), boolean(), files(), test_file_fun()) -> files().
+
+check_dir(Dir, Recursive, Acc, Fun) ->
+ case file:list_dir(Dir) of
+ {ok, Files} ->
+ {TmpDirs, TmpFiles} = split_dirs_and_files(Files, Dir),
+ case Recursive of
+ false ->
+ FinalFiles = process_file_and_dir(TmpFiles, Fun),
+ Acc ++ FinalFiles;
+ true ->
+ TmpAcc1 = process_file_and_dir(TmpFiles, Fun),
+ TmpAcc2 = process_dir_rec(TmpDirs, Fun),
+ Acc ++ TmpAcc1 ++ TmpAcc2
+ end;
+ {error, eacces} ->
+ fatal_error("no access permission to dir \""++Dir++"\"");
+ {error, enoent} ->
+ fatal_error("cannot access "++Dir++": No such file or directory");
+ {error, _Reason} ->
+ fatal_error("error involving a use of file:list_dir/1")
+ end.
+
+%% Same order as the input list
+-spec process_file(file:filename(), test_file_fun(), files()) -> files().
+
+process_file(File, TestFun, Acc) ->
+ case TestFun(File) of
+ true -> Acc ++ [File];
+ false -> Acc
+ end.
+
+%% Same order as the input list
+-spec split_dirs_and_files(files(), file:filename()) -> {files(), files()}.
+
+split_dirs_and_files(Elems, Dir) ->
+ Test_Fun =
+ fun (Elem, {DirAcc, FileAcc}) ->
+ File = filename:join(Dir, Elem),
+ case filelib:is_regular(File) of
+ false -> {[File|DirAcc], FileAcc};
+ true -> {DirAcc, [File|FileAcc]}
+ end
+ end,
+ {Dirs, Files} = lists:foldl(Test_Fun, {[], []}, Elems),
+ {lists:reverse(Dirs), lists:reverse(Files)}.
+
+%% Removes duplicate filenames but keeps the order of the input list
+-spec remove_dup(files()) -> files().
+
+remove_dup(Files) ->
+ Test_Dup = fun (File, Acc) ->
+ case lists:member(File, Acc) of
+ true -> Acc;
+ false -> [File|Acc]
+ end
+ end,
+ Reversed_Elems = lists:foldl(Test_Dup, [], Files),
+ lists:reverse(Reversed_Elems).
+
+%%--------------------------------------------------------------------
+%% Collect information.
+%%--------------------------------------------------------------------
+
+-type inc_file_info() :: {file:filename(), func_info()}.
+
+-record(tmpAcc, {file :: file:filename(),
+ module :: atom(),
+ funcAcc = [] :: [func_info()],
+ incFuncAcc = [] :: [inc_file_info()],
+ dialyzerObj = [] :: [{mfa(), {_, _}}]}).
+
+-spec collect_info(analysis()) -> analysis().
+
+collect_info(Analysis) ->
+ NewPlt =
+ try get_dialyzer_plt(Analysis) of
+ DialyzerPlt ->
+ dialyzer_plt:merge_plts([Analysis#analysis.trust_plt, DialyzerPlt])
+ catch
+ throw:{dialyzer_error,_Reason} ->
+ fatal_error("Dialyzer's PLT is missing or is not up-to-date; please (re)create it")
+ end,
+ NewAnalysis = lists:foldl(fun collect_one_file_info/2,
+ Analysis#analysis{trust_plt = NewPlt},
+ Analysis#analysis.files),
+ %% Process Remote Types
+ TmpCServer = NewAnalysis#analysis.codeserver,
+ NewCServer =
+ try
+ NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer),
+ NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer),
+ OldRecords = dialyzer_plt:get_types(NewPlt),
+ OldExpTypes = dialyzer_plt:get_exported_types(NewPlt),
+ MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
+ MergedExpTypes = sets:union(NewExpTypes, OldExpTypes),
+ %% io:format("Merged Records ~p",[MergedRecords]),
+ TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer),
+ TmpCServer2 =
+ dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes,
+ TmpCServer1),
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3)
+ catch
+ throw:{error, ErrorMsg} ->
+ fatal_error(ErrorMsg)
+ end,
+ NewAnalysis#analysis{codeserver = NewCServer}.
+
+collect_one_file_info(File, Analysis) ->
+ Ds = [{d,Name,Val} || {Name,Val} <- Analysis#analysis.macros],
+ %% Current directory should also be included in "Includes".
+ Includes = [filename:dirname(File)|Analysis#analysis.includes],
+ Is = [{i,Dir} || Dir <- Includes],
+ Options = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds,
+ case dialyzer_utils:get_abstract_code_from_src(File, Options) of
+ {error, Reason} ->
+ %% io:format("File=~p\n,Options=~p\n,Error=~p\n", [File,Options,Reason]),
+ compile_error(Reason);
+ {ok, AbstractCode} ->
+ case dialyzer_utils:get_core_from_abstract_code(AbstractCode, Options) of
+ error -> compile_error(["Could not get core erlang for "++File]);
+ {ok, Core} ->
+ case dialyzer_utils:get_record_and_type_info(AbstractCode) of
+ {error, Reason} -> compile_error([Reason]);
+ {ok, Records} ->
+ Mod = cerl:concrete(cerl:module_name(Core)),
+ case dialyzer_utils:get_spec_info(Mod, AbstractCode, Records) of
+ {error, Reason} -> compile_error([Reason]);
+ {ok, SpecInfo} ->
+ ExpTypes = get_exported_types_from_core(Core),
+ analyze_core_tree(Core, Records, SpecInfo, ExpTypes,
+ Analysis, File)
+ end
+ end
+ end
+ end.
+
+analyze_core_tree(Core, Records, SpecInfo, ExpTypes, Analysis, File) ->
+ Module = cerl:concrete(cerl:module_name(Core)),
+ TmpTree = cerl:from_records(Core),
+ CS1 = Analysis#analysis.codeserver,
+ NextLabel = dialyzer_codeserver:get_next_core_label(CS1),
+ {Tree, NewLabel} = cerl_trees:label(TmpTree, NextLabel),
+ CS2 = dialyzer_codeserver:insert(Module, Tree, CS1),
+ CS3 = dialyzer_codeserver:set_next_core_label(NewLabel, CS2),
+ CS4 = dialyzer_codeserver:store_temp_records(Module, Records, CS3),
+ CS5 =
+ case Analysis#analysis.no_spec of
+ true -> CS4;
+ false -> dialyzer_codeserver:store_temp_contracts(Module, SpecInfo, CS4)
+ end,
+ OldExpTypes = dialyzer_codeserver:get_temp_exported_types(CS5),
+ MergedExpTypes = sets:union(ExpTypes, OldExpTypes),
+ CS6 = dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes, CS5),
+ Ex_Funcs = [{0,F,A} || {_,_,{F,A}} <- cerl:module_exports(Tree)],
+ TmpCG = Analysis#analysis.callgraph,
+ CG = dialyzer_callgraph:scan_core_tree(Tree, TmpCG),
+ Fun = fun analyze_one_function/2,
+ All_Defs = cerl:module_defs(Tree),
+ Acc = lists:foldl(Fun, #tmpAcc{file = File, module = Module}, All_Defs),
+ Exported_FuncMap = map__insert({File, Ex_Funcs}, Analysis#analysis.ex_func),
+ %% we must sort all functions in the file which
+ %% originate from this file by *numerical order* of lineNo
+ Sorted_Functions = lists:keysort(1, Acc#tmpAcc.funcAcc),
+ FuncMap = map__insert({File, Sorted_Functions}, Analysis#analysis.func),
+ %% we do not need to sort functions which are imported from included files
+ IncFuncMap = map__insert({File, Acc#tmpAcc.incFuncAcc},
+ Analysis#analysis.inc_func),
+ FMs = Analysis#analysis.fms ++ [{File, Module}],
+ RecordMap = map__insert({File, Records}, Analysis#analysis.record),
+ Analysis#analysis{fms = FMs,
+ callgraph = CG,
+ codeserver = CS6,
+ ex_func = Exported_FuncMap,
+ inc_func = IncFuncMap,
+ record = RecordMap,
+ func = FuncMap}.
+
+analyze_one_function({Var, FunBody} = Function, Acc) ->
+ F = cerl:fname_id(Var),
+ A = cerl:fname_arity(Var),
+ TmpDialyzerObj = {{Acc#tmpAcc.module, F, A}, Function},
+ NewDialyzerObj = Acc#tmpAcc.dialyzerObj ++ [TmpDialyzerObj],
+ [_, LineNo, {file, FileName}] = cerl:get_ann(FunBody),
+ BaseName = filename:basename(FileName),
+ FuncInfo = {LineNo, F, A},
+ OriginalName = Acc#tmpAcc.file,
+ {FuncAcc, IncFuncAcc} =
+ case (FileName =:= OriginalName) orelse (BaseName =:= OriginalName) of
+ true -> %% Coming from original file
+ %% io:format("Added function ~p\n", [{LineNo, F, A}]),
+ {Acc#tmpAcc.funcAcc ++ [FuncInfo], Acc#tmpAcc.incFuncAcc};
+ false ->
+ %% Coming from other sourses, including:
+ %% -- .yrl (yecc-generated file)
+ %% -- yeccpre.hrl (yecc-generated file)
+ %% -- other cases
+ {Acc#tmpAcc.funcAcc, Acc#tmpAcc.incFuncAcc ++ [{FileName, FuncInfo}]}
+ end,
+ Acc#tmpAcc{funcAcc = FuncAcc,
+ incFuncAcc = IncFuncAcc,
+ dialyzerObj = NewDialyzerObj}.
+
+-spec get_dialyzer_plt(analysis()) -> plt().
+
+get_dialyzer_plt(#analysis{plt = PltFile0}) ->
+ PltFile =
+ case PltFile0 =:= none of
+ true -> dialyzer_plt:get_default_plt();
+ false -> PltFile0
+ end,
+ dialyzer_plt:from_file(PltFile).
+
+%% Exported Types
+
+get_exported_types_from_core(Core) ->
+ Attrs = cerl:module_attrs(Core),
+ ExpTypes1 = [cerl:concrete(L2) || {L1, L2} <- Attrs,
+ cerl:is_literal(L1),
+ cerl:is_literal(L2),
+ cerl:concrete(L1) =:= 'export_type'],
+ ExpTypes2 = lists:flatten(ExpTypes1),
+ M = cerl:atom_val(cerl:module_name(Core)),
+ sets:from_list([{M, F, A} || {F, A} <- ExpTypes2]).
+
+%%--------------------------------------------------------------------
+%% Utilities for error reporting.
+%%--------------------------------------------------------------------
+
+-spec fatal_error(string()) -> no_return().
+
+fatal_error(Slogan) ->
msg(io_lib:format("typer: ~s\n", [Slogan])),
erlang:halt(1).
-%%--------------------------------------------------------------------
+-spec mode_error(mode(), mode()) -> no_return().
+
+mode_error(OldMode, NewMode) ->
+ Msg = io_lib:format("Mode was previously set to '~s'; "
+ "can not set it to '~s' now",
+ [OldMode, NewMode]),
+ fatal_error(Msg).
-spec compile_error([string()]) -> no_return().
compile_error(Reason) ->
JoinedString = lists:flatten([X ++ "\n" || X <- Reason]),
Msg = "Analysis failed with error report:\n" ++ JoinedString,
- error(Msg).
-
-%%--------------------------------------------------------------------
-%% Outputs a message on 'stderr', if possible.
-%%--------------------------------------------------------------------
+ fatal_error(Msg).
-spec msg(string()) -> 'ok'.
msg(Msg) ->
case os:type() of
- {unix, _} ->
+ {unix, _} -> % Output a message on 'stderr', if possible
P = open_port({fd, 0, 2}, [out]),
port_command(P, Msg),
true = port_close(P),
@@ -197,3 +995,106 @@ msg(Msg) ->
end.
%%--------------------------------------------------------------------
+%% Version and help messages.
+%%--------------------------------------------------------------------
+
+-spec version_message() -> no_return().
+
+version_message() ->
+ io:format("TypEr version "++?VSN++"\n"),
+ erlang:halt(0).
+
+-spec help_message() -> no_return().
+
+help_message() ->
+ S = <<" Usage: typer [--help] [--version] [--plt PLT] [--edoc]
+ [--show | --show-exported | --annotate | --annotate-inc-files]
+ [-Ddefine]* [-I include_dir]* [-T application]* [-r] file*
+
+ Options:
+ -r dir*
+ search directories recursively for .erl files below them
+ --show
+ Prints type specifications for all functions on stdout.
+ (this is the default behaviour; this option is not really needed)
+ --show-exported (or --show_exported)
+ Same as --show, but prints specifications for exported functions only
+ Specs are displayed sorted alphabetically on the function's name
+ --annotate
+ Annotates the specified files with type specifications
+ --annotate-inc-files
+ Same as --annotate but annotates all -include() files as well as
+ all .erl files (use this option with caution - has not been tested much)
+ --edoc
+ Prints type information as Edoc @spec comments, not as type specs
+ --plt PLT
+ Use the specified dialyzer PLT file rather than the default one
+ -T file*
+ The specified file(s) already contain type specifications and these
+ are to be trusted in order to print specs for the rest of the files
+ (Multiple files or dirs, separated by spaces, can be specified.)
+ -Dname (or -Dname=value)
+ pass the defined name(s) to TypEr
+ (The syntax of defines is the same as that used by \"erlc\".)
+ -I include_dir
+ pass the include_dir to TypEr
+ (The syntax of includes is the same as that used by \"erlc\".)
+ --version (or -v)
+ prints the Typer version and exits
+ --help (or -h)
+ prints this message and exits
+
+ Note:
+ * denotes that multiple occurrences of these options are possible.
+">>,
+ io:put_chars(S),
+ erlang:halt(0).
+
+%%--------------------------------------------------------------------
+%% Handle messages.
+%%--------------------------------------------------------------------
+
+rcv_ext_types() ->
+ Self = self(),
+ Self ! {Self, done},
+ rcv_ext_types(Self, []).
+
+rcv_ext_types(Self, ExtTypes) ->
+ receive
+ {Self, ext_types, ExtType} ->
+ rcv_ext_types(Self, [ExtType|ExtTypes]);
+ {Self, done} ->
+ lists:usort(ExtTypes)
+ end.
+
+%%--------------------------------------------------------------------
+%% A convenient abstraction of a Key-Value mapping data structure
+%% specialized for the uses in this module
+%%--------------------------------------------------------------------
+
+-type map() :: dict().
+
+-spec map__new() -> map().
+map__new() ->
+ dict:new().
+
+-spec map__insert({term(), term()}, map()) -> map().
+map__insert(Object, Map) ->
+ {Key, Value} = Object,
+ dict:store(Key, Value, Map).
+
+-spec map__lookup(term(), map()) -> term().
+map__lookup(Key, Map) ->
+ try dict:fetch(Key, Map) catch error:_ -> none end.
+
+-spec map__from_list([{fa(), term()}]) -> map().
+map__from_list(List) ->
+ dict:from_list(List).
+
+-spec map__remove(term(), map()) -> map().
+map__remove(Key, Dict) ->
+ dict:erase(Key, Dict).
+
+-spec map__fold(fun((term(), term(), term()) -> map()), map(), map()) -> map().
+map__fold(Fun, Acc0, Dict) ->
+ dict:fold(Fun, Acc0, Dict).
diff --git a/lib/typer/src/typer.hrl b/lib/typer/src/typer.hrl
deleted file mode 100644
index c331dd82db..0000000000
--- a/lib/typer/src/typer.hrl
+++ /dev/null
@@ -1,64 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--define(SHOW, show).
--define(SHOW_EXPORTED, show_exported).
--define(ANNOTATE, annotate).
--define(ANNOTATE_INC_FILES, annotate_inc_files).
-
--type mode() :: ?SHOW | ?SHOW_EXPORTED | ?ANNOTATE | ?ANNOTATE_INC_FILES.
-
--record(typer_analysis,
- {mode :: mode(),
- macros = [] :: [{atom(), _}], % {macro_name, value}
- includes = [] :: [string()],
-
- %% Esp for Dialyzer
- %% ----------------------
- code_server = dialyzer_codeserver:new():: dialyzer_codeserver:codeserver(),
- callgraph = dialyzer_callgraph:new() :: dialyzer_callgraph:callgraph(),
- ana_files = [] :: [string()], % absolute filenames
- plt = none :: 'none' | string(),
-
- %% Esp for TypEr
- %% ----------------------
- t_files = [] :: [string()],
-
- %% For choosing between contracts or comments
- contracts = true :: boolean(),
-
- %% Any file in 'final_files' is compilable.
- %% And we need to keep it as {FileName,ModuleName}
- %% in case filename does NOT match with moduleName
- final_files = [] :: [{string(), atom()}],
-
- ex_func = typer_map:new() :: dict(),
- record = typer_map:new() :: dict(),
-
- %% Functions: the line number of the function
- %% should be kept as well
- func = typer_map:new() :: dict(),
- inc_func = typer_map:new() :: dict(),
- trust_plt = dialyzer_plt:new() :: dialyzer_plt:plt()}).
-
--record(args,
- {analyze = [] :: [string()],
- analyzed_dir_r = [] :: [string()],
- trust = [] :: [string()]}).
diff --git a/lib/typer/src/typer_annotator.erl b/lib/typer/src/typer_annotator.erl
deleted file mode 100644
index 68a8f03a5c..0000000000
--- a/lib/typer/src/typer_annotator.erl
+++ /dev/null
@@ -1,384 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%============================================================================
-%% File : typer_annotator.erl
-%% Author : Bingwen He <[email protected]>
-%% Description :
-%% If file 'FILENAME' has been analyzed, then the output of
-%% command "diff -B FILENAME.erl typer_ann/FILENAME.ann.erl"
-%% should be exactly what TypEr has added, namely type info.
-%%============================================================================
-
--module(typer_annotator).
-
--export([annotate/1]).
-
-%%----------------------------------------------------------------------------
-
--include("typer.hrl").
-
-%%----------------------------------------------------------------------------
-
--define(TYPER_ANN_DIR, "typer_ann").
-
--type func_info() :: {non_neg_integer(), atom(), arity()}.
-
--record(info, {recMap = typer_map:new() :: dict(),
- funcs = [] :: [func_info()],
- typeMap :: dict(),
- contracts :: boolean()}).
--record(inc, {map = typer_map:new() :: dict(),
- filter = [] :: [string()]}).
-
-%%----------------------------------------------------------------------------
-
--spec annotate(#typer_analysis{}) -> 'ok'.
-
-annotate(Analysis) ->
- case Analysis#typer_analysis.mode of
- ?SHOW -> show(Analysis);
- ?SHOW_EXPORTED -> show(Analysis);
- ?ANNOTATE ->
- Fun = fun({File, Module}) ->
- Info = get_final_info(File, Module, Analysis),
- write_typed_file(File, Info)
- end,
- lists:foreach(Fun, Analysis#typer_analysis.final_files);
- ?ANNOTATE_INC_FILES ->
- IncInfo = write_and_collect_inc_info(Analysis),
- write_inc_files(IncInfo)
- end.
-
-write_and_collect_inc_info(Analysis) ->
- Fun = fun({File, Module}, Inc) ->
- Info = get_final_info(File, Module, Analysis),
- write_typed_file(File, Info),
- IncFuns = get_functions(File, Analysis),
- collect_imported_funcs(IncFuns, Info#info.typeMap, Inc)
- end,
- NewInc = lists:foldl(Fun,#inc{}, Analysis#typer_analysis.final_files),
- clean_inc(NewInc).
-
-write_inc_files(Inc) ->
- Fun =
- fun (File) ->
- Val = typer_map:lookup(File,Inc#inc.map),
- %% Val is function with its type info
- %% in form [{{Line,F,A},Type}]
- Functions = [Key || {Key,_} <- Val],
- Val1 = [{{F,A},Type} || {{_Line,F,A},Type} <- Val],
- Info = #info{typeMap = typer_map:from_list(Val1),
- recMap = typer_map:new(),
- %% Note we need to sort functions here!
- funcs = lists:keysort(1, Functions)},
- %% io:format("TypeMap ~p\n", [Info#info.typeMap]),
- %% io:format("Funcs ~p\n", [Info#info.funcs]),
- %% io:format("RecMap ~p\n", [Info#info.recMap]),
- write_typed_file(File, Info)
- end,
- lists:foreach(Fun, dict:fetch_keys(Inc#inc.map)).
-
-show(Analysis) ->
- Fun = fun({File, Module}) ->
- Info = get_final_info(File, Module, Analysis),
- show_type_info_only(File, Info)
- end,
- lists:foreach(Fun, Analysis#typer_analysis.final_files).
-
-get_final_info(File, Module, Analysis) ->
- RecMap = get_recMap(File, Analysis),
- TypeMap = get_typeMap(Module, Analysis,RecMap),
- Functions = get_functions(File, Analysis),
- Contracts = Analysis#typer_analysis.contracts,
- #info{recMap=RecMap, funcs=Functions, typeMap=TypeMap, contracts=Contracts}.
-
-collect_imported_funcs(Funcs, TypeMap, TmpInc) ->
- %% Coming from other sourses, including:
- %% FIXME: How to deal with yecc-generated file????
- %% --.yrl (yecc-generated file)???
- %% -- yeccpre.hrl (yecc-generated file)???
- %% -- other cases
- Fun = fun({File,_} = Obj, Inc) ->
- case is_yecc_file(File, Inc) of
- {yecc_generated, NewInc} -> NewInc;
- {not_yecc, NewInc} ->
- check_imported_funcs(Obj, NewInc, TypeMap)
- end
- end,
- lists:foldl(Fun, TmpInc, Funcs).
-
--spec is_yecc_file(string(), #inc{}) -> {'not_yecc', #inc{}}
- | {'yecc_generated', #inc{}}.
-is_yecc_file(File, Inc) ->
- case lists:member(File, Inc#inc.filter) of
- true -> {yecc_generated, Inc};
- false ->
- case filename:extension(File) of
- ".yrl" ->
- Rootname = filename:rootname(File, ".yrl"),
- Obj = Rootname ++ ".erl",
- case lists:member(Obj, Inc#inc.filter) of
- true -> {yecc_generated, Inc};
- false ->
- NewFilter = [Obj|Inc#inc.filter],
- NewInc = Inc#inc{filter = NewFilter},
- {yecc_generated, NewInc}
- end;
- _ ->
- case filename:basename(File) of
- "yeccpre.hrl" -> {yecc_generated, Inc};
- _ -> {not_yecc, Inc}
- end
- end
- end.
-
-check_imported_funcs({File, {Line, F, A}}, Inc, TypeMap) ->
- IncMap = Inc#inc.map,
- FA = {F, A},
- Type = get_type_info(FA, TypeMap),
- case typer_map:lookup(File, IncMap) of
- none -> %% File is not added. Add it
- Obj = {File,[{FA, {Line, Type}}]},
- NewMap = typer_map:insert(Obj, IncMap),
- Inc#inc{map = NewMap};
- Val -> %% File is already in. Check.
- case lists:keyfind(FA, 1, Val) of
- false ->
- %% Function is not in; add it
- Obj = {File, Val ++ [{FA, {Line, Type}}]},
- NewMap = typer_map:insert(Obj, IncMap),
- Inc#inc{map = NewMap};
- Type ->
- %% Function is in and with same type
- Inc;
- _ ->
- %% Function is in but with diff type
- inc_warning(FA, File),
- Elem = lists:keydelete(FA, 1, Val),
- NewMap = case Elem of
- [] ->
- typer_map:remove(File, IncMap);
- _ ->
- typer_map:insert({File, Elem}, IncMap)
- end,
- Inc#inc{map = NewMap}
- end
- end.
-
-inc_warning({F, A}, File) ->
- io:format(" ***Warning: Skip function ~p/~p ", [F, A]),
- io:format("in file ~p because of inconsistent type\n", [File]).
-
-clean_inc(Inc) ->
- Inc1 = remove_yecc_generated_file(Inc),
- normalize_obj(Inc1).
-
-remove_yecc_generated_file(TmpInc) ->
- Fun = fun(Key, Inc) ->
- NewMap = typer_map:remove(Key, Inc#inc.map),
- Inc#inc{map = NewMap}
- end,
- lists:foldl(Fun, TmpInc, TmpInc#inc.filter).
-
-normalize_obj(TmpInc) ->
- Fun = fun(Key, Val, Inc) ->
- NewVal = [{{Line,F,A},Type} || {{F,A},{Line,Type}} <- Val],
- typer_map:insert({Key,NewVal}, Inc)
- end,
- NewMap = typer_map:fold(Fun, typer_map:new(), TmpInc#inc.map),
- TmpInc#inc{map = NewMap}.
-
-get_recMap(File, Analysis) ->
- typer_map:lookup(File, Analysis#typer_analysis.record).
-
-get_typeMap(Module, Analysis, RecMap) ->
- TypeInfoPlt = Analysis#typer_analysis.trust_plt,
- TypeInfo =
- case dialyzer_plt:lookup_module(TypeInfoPlt, Module) of
- none -> [];
- {value, List} -> List
- end,
- CodeServer = Analysis#typer_analysis.code_server,
- TypeInfoList = [get_type(I, CodeServer, RecMap) || I <- TypeInfo],
- typer_map:from_list(TypeInfoList).
-
-get_type({{M, F, A} = MFA, Range, Arg}, CodeServer, RecMap) ->
- case dialyzer_codeserver:lookup_mfa_contract(MFA, CodeServer) of
- error ->
- {{F, A}, {Range, Arg}};
- {ok, {_FileLine, Contract}} ->
- Sig = erl_types:t_fun(Arg, Range),
- case dialyzer_contracts:check_contract(Contract, Sig) of
- ok -> {{F, A}, {contract, Contract}};
- {error, {extra_range, _, _}} ->
- {{F, A}, {contract, Contract}};
- {error, invalid_contract} ->
- CString = dialyzer_contracts:contract_to_string(Contract),
- SigString = dialyzer_utils:format_sig(Sig, RecMap),
- typer:error(
- io_lib:format("Error in contract of function ~w:~w/~w\n"
- "\t The contract is: " ++ CString ++ "\n" ++
- "\t but the inferred signature is: ~s",
- [M, F, A, SigString]));
- {error, Msg} when is_list(Msg) -> % Msg is a string()
- typer:error(
- io_lib:format("Error in contract of function ~w:~w/~w: ~s",
- [M, F, A, Msg]))
- end
- end.
-
-get_functions(File, Analysis) ->
- case Analysis#typer_analysis.mode of
- ?SHOW ->
- Funcs = typer_map:lookup(File, Analysis#typer_analysis.func),
- Inc_Funcs = typer_map:lookup(File, Analysis#typer_analysis.inc_func),
- remove_module_info(Funcs) ++ normalize_incFuncs(Inc_Funcs);
- ?SHOW_EXPORTED ->
- Ex_Funcs = typer_map:lookup(File, Analysis#typer_analysis.ex_func),
- remove_module_info(Ex_Funcs);
- ?ANNOTATE ->
- Funcs = typer_map:lookup(File, Analysis#typer_analysis.func),
- remove_module_info(Funcs);
- ?ANNOTATE_INC_FILES ->
- typer_map:lookup(File, Analysis#typer_analysis.inc_func)
- end.
-
-normalize_incFuncs(Funcs) ->
- [FuncInfo || {_FileName, FuncInfo} <- Funcs].
-
--spec remove_module_info([func_info()]) -> [func_info()].
-
-remove_module_info(FuncInfoList) ->
- F = fun ({_,module_info,0}) -> false;
- ({_,module_info,1}) -> false;
- ({Line,F,A}) when is_integer(Line), is_atom(F), is_integer(A) -> true
- end,
- lists:filter(F, FuncInfoList).
-
-write_typed_file(File, Info) ->
- io:format(" Processing file: ~p\n", [File]),
- Dir = filename:dirname(File),
- RootName = filename:basename(filename:rootname(File)),
- Ext = filename:extension(File),
- TyperAnnDir = filename:join(Dir, ?TYPER_ANN_DIR),
- TmpNewFilename = lists:concat([RootName,".ann",Ext]),
- NewFileName = filename:join(TyperAnnDir, TmpNewFilename),
- case file:make_dir(TyperAnnDir) of
- {error, Reason} ->
- case Reason of
- eexist -> %% TypEr dir exists; remove old typer files
- ok = file:delete(NewFileName),
- write_typed_file(File, Info, NewFileName);
- enospc ->
- io:format(" Not enough space in ~p\n", [Dir]);
- eacces ->
- io:format(" No write permission in ~p\n", [Dir]);
- _ ->
- io:format("Unknown error when writing ~p\n", [Dir]),
- halt()
- end;
- ok -> %% Typer dir does NOT exist
- write_typed_file(File, Info, NewFileName)
- end.
-
-write_typed_file(File, Info, NewFileName) ->
- {ok, Binary} = file:read_file(File),
- Chars = binary_to_list(Binary),
- write_typed_file(Chars, NewFileName, Info, 1, []),
- io:format(" Saved as: ~p\n", [NewFileName]).
-
-write_typed_file(Chars, File, #info{funcs = []}, _LNo, _Acc) ->
- ok = file:write_file(File, list_to_binary(Chars), [append]);
-write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) ->
- [{Line,F,A}|RestFuncs] = Info#info.funcs,
- case Line of
- 1 -> %% This will happen only for inc files
- ok = raw_write(F, A, Info, File, []),
- NewInfo = Info#info{funcs = RestFuncs},
- NewAcc = [],
- write_typed_file(Chars, File, NewInfo, Line, NewAcc);
- _ ->
- case Ch of
- 10 ->
- NewLineNo = LineNo + 1,
- {NewInfo, NewAcc} =
- case NewLineNo of
- Line ->
- ok = raw_write(F, A, Info, File, [Ch|Acc]),
- {Info#info{funcs = RestFuncs}, []};
- _ ->
- {Info, [Ch|Acc]}
- end,
- write_typed_file(Chs, File, NewInfo, NewLineNo, NewAcc);
- _ ->
- write_typed_file(Chs, File, Info, LineNo, [Ch|Acc])
- end
- end.
-
-raw_write(F, A, Info, File, Content) ->
- TypeInfo = get_type_string(F, A, Info, file),
- ContentList = lists:reverse(Content) ++ TypeInfo ++ "\n",
- ContentBin = list_to_binary(ContentList),
- file:write_file(File, ContentBin, [append]).
-
-get_type_string(F, A, Info, Mode) ->
- Type = get_type_info({F,A}, Info#info.typeMap),
- TypeStr =
- case Type of
- {contract, C} ->
- dialyzer_contracts:contract_to_string(C);
- {RetType, ArgType} ->
- dialyzer_utils:format_sig(erl_types:t_fun(ArgType, RetType),
- Info#info.recMap)
- end,
- case Info#info.contracts of
- true ->
- case {Mode, Type} of
- {file, {contract, _}} -> "";
- _ ->
- Prefix = lists:concat(["-spec ", F]),
- lists:concat([Prefix, TypeStr, "."])
- end;
- false ->
- Prefix = lists:concat(["%% @spec ", F]),
- lists:concat([Prefix, TypeStr, "."])
- end.
-
-show_type_info_only(File, Info) ->
- io:format("\n%% File: ~p\n%% ", [File]),
- OutputString = lists:concat(["~.", length(File)+8, "c~n"]),
- io:fwrite(OutputString, [$-]),
- Fun = fun ({_LineNo, F, A}) ->
- TypeInfo = get_type_string(F, A, Info, show),
- io:format("~s\n", [TypeInfo])
- end,
- lists:foreach(Fun, Info#info.funcs).
-
-get_type_info(Func, TypeMap) ->
- case typer_map:lookup(Func, TypeMap) of
- none ->
- %% Note: Typeinfo of any function should exist in
- %% the result offered by dialyzer, otherwise there
- %% *must* be something wrong with the analysis
- io:format("No type info for function: ~p\n", [Func]),
- halt();
- {contract, _Fun} = C -> C;
- {_RetType, _ArgType} = RA -> RA
- end.
diff --git a/lib/typer/src/typer_info.erl b/lib/typer/src/typer_info.erl
deleted file mode 100644
index ea25fa6f68..0000000000
--- a/lib/typer/src/typer_info.erl
+++ /dev/null
@@ -1,162 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(typer_info).
-
--export([collect/1]).
-
--type func_info() :: {non_neg_integer(), atom(), arity()}.
--type inc_file_info() :: {string(), func_info()}.
-
--record(tmpAcc, {file :: string(),
- module :: atom(),
- funcAcc=[] :: [func_info()],
- incFuncAcc=[] :: [inc_file_info()],
- dialyzerObj=[] :: [{mfa(), {_, _}}]}).
-
--include("typer.hrl").
-
--spec collect(#typer_analysis{}) -> #typer_analysis{}.
-
-collect(Analysis) ->
- NewPlt =
- try get_dialyzer_plt(Analysis) of
- DialyzerPlt ->
- dialyzer_plt:merge_plts([Analysis#typer_analysis.trust_plt, DialyzerPlt])
- catch
- throw:{dialyzer_error,_Reason} ->
- typer:error("Dialyzer's PLT is missing or is not up-to-date; please (re)create it")
- end,
- NewAnalysis = lists:foldl(fun collect_one_file_info/2,
- Analysis#typer_analysis{trust_plt = NewPlt},
- Analysis#typer_analysis.ana_files),
- %% Process Remote Types
- TmpCServer = NewAnalysis#typer_analysis.code_server,
- NewCServer =
- try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer),
- OldRecords = dialyzer_plt:get_types(NewPlt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
- %% io:format("Merged Records ~p",[MergedRecords]),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer),
- TmpCServer2 = dialyzer_utils:process_record_remote_types(TmpCServer1),
- dialyzer_contracts:process_contract_remote_types(TmpCServer2)
- catch
- throw:{error, ErrorMsg} ->
- typer:error(ErrorMsg)
- end,
- NewAnalysis#typer_analysis{code_server = NewCServer}.
-
-collect_one_file_info(File, Analysis) ->
- Ds = [{d,Name,Val} || {Name,Val} <- Analysis#typer_analysis.macros],
- %% Current directory should also be included in "Includes".
- Includes = [filename:dirname(File)|Analysis#typer_analysis.includes],
- Is = [{i,Dir} || Dir <- Includes],
- Options = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds,
- case dialyzer_utils:get_abstract_code_from_src(File, Options) of
- {error, Reason} ->
- %% io:format("File=~p\n,Options=~p\n,Error=~p\n", [File,Options,Reason]),
- typer:compile_error(Reason);
- {ok, AbstractCode} ->
- case dialyzer_utils:get_core_from_abstract_code(AbstractCode, Options) of
- error -> typer:compile_error(["Could not get core erlang for "++File]);
- {ok, Core} ->
- case dialyzer_utils:get_record_and_type_info(AbstractCode) of
- {error, Reason} -> typer:compile_error([Reason]);
- {ok, Records} ->
- Mod = list_to_atom(filename:basename(File, ".erl")),
- case dialyzer_utils:get_spec_info(Mod, AbstractCode, Records) of
- {error, Reason} -> typer:compile_error([Reason]);
- {ok, SpecInfo} ->
- analyze_core_tree(Core, Records, SpecInfo, Analysis, File)
- end
- end
- end
- end.
-
-analyze_core_tree(Core, Records, SpecInfo, Analysis, File) ->
- Module = list_to_atom(filename:basename(File, ".erl")),
- TmpTree = cerl:from_records(Core),
- CS1 = Analysis#typer_analysis.code_server,
- NextLabel = dialyzer_codeserver:get_next_core_label(CS1),
- {Tree, NewLabel} = cerl_trees:label(TmpTree, NextLabel),
- CS2 = dialyzer_codeserver:insert(Module, Tree, CS1),
- CS3 = dialyzer_codeserver:set_next_core_label(NewLabel, CS2),
- CS4 = dialyzer_codeserver:store_temp_records(Module, Records, CS3),
- CS5 = dialyzer_codeserver:store_temp_contracts(Module, SpecInfo, CS4),
- Ex_Funcs = [{0,F,A} || {_,_,{F,A}} <- cerl:module_exports(Tree)],
- TmpCG = Analysis#typer_analysis.callgraph,
- CG = dialyzer_callgraph:scan_core_tree(Tree, TmpCG),
- Fun = fun analyze_one_function/2,
- All_Defs = cerl:module_defs(Tree),
- Acc = lists:foldl(Fun, #tmpAcc{file=File, module=Module}, All_Defs),
- Exported_FuncMap = typer_map:insert({File, Ex_Funcs},
- Analysis#typer_analysis.ex_func),
- %% NOTE: we must sort all functions in the file which
- %% originate from this file by *numerical order* of lineNo
- Sorted_Functions = lists:keysort(1, Acc#tmpAcc.funcAcc),
- FuncMap = typer_map:insert({File, Sorted_Functions},
- Analysis#typer_analysis.func),
- %% NOTE: However we do not need to sort functions
- %% which are imported from included files.
- IncFuncMap = typer_map:insert({File, Acc#tmpAcc.incFuncAcc},
- Analysis#typer_analysis.inc_func),
- Final_Files = Analysis#typer_analysis.final_files ++ [{File, Module}],
- RecordMap = typer_map:insert({File, Records}, Analysis#typer_analysis.record),
- Analysis#typer_analysis{final_files=Final_Files,
- callgraph=CG,
- code_server=CS5,
- ex_func=Exported_FuncMap,
- inc_func=IncFuncMap,
- record=RecordMap,
- func=FuncMap}.
-
-analyze_one_function({Var, FunBody} = Function, Acc) ->
- F = cerl:fname_id(Var),
- A = cerl:fname_arity(Var),
- TmpDialyzerObj = {{Acc#tmpAcc.module, F, A}, Function},
- NewDialyzerObj = Acc#tmpAcc.dialyzerObj ++ [TmpDialyzerObj],
- [_, LineNo, {file, FileName}] = cerl:get_ann(FunBody),
- BaseName = filename:basename(FileName),
- FuncInfo = {LineNo, F, A},
- OriginalName = Acc#tmpAcc.file,
- {FuncAcc, IncFuncAcc} =
- case (FileName =:= OriginalName) orelse (BaseName =:= OriginalName) of
- true -> %% Coming from original file
- %% io:format("Added function ~p\n", [{LineNo, F, A}]),
- {Acc#tmpAcc.funcAcc ++ [FuncInfo], Acc#tmpAcc.incFuncAcc};
- false ->
- %% Coming from other sourses, including:
- %% -- .yrl (yecc-generated file)
- %% -- yeccpre.hrl (yecc-generated file)
- %% -- other cases
- {Acc#tmpAcc.funcAcc, Acc#tmpAcc.incFuncAcc ++ [{FileName, FuncInfo}]}
- end,
- Acc#tmpAcc{funcAcc = FuncAcc,
- incFuncAcc = IncFuncAcc,
- dialyzerObj = NewDialyzerObj}.
-
-get_dialyzer_plt(#typer_analysis{plt = PltFile0}) ->
- PltFile =
- case PltFile0 =:= none of
- true -> dialyzer_plt:get_default_plt();
- false -> PltFile0
- end,
- dialyzer_plt:from_file(PltFile).
diff --git a/lib/typer/src/typer_map.erl b/lib/typer/src/typer_map.erl
deleted file mode 100644
index bf62dea651..0000000000
--- a/lib/typer/src/typer_map.erl
+++ /dev/null
@@ -1,47 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
--module(typer_map).
-
--export([new/0, insert/2, lookup/2, from_list/1, remove/2, fold/3]).
-
--spec new() -> dict().
-new() ->
- dict:new().
-
--spec insert({term(), term()}, dict()) -> dict().
-insert(Object, Dict) ->
- {Key, Value} = Object,
- dict:store(Key, Value, Dict).
-
--spec lookup(term(), dict()) -> any().
-lookup(Key, Dict) ->
- try dict:fetch(Key, Dict) catch error:_ -> none end.
-
--spec from_list([{term(), term()}]) -> dict().
-from_list(List) ->
- dict:from_list(List).
-
--spec remove(term(), dict()) -> dict().
-remove(Key, Dict) ->
- dict:erase(Key, Dict).
-
--spec fold(fun((term(), term(), term()) -> term()), term(), dict()) -> term().
-fold(Fun, Acc0, Dict) ->
- dict:fold(Fun, Acc0, Dict).
diff --git a/lib/typer/src/typer_options.erl b/lib/typer/src/typer_options.erl
deleted file mode 100644
index 1e53b1b305..0000000000
--- a/lib/typer/src/typer_options.erl
+++ /dev/null
@@ -1,191 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%===========================================================================
-%% File : typer_options.erl
-%% Author : Bingwen He <[email protected]>
-%% Description : Handles all command-line options given to TypEr
-%%===========================================================================
-
--module(typer_options).
-
--export([process/0]).
-
-%%---------------------------------------------------------------------------
-
--include("typer.hrl").
-
-%%---------------------------------------------------------------------------
-%% Exported functions
-%%---------------------------------------------------------------------------
-
--spec process() -> {#args{}, #typer_analysis{}}.
-
-process() ->
- ArgList = init:get_plain_arguments(),
- %% io:format("Args is ~p\n",[Args]),
- {Args, Analysis} = analyze_args(ArgList, #args{}, #typer_analysis{}),
- %% if the mode has not been set, set it to the default mode (show)
- {Args, case Analysis#typer_analysis.mode of
- undefined -> Analysis#typer_analysis{mode = ?SHOW};
- Mode when is_atom(Mode) -> Analysis
- end}.
-
-%%---------------------------------------------------------------------------
-%% Internal functions
-%%---------------------------------------------------------------------------
-
-analyze_args([], Args, Analysis) ->
- {Args, Analysis};
-analyze_args(ArgList, Args, Analysis) ->
- {Result, Rest} = cl(ArgList),
- {NewArgs, NewAnalysis} = analyze_result(Result, Args, Analysis),
- analyze_args(Rest, NewArgs, NewAnalysis).
-
-cl(["-h"|_]) -> help_message();
-cl(["--help"|_]) -> help_message();
-cl(["-v"|_]) -> version_message();
-cl(["--version"|_]) -> version_message();
-cl(["--comments"|Opts]) -> {comments, Opts};
-cl(["--show"|Opts]) -> {{mode, ?SHOW}, Opts};
-cl(["--show_exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
-cl(["--show-exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
-cl(["--annotate"|Opts]) -> {{mode, ?ANNOTATE}, Opts};
-cl(["--annotate-inc-files"|Opts]) -> {{mode, ?ANNOTATE_INC_FILES}, Opts};
-cl(["--plt",Plt|Opts]) -> {{plt, Plt}, Opts};
-cl(["-D"++Def|Opts]) ->
- case Def of
- "" -> typer:error("no variable name specified after -D");
- _ ->
- L = re:split(Def, "=", [{return, list}]),
- DefPair = process_def_list(L),
- {{def, DefPair}, Opts}
- end;
-cl(["-I",Dir|Opts]) -> {{inc,Dir}, Opts};
-cl(["-I"++Dir|Opts]) ->
- case Dir of
- "" -> typer:error("no include directory specified after -I");
- _ -> {{inc, Dir}, Opts}
- end;
-cl(["-T"|Opts]) ->
- {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
- case Files of
- [] -> typer:error("no file or directory specified after -T");
- [_|_] -> {{trust, Files}, RestOpts}
- end;
-cl(["-r"|Opts]) ->
- {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
- {{a_dir_r, Files}, RestOpts};
-cl(["-"++H|_]) -> typer:error("unknown option -"++H);
-cl(Opts) ->
- {Args, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
- {{analyze, Args}, RestOpts}.
-
-process_def_list(L) ->
- case L of
- [Name, Value] ->
- {ok, Tokens, _} = erl_scan:string(Value ++ "."),
- {ok, ErlValue} = erl_parse:parse_term(Tokens),
- {list_to_atom(Name), ErlValue};
- [Name] ->
- {list_to_atom(Name), true}
- end.
-
-%% Get information about files that the user trusts and wants to analyze
-analyze_result({analyze, Val}, Args, Analysis) ->
- NewVal = Args#args.analyze ++ Val,
- {Args#args{analyze = NewVal}, Analysis};
-analyze_result({a_dir_r, Val}, Args, Analysis) ->
- NewVal = Args#args.analyzed_dir_r ++ Val,
- {Args#args{analyzed_dir_r = NewVal}, Analysis};
-analyze_result({trust, Val}, Args, Analysis) ->
- NewVal = Args#args.trust ++ Val,
- {Args#args{trust = NewVal}, Analysis};
-analyze_result(comments, Args, Analysis) ->
- {Args, Analysis#typer_analysis{contracts = false}};
-%% Get useful information for actual analysis
-analyze_result({mode, Val}, Args, Analysis) ->
- case Analysis#typer_analysis.mode of
- undefined -> {Args, Analysis#typer_analysis{mode = Val}};
- _ -> mode_error()
- end;
-analyze_result({def, Val}, Args, Analysis) ->
- NewVal = Analysis#typer_analysis.macros ++ [Val],
- {Args, Analysis#typer_analysis{macros = NewVal}};
-analyze_result({inc, Val}, Args, Analysis) ->
- NewVal = Analysis#typer_analysis.includes ++ [Val],
- {Args, Analysis#typer_analysis{includes = NewVal}};
-analyze_result({plt, Plt}, Args, Analysis) ->
- {Args, Analysis#typer_analysis{plt = Plt}}.
-
-%%--------------------------------------------------------------------
-
--spec mode_error() -> no_return().
-mode_error() ->
- typer:error("can not do \"show\", \"show-exported\", \"annotate\", and \"annotate-inc-files\" at the same time").
-
--spec version_message() -> no_return().
-version_message() ->
- io:format("TypEr version "++?VSN++"\n"),
- erlang:halt(0).
-
--spec help_message() -> no_return().
-help_message() ->
- S = " Usage: typer [--help] [--version] [--comments] [--plt PLT]
- [--show | --show-exported | --annotate | --annotate-inc-files]
- [-Ddefine]* [-I include_dir]* [-T application]* [-r] file*
-
- Options:
- -r dir*
- search directories recursively for .erl files below them
- --show
- Prints type specifications for all functions on stdout.
- (this is the default behaviour; this option is not really needed)
- --show-exported (or --show_exported)
- Same as --show, but prints specifications for exported functions only
- Specs are displayed sorted alphabetically on the function's name
- --annotate
- Annotates the specified files with type specifications
- --annotate-inc-files
- Same as --annotate but annotates all -include() files as well as
- all .erl files (use this option with caution - has not been tested much)
- --comments
- Prints type information using Edoc comments, not type specs
- --plt PLT
- Use the specified dialyzer PLT file rather than the default one
- -T file*
- The specified file(s) already contain type specifications and these
- are to be trusted in order to print specs for the rest of the files
- (Multiple files or dirs, separated by spaces, can be specified.)
- -Dname (or -Dname=value)
- pass the defined name(s) to TypEr
- (The syntax of defines is the same as that used by \"erlc\".)
- -I include_dir
- pass the include_dir to TypEr
- (The syntax of includes is the same as that used by \"erlc\".)
- --version (or -v)
- prints the Typer version and exits
- --help (or -h)
- prints this message and exits
-
- Note:
- * denotes that multiple occurrences of these options are possible.
-",
- io:put_chars(S),
- erlang:halt(0).
diff --git a/lib/typer/src/typer_preprocess.erl b/lib/typer/src/typer_preprocess.erl
deleted file mode 100644
index 7cb0b9932b..0000000000
--- a/lib/typer/src/typer_preprocess.erl
+++ /dev/null
@@ -1,154 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2009. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(typer_preprocess).
-
--export([get_all_files/2]).
-
--include("typer.hrl").
-
-%%----------------------------------------------------------------------------
-
--spec get_all_files(#args{}, 'analysis' | 'trust') -> [string()].
-
-get_all_files(Args, analysis) ->
- case internal_get_all_files(Args#args.analyze,
- Args#args.analyzed_dir_r,
- fun test_erl_file_exclude_ann/1) of
- [] -> typer:error("no file(s) to analyze");
- AllFiles -> AllFiles
- end;
-get_all_files(Args, trust) ->
- internal_get_all_files(Args#args.trust, [], fun test_erl_file/1).
-
--spec test_erl_file_exclude_ann(string()) -> boolean().
-
-test_erl_file_exclude_ann(File) ->
- case filename:extension(File) of
- ".erl" -> %% Exclude files ending with ".ann.erl"
- case re:run(File, "[\.]ann[\.]erl$") of
- {match, _} -> false;
- nomatch -> true
- end;
- _ -> false
- end.
-
--spec test_erl_file(string()) -> boolean().
-
-test_erl_file(File) ->
- filename:extension(File) =:= ".erl".
-
--spec internal_get_all_files([string()], [string()],
- fun((string()) -> boolean())) -> [string()].
-
-internal_get_all_files(File_Dir, Dir_R, Fun) ->
- All_File_1 = process_file_and_dir(File_Dir, Fun),
- All_File_2 = process_dir_recursively(Dir_R, Fun),
- remove_dup(All_File_1 ++ All_File_2).
-
--spec process_file_and_dir([string()],
- fun((string()) -> boolean())) -> [string()].
-
-process_file_and_dir(File_Dir, TestFun) ->
- Fun =
- fun (Elem, Acc) ->
- case filelib:is_regular(Elem) of
- true -> process_file(Elem, TestFun, Acc);
- false -> check_dir(Elem, non_recursive, Acc, TestFun)
- end
- end,
- lists:foldl(Fun, [], File_Dir).
-
--spec process_dir_recursively([string()],
- fun((string()) -> boolean())) -> [string()].
-
-process_dir_recursively(Dirs, TestFun) ->
- Fun = fun (Dir, Acc) ->
- check_dir(Dir, recursive, Acc, TestFun)
- end,
- lists:foldl(Fun, [], Dirs).
-
--spec check_dir(string(),
- 'non_recursive' | 'recursive',
- [string()],
- fun((string()) -> boolean())) -> [string()].
-
-check_dir(Dir, Mode, Acc, Fun) ->
- case file:list_dir(Dir) of
- {ok, Files} ->
- {TmpDirs, TmpFiles} = split_dirs_and_files(Files, Dir),
- case Mode of
- non_recursive ->
- FinalFiles = process_file_and_dir(TmpFiles, Fun),
- Acc ++ FinalFiles;
- recursive ->
- TmpAcc1 = process_file_and_dir(TmpFiles, Fun),
- TmpAcc2 = process_dir_recursively(TmpDirs, Fun),
- Acc ++ TmpAcc1 ++ TmpAcc2
- end;
- {error, eacces} ->
- typer:error("no access permission to dir \""++Dir++"\"");
- {error, enoent} ->
- typer:error("cannot access "++Dir++": No such file or directory");
- {error, _Reason} ->
- typer:error("error involving a use of file:list_dir/1")
- end.
-
-%% Same order as the input list
--spec process_file(string(), fun((string()) -> boolean()), string()) -> [string()].
-
-process_file(File, TestFun, Acc) ->
- case TestFun(File) of
- true -> Acc ++ [File];
- false -> Acc
- end.
-
-%% Same order as the input list
--spec split_dirs_and_files([string()], string()) -> {[string()], [string()]}.
-
-split_dirs_and_files(Elems, Dir) ->
- Test_Fun =
- fun (Elem, {DirAcc, FileAcc}) ->
- File = filename:join(Dir, Elem),
- case filelib:is_regular(File) of
- false -> {[File|DirAcc], FileAcc};
- true -> {DirAcc, [File|FileAcc]}
- end
- end,
- {Dirs, Files} = lists:foldl(Test_Fun, {[], []}, Elems),
- {lists:reverse(Dirs), lists:reverse(Files)}.
-
-%%-----------------------------------------------------------------------
-%% Utilities
-%%-----------------------------------------------------------------------
-
-%% Removes duplicate filenames but it keeps the order of the input list
-
--spec remove_dup([string()]) -> [string()].
-
-remove_dup(Files) ->
- Test_Dup = fun (File, Acc) ->
- case lists:member(File, Acc) of
- true -> Acc;
- false -> [File|Acc]
- end
- end,
- Reversed_Elems = lists:foldl(Test_Dup, [], Files),
- lists:reverse(Reversed_Elems).
diff --git a/lib/typer/vsn.mk b/lib/typer/vsn.mk
index 7f4aabb335..51561939ac 100644
--- a/lib/typer/vsn.mk
+++ b/lib/typer/vsn.mk
@@ -1 +1 @@
-TYPER_VSN = 0.1.7.5
+TYPER_VSN = 0.9
diff --git a/lib/wx/c_src/wxe_driver.c b/lib/wx/c_src/wxe_driver.c
index 310325ea26..2404b13cc3 100644
--- a/lib/wx/c_src/wxe_driver.c
+++ b/lib/wx/c_src/wxe_driver.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -117,8 +117,7 @@ wxe_driver_start(ErlDrvPort port, char *buff)
if(WXE_DRV_PORT == 0) {
for(; *buff != 32; buff++);
buff++;
- erl_wx_privdir = malloc(strlen(buff));
- strcpy(erl_wx_privdir, buff);
+ erl_wx_privdir = strdup(buff);
WXE_DRV_PORT = port;
wxe_master = data;
@@ -146,7 +145,6 @@ static void
wxe_driver_unload(void)
{
// fprintf(stderr, "%s:%d: UNLOAD \r\n", __FILE__,__LINE__);
- meta_command(WXE_SHUTDOWN, wxe_master);
stop_native_gui(wxe_master);
unload_native_gui();
free(wxe_master);
diff --git a/lib/wx/c_src/wxe_impl.cpp b/lib/wx/c_src/wxe_impl.cpp
index 79d1a29519..365fb691a1 100644
--- a/lib/wx/c_src/wxe_impl.cpp
+++ b/lib/wx/c_src/wxe_impl.cpp
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -78,6 +78,21 @@ extern void erts_thread_disable_fpe(void);
}
#endif
+#if defined(__APPLE__) && defined(__MACH__) && !defined(__DARWIN__)
+#define __DARWIN__ 1
+#endif
+
+#ifdef __DARWIN__
+extern "C" {
+ int erl_drv_stolen_main_thread_join(ErlDrvTid tid, void **respp);
+ int erl_drv_steal_main_thread(char *name,
+ ErlDrvTid *dtid,
+ void* (*func)(void*),
+ void* arg,
+ ErlDrvThreadOpts *opts);
+}
+#endif
+
void *wxe_main_loop(void * );
/* ************************************************************
@@ -99,8 +114,14 @@ int start_native_gui(wxe_data *sd)
wxe_batch_locker_c = erl_drv_cond_create((char *)"wxe_batch_locker_c");
init_caller = driver_connected(sd->port);
- if((res = erl_drv_thread_create((char *)"wxwidgets",
- &wxe_thread,wxe_main_loop,(void *) sd->pdl,NULL)) == 0) {
+#ifdef __DARWIN__
+ res = erl_drv_steal_main_thread((char *)"wxwidgets",
+ &wxe_thread,wxe_main_loop,(void *) sd->pdl,NULL);
+#else
+ res = erl_drv_thread_create((char *)"wxwidgets",
+ &wxe_thread,wxe_main_loop,(void *) sd->pdl,NULL);
+#endif
+ if(res == 0) {
erl_drv_mutex_lock(wxe_status_m);
for(;wxe_status == WXE_NOT_INITIATED;) {
erl_drv_cond_wait(wxe_status_c, wxe_status_m);
@@ -117,7 +138,14 @@ int start_native_gui(wxe_data *sd)
void stop_native_gui(wxe_data *sd)
{
+ if(wxe_status == WXE_INITIATED) {
+ meta_command(WXE_SHUTDOWN, sd);
+ }
+#ifdef __DARWIN__
+ erl_drv_stolen_main_thread_join(wxe_thread, NULL);
+#else
erl_drv_thread_join(wxe_thread, NULL);
+#endif
erl_drv_mutex_destroy(wxe_status_m);
erl_drv_cond_destroy(wxe_status_c);
erl_drv_mutex_destroy(wxe_batch_locker_m);
@@ -182,8 +210,8 @@ void *wxe_main_loop(void *vpdl)
{
int result;
int argc = 1;
- char * temp = (char *) "Erlang\0";
- char ** argv = &temp;
+ char * temp = (char *) "Erlang";
+ char * argv[] = {temp,NULL};
ErlDrvPDL pdl = (ErlDrvPDL) vpdl;
driver_pdl_inc_refc(pdl);
@@ -202,7 +230,9 @@ void *wxe_main_loop(void *vpdl)
/* We are done try to make a clean exit */
wxe_status = WXE_EXITED;
driver_pdl_dec_refc(pdl);
+#ifndef __DARWIN__
erl_drv_thread_exit(NULL);
+#endif
return NULL;
} else {
erl_drv_mutex_lock(wxe_status_m);
diff --git a/lib/wx/c_src/wxe_ps_init.c b/lib/wx/c_src/wxe_ps_init.c
index e787c214bd..a85f751024 100644
--- a/lib/wx/c_src/wxe_ps_init.c
+++ b/lib/wx/c_src/wxe_ps_init.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2009. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/wx/src/wxe_master.erl b/lib/wx/src/wxe_master.erl
index d8592c133b..9efe59054c 100644
--- a/lib/wx/src/wxe_master.erl
+++ b/lib/wx/src/wxe_master.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -128,19 +128,19 @@ init([]) ->
process_flag(trap_exit, true),
DriverWithArgs = DriverName ++ " " ++ code:priv_dir(wx) ++ [0],
- case catch open_port({spawn, DriverWithArgs},[binary]) of
- {'EXIT', Err} ->
- erlang:error({open_port,Err});
- Port ->
- wx_debug_info = ets:new(wx_debug_info, [named_table]),
- wx_non_consts = ets:new(wx_non_consts, [named_table]),
- true = ets:insert(wx_debug_info, wxdebug_table()),
- spawn_link(fun() -> debug_ping(Port) end),
- receive
- {wx_consts, List} ->
- true = ets:insert(wx_non_consts, List)
- end,
- {ok, #state{cb_port=Port, driver=DriverName, users=gb_sets:empty()}}
+ try
+ Port = open_port({spawn, DriverWithArgs},[binary]),
+ wx_debug_info = ets:new(wx_debug_info, [named_table]),
+ wx_non_consts = ets:new(wx_non_consts, [named_table]),
+ true = ets:insert(wx_debug_info, wxdebug_table()),
+ spawn_link(fun() -> debug_ping(Port) end),
+ receive
+ {wx_consts, List} ->
+ true = ets:insert(wx_non_consts, List)
+ end,
+ {ok, #state{cb_port=Port, driver=DriverName, users=gb_sets:empty()}}
+ catch _:Err ->
+ error({Err, "Could not initiate graphics"})
end.
%%--------------------------------------------------------------------
diff --git a/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc
index fae5346e6a..5c995a5a9c 100644
--- a/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -35,6 +35,6 @@
%% STRING_REST and STRING_UNBOUND_REST is only different in the list case
-define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/big-utf16, Rest/binary>>).
--define(BYTE_ORDER_MARK_1, undefined).
+-define(BYTE_ORDER_MARK_1, undefined_bom1).
-define(BYTE_ORDER_MARK_2, <<16#FE>>).
-define(BYTE_ORDER_MARK_REST(Rest), <<16#FE, 16#FF, Rest/binary>>).
diff --git a/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc
index 5e1f0a217c..5c6ca0caba 100644
--- a/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -35,6 +35,6 @@
%% STRING_REST and STRING_UNBOUND_REST is only different in the list case
-define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/little-utf16, Rest/binary>>).
--define(BYTE_ORDER_MARK_1, undefined).
+-define(BYTE_ORDER_MARK_1, undefined_bom1).
-define(BYTE_ORDER_MARK_2, <<16#FF>>).
-define(BYTE_ORDER_MARK_REST(Rest), <<16#FF, 16#FE, Rest/binary>>).
diff --git a/lib/xmerl/src/xmerl_uri.erl b/lib/xmerl/src/xmerl_uri.erl
index d8edb2e6e1..a0c6f1c2a7 100644
--- a/lib/xmerl/src/xmerl_uri.erl
+++ b/lib/xmerl/src/xmerl_uri.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2011. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -359,9 +359,9 @@ scan_host(C0) ->
%% Hex4=<?HEX ->
%% {C1,lists:reverse(lists:append(IPv6address))};
{C1,Hostname,[A|_HostF]} ->
- {C1,lists:reverse(lists:append(Hostname))};
- _ ->
- {error,no_host}
+ {C1,lists:reverse(lists:append(Hostname))}
+%% _ ->
+%% {error,no_host}
end.
scan_host2([H|C0],Acc,CurF,Host,HostF) when $0=<H,H=<$9 ->