aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer/test/r9c_SUITE_data/src
diff options
context:
space:
mode:
authorMicael Karlberg <[email protected]>2011-05-27 14:38:08 +0200
committerMicael Karlberg <[email protected]>2011-05-27 14:38:08 +0200
commit1c01cf2fb625e105dd2355279635edbb3dbb3063 (patch)
tree5dbd5a391b86d99c00a01c456f817f30209f1962 /lib/dialyzer/test/r9c_SUITE_data/src
parent77fe4062599e4ffe897c21d22ad493cfec4b475c (diff)
parent4a5a75811e2cd590b5c94f71864a5245fd511ccf (diff)
downloadotp-1c01cf2fb625e105dd2355279635edbb3dbb3063.tar.gz
otp-1c01cf2fb625e105dd2355279635edbb3dbb3063.tar.bz2
otp-1c01cf2fb625e105dd2355279635edbb3dbb3063.zip
Merge branch 'maint-r14' of super:otp into maint-r14
Diffstat (limited to 'lib/dialyzer/test/r9c_SUITE_data/src')
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/Makefile142
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/Restrictions.txt55
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.app.src20
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.appup.src162
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_db.erl160
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_records.hrl96
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl1904
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl5566
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl1468
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl1357
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_per.erl1234
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen.erl1664
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber.erl1525
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl1562
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per.erl1189
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl1811
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_name.erl225
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser.yrl1162
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl2763
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_pretty_format.erl197
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_tok.erl351
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_value.erl330
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt.erl69
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl2310
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl1849
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_check.erl333
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_driver_handler.erl108
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl1593
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl2176
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl2102
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_v1.erl1827
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_history.sgml97
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_latest.sgml97
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/Makefile178
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl1582
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl260
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http.hrl127
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl745
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_handler.erl724
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl542
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.erl594
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.hrl77
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor.erl174
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor_sup.erl116
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_conf.erl688
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_example.erl134
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl1029
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_misc_sup.erl113
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl344
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_request_handler.erl994
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl437
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_socket.erl381
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_sup.erl202
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_util.erl773
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.erl93
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.hrl62
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.app.src56
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.appup.src133
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.config2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/inets_sup.erl158
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl138
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_actions.erl92
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_alias.erl175
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.erl748
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.hrl26
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_dets.erl222
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl269
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_plain.erl338
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_server.erl422
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_browser.erl213
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_cgi.erl692
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_dir.erl266
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_disk_log.erl404
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl481
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_get.erl151
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_head.erl89
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl1136
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_include.erl722
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_log.erl250
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl380
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl320
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security.erl307
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security_server.erl727
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_trace.erl64
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/uri.erl349
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/Makefile136
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.app.src50
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.appup.src6
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl2191
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.hrl117
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_backup.erl194
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl1168
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl1283
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl39
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_controller.erl2010
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_dumper.erl1092
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_event.erl260
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag.erl1201
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_hash.erl118
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl127
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_index.erl379
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_kernel_sup.erl60
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_late_loader.erl95
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_lib.erl1276
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl805
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl1021
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_log.erl1019
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl776
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_recover.erl1174
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_registry.erl276
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl2898
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_hook.erl271
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_sup.erl39
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sp.erl35
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_subscr.erl491
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sup.erl136
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_text.erl189
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl2173
118 files changed, 80308 insertions, 0 deletions
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/Makefile b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/Makefile
new file mode 100644
index 0000000000..9dba643327
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/Makefile
@@ -0,0 +1,142 @@
+#
+# Copyright (C) 1997, Ericsson Telecommunications
+# Author: Kenneth Lundin
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+VSN=$(ASN1_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/asn1-$(VSN)
+
+
+
+
+#
+# Common Macros
+#
+# PARSER_SRC = \
+# asn1ct_parser.yrl
+
+# PARSER_MODULE=$(PARSER_SRC:%.yrl=%)
+
+EBIN = ../ebin
+CT_MODULES= \
+ asn1ct \
+ asn1ct_check \
+ asn1_db \
+ asn1ct_pretty_format \
+ asn1ct_gen \
+ asn1ct_gen_per \
+ asn1ct_gen_per_rt2ct \
+ asn1ct_name \
+ asn1ct_constructed_per \
+ asn1ct_constructed_ber \
+ asn1ct_gen_ber \
+ asn1ct_constructed_ber_bin_v2 \
+ asn1ct_gen_ber_bin_v2 \
+ asn1ct_value \
+ asn1ct_tok \
+ asn1ct_parser2
+
+RT_MODULES= \
+ asn1rt \
+ asn1rt_per \
+ asn1rt_per_bin \
+ asn1rt_per_v1 \
+ asn1rt_ber_bin \
+ asn1rt_ber_bin_v2 \
+ asn1rt_per_bin_rt2ct \
+ asn1rt_driver_handler \
+ asn1rt_check
+
+# asn1rt_ber_v1 \
+# asn1rt_ber \
+# the rt module to use is defined in asn1_records.hrl
+# and must be updated when an incompatible change is done in the rt modules
+
+
+MODULES= $(CT_MODULES) $(RT_MODULES)
+
+ERL_FILES = $(MODULES:%=%.erl)
+
+TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR))
+
+GENERATED_PARSER = $(PARSER_MODULE:%=%.erl)
+
+# internal hrl file
+HRL_FILES = asn1_records.hrl
+
+APP_FILE = asn1.app
+APPUP_FILE = asn1.appup
+
+APP_SRC = $(APP_FILE).src
+APP_TARGET = $(EBIN)/$(APP_FILE)
+
+APPUP_SRC = $(APPUP_FILE).src
+APPUP_TARGET = $(EBIN)/$(APPUP_FILE)
+
+EXAMPLES = \
+ ../examples/P-Record.asn
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_FLAGS +=
+ERL_COMPILE_FLAGS += \
+ -I$(ERL_TOP)/lib/stdlib \
+ +warn_unused_vars
+YRL_FLAGS =
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt: $(TARGET_FILES) $(APP_TARGET) $(APPUP_TARGET)
+
+
+clean:
+ rm -f $(TARGET_FILES) $(APP_TARGET) $(APPUP_TARGET) $(GENERATED_PARSER)
+ rm -f core *~
+
+docs:
+
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(EBIN)/asn1ct.$(EMULATOR):asn1ct.erl
+ $(ERLC) -b$(EMULATOR) -o$(EBIN) $(ERL_COMPILE_FLAGS) -Dvsn=\"$(VSN)\" $<
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(APP_TARGET) $(APPUP_TARGET) $(RELSYSDIR)/ebin
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(PARSER_SRC) $(ERL_FILES) $(HRL_FILES) $(APP_SRC) $(APPUP_SRC) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/examples
+ $(INSTALL_DATA) $(EXAMPLES) $(RELSYSDIR)/examples
+
+# there are no include files to be used by the user
+#$(INSTALL_DIR) $(RELSYSDIR)/include
+#$(INSTALL_DATA) $(HRL_FILES) $(RELSYSDIR)/include
+
+release_docs_spec:
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/Restrictions.txt b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/Restrictions.txt
new file mode 100644
index 0000000000..d1d1855dc9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/Restrictions.txt
@@ -0,0 +1,55 @@
+The following restrictions apply to this implementation of the ASN.1 compiler:
+
+Supported encoding rules are:
+BER
+PER (aligned)
+
+PER (unaligned) IS NOT SUPPORTED
+
+Supported types are:
+
+INTEGER
+BOOLEAN
+ENUMERATION
+SEQUENCE
+SEQUENCE OF
+SET
+SET OF
+CHOICE
+OBJECT IDENTIFIER
+RestrictedCharacterStringTypes
+UnrestrictedCharacterStringTypes
+
+
+NOT SUPPORTED types are:
+ANY IS (IS NOT IN THE STANDARD ANY MORE)
+ANY DEFINED BY (IS NOT IN THE STANDARD ANY MORE)
+EXTERNAL
+EMBEDDED-PDV
+REAL
+
+The support for value definitions in the ASN.1 notation is very limited.
+
+The support for constraints is limited to:
+SizeConstraint SIZE(X)
+SingleValue (1)
+ValueRange (X..Y)
+PermittedAlpabet FROM
+
+The only supported value-notation for SEQUENCE and SET in Erlang is
+the record variant.
+The list notation with named components used by the old ASN.1 compiler
+was supported in the first versions of this compiler both are no longer
+supported.
+
+The decode functions always return a symbolic value if they can.
+
+
+Files with ASN.1 source must have a suffix .asn1 the suffix .py used by the
+old ASN.1 compiler is supported in this version but will not be supported in the future.
+
+Generated files:
+X.asn1db % the intermediate format of a compiled ASN.1 module
+X.hrl % generated Erlang include file for module X
+X.erl % generated Erlang module with encode decode functions for
+ % ASN.1 module X
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.app.src b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.app.src
new file mode 100644
index 0000000000..2ec06ff4db
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.app.src
@@ -0,0 +1,20 @@
+{application, asn1,
+ [{description, "The Erlang ASN1 compiler version %VSN%"},
+ {vsn, "%VSN%"},
+ {modules, [
+ asn1rt,
+ asn1rt_per,
+ asn1rt_per_v1,
+ asn1rt_per_bin,
+ asn1rt_per_bin_rt2ct,
+ asn1rt_ber_bin,
+ asn1rt_ber_bin_v2,
+ asn1rt_check,
+ asn1rt_driver_handler
+ ]},
+ {registered, [
+ asn1_driver_owner
+ ]},
+ {env, []},
+ {applications, [kernel, stdlib]}
+ ]}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.appup.src b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.appup.src
new file mode 100644
index 0000000000..55ef53994a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1.appup.src
@@ -0,0 +1,162 @@
+{"%VSN%",
+ [
+ {"1.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin},
+ {add_module, asn1rt_check}
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin},
+ {add_module, asn1rt_check}
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.1.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin},
+ {add_module, asn1rt_check}
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.2",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {add_module, asn1rt_per_bin_rt2ct},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ },
+ {"1.3.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_bin_v2},
+ {add_module, asn1rt_driver_handler}
+ {remove, {asn1rt_ber_v1, soft_purge, soft_purge}},
+ ]
+ }
+ ],
+ [
+ {"1.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin, soft_purge, soft_purge}},
+ {remove, {asn1rt_check, soft_purge, soft_purge}}
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin, soft_purge, soft_purge}},
+ {remove, {asn1rt_check, soft_purge, soft_purge}}
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.1.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin, soft_purge, soft_purge}},
+ {remove, {asn1rt_check, soft_purge, soft_purge}}
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.2",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_per_bin_rt2ct, soft_purge, soft_purge}},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.3",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ },
+ {"1.3.3.1",
+ [
+ {load_module, asn1rt_per_v1, soft_purge, soft_purge, []},
+ {load_module, asn1rt_ber_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin, soft_purge, soft_purge, []},
+ {load_module, asn1rt_check, soft_purge, soft_purge, []},
+ {load_module, asn1rt_per_bin_rt2ct, soft_purge, soft_purge, []},
+ {add_module, asn1rt_ber_v1},
+ {remove, {asn1rt_ber_bin_v2, soft_purge, soft_purge}},
+ {remove, {asn1rt_driver_handler, soft_purge, soft_purge}}
+ ]
+ }
+
+ ]}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_db.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_db.erl
new file mode 100644
index 0000000000..d5ddb9582b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_db.erl
@@ -0,0 +1,160 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1_db.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1_db).
+%-compile(export_all).
+-export([dbnew/1,dbsave/2,dbload/1,dbput/3,dbget/2,dbget_all/1]).
+-export([dbget_all_mod/1,dbstop/0,dbclear/0,dberase_module/1,dbstart/1,stop_server/1]).
+%% internal exports
+-export([dbloop0/1,dbloop/2]).
+
+%% Db stuff
+dbstart(Includes) ->
+ start_server(asn1db, asn1_db, dbloop0, [Includes]).
+
+dbloop0(Includes) ->
+ dbloop(Includes, ets:new(asn1, [set,named_table])).
+
+opentab(Tab,Mod,[]) ->
+ opentab(Tab,Mod,["."]);
+opentab(Tab,Mod,Includes) ->
+ Base = lists:concat([Mod,".asn1db"]),
+ opentab2(Tab,Base,Mod,Includes,ok).
+
+opentab2(_Tab,_Base,_Mod,[],Error) ->
+ Error;
+opentab2(Tab,Base,Mod,[Ih|It],_Error) ->
+ File = filename:join(Ih,Base),
+ case ets:file2tab(File) of
+ {ok,Modtab} ->
+ ets:insert(Tab,{Mod, Modtab}),
+ {ok,Modtab};
+ NewErr ->
+ opentab2(Tab,Base,Mod,It,NewErr)
+ end.
+
+
+dbloop(Includes, Tab) ->
+ receive
+ {From,{set, Mod, K2, V}} ->
+ [{_,Modtab}] = ets:lookup(Tab,Mod),
+ ets:insert(Modtab,{K2, V}),
+ From ! {asn1db, ok},
+ dbloop(Includes, Tab);
+ {From, {get, Mod, K2}} ->
+ Result = case ets:lookup(Tab,Mod) of
+ [] ->
+ opentab(Tab,Mod,Includes);
+ [{_,Modtab}] -> {ok,Modtab}
+ end,
+ case Result of
+ {ok,Newtab} ->
+ From ! {asn1db, lookup(Newtab, K2)};
+ _Error ->
+ From ! {asn1db, undefined}
+ end,
+ dbloop(Includes, Tab);
+ {From, {all_mod, Mod}} ->
+ [{_,Modtab}] = ets:lookup(Tab,Mod),
+ From ! {asn1db, ets:tab2list(Modtab)},
+ dbloop(Includes, Tab);
+ {From, {delete_mod, Mod}} ->
+ [{_,Modtab}] = ets:lookup(Tab,Mod),
+ ets:delete(Modtab),
+ ets:delete(Tab,Mod),
+ From ! {asn1db, ok},
+ dbloop(Includes, Tab);
+ {From, {save, OutFile,Mod}} ->
+ [{_,Mtab}] = ets:lookup(Tab,Mod),
+ {From ! {asn1db, ets:tab2file(Mtab,OutFile)}},
+ dbloop(Includes,Tab);
+ {From, {load, Mod}} ->
+ Result = case ets:lookup(Tab,Mod) of
+ [] ->
+ opentab(Tab,Mod,Includes);
+ [{_,Modtab}] -> {ok,Modtab}
+ end,
+ {From, {asn1db,Result}},
+ dbloop(Includes,Tab);
+ {From, {new, Mod}} ->
+ case ets:lookup(Tab,Mod) of
+ [{_,Modtab}] ->
+ ets:delete(Modtab);
+ _ ->
+ true
+ end,
+ Tabname = list_to_atom(lists:concat(["asn1_",Mod])),
+ ets:new(Tabname, [set,named_table]),
+ ets:insert(Tab,{Mod,Tabname}),
+ From ! {asn1db, ok},
+ dbloop(Includes,Tab);
+ {From, stop} ->
+ From ! {asn1db, ok}; %% nothing to store
+ {From, clear} ->
+ ModTabList = [Mt||{_,Mt} <- ets:tab2list(Tab)],
+ lists:foreach(fun(T) -> ets:delete(T) end,ModTabList),
+ ets:delete(Tab),
+ From ! {asn1db, cleared},
+ dbloop(Includes, ets:new(asn1, [set]))
+ end.
+
+
+%%all(Tab, K) ->
+%% pickup(K, ets:match(Tab, {{K, '$1'}, '$2'})).
+%%pickup(K, []) -> [];
+%%pickup(K, [[V1,V2] |T]) ->
+%% [{{K,V1},V2} | pickup(K, T)].
+
+lookup(Tab, K) ->
+ case ets:lookup(Tab, K) of
+ [] -> undefined;
+ [{K,V}] -> V
+ end.
+
+
+dbnew(Module) -> req({new,Module}).
+dbsave(OutFile,Module) -> req({save,OutFile,Module}).
+dbload(Module) -> req({load,Module}).
+
+dbput(Module,K,V) -> req({set, Module, K, V}).
+dbget(Module,K) -> req({get, Module, K}).
+dbget_all(K) -> req({get_all, K}).
+dbget_all_mod(Mod) -> req({all_mod,Mod}).
+dbstop() -> stop_server(asn1db).
+dbclear() -> req(clear).
+dberase_module({module,M})->
+ req({delete_mod, M}).
+
+req(R) ->
+ asn1db ! {self(), R},
+ receive {asn1db, Reply} -> Reply end.
+
+stop_server(Name) ->
+ stop_server(Name, whereis(Name)).
+stop_server(_, undefined) -> stopped;
+stop_server(Name, _Pid) ->
+ Name ! {self(), stop},
+ receive {Name, _} -> stopped end.
+
+
+start_server(Name,Mod,Fun,Args) ->
+ case whereis(Name) of
+ undefined ->
+ register(Name, spawn(Mod,Fun, Args));
+ _Pid ->
+ already_started
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_records.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_records.hrl
new file mode 100644
index 0000000000..6ba4877523
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1_records.hrl
@@ -0,0 +1,96 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1_records.hrl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-define('RT_BER',"asn1rt_ber_v1").
+-define('RT_BER_BIN',"asn1rt_ber_bin").
+-define('RT_PER',"asn1rt_per_v1").
+%% change to this when we have this module -define('RT_PER_BIN',"asn1rt_per_bin").
+-define('RT_PER_BIN',"asn1rt_per_bin").
+
+-record(module,{pos,name,defid,tagdefault='EXPLICIT',exports={exports,[]},imports={imports,[]}, extensiondefault=empty,typeorval}).
+
+-record('SEQUENCE',{pname=false,tablecinf=false,components=[]}).
+-record('SET',{pname=false,sorted=false,tablecinf=false,components=[]}).
+-record('ComponentType',{pos,name,typespec,prop,tags}).
+-record('ObjectClassFieldType',{classname,class,fieldname,type}).
+
+-record(typedef,{checked=false,pos,name,typespec}).
+-record(classdef,{checked=false,pos,name,typespec}).
+-record(valuedef,{checked=false,pos,name,type,value}).
+-record(ptypedef,{checked=false,pos,name,args,typespec}).
+-record(pvaluedef,{checked=false,pos,name,args,type,value}).
+-record(pvaluesetdef,{checked=false,pos,name,args,type,valueset}).
+-record(pobjectdef,{checked=false,pos,name,args,class,def}).
+-record(pobjectsetdef,{checked=false,pos,name,args,class,def}).
+
+-record(typereference,{pos,val}).
+-record(identifier,{pos,val}).
+-record(constraint,{c,e}).
+-record('Constraint',{'SingleValue'=no,'SizeConstraint'=no,'ValueRange'=no,'PermittedAlphabet'=no,
+ 'ContainedSubtype'=no, 'TypeConstraint'=no,'InnerSubtyping'=no,e=no,'Other'=no}).
+-record(simpletableattributes,{objectsetname,c_name,c_index,usedclassfield,
+ uniqueclassfield,valueindex}).
+-record(type,{tag=[],def,constraint=[],tablecinf=[],inlined=no}).
+
+-record(objectclass,{fields=[],syntax}).
+-record('Object',{classname,gen=true,def}).
+-record('ObjectSet',{class,gen=true,uniquefname,set}).
+
+-record(tag,{class,number,type,form=32}). % form = ?CONSTRUCTED
+% This record holds information about allowed constraint types per type
+-record(cmap,{single_value=no,contained_subtype=no,value_range=no,
+ size=no,permitted_alphabet=no,type_constraint=no,
+ inner_subtyping=no}).
+
+
+-record('EXTENSIONMARK',{pos,val}).
+
+% each IMPORT contains a list of 'SymbolsFromModule'
+-record('SymbolsFromModule',{symbols,module,objid}).
+
+% Externaltypereference -> modulename '.' typename
+-record('Externaltypereference',{pos,module,type}).
+% Externalvaluereference -> modulename '.' typename
+-record('Externalvaluereference',{pos,module,value}).
+
+-record(state,{module,mname,type,tname,value,vname,erule,parameters=[],
+ inputmodules,abscomppath=[],recordtopname=[],options}).
+
+%% state record used by backend at partial decode
+%% active is set to 'yes' when a partial decode function is generated.
+%% prefix is set to 'dec-inc-' or 'dec-partial-' is for
+%% incomplete partial decode or partial decode respectively
+%% inc_tag_pattern holds the tags of the significant types/components
+%% for incomplete partial decode.
+%% tag_pattern holds the tags for partial decode.
+%% inc_type_pattern and type_pattern holds the names of the
+%% significant types/components.
+%% func_name holds the name of the function for the toptype.
+%% namelist holds the list of names of types/components that still
+%% haven't been generated.
+%% tobe_refed_funcs is a list of tuples {function names
+%% (Types),namelist of incomplete decode spec}, with function names
+%% that are referenced within other generated partial incomplete
+%% decode functions. They shall be generated as partial incomplete
+%% decode functions.
+
+%% gen_refed_funcs is as list of function names. Unlike
+%% tobe_refed_funcs these have been generated.
+-record(gen_state,{active=false,prefix,inc_tag_pattern,
+ tag_pattern,inc_type_pattern,
+ type_pattern,func_name,namelist,
+ tobe_refed_funcs=[],gen_refed_funcs=[]}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
new file mode 100644
index 0000000000..fd36f1657e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
@@ -0,0 +1,1904 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct).
+
+%% Compile Time functions for ASN.1 (e.g ASN.1 compiler).
+
+%%-compile(export_all).
+%% Public exports
+-export([compile/1, compile/2]).
+-export([start/0, start/1, stop/0]).
+-export([encode/2, encode/3, decode/3]).
+-export([test/1, test/2, test/3, value/2]).
+%% Application internal exports
+-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,value/1,vsn/0,
+ create_ets_table/2,get_name_of_def/1,get_pos_of_def/1]).
+-export([read_config_data/1,get_gen_state_field/1,get_gen_state/0,
+ partial_inc_dec_toptype/1,save_gen_state/1,update_gen_state/2,
+ get_tobe_refed_func/1,reset_gen_state/0,is_function_generated/1,
+ generated_refed_func/1,next_refed_func/0,pop_namelist/0,
+ next_namelist_el/0,update_namelist/1,step_in_constructed/0,
+ add_tobe_refed_func/1,add_generated_refed_func/1]).
+
+-include("asn1_records.hrl").
+-include_lib("stdlib/include/erl_compile.hrl").
+
+-import(asn1ct_gen_ber_bin_v2,[encode_tag_val/3,decode_class/1]).
+
+-define(unique_names,0).
+-define(dupl_uniquedefs,1).
+-define(dupl_equaldefs,2).
+-define(dupl_eqdefs_uniquedefs,?dupl_equaldefs bor ?dupl_uniquedefs).
+
+-define(CONSTRUCTED, 2#00100000).
+
+%% macros used for partial decode commands
+-define(CHOOSEN,choosen).
+-define(SKIP,skip).
+-define(SKIP_OPTIONAL,skip_optional).
+
+%% macros used for partial incomplete decode commands
+-define(MANDATORY,mandatory).
+-define(DEFAULT,default).
+-define(OPTIONAL,opt).
+-define(PARTS,parts).
+-define(UNDECODED,undec).
+-define(ALTERNATIVE,alt).
+-define(ALTERNATIVE_UNDECODED,alt_undec).
+-define(ALTERNATIVE_PARTS,alt_parts).
+%-define(BINARY,bin).
+
+%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This is the interface to the compiler
+%%
+%%
+
+
+compile(File) ->
+ compile(File,[]).
+
+compile(File,Options) when list(Options) ->
+ Options1 =
+ case {lists:member(optimize,Options),lists:member(ber_bin,Options)} of
+ {true,true} ->
+ [ber_bin_v2|Options--[ber_bin]];
+ _ -> Options
+ end,
+ case (catch input_file_type(File)) of
+ {single_file,PrefixedFile} ->
+ (catch compile1(PrefixedFile,Options1));
+ {multiple_files_file,SetBase,FileName} ->
+ FileList = get_file_list(FileName),
+ (catch compile_set(SetBase,filename:dirname(FileName),
+ FileList,Options1));
+ Err = {input_file_error,_Reason} ->
+ {error,Err}
+ end.
+
+
+compile1(File,Options) when list(Options) ->
+ io:format("Erlang ASN.1 version ~p compiling ~p ~n",[?vsn,File]),
+ io:format("Compiler Options: ~p~n",[Options]),
+ Ext = filename:extension(File),
+ Base = filename:basename(File,Ext),
+ OutFile = outfile(Base,"",Options),
+ DbFile = outfile(Base,"asn1db",Options),
+ Includes = [I || {i,I} <- Options],
+ EncodingRule = get_rule(Options),
+ create_ets_table(asn1_functab,[named_table]),
+ Continue1 = scan({true,true},File,Options),
+ Continue2 = parse(Continue1,File,Options),
+ Continue3 = check(Continue2,File,OutFile,Includes,EncodingRule,
+ DbFile,Options,[]),
+ Continue4 = generate(Continue3,OutFile,EncodingRule,Options),
+ delete_tables([asn1_functab]),
+ compile_erl(Continue4,OutFile,Options).
+
+%%****************************************************************************%%
+%% functions dealing with compiling of several input files to one output file %%
+%%****************************************************************************%%
+compile_set(SetBase,DirName,Files,Options) when list(hd(Files)),list(Options) ->
+ %% case when there are several input files in a list
+ io:format("Erlang ASN.1 version ~p compiling ~p ~n",[?vsn,Files]),
+ io:format("Compiler Options: ~p~n",[Options]),
+ OutFile = outfile(SetBase,"",Options),
+ DbFile = outfile(SetBase,"asn1db",Options),
+ Includes = [I || {i,I} <- Options],
+ EncodingRule = get_rule(Options),
+ create_ets_table(asn1_functab,[named_table]),
+ ScanRes = scan_set(DirName,Files,Options),
+ ParseRes = parse_set(ScanRes,Options),
+ Result =
+ case [X||X <- ParseRes,element(1,X)==true] of
+ [] -> %% all were false, time to quit
+ lists:map(fun(X)->element(2,X) end,ParseRes);
+ ParseRes -> %% all were true, continue with check
+ InputModules =
+ lists:map(
+ fun(F)->
+ E = filename:extension(F),
+ B = filename:basename(F,E),
+ if
+ list(B) -> list_to_atom(B);
+ true -> B
+ end
+ end,
+ Files),
+ check_set(ParseRes,SetBase,OutFile,Includes,
+ EncodingRule,DbFile,Options,InputModules);
+ Other ->
+ {error,{'unexpected error in scan/parse phase',
+ lists:map(fun(X)->element(3,X) end,Other)}}
+ end,
+ delete_tables([asn1_functab]),
+ Result.
+
+check_set(ParseRes,SetBase,OutFile,Includes,EncRule,DbFile,
+ Options,InputModules) ->
+ lists:foreach(fun({_T,M,File})->
+ cmp(M#module.name,File)
+ end,
+ ParseRes),
+ MergedModule = merge_modules(ParseRes,SetBase),
+ SetM = MergedModule#module{name=SetBase},
+ Continue1 = check({true,SetM},SetBase,OutFile,Includes,EncRule,DbFile,
+ Options,InputModules),
+ Continue2 = generate(Continue1,OutFile,EncRule,Options),
+
+ delete_tables([renamed_defs,original_imports,automatic_tags]),
+
+ compile_erl(Continue2,OutFile,Options).
+
+%% merge_modules/2 -> returns a module record where the typeorval lists are merged,
+%% the exports lists are merged, the imports lists are merged when the
+%% elements come from other modules than the merge set, the tagdefault
+%% field gets the shared value if all modules have same tagging scheme,
+%% otherwise a tagging_error exception is thrown,
+%% the extensiondefault ...(not handled yet).
+merge_modules(ParseRes,CommonName) ->
+ ModuleList = lists:map(fun(X)->element(2,X) end,ParseRes),
+ NewModuleList = remove_name_collisions(ModuleList),
+ case ets:info(renamed_defs,size) of
+ 0 -> ets:delete(renamed_defs);
+ _ -> ok
+ end,
+ save_imports(NewModuleList),
+% io:format("~p~n~p~n~p~n~n",[ets:lookup(original_imports,'M1'),ets:lookup(original_imports,'M2'),ets:tab2list(original_imports)]),
+ TypeOrVal = lists:append(lists:map(fun(X)->X#module.typeorval end,
+ NewModuleList)),
+ InputMNameList = lists:map(fun(X)->X#module.name end,
+ NewModuleList),
+ CExports = common_exports(NewModuleList),
+
+ ImportsModuleNameList = lists:map(fun(X)->
+ {X#module.imports,
+ X#module.name} end,
+ NewModuleList),
+ %% ImportsModuleNameList: [{Imports,ModuleName},...]
+ %% Imports is a tuple {imports,[#'SymbolsFromModule'{},...]}
+ CImports = common_imports(ImportsModuleNameList,InputMNameList),
+ TagDefault = check_tagdefault(NewModuleList),
+ #module{name=CommonName,tagdefault=TagDefault,exports=CExports,
+ imports=CImports,typeorval=TypeOrVal}.
+
+%% causes an exit if duplicate definition names exist in a module
+remove_name_collisions(Modules) ->
+ create_ets_table(renamed_defs,[named_table]),
+ %% Name duplicates in the same module is not allowed.
+ lists:foreach(fun exit_if_nameduplicate/1,Modules),
+ %% Then remove duplicates in different modules and return the
+ %% new list of modules.
+ remove_name_collisions2(Modules,[]).
+
+%% For each definition in the first module in module list, find
+%% all definitons with same name and rename both definitions in
+%% the first module and in rest of modules
+remove_name_collisions2([M|Ms],Acc) ->
+ TypeOrVal = M#module.typeorval,
+ MName = M#module.name,
+ %% Test each name in TypeOrVal on all modules in Ms
+ {NewM,NewMs} = remove_name_collisions2(MName,TypeOrVal,Ms,[]),
+ remove_name_collisions2(NewMs,[M#module{typeorval=NewM}|Acc]);
+remove_name_collisions2([],Acc) ->
+ finished_warn_prints(),
+ Acc.
+
+%% For each definition in list of defs find definitions in (rest of)
+%% modules that have same name. If duplicate was found rename def.
+%% Test each name in [T|Ts] on all modules in Ms
+remove_name_collisions2(ModName,[T|Ts],Ms,Acc) ->
+ Name = get_name_of_def(T),
+ case discover_dupl_in_mods(Name,T,Ms,[],?unique_names) of
+ {_,?unique_names} -> % there was no name collision
+ remove_name_collisions2(ModName,Ts,Ms,[T|Acc]);
+ {NewMs,?dupl_uniquedefs} -> % renamed defs in NewMs
+ %% rename T
+ NewT = set_name_of_def(ModName,Name,T), %rename def
+ warn_renamed_def(ModName,get_name_of_def(NewT),Name),
+ ets:insert(renamed_defs,{get_name_of_def(NewT),Name,ModName}),
+ remove_name_collisions2(ModName,Ts,NewMs,[NewT|Acc]);
+ {NewMs,?dupl_equaldefs} -> % name duplicates, but identical defs
+ %% keep name of T
+ warn_kept_def(ModName,Name),
+ remove_name_collisions2(ModName,Ts,NewMs,[T|Acc]);
+ {NewMs,?dupl_eqdefs_uniquedefs} ->
+ %% keep name of T, renamed defs in NewMs
+ warn_kept_def(ModName,Name),
+ remove_name_collisions2(ModName,Ts,NewMs,[T|Acc])
+ end;
+remove_name_collisions2(_,[],Ms,Acc) ->
+ {Acc,Ms}.
+
+%% Name is the name of a definition. If a definition with the same name
+%% is found in the modules Ms the definition will be renamed and returned.
+discover_dupl_in_mods(Name,Def,[M=#module{name=N,typeorval=TorV}|Ms],
+ Acc,AnyRenamed) ->
+ Fun = fun(T,RenamedOrDupl)->
+ case {get_name_of_def(T),compare_defs(Def,T)} of
+ {Name,not_equal} ->
+ %% rename def
+ NewT=set_name_of_def(N,Name,T),
+ warn_renamed_def(N,get_name_of_def(NewT),Name),
+ ets:insert(renamed_defs,{get_name_of_def(NewT),
+ Name,N}),
+ {NewT,?dupl_uniquedefs bor RenamedOrDupl};
+ {Name,equal} ->
+ %% delete def
+ warn_deleted_def(N,Name),
+ {[],?dupl_equaldefs bor RenamedOrDupl};
+ _ ->
+ {T,RenamedOrDupl}
+ end
+ end,
+ {NewTorV,NewAnyRenamed} = lists:mapfoldl(Fun,AnyRenamed,TorV),
+ %% have to flatten the NewTorV to remove any empty list elements
+ discover_dupl_in_mods(Name,Def,Ms,
+ [M#module{typeorval=lists:flatten(NewTorV)}|Acc],
+ NewAnyRenamed);
+discover_dupl_in_mods(_,_,[],Acc,AnyRenamed) ->
+ {Acc,AnyRenamed}.
+
+warn_renamed_def(ModName,NewName,OldName) ->
+ maybe_first_warn_print(),
+ io:format("NOTICE: The ASN.1 definition in module ~p with name ~p has been renamed in generated module. New name is ~p.~n",[ModName,OldName,NewName]).
+
+warn_deleted_def(ModName,DefName) ->
+ maybe_first_warn_print(),
+ io:format("NOTICE: The ASN.1 definition in module ~p with name ~p has been deleted in generated module.~n",[ModName,DefName]).
+
+warn_kept_def(ModName,DefName) ->
+ maybe_first_warn_print(),
+ io:format("NOTICE: The ASN.1 definition in module ~p with name ~p has kept its name due to equal definition as duplicate.~n",[ModName,DefName]).
+
+maybe_first_warn_print() ->
+ case get(warn_duplicate_defs) of
+ undefined ->
+ put(warn_duplicate_defs,true),
+ io:format("~nDue to multiple occurrences of a definition name in "
+ "multi-file compiled files:~n");
+ _ ->
+ ok
+ end.
+finished_warn_prints() ->
+ put(warn_duplicate_defs,undefined).
+
+
+exit_if_nameduplicate(#module{typeorval=TorV}) ->
+ exit_if_nameduplicate(TorV);
+exit_if_nameduplicate([]) ->
+ ok;
+exit_if_nameduplicate([Def|Rest]) ->
+ Name=get_name_of_def(Def),
+ exit_if_nameduplicate2(Name,Rest),
+ exit_if_nameduplicate(Rest).
+
+exit_if_nameduplicate2(Name,Rest) ->
+ Pred=fun(Def)->
+ case get_name_of_def(Def) of
+ Name -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(Pred,Rest) of
+ true ->
+ throw({error,{"more than one definition with same name",Name}});
+ _ ->
+ ok
+ end.
+
+compare_defs(D1,D2) ->
+ compare_defs2(unset_pos(D1),unset_pos(D2)).
+compare_defs2(D,D) ->
+ equal;
+compare_defs2(_,_) ->
+ not_equal.
+
+unset_pos(Def) when record(Def,typedef) ->
+ Def#typedef{pos=undefined};
+unset_pos(Def) when record(Def,classdef) ->
+ Def#classdef{pos=undefined};
+unset_pos(Def) when record(Def,valuedef) ->
+ Def#valuedef{pos=undefined};
+unset_pos(Def) when record(Def,ptypedef) ->
+ Def#ptypedef{pos=undefined};
+unset_pos(Def) when record(Def,pvaluedef) ->
+ Def#pvaluedef{pos=undefined};
+unset_pos(Def) when record(Def,pvaluesetdef) ->
+ Def#pvaluesetdef{pos=undefined};
+unset_pos(Def) when record(Def,pobjectdef) ->
+ Def#pobjectdef{pos=undefined};
+unset_pos(Def) when record(Def,pobjectsetdef) ->
+ Def#pobjectsetdef{pos=undefined}.
+
+get_pos_of_def(#typedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#classdef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#valuedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#ptypedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pvaluedef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pvaluesetdef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pobjectdef{pos=Pos}) ->
+ Pos;
+get_pos_of_def(#pobjectsetdef{pos=Pos}) ->
+ Pos.
+
+
+get_name_of_def(#typedef{name=Name}) ->
+ Name;
+get_name_of_def(#classdef{name=Name}) ->
+ Name;
+get_name_of_def(#valuedef{name=Name}) ->
+ Name;
+get_name_of_def(#ptypedef{name=Name}) ->
+ Name;
+get_name_of_def(#pvaluedef{name=Name}) ->
+ Name;
+get_name_of_def(#pvaluesetdef{name=Name}) ->
+ Name;
+get_name_of_def(#pobjectdef{name=Name}) ->
+ Name;
+get_name_of_def(#pobjectsetdef{name=Name}) ->
+ Name.
+
+set_name_of_def(ModName,Name,OldDef) ->
+ NewName = list_to_atom(lists:concat([Name,ModName])),
+ case OldDef of
+ #typedef{} -> OldDef#typedef{name=NewName};
+ #classdef{} -> OldDef#classdef{name=NewName};
+ #valuedef{} -> OldDef#valuedef{name=NewName};
+ #ptypedef{} -> OldDef#ptypedef{name=NewName};
+ #pvaluedef{} -> OldDef#pvaluedef{name=NewName};
+ #pvaluesetdef{} -> OldDef#pvaluesetdef{name=NewName};
+ #pobjectdef{} -> OldDef#pobjectdef{name=NewName};
+ #pobjectsetdef{} -> OldDef#pobjectsetdef{name=NewName}
+ end.
+
+save_imports(ModuleList)->
+ Fun = fun(M) ->
+ case M#module.imports of
+ {_,[]} -> [];
+ {_,I} ->
+ {M#module.name,I}
+ end
+ end,
+ ImportsList = lists:map(Fun,ModuleList),
+ case lists:flatten(ImportsList) of
+ [] ->
+ ok;
+ ImportsList2 ->
+ create_ets_table(original_imports,[named_table]),
+ ets:insert(original_imports,ImportsList2)
+ end.
+
+
+common_exports(ModuleList) ->
+ %% if all modules exports 'all' then export 'all',
+ %% otherwise export each typeorval name
+ case lists:filter(fun(X)->
+ element(2,X#module.exports) /= all
+ end,
+ ModuleList) of
+ []->
+ {exports,all};
+ ModsWithExpList ->
+ CExports1 =
+ lists:append(lists:map(fun(X)->element(2,X#module.exports) end,
+ ModsWithExpList)),
+ CExports2 = export_all(lists:subtract(ModuleList,ModsWithExpList)),
+ {exports,CExports1++CExports2}
+ end.
+
+export_all([])->[];
+export_all(ModuleList) ->
+ ExpList =
+ lists:map(
+ fun(M)->
+ TorVL=M#module.typeorval,
+ MName = M#module.name,
+ lists:map(
+ fun(Def)->
+ case Def of
+ T when record(T,typedef)->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=T#typedef.name};
+ V when record(V,valuedef) ->
+ #'Externalvaluereference'{pos=0,
+ module=MName,
+ value=V#valuedef.name};
+ C when record(C,classdef) ->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=C#classdef.name};
+ P when record(P,ptypedef) ->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=P#ptypedef.name};
+ PV when record(PV,pvaluesetdef) ->
+ #'Externaltypereference'{pos=0,
+ module=MName,
+ type=PV#pvaluesetdef.name};
+ PO when record(PO,pobjectdef) ->
+ #'Externalvaluereference'{pos=0,
+ module=MName,
+ value=PO#pobjectdef.name}
+ end
+ end,
+ TorVL)
+ end,
+ ModuleList),
+ lists:append(ExpList).
+
+%% common_imports/2
+%% IList is a list of tuples, {Imports,MName}, where Imports is the imports of
+%% the module with name MName.
+%% InputMNameL holds the names of all merged modules.
+%% Returns an import tuple with a list of imports that are external the merged
+%% set of modules.
+common_imports(IList,InputMNameL) ->
+ SetExternalImportsList = remove_in_set_imports(IList,InputMNameL,[]),
+ {imports,remove_import_doubles(SetExternalImportsList)}.
+
+check_tagdefault(ModList) ->
+ case have_same_tagdefault(ModList) of
+ {true,TagDefault} -> TagDefault;
+ {false,TagDefault} ->
+ create_ets_table(automatic_tags,[named_table]),
+ save_automatic_tagged_types(ModList),
+ TagDefault
+ end.
+
+have_same_tagdefault([#module{tagdefault=T}|Ms]) ->
+ have_same_tagdefault(Ms,{true,T}).
+
+have_same_tagdefault([],TagDefault) ->
+ TagDefault;
+have_same_tagdefault([#module{tagdefault=T}|Ms],TDefault={_,T}) ->
+ have_same_tagdefault(Ms,TDefault);
+have_same_tagdefault([#module{tagdefault=T1}|Ms],{_,T2}) ->
+ have_same_tagdefault(Ms,{false,rank_tagdef([T1,T2])}).
+
+rank_tagdef(L) ->
+ case lists:member('EXPLICIT',L) of
+ true -> 'EXPLICIT';
+ _ -> 'IMPLICIT'
+ end.
+
+save_automatic_tagged_types([])->
+ done;
+save_automatic_tagged_types([#module{tagdefault='AUTOMATIC',
+ typeorval=TorV}|Ms]) ->
+ Fun =
+ fun(T) ->
+ ets:insert(automatic_tags,{get_name_of_def(T)})
+ end,
+ lists:foreach(Fun,TorV),
+ save_automatic_tagged_types(Ms);
+save_automatic_tagged_types([_M|Ms]) ->
+ save_automatic_tagged_types(Ms).
+
+%% remove_in_set_imports/3 :
+%% input: list with tuples of each module's imports and module name
+%% respectively.
+%% output: one list with same format but each occured import from a
+%% module in the input set (IMNameL) is removed.
+remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) ->
+ NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]),
+ remove_in_set_imports(Rest,InputMNameL,NewImpL++Acc);
+remove_in_set_imports([],_,Acc) ->
+ lists:reverse(Acc).
+
+remove_in_set_imports1([I|Is],InputMNameL,Acc) ->
+ case I#'SymbolsFromModule'.module of
+ #'Externaltypereference'{type=MName} ->
+ case lists:member(MName,InputMNameL) of
+ true ->
+ remove_in_set_imports1(Is,InputMNameL,Acc);
+ false ->
+ remove_in_set_imports1(Is,InputMNameL,[I|Acc])
+ end;
+ _ ->
+ remove_in_set_imports1(Is,InputMNameL,[I|Acc])
+ end;
+remove_in_set_imports1([],_,Acc) ->
+ lists:reverse(Acc).
+
+remove_import_doubles([]) ->
+ [];
+%% If several modules in the merge set imports symbols from
+%% the same external module it might be doubled.
+%% ImportList has #'SymbolsFromModule' elements
+remove_import_doubles(ImportList) ->
+ MergedImportList =
+ merge_symbols_from_module(ImportList,[]),
+%% io:format("MergedImportList: ~p~n",[MergedImportList]),
+ delete_double_of_symbol(MergedImportList,[]).
+
+merge_symbols_from_module([Imp|Imps],Acc) ->
+ #'Externaltypereference'{type=ModName} = Imp#'SymbolsFromModule'.module,
+ IfromModName =
+ lists:filter(
+ fun(I)->
+ case I#'SymbolsFromModule'.module of
+ #'Externaltypereference'{type=ModName} ->
+ true;
+ #'Externalvaluereference'{value=ModName} ->
+ true;
+ _ -> false
+ end
+ end,
+ Imps),
+ NewImps = lists:subtract(Imps,IfromModName),
+%% io:format("Imp: ~p~nIfromModName: ~p~n",[Imp,IfromModName]),
+ NewImp =
+ Imp#'SymbolsFromModule'{
+ symbols = lists:append(
+ lists:map(fun(SL)->
+ SL#'SymbolsFromModule'.symbols
+ end,[Imp|IfromModName]))},
+ merge_symbols_from_module(NewImps,[NewImp|Acc]);
+merge_symbols_from_module([],Acc) ->
+ lists:reverse(Acc).
+
+delete_double_of_symbol([I|Is],Acc) ->
+ SymL=I#'SymbolsFromModule'.symbols,
+ NewSymL = delete_double_of_symbol1(SymL,[]),
+ delete_double_of_symbol(Is,[I#'SymbolsFromModule'{symbols=NewSymL}|Acc]);
+delete_double_of_symbol([],Acc) ->
+ Acc.
+
+delete_double_of_symbol1([TRef=#'Externaltypereference'{type=TrefName}|Rest],Acc)->
+ NewRest =
+ lists:filter(fun(S)->
+ case S of
+ #'Externaltypereference'{type=TrefName}->
+ false;
+ _ -> true
+ end
+ end,
+ Rest),
+ delete_double_of_symbol1(NewRest,[TRef|Acc]);
+delete_double_of_symbol1([VRef=#'Externalvaluereference'{value=VName}|Rest],Acc) ->
+ NewRest =
+ lists:filter(fun(S)->
+ case S of
+ #'Externalvaluereference'{value=VName}->
+ false;
+ _ -> true
+ end
+ end,
+ Rest),
+ delete_double_of_symbol1(NewRest,[VRef|Acc]);
+delete_double_of_symbol1([TRef={#'Externaltypereference'{type=MRef},
+ #'Externaltypereference'{type=TRef}}|Rest],
+ Acc)->
+ NewRest =
+ lists:filter(
+ fun(S)->
+ case S of
+ {#'Externaltypereference'{type=MRef},
+ #'Externaltypereference'{type=TRef}}->
+ false;
+ _ -> true
+ end
+ end,
+ Rest),
+ delete_double_of_symbol1(NewRest,[TRef|Acc]);
+delete_double_of_symbol1([],Acc) ->
+ Acc.
+
+
+scan_set(DirName,Files,Options) ->
+ lists:map(
+ fun(F)->
+ case scan({true,true},filename:join([DirName,F]),Options) of
+ {false,{error,Reason}} ->
+ throw({error,{'scan error in file:',F,Reason}});
+ {TrueOrFalse,Res} ->
+ {TrueOrFalse,Res,F}
+ end
+ end,
+ Files).
+
+parse_set(ScanRes,Options) ->
+ lists:map(
+ fun({TorF,Toks,F})->
+ case parse({TorF,Toks},F,Options) of
+ {false,{error,Reason}} ->
+ throw({error,{'parse error in file:',F,Reason}});
+ {TrueOrFalse,Res} ->
+ {TrueOrFalse,Res,F}
+ end
+ end,
+ ScanRes).
+
+
+%%***********************************
+
+
+scan({true,_}, File,Options) ->
+ case asn1ct_tok:file(File) of
+ {error,Reason} ->
+ io:format("~p~n",[Reason]),
+ {false,{error,Reason}};
+ Tokens ->
+ case lists:member(ss,Options) of
+ true -> % we terminate after scan
+ {false,Tokens};
+ false -> % continue with next pass
+ {true,Tokens}
+ end
+ end;
+scan({false,Result},_,_) ->
+ Result.
+
+
+parse({true,Tokens},File,Options) ->
+ %Presult = asn1ct_parser2:parse(Tokens),
+ %%case lists:member(p1,Options) of
+ %% true ->
+ %% asn1ct_parser:parse(Tokens);
+ %% _ ->
+ %% asn1ct_parser2:parse(Tokens)
+ %% end,
+ case catch asn1ct_parser2:parse(Tokens) of
+ {error,{{Line,_Mod,Message},_TokTup}} ->
+ if
+ integer(Line) ->
+ BaseName = filename:basename(File),
+ io:format("syntax error at line ~p in module ~s:~n",
+ [Line,BaseName]);
+ true ->
+ io:format("syntax error in module ~p:~n",[File])
+ end,
+ print_error_message(Message),
+ {false,{error,Message}};
+ {error,{Line,_Mod,[Message,Token]}} ->
+ io:format("syntax error: ~p ~p at line ~p~n",
+ [Message,Token,Line]),
+ {false,{error,{Line,[Message,Token]}}};
+ {ok,M} ->
+ case lists:member(sp,Options) of
+ true -> % terminate after parse
+ {false,M};
+ false -> % continue with next pass
+ {true,M}
+ end;
+ OtherError ->
+ io:format("~p~n",[OtherError])
+ end;
+parse({false,Tokens},_,_) ->
+ {false,Tokens}.
+
+check({true,M},File,OutFile,Includes,EncodingRule,DbFile,Options,InputMods) ->
+ cmp(M#module.name,File),
+ start(["."|Includes]),
+ case asn1ct_check:storeindb(M) of
+ ok ->
+ Module = asn1_db:dbget(M#module.name,'MODULE'),
+ State = #state{mname=Module#module.name,
+ module=Module#module{typeorval=[]},
+ erule=EncodingRule,
+ inputmodules=InputMods,
+ options=Options},
+ Check = asn1ct_check:check(State,Module#module.typeorval),
+ case {Check,lists:member(abs,Options)} of
+ {{error,Reason},_} ->
+ {false,{error,Reason}};
+ {{ok,NewTypeOrVal,_},true} ->
+ NewM = Module#module{typeorval=NewTypeOrVal},
+ asn1_db:dbput(NewM#module.name,'MODULE',NewM),
+ pretty2(M#module.name,lists:concat([OutFile,".abs"])),
+ {false,ok};
+ {{ok,NewTypeOrVal,GenTypeOrVal},_} ->
+ NewM = Module#module{typeorval=NewTypeOrVal},
+ asn1_db:dbput(NewM#module.name,'MODULE',NewM),
+ asn1_db:dbsave(DbFile,M#module.name),
+ io:format("--~p--~n",[{generated,DbFile}]),
+ {true,{M,NewM,GenTypeOrVal}}
+ end
+ end;
+check({false,M},_,_,_,_,_,_,_) ->
+ {false,M}.
+
+generate({true,{M,_Module,GenTOrV}},OutFile,EncodingRule,Options) ->
+ debug_on(Options),
+ case lists:member(compact_bit_string,Options) of
+ true -> put(compact_bit_string,true);
+ _ -> ok
+ end,
+ put(encoding_options,Options),
+ create_ets_table(check_functions,[named_table]),
+
+ %% create decoding function names and taglists for partial decode
+ %% For the time being leave errors unnoticed !!!!!!!!!
+% io:format("Options: ~p~n",[Options]),
+ case catch specialized_decode_prepare(EncodingRule,M,GenTOrV,Options) of
+ {error, enoent} -> ok;
+ {error, Reason} -> io:format("WARNING: Error in configuration"
+ "file: ~n~p~n",[Reason]);
+ {'EXIT',Reason} -> io:format("WARNING: Internal error when "
+ "analyzing configuration"
+ "file: ~n~p~n",[Reason]);
+ _ -> ok
+ end,
+
+ asn1ct_gen:pgen(OutFile,EncodingRule,M#module.name,GenTOrV),
+ debug_off(Options),
+ put(compact_bit_string,false),
+ erase(encoding_options),
+ erase(tlv_format), % used in ber_bin, optimize
+ erase(class_default_type),% used in ber_bin, optimize
+ ets:delete(check_functions),
+ case lists:member(sg,Options) of
+ true -> % terminate here , with .erl file generated
+ {false,true};
+ false ->
+ {true,true}
+ end;
+generate({false,M},_,_,_) ->
+ {false,M}.
+
+compile_erl({true,_},OutFile,Options) ->
+ erl_compile(OutFile,Options);
+compile_erl({false,true},_,_) ->
+ ok;
+compile_erl({false,Result},_,_) ->
+ Result.
+
+input_file_type([]) ->
+ {empty_name,[]};
+input_file_type(File) ->
+ case filename:extension(File) of
+ [] ->
+ case file:read_file_info(lists:concat([File,".asn1"])) of
+ {ok,_FileInfo} ->
+ {single_file, lists:concat([File,".asn1"])};
+ _Error ->
+ case file:read_file_info(lists:concat([File,".asn"])) of
+ {ok,_FileInfo} ->
+ {single_file, lists:concat([File,".asn"])};
+ _Error ->
+ {single_file, lists:concat([File,".py"])}
+ end
+ end;
+ ".asn1config" ->
+ case read_config_file(File,asn1_module) of
+ {ok,Asn1Module} ->
+ put(asn1_config_file,File),
+ input_file_type(Asn1Module);
+ Error ->
+ Error
+ end;
+ Asn1PFix ->
+ Base = filename:basename(File,Asn1PFix),
+ case filename:extension(Base) of
+ [] ->
+ {single_file,File};
+ SetPFix when (SetPFix == ".set") ->
+ {multiple_files_file,
+ filename:basename(Base,SetPFix),
+ File};
+ _Error ->
+ throw({input_file_error,{'Bad input file',File}})
+ end
+ end.
+
+get_file_list(File) ->
+ case file:open(File, [read]) of
+ {error,Reason} ->
+ {error,{File,file:format_error(Reason)}};
+ {ok,Stream} ->
+ get_file_list1(Stream,[])
+ end.
+
+get_file_list1(Stream,Acc) ->
+ Ret = io:get_line(Stream,''),
+ case Ret of
+ eof ->
+ file:close(Stream),
+ lists:reverse(Acc);
+ FileName ->
+ PrefixedNameList =
+ case (catch input_file_type(lists:delete($\n,FileName))) of
+ {empty_name,[]} -> [];
+ {single_file,Name} -> [Name];
+ {multiple_files_file,Name} ->
+ get_file_list(Name);
+ Err = {input_file_error,_Reason} ->
+ throw(Err)
+ end,
+ get_file_list1(Stream,PrefixedNameList++Acc)
+ end.
+
+get_rule(Options) ->
+ case [Rule ||Rule <-[per,ber,ber_bin,ber_bin_v2,per_bin],
+ Opt <- Options,
+ Rule==Opt] of
+ [Rule] ->
+ Rule;
+ [Rule|_] ->
+ Rule;
+ [] ->
+ ber
+ end.
+
+erl_compile(OutFile,Options) ->
+% io:format("Options:~n~p~n",[Options]),
+ case lists:member(noobj,Options) of
+ true ->
+ ok;
+ _ ->
+ ErlOptions = remove_asn_flags(Options),
+ case c:c(OutFile,ErlOptions) of
+ {ok,_Module} ->
+ ok;
+ _ ->
+ {error,'no_compilation'}
+ end
+ end.
+
+remove_asn_flags(Options) ->
+ [X || X <- Options,
+ X /= get_rule(Options),
+ X /= optimize,
+ X /= compact_bit_string,
+ X /= debug,
+ X /= keyed_list].
+
+debug_on(Options) ->
+ case lists:member(debug,Options) of
+ true ->
+ put(asndebug,true);
+ _ ->
+ true
+ end,
+ case lists:member(keyed_list,Options) of
+ true ->
+ put(asn_keyed_list,true);
+ _ ->
+ true
+ end.
+
+
+debug_off(_Options) ->
+ erase(asndebug),
+ erase(asn_keyed_list).
+
+
+outfile(Base, Ext, Opts) when atom(Ext) ->
+ outfile(Base, atom_to_list(Ext), Opts);
+outfile(Base, Ext, Opts) ->
+ Obase = case lists:keysearch(outdir, 1, Opts) of
+ {value, {outdir, Odir}} -> filename:join(Odir, Base);
+ _NotFound -> Base % Not found or bad format
+ end,
+ case Ext of
+ [] ->
+ Obase;
+ _ ->
+ Obase++"."++Ext
+ end.
+
+%% compile(AbsFileName, Options)
+%% Compile entry point for erl_compile.
+
+compile_asn(File,OutFile,Options) ->
+ compile(lists:concat([File,".asn"]),OutFile,Options).
+
+compile_asn1(File,OutFile,Options) ->
+ compile(lists:concat([File,".asn1"]),OutFile,Options).
+
+compile_py(File,OutFile,Options) ->
+ compile(lists:concat([File,".py"]),OutFile,Options).
+
+compile(File, _OutFile, Options) ->
+ case catch compile(File, make_erl_options(Options)) of
+ Exit = {'EXIT',_Reason} ->
+ io:format("~p~n~s~n",[Exit,"error"]),
+ error;
+ {error,_Reason} ->
+ %% case occurs due to error in asn1ct_parser2,asn1ct_check
+%% io:format("~p~n",[_Reason]),
+%% io:format("~p~n~s~n",[_Reason,"error"]),
+ error;
+ ok ->
+ io:format("ok~n"),
+ ok;
+ ParseRes when tuple(ParseRes) ->
+ io:format("~p~n",[ParseRes]),
+ ok;
+ ScanRes when list(ScanRes) ->
+ io:format("~p~n",[ScanRes]),
+ ok;
+ Unknown ->
+ io:format("~p~n~s~n",[Unknown,"error"]),
+ error
+ end.
+
+%% Converts generic compiler options to specific options.
+
+make_erl_options(Opts) ->
+
+ %% This way of extracting will work even if the record passed
+ %% has more fields than known during compilation.
+
+ Includes = Opts#options.includes,
+ Defines = Opts#options.defines,
+ Outdir = Opts#options.outdir,
+%% Warning = Opts#options.warning,
+ Verbose = Opts#options.verbose,
+ Specific = Opts#options.specific,
+ Optimize = Opts#options.optimize,
+ OutputType = Opts#options.output_type,
+ Cwd = Opts#options.cwd,
+
+ Options =
+ case Verbose of
+ true -> [verbose];
+ false -> []
+ end ++
+%%% case Warning of
+%%% 0 -> [];
+%%% _ -> [report_warnings]
+%%% end ++
+ [] ++
+ case Optimize of
+ 1 -> [optimize];
+ 999 -> [];
+ _ -> [{optimize,Optimize}]
+ end ++
+ lists:map(
+ fun ({Name, Value}) ->
+ {d, Name, Value};
+ (Name) ->
+ {d, Name}
+ end,
+ Defines) ++
+ case OutputType of
+ undefined -> [ber]; % temporary default (ber when it's ready)
+ ber -> [ber];
+ ber_bin -> [ber_bin];
+ ber_bin_v2 -> [ber_bin_v2];
+ per -> [per];
+ per_bin -> [per_bin]
+ end,
+
+ Options++[report_errors, {cwd, Cwd}, {outdir, Outdir}|
+ lists:map(fun(Dir) -> {i, Dir} end, Includes)]++Specific.
+
+pretty2(Module,AbsFile) ->
+ start(),
+ {ok,F} = file:open(AbsFile, [write]),
+ M = asn1_db:dbget(Module,'MODULE'),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.defid)]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.tagdefault)]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.exports)]),
+ io:format(F,"~s\n",[asn1ct_pretty_format:term(M#module.imports)]),
+ io:format(F,"~s\n\n",[asn1ct_pretty_format:term(M#module.extensiondefault)]),
+
+ {Types,Values,ParameterizedTypes,Classes,Objects,ObjectSets} = M#module.typeorval,
+ io:format(F,"%%%%%%%%%%%%%%%%%%% TYPES in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Types),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% VALUES in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Values),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Parameterized Types in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,ParameterizedTypes),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Classes in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Classes),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Objects in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,Objects),
+ io:format(F,"%%%%%%%%%%%%%%%%%%% Object Sets in ~p %%%%%%%%%%%%%%%%%%%~n",[Module]),
+ lists:foreach(fun(T)-> io:format(F,"~s\n",
+ [asn1ct_pretty_format:term(asn1_db:dbget(Module,T))])
+ end,ObjectSets).
+start() ->
+ Includes = ["."],
+ start(Includes).
+
+
+start(Includes) when list(Includes) ->
+ asn1_db:dbstart(Includes).
+
+stop() ->
+ save(),
+ asn1_db:stop_server(ns),
+ asn1_db:stop_server(rand),
+ stopped.
+
+save() ->
+ asn1_db:dbstop().
+
+%%clear() ->
+%% asn1_db:dbclear().
+
+encode(Module,Term) ->
+ asn1rt:encode(Module,Term).
+
+encode(Module,Type,Term) when list(Module) ->
+ asn1rt:encode(list_to_atom(Module),Type,Term);
+encode(Module,Type,Term) ->
+ asn1rt:encode(Module,Type,Term).
+
+decode(Module,Type,Bytes) when list(Module) ->
+ asn1rt:decode(list_to_atom(Module),Type,Bytes);
+decode(Module,Type,Bytes) ->
+ asn1rt:decode(Module,Type,Bytes).
+
+
+test(Module) ->
+ start(),
+ M = asn1_db:dbget(Module,'MODULE'),
+ {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets} = M#module.typeorval,
+ test_each(Module,Types).
+
+test_each(Module,[Type | Rest]) ->
+ case test(Module,Type) of
+ {ok,_Result} ->
+ test_each(Module,Rest);
+ Error ->
+ Error
+ end;
+test_each(_,[]) ->
+ ok.
+
+test(Module,Type) ->
+ io:format("~p:~p~n",[Module,Type]),
+ case (catch value(Module,Type)) of
+ {ok,Val} ->
+ %% io:format("asn1ct:test/2: ~w~n",[Val]),
+ test(Module,Type,Val);
+ {'EXIT',Reason} ->
+ {error,{asn1,{value,Reason}}}
+ end.
+
+
+test(Module,Type,Value) ->
+ case catch encode(Module,Type,Value) of
+ {ok,Bytes} ->
+ %% io:format("test 1: ~p~n",[{Bytes}]),
+ M = if
+ list(Module) ->
+ list_to_atom(Module);
+ true ->
+ Module
+ end,
+ NewBytes =
+ case M:encoding_rule() of
+ ber ->
+ lists:flatten(Bytes);
+ ber_bin when binary(Bytes) ->
+ Bytes;
+ ber_bin ->
+ list_to_binary(Bytes);
+ ber_bin_v2 when binary(Bytes) ->
+ Bytes;
+ ber_bin_v2 ->
+ list_to_binary(Bytes);
+ per ->
+ lists:flatten(Bytes);
+ per_bin when binary(Bytes) ->
+ Bytes;
+ per_bin ->
+ list_to_binary(Bytes)
+ end,
+ case decode(Module,Type,NewBytes) of
+ {ok,Value} ->
+ {ok,{Module,Type,Value}};
+ {ok,Res} ->
+ {error,{asn1,{encode_decode_mismatch,
+ {{Module,Type,Value},Res}}}};
+ Error ->
+ {error,{asn1,{{decode,
+ {Module,Type,Value},Error}}}}
+ end;
+ Error ->
+ {error,{asn1,{encode,{{Module,Type,Value},Error}}}}
+ end.
+
+value(Module) ->
+ start(),
+ M = asn1_db:dbget(Module,'MODULE'),
+ {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets} = M#module.typeorval,
+ lists:map(fun(A) ->value(Module,A) end,Types).
+
+value(Module,Type) ->
+ start(),
+ case catch asn1ct_value:get_type(Module,Type,no) of
+ {error,Reason} ->
+ {error,Reason};
+ {'EXIT',Reason} ->
+ {error,Reason};
+ Result ->
+ {ok,Result}
+ end.
+
+cmp(Module,InFile) ->
+ Base = filename:basename(InFile),
+ Dir = filename:dirname(InFile),
+ Ext = filename:extension(Base),
+ Finfo = file:read_file_info(InFile),
+ Minfo = file:read_file_info(filename:join(Dir,lists:concat([Module,Ext]))),
+ case Finfo of
+ Minfo ->
+ ok;
+ _ ->
+ io:format("asn1error: Modulename and filename must be equal~n",[]),
+ throw(error)
+ end.
+
+vsn() ->
+ ?vsn.
+
+print_error_message([got,H|T]) when list(H) ->
+ io:format(" got:"),
+ print_listing(H,"and"),
+ print_error_message(T);
+print_error_message([expected,H|T]) when list(H) ->
+ io:format(" expected one of:"),
+ print_listing(H,"or"),
+ print_error_message(T);
+print_error_message([H|T]) ->
+ io:format(" ~p",[H]),
+ print_error_message(T);
+print_error_message([]) ->
+ io:format("~n").
+
+print_listing([H1,H2|[]],AndOr) ->
+ io:format(" ~p ~s ~p",[H1,AndOr,H2]);
+print_listing([H1,H2|T],AndOr) ->
+ io:format(" ~p,",[H1]),
+ print_listing([H2|T],AndOr);
+print_listing([H],_AndOr) ->
+ io:format(" ~p",[H]);
+print_listing([],_) ->
+ ok.
+
+
+%% functions to administer ets tables
+
+%% Always creates a new table
+create_ets_table(Name,Options) when atom(Name) ->
+ case ets:info(Name) of
+ undefined ->
+ ets:new(Name,Options);
+ _ ->
+ ets:delete(Name),
+ ets:new(Name,Options)
+ end.
+
+%% Creates a new ets table only if no table exists
+create_if_no_table(Name,Options) ->
+ case ets:info(Name) of
+ undefined ->
+ %% create a new table
+ create_ets_table(Name,Options);
+ _ -> ok
+ end.
+
+
+delete_tables([Table|Ts]) ->
+ case ets:info(Table) of
+ undefined -> ok;
+ _ -> ets:delete(Table)
+ end,
+ delete_tables(Ts);
+delete_tables([]) ->
+ ok.
+
+
+specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
+% Asn1confMember =
+% fun([{asn1config,File}|_],_) ->
+% {true,File};
+% ([],_) -> false;
+% ([_H|T],Fun) ->
+% Fun(T,Fun)
+% end,
+% case Asn1confMember(Options,Asn1confMember) of
+% {true,File} ->
+ case lists:member(asn1config,Options) of
+ true ->
+ partial_decode_prepare(Erule,M,TsAndVs,Options);
+ _ ->
+ ok
+ end.
+%% Reads the configuration file if it exists and stores information
+%% about partial decode and incomplete decode
+partial_decode_prepare(ber_bin_v2,M,TsAndVs,Options) when tuple(TsAndVs) ->
+ %% read configure file
+% Types = element(1,TsAndVs),
+ CfgList = read_config_file(M#module.name),
+ SelectedDecode = get_config_info(CfgList,partial_decode),
+ ExclusiveDecode = get_config_info(CfgList,exclusive_decode),
+ CommandList =
+ create_partial_decode_gen_info(M#module.name,SelectedDecode),
+% io:format("partial_decode = ~p~n",[CommandList]),
+
+ save_config(partial_decode,CommandList),
+ CommandList2 =
+ create_partial_inc_decode_gen_info(M#module.name,ExclusiveDecode),
+% io:format("partial_incomplete_decode = ~p~n",[CommandList2]),
+ Part_inc_tlv_tags = tag_format(ber_bin_v2,Options,CommandList2),
+% io:format("partial_incomplete_decode: tlv_tags = ~p~n",[Part_inc_tlv_tags]),
+ save_config(partial_incomplete_decode,Part_inc_tlv_tags),
+ save_gen_state(ExclusiveDecode,Part_inc_tlv_tags);
+partial_decode_prepare(_,_,_,_) ->
+ ok.
+
+
+
+%% create_partial_inc_decode_gen_info/2
+%%
+%% Creats a list of tags out of the information in TypeNameList that
+%% tells which value will be incomplete decoded, i.e. each end
+%% component/type in TypeNameList. The significant types/components in
+%% the path from the toptype must be specified in the
+%% TypeNameList. Significant elements are all constructed types that
+%% branches the path to the leaf and the leaf it selfs.
+%%
+%% Returns a list of elements, where an element may be one of
+%% mandatory|[opt,Tag]|[bin,Tag]. mandatory correspond to a mandatory
+%% element that shall be decoded as usual. [opt,Tag] matches an
+%% OPTIONAL or DEFAULT element that shall be decoded as
+%% usual. [bin,Tag] corresponds to an element, mandatory, OPTIONAL or
+%% DEFAULT, that shall be left encoded (incomplete decoded).
+create_partial_inc_decode_gen_info(ModName,{Mod,[{Name,L}|Ls]}) when list(L) ->
+ TopTypeName = partial_inc_dec_toptype(L),
+ [{Name,TopTypeName,
+ create_partial_inc_decode_gen_info1(ModName,TopTypeName,{Mod,L})}|
+ create_partial_inc_decode_gen_info(ModName,{Mod,Ls})];
+create_partial_inc_decode_gen_info(_,{_,[]}) ->
+ [];
+create_partial_inc_decode_gen_info(_,[]) ->
+ [].
+
+create_partial_inc_decode_gen_info1(ModName,TopTypeName,{ModName,
+ [_TopType|Rest]}) ->
+ case asn1_db:dbget(ModName,TopTypeName) of
+ #typedef{typespec=TS} ->
+ TagCommand = get_tag_command(TS,?MANDATORY,mandatory),
+ create_pdec_inc_command(ModName,get_components(TS#type.def),
+ Rest,[TagCommand]);
+ _ ->
+ throw({error,{"wrong type list in asn1 config file",
+ TopTypeName}})
+ end;
+create_partial_inc_decode_gen_info1(M1,_,{M2,_}) when M1 /= M2 ->
+ throw({error,{"wrong module name in asn1 config file",
+ M2}});
+create_partial_inc_decode_gen_info1(_,_,TNL) ->
+ throw({error,{"wrong type list in asn1 config file",
+ TNL}}).
+
+%%
+%% Only when there is a 'ComponentType' the config data C1 may be a
+%% list, where the incomplete decode is branched. So, C1 may be a
+%% list, a "binary tuple", a "parts tuple" or an atom. The second
+%% element of a binary tuple and a parts tuple is an atom.
+create_pdec_inc_command(_ModName,_,[],Acc) ->
+ lists:reverse(Acc);
+create_pdec_inc_command(ModName,{Comps1,Comps2},TNL,Acc)
+ when list(Comps1),list(Comps2) ->
+ create_pdec_inc_command(ModName,Comps1 ++ Comps2,TNL,Acc);
+create_pdec_inc_command(ModN,Clist,[CL|_Rest],Acc) when list(CL) ->
+ create_pdec_inc_command(ModN,Clist,CL,Acc);
+create_pdec_inc_command(ModName,
+ CList=[#'ComponentType'{name=Name,typespec=TS,
+ prop=Prop}|Comps],
+ TNL=[C1|Cs],Acc) ->
+ case C1 of
+% Name ->
+% %% In this case C1 is an atom
+% TagCommand = get_tag_command(TS,?MANDATORY,Prop),
+% create_pdec_inc_command(ModName,get_components(TS#type.def),Cs,[TagCommand|Acc]);
+ {Name,undecoded} ->
+ TagCommand = get_tag_command(TS,?UNDECODED,Prop),
+ create_pdec_inc_command(ModName,Comps,Cs,[TagCommand|Acc]);
+ {Name,parts} ->
+ TagCommand = get_tag_command(TS,?PARTS,Prop),
+ create_pdec_inc_command(ModName,Comps,Cs,[TagCommand|Acc]);
+ L when list(L) ->
+ %% This case is only possible as the first element after
+ %% the top type element, when top type is SEGUENCE or SET.
+ %% Follow each element in L. Must note every tag on the
+ %% way until the last command is reached, but it ought to
+ %% be enough to have a "complete" or "complete optional"
+ %% command for each component that is not specified in the
+ %% config file. Then in the TLV decode the components with
+ %% a "complete" command will be decoded by an ordinary TLV
+ %% decode.
+ create_pdec_inc_command(ModName,CList,L,Acc);
+ {Name,RestPartsList} when list(RestPartsList) ->
+ %% Same as previous, but this may occur at any place in
+ %% the structure. The previous is only possible as the
+ %% second element.
+ case get_tag_command(TS,?MANDATORY,Prop) of
+ ?MANDATORY ->
+ InnerDirectives=
+ create_pdec_inc_command(ModName,TS#type.def,
+ RestPartsList,[]),
+ create_pdec_inc_command(ModName,Comps,Cs,
+ [[?MANDATORY,InnerDirectives]|Acc]);
+% create_pdec_inc_command(ModName,Comps,Cs,
+% [InnerDirectives,?MANDATORY|Acc]);
+ [Opt,EncTag] ->
+ InnerDirectives =
+ create_pdec_inc_command(ModName,TS#type.def,
+ RestPartsList,[]),
+ create_pdec_inc_command(ModName,Comps,Cs,
+ [[Opt,EncTag,InnerDirectives]|Acc])
+ end;
+% create_pdec_inc_command(ModName,CList,RestPartsList,Acc);
+%% create_pdec_inc_command(ModName,TS#type.def,RestPartsList,Acc);
+ _ -> %% this component may not be in the config list
+ TagCommand = get_tag_command(TS,?MANDATORY,Prop),
+ create_pdec_inc_command(ModName,Comps,TNL,[TagCommand|Acc])
+ end;
+create_pdec_inc_command(ModName,
+ {'CHOICE',[#'ComponentType'{name=C1,
+ typespec=TS,
+ prop=Prop}|Comps]},
+ [{C1,Directive}|Rest],Acc) ->
+ case Directive of
+ List when list(List) ->
+ [Command,Tag] = get_tag_command(TS,?ALTERNATIVE,Prop),
+ CompAcc = create_pdec_inc_command(ModName,TS#type.def,List,[]),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},Rest,
+ [[Command,Tag,CompAcc]|Acc]);
+ undecoded ->
+ TagCommand = get_tag_command(TS,?ALTERNATIVE_UNDECODED,Prop),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},Rest,
+ [TagCommand|Acc]);
+ parts ->
+ TagCommand = get_tag_command(TS,?ALTERNATIVE_PARTS,Prop),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},Rest,
+ [TagCommand|Acc])
+ end;
+create_pdec_inc_command(ModName,
+ {'CHOICE',[#'ComponentType'{typespec=TS,
+ prop=Prop}|Comps]},
+ TNL,Acc) ->
+ TagCommand = get_tag_command(TS,?ALTERNATIVE,Prop),
+ create_pdec_inc_command(ModName,{'CHOICE',Comps},TNL,[TagCommand|Acc]);
+create_pdec_inc_command(M,{'CHOICE',{Cs1,Cs2}},TNL,Acc)
+ when list(Cs1),list(Cs2) ->
+ create_pdec_inc_command(M,{'CHOICE',Cs1 ++ Cs2},TNL,Acc);
+create_pdec_inc_command(ModName,#'Externaltypereference'{module=M,type=Name},
+ TNL,Acc) ->
+ #type{def=Def} = get_referenced_type(M,Name),
+ create_pdec_inc_command(ModName,get_components(Def),TNL,Acc);
+create_pdec_inc_command(_,_,TNL,_) ->
+ throw({error,{"unexpected error when creating partial "
+ "decode command",TNL}}).
+
+partial_inc_dec_toptype([T|_]) when atom(T) ->
+ T;
+partial_inc_dec_toptype([{T,_}|_]) when atom(T) ->
+ T;
+partial_inc_dec_toptype([L|_]) when list(L) ->
+ partial_inc_dec_toptype(L);
+partial_inc_dec_toptype(_) ->
+ throw({error,{"no top type found for partial incomplete decode"}}).
+
+
+%% Creats a list of tags out of the information in TypeList and Types
+%% that tells which value will be decoded. Each constructed type that
+%% is in the TypeList will get a "choosen" command. Only the last
+%% type/component in the TypeList may be a primitive type. Components
+%% "on the way" to the final element may get the "skip" or the
+%% "skip_optional" command.
+%% CommandList = [Elements]
+%% Elements = {choosen,Tag}|{skip_optional,Tag}|skip
+%% Tag is a binary with the tag BER encoded.
+create_partial_decode_gen_info(ModName,{{_,ModName},TypeList}) ->
+ case TypeList of
+ [TopType|Rest] ->
+ case asn1_db:dbget(ModName,TopType) of
+ #typedef{typespec=TS} ->
+ TagCommand = get_tag_command(TS,?CHOOSEN),
+ create_pdec_command(ModName,get_components(TS#type.def),
+ Rest,[TagCommand]);
+ _ ->
+ throw({error,{"wrong type list in asn1 config file",
+ TypeList}})
+ end;
+ _ ->
+ []
+ end;
+create_partial_decode_gen_info(_,[]) ->
+ [];
+create_partial_decode_gen_info(_M1,{{_,M2},_}) ->
+ throw({error,{"wrong module name in asn1 config file",
+ M2}}).
+
+%% create_pdec_command/4 for each name (type or component) in the
+%% third argument, TypeNameList, a command is created. The command has
+%% information whether the component/type shall be skipped, looked
+%% into or returned. The list of commands is returned.
+create_pdec_command(_ModName,_,[],Acc) ->
+ lists:reverse(Acc);
+create_pdec_command(ModName,[#'ComponentType'{name=C1,typespec=TS}|_Comps],
+ [C1|Cs],Acc) ->
+ %% this component is a constructed type or the last in the
+ %% TypeNameList otherwise the config spec is wrong
+ TagCommand = get_tag_command(TS,?CHOOSEN),
+ create_pdec_command(ModName,get_components(TS#type.def),
+ Cs,[TagCommand|Acc]);
+create_pdec_command(ModName,[#'ComponentType'{typespec=TS,
+ prop=Prop}|Comps],
+ [C2|Cs],Acc) ->
+ TagCommand =
+ case Prop of
+ mandatory ->
+ get_tag_command(TS,?SKIP);
+ _ ->
+ get_tag_command(TS,?SKIP_OPTIONAL)
+ end,
+ create_pdec_command(ModName,Comps,[C2|Cs],[TagCommand|Acc]);
+create_pdec_command(ModName,{'CHOICE',[Comp=#'ComponentType'{name=C1}|_]},TNL=[C1|_Cs],Acc) ->
+ create_pdec_command(ModName,[Comp],TNL,Acc);
+create_pdec_command(ModName,{'CHOICE',[#'ComponentType'{}|Comps]},TNL,Acc) ->
+ create_pdec_command(ModName,{'CHOICE',Comps},TNL,Acc);
+create_pdec_command(ModName,#'Externaltypereference'{module=M,type=C1},
+ TypeNameList,Acc) ->
+ case get_referenced_type(M,C1) of
+ #type{def=Def} ->
+ create_pdec_command(ModName,get_components(Def),TypeNameList,
+ Acc);
+ Err ->
+ throw({error,{"unexpected result when fetching "
+ "referenced element",Err}})
+ end;
+create_pdec_command(ModName,TS=#type{def=Def},[C1|Cs],Acc) ->
+ %% This case when we got the "components" of a SEQUENCE/SET OF
+ case C1 of
+ [1] ->
+ %% A list with an integer is the only valid option in a 'S
+ %% OF', the other valid option would be an empty
+ %% TypeNameList saying that the entire 'S OF' will be
+ %% decoded.
+ TagCommand = get_tag_command(TS,?CHOOSEN),
+ create_pdec_command(ModName,Def,Cs,[TagCommand|Acc]);
+ [N] when integer(N) ->
+ TagCommand = get_tag_command(TS,?SKIP),
+ create_pdec_command(ModName,Def,[[N-1]|Cs],[TagCommand|Acc]);
+ Err ->
+ throw({error,{"unexpected error when creating partial "
+ "decode command",Err}})
+ end;
+create_pdec_command(_,_,TNL,_) ->
+ throw({error,{"unexpected error when creating partial "
+ "decode command",TNL}}).
+
+% get_components({'CHOICE',Components}) ->
+% Components;
+get_components(#'SEQUENCE'{components=Components}) ->
+ Components;
+get_components(#'SET'{components=Components}) ->
+ Components;
+get_components({'SEQUENCE OF',Components}) ->
+ Components;
+get_components({'SET OF',Components}) ->
+ Components;
+get_components(Def) ->
+ Def.
+
+%% get_tag_command(Type,Command)
+
+%% Type is the type that has information about the tag Command tells
+%% what to do with the encoded value with the tag of Type when
+%% decoding.
+get_tag_command(#type{tag=[]},_) ->
+ [];
+get_tag_command(#type{tag=[_Tag]},?SKIP) ->
+ ?SKIP;
+get_tag_command(#type{tag=[Tag]},Command) ->
+ %% encode the tag according to BER
+ [Command,encode_tag_val(decode_class(Tag#tag.class),Tag#tag.form,
+ Tag#tag.number)];
+get_tag_command(T=#type{tag=[Tag|Tags]},Command) ->
+ [get_tag_command(T#type{tag=Tag},Command)|
+ get_tag_command(T#type{tag=Tags},Command)].
+
+%% get_tag_command/3 used by create_pdec_inc_command
+get_tag_command(#type{tag=[]},_,_) ->
+ [];
+get_tag_command(#type{tag=[Tag]},?MANDATORY,Prop) ->
+ case Prop of
+ mandatory ->
+ ?MANDATORY;
+ {'DEFAULT',_} ->
+ [?DEFAULT,encode_tag_val(decode_class(Tag#tag.class),
+ Tag#tag.form,Tag#tag.number)];
+ _ -> [?OPTIONAL,encode_tag_val(decode_class(Tag#tag.class),
+ Tag#tag.form,Tag#tag.number)]
+ end;
+get_tag_command(#type{tag=[Tag]},Command,_) ->
+ [Command,encode_tag_val(decode_class(Tag#tag.class),Tag#tag.form,
+ Tag#tag.number)].
+
+
+get_referenced_type(M,Name) ->
+ case asn1_db:dbget(M,Name) of
+ #typedef{typespec=TS} ->
+ case TS of
+ #type{def=#'Externaltypereference'{module=M2,type=Name2}} ->
+ %% The tags have already been taken care of in the
+ %% first reference where they were gathered in a
+ %% list of tags.
+ get_referenced_type(M2,Name2);
+ #type{} -> TS;
+ _ ->
+ throw({error,{"unexpected element when"
+ " fetching referenced type",TS}})
+ end;
+ T ->
+ throw({error,{"unexpected element when fetching "
+ "referenced type",T}})
+ end.
+
+tag_format(EncRule,_Options,CommandList) ->
+ case EncRule of
+ ber_bin_v2 ->
+ tlv_tags(CommandList);
+ _ ->
+ CommandList
+ end.
+
+tlv_tags([]) ->
+ [];
+tlv_tags([mandatory|Rest]) ->
+ [mandatory|tlv_tags(Rest)];
+tlv_tags([[Command,Tag]|Rest]) when atom(Command),binary(Tag) ->
+ [[Command,tlv_tag(Tag)]|tlv_tags(Rest)];
+tlv_tags([[Command,Directives]|Rest]) when atom(Command),list(Directives) ->
+ [[Command,tlv_tags(Directives)]|tlv_tags(Rest)];
+%% remove all empty lists
+tlv_tags([[]|Rest]) ->
+ tlv_tags(Rest);
+tlv_tags([{Name,TopType,L1}|Rest]) when list(L1),atom(TopType) ->
+ [{Name,TopType,tlv_tags(L1)}|tlv_tags(Rest)];
+tlv_tags([[Command,Tag,L1]|Rest]) when list(L1),binary(Tag) ->
+ [[Command,tlv_tag(Tag),tlv_tags(L1)]|tlv_tags(Rest)];
+tlv_tags([L=[L1|_]|Rest]) when list(L1) ->
+ [tlv_tags(L)|tlv_tags(Rest)].
+
+tlv_tag(<<Cl:2,_:1,TagNo:5>>) when TagNo < 31 ->
+ (Cl bsl 16) + TagNo;
+tlv_tag(<<Cl:2,_:1,31:5,0:1,TagNo:7>>) ->
+ (Cl bsl 16) + TagNo;
+tlv_tag(<<Cl:2,_:1,31:5,Buffer/binary>>) ->
+ TagNo = tlv_tag1(Buffer,0),
+ (Cl bsl 16) + TagNo.
+tlv_tag1(<<0:1,PartialTag:7>>,Acc) ->
+ (Acc bsl 7) bor PartialTag;
+tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
+ tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
+
+%% reads the content from the configuration file and returns the
+%% selected part choosen by InfoType. Assumes that the config file
+%% content is an Erlang term.
+read_config_file(ModuleName,InfoType) when atom(InfoType) ->
+ CfgList = read_config_file(ModuleName),
+ get_config_info(CfgList,InfoType).
+
+
+read_config_file(ModuleName) ->
+ case file:consult(lists:concat([ModuleName,'.asn1config'])) of
+% case file:consult(ModuleName) of
+ {ok,CfgList} ->
+ CfgList;
+ {error,enoent} ->
+ Options = get(encoding_options),
+ Includes = [I || {i,I} <- Options],
+ read_config_file1(ModuleName,Includes);
+ {error,Reason} ->
+ file:format_error(Reason),
+ throw({error,{"error reading asn1 config file",Reason}})
+ end.
+read_config_file1(ModuleName,[]) ->
+ case filename:extension(ModuleName) of
+ ".asn1config" ->
+ throw({error,enoent});
+ _ ->
+ read_config_file(lists:concat([ModuleName,".asn1config"]))
+ end;
+read_config_file1(ModuleName,[H|T]) ->
+% File = filename:join([H,lists:concat([ModuleName,'.asn1config'])]),
+ File = filename:join([H,ModuleName]),
+ case file:consult(File) of
+ {ok,CfgList} ->
+ CfgList;
+ {error,enoent} ->
+ read_config_file1(ModuleName,T);
+ {error,Reason} ->
+ file:format_error(Reason),
+ throw({error,{"error reading asn1 config file",Reason}})
+ end.
+
+get_config_info(CfgList,InfoType) ->
+ case InfoType of
+ all ->
+ CfgList;
+ _ ->
+ case lists:keysearch(InfoType,1,CfgList) of
+ {value,{InfoType,Value}} ->
+ Value;
+ false ->
+ []
+ end
+ end.
+
+%% save_config/2 saves the Info with the key Key
+%% Before saving anything check if a table exists
+save_config(Key,Info) ->
+ create_if_no_table(asn1_general,[named_table]),
+ ets:insert(asn1_general,{{asn1_config,Key},Info}).
+
+read_config_data(Key) ->
+ case ets:info(asn1_general) of
+ undefined -> undefined;
+ _ ->
+ case ets:lookup(asn1_general,{asn1_config,Key}) of
+ [{_,Data}] -> Data;
+ Err ->
+ io:format("strange data from config file ~w~n",[Err]),
+ Err
+ end
+ end.
+
+
+%%
+%% Functions to manipulate the gen_state record saved in the
+%% asn1_general ets table.
+%%
+
+%% saves input data in a new gen_state record
+save_gen_state({_,ConfList},PartIncTlvTagList) ->
+ %ConfList=[{FunctionName,PatternList}|Rest]
+ StateRec = #gen_state{inc_tag_pattern=PartIncTlvTagList,
+ inc_type_pattern=ConfList},
+ save_config(gen_state,StateRec);
+save_gen_state(_,_) ->
+%% ok.
+ save_config(gen_state,#gen_state{}).
+
+save_gen_state(GenState) when record(GenState,gen_state) ->
+ save_config(gen_state,GenState).
+
+
+%% get_gen_state_field returns undefined if no gen_state exists or if
+%% Field is undefined or the data at the field.
+get_gen_state_field(Field) ->
+ case read_config_data(gen_state) of
+ undefined ->
+ undefined;
+ GenState ->
+ get_gen_state_field(GenState,Field)
+ end.
+get_gen_state_field(#gen_state{active=Active},active) ->
+ Active;
+get_gen_state_field(_,active) ->
+ false;
+get_gen_state_field(GS,prefix) ->
+ GS#gen_state.prefix;
+get_gen_state_field(GS,inc_tag_pattern) ->
+ GS#gen_state.inc_tag_pattern;
+get_gen_state_field(GS,tag_pattern) ->
+ GS#gen_state.tag_pattern;
+get_gen_state_field(GS,inc_type_pattern) ->
+ GS#gen_state.inc_type_pattern;
+get_gen_state_field(GS,type_pattern) ->
+ GS#gen_state.type_pattern;
+get_gen_state_field(GS,func_name) ->
+ GS#gen_state.func_name;
+get_gen_state_field(GS,namelist) ->
+ GS#gen_state.namelist;
+get_gen_state_field(GS,tobe_refed_funcs) ->
+ GS#gen_state.tobe_refed_funcs;
+get_gen_state_field(GS,gen_refed_funcs) ->
+ GS#gen_state.gen_refed_funcs.
+
+
+get_gen_state() ->
+ read_config_data(gen_state).
+
+
+update_gen_state(Field,Data) ->
+ case get_gen_state() of
+ State when record(State,gen_state) ->
+ update_gen_state(Field,State,Data);
+ _ ->
+ exit({error,{asn1,{internal,
+ "tried to update nonexistent gen_state",Field,Data}}})
+ end.
+update_gen_state(active,State,Data) ->
+ save_gen_state(State#gen_state{active=Data});
+update_gen_state(prefix,State,Data) ->
+ save_gen_state(State#gen_state{prefix=Data});
+update_gen_state(inc_tag_pattern,State,Data) ->
+ save_gen_state(State#gen_state{inc_tag_pattern=Data});
+update_gen_state(tag_pattern,State,Data) ->
+ save_gen_state(State#gen_state{tag_pattern=Data});
+update_gen_state(inc_type_pattern,State,Data) ->
+ save_gen_state(State#gen_state{inc_type_pattern=Data});
+update_gen_state(type_pattern,State,Data) ->
+ save_gen_state(State#gen_state{type_pattern=Data});
+update_gen_state(func_name,State,Data) ->
+ save_gen_state(State#gen_state{func_name=Data});
+update_gen_state(namelist,State,Data) ->
+% SData =
+% case Data of
+% [D] when list(D) -> D;
+% _ -> Data
+% end,
+ save_gen_state(State#gen_state{namelist=Data});
+update_gen_state(tobe_refed_funcs,State,Data) ->
+ save_gen_state(State#gen_state{tobe_refed_funcs=Data});
+update_gen_state(gen_refed_funcs,State,Data) ->
+ save_gen_state(State#gen_state{gen_refed_funcs=Data}).
+
+update_namelist(Name) ->
+ case get_gen_state_field(namelist) of
+ [Name,Rest] -> update_gen_state(namelist,Rest);
+ [Name|Rest] -> update_gen_state(namelist,Rest);
+ [{Name,List}] when list(List) -> update_gen_state(namelist,List);
+ [{Name,Atom}|Rest] when atom(Atom) -> update_gen_state(namelist,Rest);
+ Other -> Other
+ end.
+
+pop_namelist() ->
+ DeepTail = %% removes next element in order
+ fun([[{_,A}]|T],_Fun) when atom(A) -> T;
+ ([{_N,L}|T],_Fun) when list(L) -> [L|T];
+ ([[]|T],Fun) -> Fun(T,Fun);
+ ([L1|L2],Fun) when list(L1) ->
+ case lists:flatten(L1) of
+ [] -> Fun([L2],Fun);
+ _ -> [Fun(L1,Fun)|L2]
+ end;
+ ([_H|T],_Fun) -> T
+ end,
+ {Pop,NewNL} =
+ case get_gen_state_field(namelist) of
+ [] -> {[],[]};
+ L ->
+ {next_namelist_el(L),
+ DeepTail(L,DeepTail)}
+ end,
+ update_gen_state(namelist,NewNL),
+ Pop.
+
+%% next_namelist_el fetches the next type/component name in turn in
+%% the namelist, without changing the namelist.
+next_namelist_el() ->
+ case get_gen_state_field(namelist) of
+ undefined -> undefined;
+ L when list(L) -> next_namelist_el(L)
+ end.
+
+next_namelist_el([]) ->
+ [];
+next_namelist_el([L]) when list(L) ->
+ next_namelist_el(L);
+next_namelist_el([H|_]) when atom(H) ->
+ H;
+next_namelist_el([L|T]) when list(L) ->
+ case next_namelist_el(L) of
+ [] ->
+ next_namelist_el([T]);
+ R ->
+ R
+ end;
+next_namelist_el([H={_,A}|_]) when atom(A) ->
+ H.
+
+%% removes a bracket from the namelist
+step_in_constructed() ->
+ case get_gen_state_field(namelist) of
+ [L] when list(L) ->
+ update_gen_state(namelist,L);
+ _ -> ok
+ end.
+
+is_function_generated(Name) ->
+ case get_gen_state_field(gen_refed_funcs) of
+ L when list(L) ->
+ lists:member(Name,L);
+ _ ->
+ false
+ end.
+
+get_tobe_refed_func(Name) ->
+ case get_gen_state_field(tobe_refed_funcs) of
+ L when list(L) ->
+ case lists:keysearch(Name,1,L) of
+ {_,Element} ->
+ Element;
+ _ ->
+ undefined
+ end;
+ _ ->
+ undefined
+ end.
+
+add_tobe_refed_func(Data) ->
+ L = get_gen_state_field(tobe_refed_funcs),
+ update_gen_state(tobe_refed_funcs,[Data|L]).
+
+%% moves Name from the to be list to the generated list.
+generated_refed_func(Name) ->
+ L = get_gen_state_field(tobe_refed_funcs),
+ NewL = lists:keydelete(Name,1,L),
+ update_gen_state(tobe_refed_funcs,NewL),
+ L2 = get_gen_state_field(gen_refed_funcs),
+ update_gen_state(gen_refed_funcs,[Name|L2]).
+
+add_generated_refed_func(Data) ->
+ L = get_gen_state_field(gen_refed_funcs),
+ update_gen_state(gen_refed_funcs,[Data|L]).
+
+
+next_refed_func() ->
+ case get_gen_state_field(tobe_refed_funcs) of
+ [] ->
+ [];
+ [H|T] ->
+ update_gen_state(tobe_refed_funcs,T),
+ H
+ end.
+
+reset_gen_state() ->
+ save_gen_state(#gen_state{}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
new file mode 100644
index 0000000000..2f0ada122e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
@@ -0,0 +1,5566 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_check.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_check).
+
+%% Main Module for ASN.1 compile time functions
+
+%-compile(export_all).
+-export([check/2,storeindb/1]).
+-include("asn1_records.hrl").
+%%% The tag-number for universal types
+-define(N_BOOLEAN, 1).
+-define(N_INTEGER, 2).
+-define(N_BIT_STRING, 3).
+-define(N_OCTET_STRING, 4).
+-define(N_NULL, 5).
+-define(N_OBJECT_IDENTIFIER, 6).
+-define(N_OBJECT_DESCRIPTOR, 7).
+-define(N_EXTERNAL, 8). % constructed
+-define(N_INSTANCE_OF,8).
+-define(N_REAL, 9).
+-define(N_ENUMERATED, 10).
+-define(N_EMBEDDED_PDV, 11). % constructed
+-define(N_SEQUENCE, 16).
+-define(N_SET, 17).
+-define(N_NumericString, 18).
+-define(N_PrintableString, 19).
+-define(N_TeletexString, 20).
+-define(N_VideotexString, 21).
+-define(N_IA5String, 22).
+-define(N_UTCTime, 23).
+-define(N_GeneralizedTime, 24).
+-define(N_GraphicString, 25).
+-define(N_VisibleString, 26).
+-define(N_GeneralString, 27).
+-define(N_UniversalString, 28).
+-define(N_CHARACTER_STRING, 29). % constructed
+-define(N_BMPString, 30).
+
+-define(TAG_PRIMITIVE(Num),
+ case S#state.erule of
+ ber_bin_v2 ->
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0};
+ _ -> []
+ end).
+-define(TAG_CONSTRUCTED(Num),
+ case S#state.erule of
+ ber_bin_v2 ->
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32};
+ _ -> []
+ end).
+
+-record(newt,{type=unchanged,tag=unchanged,constraint=unchanged,inlined=no}). % used in check_type to update type and tag
+-record(newv,{type=unchanged,value=unchanged}). % used in check_value to update type and value
+
+check(S,{Types,Values,ParameterizedTypes,Classes,Objects,ObjectSets}) ->
+ %%Predicates used to filter errors
+ TupleIs = fun({T,_},T) -> true;
+ (_,_) -> false
+ end,
+ IsClass = fun(X) -> TupleIs(X,asn1_class) end,
+ IsObjSet = fun(X) -> TupleIs(X,objectsetdef) end,
+ IsPObjSet = fun(X) -> TupleIs(X,pobjectsetdef) end,
+ IsObject = fun(X) -> TupleIs(X,objectdef) end,
+ IsValueSet = fun(X) -> TupleIs(X,valueset) end,
+ Element2 = fun(X) -> element(2,X) end,
+
+ _Perror = checkp(S,ParameterizedTypes,[]), % must do this before the templates are used
+ Terror = checkt(S,Types,[]),
+
+ %% get parameterized object sets sent to checkt/3
+ %% and update Terror
+
+ {PObjSetNames1,Terror2} = filter_errors(IsPObjSet,Terror),
+
+ Verror = checkv(S,Values ++ ObjectSets,[]), %value sets may be parsed as object sets
+
+ %% get information object classes wrongly sent to checkt/3
+ %% and update Terror2
+
+ {AddClasses,Terror3} = filter_errors(IsClass,Terror2),
+
+ NewClasses = Classes++AddClasses,
+
+ Cerror = checkc(S,NewClasses,[]),
+
+ %% get object sets incorrectly sent to checkv/3
+ %% and update Verror
+
+ {ObjSetNames,Verror2} = filter_errors(IsObjSet,Verror),
+
+ %% get parameterized object sets incorrectly sent to checkv/3
+ %% and update Verror2
+
+ {PObjSetNames,Verror3} = filter_errors(IsPObjSet,Verror2),
+
+ %% get objects incorrectly sent to checkv/3
+ %% and update Verror3
+
+ {ObjectNames,Verror4} = filter_errors(IsObject,Verror3),
+
+ NewObjects = Objects++ObjectNames,
+ NewObjectSets = ObjSetNames ++ PObjSetNames ++ PObjSetNames1,
+
+ %% get value sets
+ %% and update Verror4
+
+ {ValueSetNames,Verror5} = filter_errors(IsValueSet,Verror4),
+
+ asn1ct:create_ets_table(inlined_objects,[named_table]),
+ {Oerror,ExclO,ExclOS} = checko(S,NewObjects ++
+ NewObjectSets,
+ [],[],[]),
+ InlinedObjTuples = ets:tab2list(inlined_objects),
+ InlinedObjects = lists:map(Element2,InlinedObjTuples),
+ ets:delete(inlined_objects),
+
+ Exporterror = check_exports(S,S#state.module),
+ case {Terror3,Verror5,Cerror,Oerror,Exporterror} of
+ {[],[],[],[],[]} ->
+ ContextSwitchTs = context_switch_in_spec(),
+ InstanceOf = instance_of_in_spec(),
+ NewTypes = lists:subtract(Types,AddClasses) ++ ContextSwitchTs
+ ++ InstanceOf,
+ NewValues = lists:subtract(Values,PObjSetNames++ObjectNames++
+ ValueSetNames),
+ {ok,
+ {NewTypes,NewValues,ParameterizedTypes,
+ NewClasses,NewObjects,NewObjectSets},
+ {NewTypes,NewValues,ParameterizedTypes,NewClasses,
+ lists:subtract(NewObjects,ExclO)++InlinedObjects,
+ lists:subtract(NewObjectSets,ExclOS)}};
+ _ ->{error,{asn1,lists:flatten([Terror3,Verror5,Cerror,
+ Oerror,Exporterror])}}
+ end.
+
+context_switch_in_spec() ->
+ L = [{external,'EXTERNAL'},
+ {embedded_pdv,'EMBEDDED PDV'},
+ {character_string,'CHARACTER STRING'}],
+ F = fun({T,TName},Acc) ->
+ case get(T) of
+ generate -> erase(T),
+ [TName|Acc];
+ _ -> Acc
+ end
+ end,
+ lists:foldl(F,[],L).
+
+instance_of_in_spec() ->
+ case get(instance_of) of
+ generate ->
+ erase(instance_of),
+ ['INSTANCE OF'];
+ _ ->
+ []
+ end.
+
+filter_errors(Pred,ErrorList) ->
+ Element2 = fun(X) -> element(2,X) end,
+ RemovedTupleElements = lists:filter(Pred,ErrorList),
+ RemovedNames = lists:map(Element2,RemovedTupleElements),
+ %% remove value set name tuples from Verror
+ RestErrors = lists:subtract(ErrorList,RemovedTupleElements),
+ {RemovedNames,RestErrors}.
+
+
+check_exports(S,Module = #module{}) ->
+ case Module#module.exports of
+ {exports,[]} ->
+ [];
+ {exports,all} ->
+ [];
+ {exports,ExportList} when list(ExportList) ->
+ IsNotDefined =
+ fun(X) ->
+ case catch get_referenced_type(S,X) of
+ {error,{asn1,_}} ->
+ true;
+ _ -> false
+ end
+ end,
+ case lists:filter(IsNotDefined,ExportList) of
+ [] ->
+ [];
+ NoDefExp ->
+ GetName =
+ fun(T = #'Externaltypereference'{type=N})->
+ %%{exported,undefined,entity,N}
+ NewS=S#state{type=T,tname=N},
+ error({export,"exported undefined entity",NewS})
+ end,
+ lists:map(GetName,NoDefExp)
+ end
+ end.
+
+checkt(S,[Name|T],Acc) ->
+ %%io:format("check_typedef:~p~n",[Name]),
+ Result =
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({type,{internal_error,'???'},S});
+ Type when record(Type,typedef) ->
+ NewS = S#state{type=Type,tname=Name},
+ case catch(check_type(NewS,Type,Type#typedef.typespec)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1_class,_ClassDef} ->
+ {asn1_class,Name};
+ pobjectsetdef ->
+ {pobjectsetdef,Name};
+ pvalueset ->
+ {pvalueset,Name};
+ Ts ->
+ case Type#typedef.checked of
+ true -> % already checked and updated
+ ok;
+ _ ->
+ NewTypeDef = Type#typedef{checked=true,typespec = Ts},
+ %io:format("checkt:dbput:~p, ~p~n",[S#state.mname,NewTypeDef#typedef.name]),
+ asn1_db:dbput(NewS#state.mname,Name,NewTypeDef), % update the type
+ ok
+ end
+ end
+ end,
+ case Result of
+ ok ->
+ checkt(S,T,Acc);
+ _ ->
+ checkt(S,T,[Result|Acc])
+ end;
+checkt(S,[],Acc) ->
+ case check_contextswitchingtypes(S,[]) of
+ [] ->
+ lists:reverse(Acc);
+ L ->
+ checkt(S,L,Acc)
+ end.
+
+check_contextswitchingtypes(S,Acc) ->
+ CSTList=[{external,'EXTERNAL'},
+ {embedded_pdv,'EMBEDDED PDV'},
+ {character_string,'CHARACTER STRING'}],
+ check_contextswitchingtypes(S,CSTList,Acc).
+
+check_contextswitchingtypes(S,[{T,TName}|Ts],Acc) ->
+ case get(T) of
+ unchecked ->
+ put(T,generate),
+ check_contextswitchingtypes(S,Ts,[TName|Acc]);
+ _ ->
+ check_contextswitchingtypes(S,Ts,Acc)
+ end;
+check_contextswitchingtypes(_,[],Acc) ->
+ Acc.
+
+checkv(S,[Name|T],Acc) ->
+ %%io:format("check_valuedef:~p~n",[Name]),
+ Result = case asn1_db:dbget(S#state.mname,Name) of
+ undefined -> error({value,{internal_error,'???'},S});
+ Value when record(Value,valuedef);
+ record(Value,typedef); %Value set may be parsed as object set.
+ record(Value,pvaluedef);
+ record(Value,pvaluesetdef) ->
+ NewS = S#state{value=Value},
+ case catch(check_value(NewS,Value)) of
+ {error,Reason} ->
+ error({value,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({value,{internal_error,Reason},NewS});
+ {pobjectsetdef} ->
+ {pobjectsetdef,Name};
+ {objectsetdef} ->
+ {objectsetdef,Name};
+ {objectdef} ->
+ %% this is an object, save as typedef
+ #valuedef{checked=C,pos=Pos,name=N,type=Type,
+ value=Def}=Value,
+% Currmod = S#state.mname,
+% #type{def=
+% #'Externaltypereference'{module=Mod,
+% type=CName}} = Type,
+ ClassName =
+ Type#type.def,
+% case Mod of
+% Currmod ->
+% {objectclassname,CName};
+% _ ->
+% {objectclassname,Mod,CName}
+% end,
+ NewSpec = #'Object'{classname=ClassName,
+ def=Def},
+ NewDef = #typedef{checked=C,pos=Pos,name=N,
+ typespec=NewSpec},
+ asn1_db:dbput(NewS#state.mname,Name,NewDef),
+ {objectdef,Name};
+ {valueset,VSet} ->
+ Pos = asn1ct:get_pos_of_def(Value),
+ CheckedVSDef = #typedef{checked=true,pos=Pos,
+ name=Name,typespec=VSet},
+ asn1_db:dbput(NewS#state.mname,Name,CheckedVSDef),
+ {valueset,Name};
+ V ->
+ %% update the valuedef
+ asn1_db:dbput(NewS#state.mname,Name,V),
+ ok
+ end
+ end,
+ case Result of
+ ok ->
+ checkv(S,T,Acc);
+ _ ->
+ checkv(S,T,[Result|Acc])
+ end;
+checkv(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+
+checkp(S,[Name|T],Acc) ->
+ %io:format("check_ptypedef:~p~n",[Name]),
+ Result = case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({type,{internal_error,'???'},S});
+ Type when record(Type,ptypedef) ->
+ NewS = S#state{type=Type,tname=Name},
+ case catch(check_ptype(NewS,Type,Type#ptypedef.typespec)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1_class,_ClassDef} ->
+ {asn1_class,Name};
+ Ts ->
+ NewType = Type#ptypedef{checked=true,typespec = Ts},
+ asn1_db:dbput(NewS#state.mname,Name,NewType), % update the type
+ ok
+ end
+ end,
+ case Result of
+ ok ->
+ checkp(S,T,Acc);
+ _ ->
+ checkp(S,T,[Result|Acc])
+ end;
+checkp(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+
+
+
+checkc(S,[Name|Cs],Acc) ->
+ Result =
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({class,{internal_error,'???'},S});
+ Class ->
+ ClassSpec = if
+ record(Class,classdef) ->
+ Class#classdef.typespec;
+ record(Class,typedef) ->
+ Class#typedef.typespec
+ end,
+ NewS = S#state{type=Class,tname=Name},
+ case catch(check_class(NewS,ClassSpec)) of
+ {error,Reason} ->
+ error({class,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({class,{internal_error,Reason},NewS});
+ C ->
+ %% update the classdef
+ NewClass =
+ if
+ record(Class,classdef) ->
+ Class#classdef{checked=true,typespec=C};
+ record(Class,typedef) ->
+ #classdef{checked=true,name=Name,typespec=C}
+ end,
+ asn1_db:dbput(NewS#state.mname,Name,NewClass),
+ ok
+ end
+ end,
+ case Result of
+ ok ->
+ checkc(S,Cs,Acc);
+ _ ->
+ checkc(S,Cs,[Result|Acc])
+ end;
+checkc(_S,[],Acc) ->
+%% include_default_class(S#state.mname),
+ lists:reverse(Acc).
+
+checko(S,[Name|Os],Acc,ExclO,ExclOS) ->
+ Result =
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ error({type,{internal_error,'???'},S});
+ Object when record(Object,typedef) ->
+ NewS = S#state{type=Object,tname=Name},
+ case catch(check_object(NewS,Object,Object#typedef.typespec)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1,Reason} ->
+ error({type,Reason,NewS});
+ O ->
+ NewObj = Object#typedef{checked=true,typespec=O},
+ asn1_db:dbput(NewS#state.mname,Name,NewObj),
+ if
+ record(O,'Object') ->
+ case O#'Object'.gen of
+ true ->
+ {ok,ExclO,ExclOS};
+ false ->
+ {ok,[Name|ExclO],ExclOS}
+ end;
+ record(O,'ObjectSet') ->
+ case O#'ObjectSet'.gen of
+ true ->
+ {ok,ExclO,ExclOS};
+ false ->
+ {ok,ExclO,[Name|ExclOS]}
+ end
+ end
+ end;
+ PObject when record(PObject,pobjectdef) ->
+ NewS = S#state{type=PObject,tname=Name},
+ case (catch check_pobject(NewS,PObject)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1,Reason} ->
+ error({type,Reason,NewS});
+ PO ->
+ NewPObj = PObject#pobjectdef{def=PO},
+ asn1_db:dbput(NewS#state.mname,Name,NewPObj),
+ {ok,[Name|ExclO],ExclOS}
+ end;
+ PObjSet when record(PObjSet,pvaluesetdef) ->
+ %% this is a parameterized object set. Might be a parameterized
+ %% value set, couldn't it?
+ NewS = S#state{type=PObjSet,tname=Name},
+ case (catch check_pobjectset(NewS,PObjSet)) of
+ {error,Reason} ->
+ error({type,Reason,NewS});
+ {'EXIT',Reason} ->
+ error({type,{internal_error,Reason},NewS});
+ {asn1,Reason} ->
+ error({type,Reason,NewS});
+ POS ->
+ %%NewPObjSet = PObjSet#pvaluesetdef{valueset=POS},
+ asn1_db:dbput(NewS#state.mname,Name,POS),
+ {ok,ExclO,[Name|ExclOS]}
+ end
+ end,
+ case Result of
+ {ok,NewExclO,NewExclOS} ->
+ checko(S,Os,Acc,NewExclO,NewExclOS);
+ _ ->
+ checko(S,Os,[Result|Acc],ExclO,ExclOS)
+ end;
+checko(_S,[],Acc,ExclO,ExclOS) ->
+ {lists:reverse(Acc),lists:reverse(ExclO),lists:reverse(ExclOS)}.
+
+check_class(S,CDef=#classdef{checked=Ch,name=Name,typespec=TS}) ->
+ case Ch of
+ true -> TS;
+ idle -> TS;
+ _ ->
+ NewCDef = CDef#classdef{checked=idle},
+ asn1_db:dbput(S#state.mname,Name,NewCDef),
+ CheckedTS = check_class(S,TS),
+ asn1_db:dbput(S#state.mname,Name,
+ NewCDef#classdef{checked=true,
+ typespec=CheckedTS}),
+ CheckedTS
+ end;
+check_class(S = #state{mname=M,tname=T},ClassSpec)
+ when record(ClassSpec,type) ->
+ Def = ClassSpec#type.def,
+ case Def of
+ #'Externaltypereference'{module=M,type=T} ->
+ #objectclass{fields=Def}; % in case of recursive definitions
+ Tref when record(Tref,'Externaltypereference') ->
+ {_,RefType} = get_referenced_type(S,Tref),
+% case RefType of
+% RefClass when record(RefClass,classdef) ->
+% check_class(S,RefClass#classdef.typespec)
+% end
+ case is_class(S,RefType) of
+ true ->
+ check_class(S,get_class_def(S,RefType));
+ _ ->
+ error({class,{internal_error,RefType},S})
+ end
+ end;
+% check_class(S,{objectclassname,ModuleName,ClassName}) when atom(ModuleName),atom(ClassName) ->
+% 'fix this';
+check_class(S,C) when record(C,objectclass) ->
+ NewFieldSpec = check_class_fields(S,C#objectclass.fields),
+ C#objectclass{fields=NewFieldSpec};
+%check_class(S,{objectclassname,ClassName}) ->
+check_class(S,ClassName) ->
+ {_,Def} = get_referenced_type(S,ClassName),
+ case Def of
+ ClassDef when record(ClassDef,classdef) ->
+ case ClassDef#classdef.checked of
+ true ->
+ ClassDef#classdef.typespec;
+ idle ->
+ ClassDef#classdef.typespec;
+ false ->
+ check_class(S,ClassDef#classdef.typespec)
+ end;
+ TypeDef when record(TypeDef,typedef) ->
+ %% this case may occur when a definition is a reference
+ %% to a class definition.
+ case TypeDef#typedef.typespec of
+ #type{def=Ext} when record(Ext,'Externaltypereference') ->
+ check_class(S,Ext)
+ end
+ end;
+check_class(_S,{poc,_ObjSet,_Params}) ->
+ 'fix this later'.
+
+check_class_fields(S,Fields) ->
+ check_class_fields(S,Fields,[]).
+
+check_class_fields(S,[F|Fields],Acc) ->
+ NewField =
+ case element(1,F) of
+ fixedtypevaluefield ->
+ {_,Name,Type,Unique,OSpec} = F,
+ RefType = check_type(S,#typedef{typespec=Type},Type),
+ {fixedtypevaluefield,Name,RefType,Unique,OSpec};
+ object_or_fixedtypevalue_field ->
+ {_,Name,Type,Unique,OSpec} = F,
+ Cat =
+ case asn1ct_gen:type(asn1ct_gen:get_inner(Type#type.def)) of
+ Def when record(Def,typereference);
+ record(Def,'Externaltypereference') ->
+ {_,D} = get_referenced_type(S,Def),
+ D;
+ {undefined,user} ->
+ %% neither of {primitive,bif} or {constructed,bif}
+%% {_,D} = get_referenced_type(S,#typereference{val=Type#type.def}),
+ {_,D} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=Type#type.def}),
+ D;
+ _ ->
+ Type
+ end,
+ case Cat of
+ Class when record(Class,classdef) ->
+ {objectfield,Name,Type,Unique,OSpec};
+ _ ->
+ RefType = check_type(S,#typedef{typespec=Type},Type),
+ {fixedtypevaluefield,Name,RefType,Unique,OSpec}
+ end;
+ objectset_or_fixedtypevalueset_field ->
+ {_,Name,Type,OSpec} = F,
+%% RefType = check_type(S,#typedef{typespec=Type},Type),
+ RefType =
+ case (catch check_type(S,#typedef{typespec=Type},Type)) of
+ {asn1_class,_ClassDef} ->
+ case if_current_checked_type(S,Type) of
+ true ->
+ Type#type.def;
+ _ ->
+ check_class(S,Type)
+ end;
+ CheckedType when record(CheckedType,type) ->
+ CheckedType;
+ _ ->
+ error({class,"internal error, check_class_fields",S})
+ end,
+ if
+ record(RefType,'Externaltypereference') ->
+ {objectsetfield,Name,Type,OSpec};
+ record(RefType,classdef) ->
+ {objectsetfield,Name,Type,OSpec};
+ record(RefType,objectclass) ->
+ {objectsetfield,Name,Type,OSpec};
+ true ->
+ {fixedtypevaluesetfield,Name,RefType,OSpec}
+ end;
+ typefield ->
+ case F of
+ {TF,Name,{'DEFAULT',Type}} ->
+ {TF,Name,{'DEFAULT',check_type(S,#typedef{typespec=Type},Type)}};
+ _ -> F
+ end;
+ _ -> F
+ end,
+ check_class_fields(S,Fields,[NewField|Acc]);
+check_class_fields(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+if_current_checked_type(S,#type{def=Def}) ->
+ CurrentCheckedName = S#state.tname,
+ MergedModules = S#state.inputmodules,
+ % CurrentCheckedModule = S#state.mname,
+ case Def of
+ #'Externaltypereference'{module=CurrentCheckedName,
+ type=CurrentCheckedName} ->
+ true;
+ #'Externaltypereference'{module=ModuleName,
+ type=CurrentCheckedName} ->
+ case MergedModules of
+ undefined ->
+ false;
+ _ ->
+ lists:member(ModuleName,MergedModules)
+ end;
+ _ ->
+ false
+ end.
+
+
+
+check_pobject(_S,PObject) when record(PObject,pobjectdef) ->
+ Def = PObject#pobjectdef.def,
+ Def.
+
+
+check_pobjectset(S,PObjSet) ->
+ #pvaluesetdef{pos=Pos,name=Name,args=Args,type=Type,
+ valueset=ValueSet}=PObjSet,
+ {Mod,Def} = get_referenced_type(S,Type#type.def),
+ case Def of
+ #classdef{} ->
+ ClassName = #'Externaltypereference'{module=Mod,
+ type=Def#classdef.name},
+ {valueset,Set} = ValueSet,
+% ObjectSet = #'ObjectSet'{class={objectclassname,ClassName},
+ ObjectSet = #'ObjectSet'{class=ClassName,
+ set=Set},
+ #pobjectsetdef{pos=Pos,name=Name,args=Args,class=Type#type.def,
+ def=ObjectSet};
+ _ ->
+ PObjSet
+ end.
+
+check_object(_S,ObjDef,ObjSpec) when (ObjDef#typedef.checked == true) ->
+ ObjSpec;
+check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
+ {_,_ClassDef} = get_referenced_type(S,ClassRef),
+ NewClassRef = check_externaltypereference(S,ClassRef),
+ ClassDef =
+ case _ClassDef#classdef.checked of
+ false ->
+ #classdef{checked=true,
+ typespec=check_class(S,_ClassDef#classdef.typespec)};
+ _ ->
+ _ClassDef
+ end,
+ NewObj =
+ case ObjectDef of
+ Def when tuple(Def), (element(1,Def)==object) ->
+ NewSettingList = check_objectdefn(S,Def,ClassDef),
+ #'Object'{def=NewSettingList};
+% Def when tuple(Def), (element(1,Def)=='ObjectFromObject') ->
+% fixa;
+ {po,{object,DefObj},ArgsList} ->
+ {_,Object} = get_referenced_type(S,DefObj),%DefObj is a
+ %%#'Externalvaluereference' or a #'Externaltypereference'
+ %% Maybe this call should be catched and in case of an exception
+ %% an nonallocated parameterized object should be returned.
+ instantiate_po(S,ClassDef,Object,ArgsList);
+ #'Externalvaluereference'{} ->
+ {_,Object} = get_referenced_type(S,ObjectDef),
+ check_object(S,Object,Object#typedef.typespec);
+ _ ->
+ exit({error,{no_object,ObjectDef},S})
+ end,
+ Gen = gen_incl(S,NewObj#'Object'.def,
+ (ClassDef#classdef.typespec)#objectclass.fields),
+ NewObj#'Object'{classname=NewClassRef,gen=Gen};
+
+%%check_object(S,ObjSetDef,ObjSet=#type{def={pt,ObjSetRef,Args}}) ->
+ %% A parameterized
+
+check_object(S,
+ _ObjSetDef,
+ ObjSet=#'ObjectSet'{class=ClassRef}) ->
+ {_,ClassDef} = get_referenced_type(S,ClassRef),
+ NewClassRef = check_externaltypereference(S,ClassRef),
+ UniqueFieldName =
+ case (catch get_unique_fieldname(ClassDef)) of
+ {error,'__undefined_'} -> {unique,undefined};
+ {asn1,Msg,_} -> error({class,Msg,S});
+ Other -> Other
+ end,
+ NewObjSet=
+ case ObjSet#'ObjectSet'.set of
+ {'SingleValue',Set} when list(Set) ->
+ CheckedSet = check_object_list(S,NewClassRef,Set),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ {'SingleValue',{definedvalue,ObjName}} ->
+ {_,ObjDef} = get_referenced_type(S,#identifier{val=ObjName}),
+ #'Object'{def=CheckedObj} =
+ check_object(S,ObjDef,ObjDef#typedef.typespec),
+ NewSet = get_unique_valuelist(S,[{ObjDef#typedef.name,
+ CheckedObj}],
+ UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ {'SingleValue',#'Externalvaluereference'{value=ObjName}} ->
+ {_,ObjDef} = get_referenced_type(S,#identifier{val=ObjName}),
+ #'Object'{def=CheckedObj} =
+ check_object(S,ObjDef,ObjDef#typedef.typespec),
+ NewSet = get_unique_valuelist(S,[{ObjDef#typedef.name,
+ CheckedObj}],
+ UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ ['EXTENSIONMARK'] ->
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=['EXTENSIONMARK']};
+ Set when list(Set) ->
+ CheckedSet = check_object_list(S,NewClassRef,Set),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet};
+ {Set,Ext} when list(Set) ->
+ CheckedSet = check_object_list(S,NewClassRef,Set++Ext),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet++['EXTENSIONMARK']};
+ {{'SingleValue',Set},Ext} ->
+ CheckedSet = check_object_list(S,NewClassRef,
+ merge_sets(Set,Ext)),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet++['EXTENSIONMARK']};
+ {Type,{'EXCEPT',Exclusion}} when record(Type,type) ->
+ {_,TDef} = get_referenced_type(S,Type#type.def),
+ OS = TDef#typedef.typespec,
+ NewSet = reduce_objectset(OS#'ObjectSet'.set,Exclusion),
+ NewOS = OS#'ObjectSet'{set=NewSet},
+ check_object(S,TDef#typedef{typespec=NewOS},
+ NewOS);
+ #type{def={pt,DefinedObjSet,ParamList}} ->
+ {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
+ instantiate_pos(S,ClassDef,PObjSetDef,ParamList);
+ {ObjDef={object,definedsyntax,_ObjFields},_Ext} ->
+ CheckedSet = check_object_list(S,NewClassRef,[ObjDef]),
+ NewSet = get_unique_valuelist(S,CheckedSet,UniqueFieldName),
+ ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
+ set=NewSet++['EXTENSIONMARK']}
+ end,
+ Gen = gen_incl_set(S,NewObjSet#'ObjectSet'.set,
+ ClassDef),
+ NewObjSet#'ObjectSet'{class=NewClassRef,gen=Gen}.
+
+
+merge_sets(Set,Ext) when list(Set),list(Ext) ->
+ Set ++ Ext;
+merge_sets(Set,Ext) when list(Ext) ->
+ [Set|Ext];
+merge_sets(Set,{'SingleValue',Ext}) when list(Set) ->
+ Set ++ [Ext];
+merge_sets(Set,{'SingleValue',Ext}) ->
+ [Set] ++ [Ext].
+
+reduce_objectset(ObjectSet,Exclusion) ->
+ case Exclusion of
+ {'SingleValue',#'Externalvaluereference'{value=Name}} ->
+ case lists:keysearch(Name,1,ObjectSet) of
+ {value,El} ->
+ lists:subtract(ObjectSet,[El]);
+ _ ->
+ ObjectSet
+ end
+ end.
+
+%% Checks a list of objects or object sets and returns a list of selected
+%% information for the code generation.
+check_object_list(S,ClassRef,ObjectList) ->
+ check_object_list(S,ClassRef,ObjectList,[]).
+
+check_object_list(S,ClassRef,[ObjOrSet|Objs],Acc) ->
+ case ObjOrSet of
+ ObjDef when tuple(ObjDef),(element(1,ObjDef)==object) ->
+ Def =
+ check_object(S,#typedef{typespec=ObjDef},
+% #'Object'{classname={objectclassname,ClassRef},
+ #'Object'{classname=ClassRef,
+ def=ObjDef}),
+ check_object_list(S,ClassRef,Objs,[{no_name,Def#'Object'.def}|Acc]);
+ {'SingleValue',{definedvalue,ObjName}} ->
+ {_,ObjectDef} = get_referenced_type(S,#identifier{val=ObjName}),
+ #'Object'{def=Def} = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
+ check_object_list(S,ClassRef,Objs,[{ObjectDef#typedef.name,Def}|Acc]);
+ {'SingleValue',Ref = #'Externalvaluereference'{}} ->
+ {_,ObjectDef} = get_referenced_type(S,Ref),
+ #'Object'{def=Def} = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
+ check_object_list(S,ClassRef,Objs,[{ObjectDef#typedef.name,Def}|Acc]);
+ ObjRef when record(ObjRef,'Externalvaluereference') ->
+ {_,ObjectDef} = get_referenced_type(S,ObjRef),
+ #'Object'{def=Def} = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
+ check_object_list(S,ClassRef,Objs,
+%% [{ObjRef#'Externalvaluereference'.value,Def}|Acc]);
+ [{ObjectDef#typedef.name,Def}|Acc]);
+ {'ValueFromObject',{_,Object},FieldName} ->
+ {_,Def} = get_referenced_type(S,Object),
+%% TypeOrVal = get_fieldname_element(S,Def,FieldName);%% this must result in an object set
+ TypeDef = get_fieldname_element(S,Def,FieldName),
+ (TypeDef#typedef.typespec)#'ObjectSet'.set;
+ ObjSet when record(ObjSet,type) ->
+ ObjSetDef =
+ case ObjSet#type.def of
+ Ref when record(Ref,typereference);
+ record(Ref,'Externaltypereference') ->
+ {_,D} = get_referenced_type(S,ObjSet#type.def),
+ D;
+ Other ->
+ throw({asn1_error,{'unknown objecset',Other,S}})
+ end,
+ #'ObjectSet'{set=ObjectsInSet} =
+ check_object(S,ObjSetDef,ObjSetDef#typedef.typespec),
+ AccList = transform_set_to_object_list(ObjectsInSet,[]),
+ check_object_list(S,ClassRef,Objs,AccList++Acc);
+ union ->
+ check_object_list(S,ClassRef,Objs,Acc);
+ Other ->
+ exit({error,{'unknown object',Other},S})
+ end;
+%% Finally reverse the accumulated list and if there are any extension
+%% marks in the object set put one indicator of that in the end of the
+%% list.
+check_object_list(_,_,[],Acc) ->
+ lists:reverse(Acc).
+%% case lists:member('EXTENSIONMARK',RevAcc) of
+%% true ->
+%% ExclRevAcc = lists:filter(fun(X)->X /= 'EXTENSIONMARK' end,
+%% RevAcc),
+%% ExclRevAcc ++ ['EXTENSIONMARK'];
+%% false ->
+%% RevAcc
+%% end.
+
+
+%% get_fieldname_element/3
+%% gets the type/value/object/... of the referenced element in FieldName
+%% FieldName is a list and may have more than one element.
+%% Each element in FieldName can be either {typefieldreference,AnyFieldName}
+%% or {valuefieldreference,AnyFieldName}
+%% Def is the def of the first object referenced by FieldName
+get_fieldname_element(S,Def,[{_RefType,FieldName}]) when record(Def,typedef) ->
+ {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
+ case lists:keysearch(FieldName,1,ObjComps) of
+ {value,{_,TDef}} when record(TDef,typedef) ->
+ %% ORec = TDef#typedef.typespec, %% XXX This must be made general
+% case TDef#typedef.typespec of
+% ObjSetRec when record(ObjSetRec,'ObjectSet') ->
+% ObjSet = ObjSetRec#'ObjectSet'.set;
+% ObjRec when record(ObjRec,'Object') ->
+% %% now get the field in ObjRec that RestFName points out
+% %ObjRec
+% TDef
+% end;
+ TDef;
+ {value,{_,VDef}} when record(VDef,valuedef) ->
+ check_value(S,VDef);
+ _ ->
+ throw({assigned_object_error,"not_assigned_object",S})
+ end;
+get_fieldname_element(_S,Def,[{_RefType,_FieldName}|_RestFName])
+ when record(Def,typedef) ->
+ ok.
+
+transform_set_to_object_list([{Name,_UVal,Fields}|Objs],Acc) ->
+ transform_set_to_object_list(Objs,[{Name,{object,generatesyntax,Fields}}|Acc]);
+transform_set_to_object_list(['EXTENSIONMARK'|Objs],Acc) ->
+%% transform_set_to_object_list(Objs,['EXTENSIONMARK'|Acc]);
+ transform_set_to_object_list(Objs,Acc);
+transform_set_to_object_list([],Acc) ->
+ Acc.
+
+get_unique_valuelist(_S,ObjSet,{unique,undefined}) -> % no unique field in object
+ lists:map(fun({N,{_,_,F}})->{N,F};
+ (V={_,_,_}) ->V end, ObjSet);
+get_unique_valuelist(S,ObjSet,UFN) ->
+ get_unique_vlist(S,ObjSet,UFN,[]).
+
+get_unique_vlist(S,[],_,Acc) ->
+ case catch check_uniqueness(Acc) of
+ {asn1_error,_} ->
+% exit({error,Reason,S});
+ error({'ObjectSet',"not unique objects in object set",S});
+ true ->
+ lists:reverse(Acc)
+ end;
+get_unique_vlist(S,[{ObjName,Obj}|Rest],UniqueFieldName,Acc) ->
+ {_,_,Fields} = Obj,
+ VDef = get_unique_value(S,Fields,UniqueFieldName),
+ get_unique_vlist(S,Rest,UniqueFieldName,
+ [{ObjName,VDef#valuedef.value,Fields}|Acc]);
+get_unique_vlist(S,[V={_,_,_}|Rest],UniqueFieldName,Acc) ->
+ get_unique_vlist(S,Rest,UniqueFieldName,[V|Acc]).
+
+get_unique_value(S,Fields,UniqueFieldName) ->
+ Module = S#state.mname,
+ case lists:keysearch(UniqueFieldName,1,Fields) of
+ {value,Field} ->
+ case element(2,Field) of
+ VDef when record(VDef,valuedef) ->
+ VDef;
+ {definedvalue,ValName} ->
+ ValueDef = asn1_db:dbget(Module,ValName),
+ case ValueDef of
+ VDef when record(VDef,valuedef) ->
+ ValueDef;
+ undefined ->
+ #valuedef{value=ValName}
+ end;
+ {'ValueFromObject',Object,Name} ->
+ case Object of
+ {object,Ext} when record(Ext,'Externaltypereference') ->
+ OtherModule = Ext#'Externaltypereference'.module,
+ ExtObjName = Ext#'Externaltypereference'.type,
+ ObjDef = asn1_db:dbget(OtherModule,ExtObjName),
+ ObjSpec = ObjDef#typedef.typespec,
+ get_unique_value(OtherModule,element(3,ObjSpec),Name);
+ {object,{_,_,ObjName}} ->
+ ObjDef = asn1_db:dbget(Module,ObjName),
+ ObjSpec = ObjDef#typedef.typespec,
+ get_unique_value(Module,element(3,ObjSpec),Name);
+ {po,Object,_Params} ->
+ exit({error,{'parameterized object not implemented yet',
+ Object},S})
+ end;
+ Value when atom(Value);number(Value) ->
+ #valuedef{value=Value};
+ {'CHOICE',{_,Value}} when atom(Value);number(Value) ->
+ #valuedef{value=Value}
+ end;
+ false ->
+ exit({error,{'no unique value',Fields,UniqueFieldName},S})
+%% io:format("WARNING: no unique value in object"),
+%% exit(uniqueFieldName)
+ end.
+
+check_uniqueness(NameValueList) ->
+ check_uniqueness1(lists:keysort(2,NameValueList)).
+
+check_uniqueness1([]) ->
+ true;
+check_uniqueness1([_]) ->
+ true;
+check_uniqueness1([{_,N,_},{_,N,_}|_Rest]) ->
+ throw({asn1_error,{'objects in set must have unique values in UNIQUE fields',N}});
+check_uniqueness1([_|Rest]) ->
+ check_uniqueness1(Rest).
+
+%% instantiate_po/4
+%% ClassDef is the class of Object,
+%% Object is the Parameterized object, which is referenced,
+%% ArgsList is the list of actual parameters
+%% returns an #'Object' record.
+instantiate_po(S,_ClassDef,Object,ArgsList) when record(Object,pobjectdef) ->
+ FormalParams = get_pt_args(Object),
+ MatchedArgs = match_args(FormalParams,ArgsList,[]),
+ NewS = S#state{type=Object,parameters=MatchedArgs},
+ check_object(NewS,Object,#'Object'{classname=Object#pobjectdef.class,
+ def=Object#pobjectdef.def}).
+
+%% instantiate_pos/4
+%% ClassDef is the class of ObjectSetDef,
+%% ObjectSetDef is the Parameterized object set, which is referenced
+%% on the right side of the assignment,
+%% ArgsList is the list of actual parameters, i.e. real objects
+instantiate_pos(S,ClassDef,ObjectSetDef,ArgsList) ->
+ ClassName = ClassDef#classdef.name,
+ FormalParams = get_pt_args(ObjectSetDef),
+ Set = case get_pt_spec(ObjectSetDef) of
+ {valueset,_Set} -> _Set;
+ _Set -> _Set
+ end,
+ MatchedArgs = match_args(FormalParams,ArgsList,[]),
+ NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs},
+ check_object(NewS,ObjectSetDef,
+ #'ObjectSet'{class=name2Extref(S#state.mname,ClassName),
+ set=Set}).
+
+
+%% gen_incl -> boolean()
+%% If object with Fields has any of the corresponding class' typefields
+%% then return value is true otherwise it is false.
+%% If an object lacks a typefield but the class has a type field that
+%% is OPTIONAL then we want gen to be true
+gen_incl(S,{_,_,Fields},CFields)->
+ gen_incl1(S,Fields,CFields).
+
+gen_incl1(_,_,[]) ->
+ false;
+gen_incl1(S,Fields,[C|CFields]) ->
+ case element(1,C) of
+ typefield ->
+% case lists:keymember(element(2,C),1,Fields) of
+% true ->
+% true;
+% false ->
+% gen_incl1(S,Fields,CFields)
+% end;
+ true; %% should check that field is OPTIONAL or DEFUALT if
+ %% the object lacks this field
+ objectfield ->
+ case lists:keysearch(element(2,C),1,Fields) of
+ {value,Field} ->
+ Type = element(3,C),
+ {_,ClassDef} = get_referenced_type(S,Type#type.def),
+% {_,ClassFields,_} = ClassDef#classdef.typespec,
+ #objectclass{fields=ClassFields} =
+ ClassDef#classdef.typespec,
+ ObjTDef = element(2,Field),
+ case gen_incl(S,(ObjTDef#typedef.typespec)#'Object'.def,
+ ClassFields) of
+ true ->
+ true;
+ _ ->
+ gen_incl1(S,Fields,CFields)
+ end;
+ _ ->
+ gen_incl1(S,Fields,CFields)
+ end;
+ _ ->
+ gen_incl1(S,Fields,CFields)
+ end.
+
+%% first if no unique field in the class return false.(don't generate code)
+gen_incl_set(S,Fields,ClassDef) ->
+ case catch get_unique_fieldname(ClassDef) of
+ Tuple when tuple(Tuple) ->
+ false;
+ _ ->
+ gen_incl_set1(S,Fields,
+ (ClassDef#classdef.typespec)#objectclass.fields)
+ end.
+
+%% if any of the existing or potentially existing objects has a typefield
+%% then return true.
+gen_incl_set1(_,[],_CFields)->
+ false;
+gen_incl_set1(_,['EXTENSIONMARK'],_) ->
+ true;
+%% Fields are the fields of an object in the object set.
+%% CFields are the fields of the class of the object set.
+gen_incl_set1(S,[Object|Rest],CFields)->
+ Fields = element(size(Object),Object),
+ case gen_incl1(S,Fields,CFields) of
+ true ->
+ true;
+ false ->
+ gen_incl_set1(S,Rest,CFields)
+ end.
+
+check_objectdefn(S,Def,CDef) when record(CDef,classdef) ->
+ WithSyntax = (CDef#classdef.typespec)#objectclass.syntax,
+ ClassFields = (CDef#classdef.typespec)#objectclass.fields,
+ case Def of
+ {object,defaultsyntax,Fields} ->
+ check_defaultfields(S,Fields,ClassFields);
+ {object,definedsyntax,Fields} ->
+ {_,WSSpec} = WithSyntax,
+ NewFields =
+ case catch( convert_definedsyntax(S,Fields,WSSpec,
+ ClassFields,[])) of
+ {asn1,{_ErrorType,ObjToken,ClassToken}} ->
+ throw({asn1,{'match error in object',ObjToken,
+ 'found in object',ClassToken,'found in class'}});
+ Err={asn1,_} -> throw(Err);
+ Err={'EXIT',_} -> throw(Err);
+ DefaultFields when list(DefaultFields) ->
+ DefaultFields
+ end,
+ {object,defaultsyntax,NewFields};
+ {object,_ObjectId} -> % This is a DefinedObject
+ fixa;
+ Other ->
+ exit({error,{objectdefn,Other}})
+ end.
+
+check_defaultfields(S,Fields,ClassFields) ->
+ check_defaultfields(S,Fields,ClassFields,[]).
+
+check_defaultfields(_S,[],_ClassFields,Acc) ->
+ {object,defaultsyntax,lists:reverse(Acc)};
+check_defaultfields(S,[{FName,Spec}|Fields],ClassFields,Acc) ->
+ case lists:keysearch(FName,2,ClassFields) of
+ {value,CField} ->
+ NewField = convert_to_defaultfield(S,FName,Spec,CField),
+ check_defaultfields(S,Fields,ClassFields,[NewField|Acc]);
+ _ ->
+ throw({error,{asn1,{'unvalid field in object',FName}}})
+ end.
+%% {object,defaultsyntax,Fields}.
+
+convert_definedsyntax(_S,[],[],_ClassFields,Acc) ->
+ lists:reverse(Acc);
+convert_definedsyntax(S,Fields,WithSyntax,ClassFields,Acc) ->
+ case match_field(S,Fields,WithSyntax,ClassFields) of
+ {MatchedField,RestFields,RestWS} ->
+ if
+ list(MatchedField) ->
+ convert_definedsyntax(S,RestFields,RestWS,ClassFields,
+ lists:append(MatchedField,Acc));
+ true ->
+ convert_definedsyntax(S,RestFields,RestWS,ClassFields,
+ [MatchedField|Acc])
+ end
+%% throw({error,{asn1,{'unvalid syntax in object',WorS}}})
+ end.
+
+match_field(S,Fields,WithSyntax,ClassFields) ->
+ match_field(S,Fields,WithSyntax,ClassFields,[]).
+
+match_field(S,Fields,[W|Ws],ClassFields,Acc) when list(W) ->
+ case catch(match_optional_field(S,Fields,W,ClassFields,[])) of
+ {'EXIT',_} ->
+ match_field(Fields,Ws,ClassFields,Acc); %% add S
+%% {[Result],RestFields} ->
+%% {Result,RestFields,Ws};
+ {Result,RestFields} when list(Result) ->
+ {Result,RestFields,Ws};
+ _ ->
+ match_field(S,Fields,Ws,ClassFields,Acc)
+ end;
+match_field(S,Fields,WithSyntax,ClassFields,_Acc) ->
+ match_mandatory_field(S,Fields,WithSyntax,ClassFields,[]).
+
+match_optional_field(_S,RestFields,[],_,Ret) ->
+ {Ret,RestFields};
+%% An additional optional field within an optional field
+match_optional_field(S,Fields,[W|Ws],ClassFields,Ret) when list(W) ->
+ case catch match_optional_field(S,Fields,W,ClassFields,[]) of
+ {'EXIT',_} ->
+ {Ret,Fields};
+ {asn1,{optional_matcherror,_,_}} ->
+ {Ret,Fields};
+ {OptionalField,RestFields} ->
+ match_optional_field(S,RestFields,Ws,ClassFields,
+ lists:append(OptionalField,Ret))
+ end;
+%% identify and skip word
+%match_optional_field(S,[#'Externaltypereference'{type=WorS}|Rest],
+match_optional_field(S,[{_,_,WorS}|Rest],
+ [WorS|Ws],ClassFields,Ret) ->
+ match_optional_field(S,Rest,Ws,ClassFields,Ret);
+match_optional_field(S,[],_,ClassFields,Ret) ->
+ match_optional_field(S,[],[],ClassFields,Ret);
+%% identify and skip comma
+match_optional_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
+ match_optional_field(S,Rest,Ws,ClassFields,Ret);
+%% identify and save field data
+match_optional_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Ret) ->
+ WorS =
+ case Setting of
+ Type when record(Type,type) -> Type;
+%% #'Externalvaluereference'{value=WordOrSetting} -> WordOrSetting;
+ {'ValueFromObject',_,_} -> Setting;
+ {object,_,_} -> Setting;
+ {_,_,WordOrSetting} -> WordOrSetting;
+%% Atom when atom(Atom) -> Atom
+ Other -> Other
+ end,
+ case lists:keysearch(W,2,ClassFields) of
+ false ->
+ throw({asn1,{optional_matcherror,WorS,W}});
+ {value,CField} ->
+ NewField = convert_to_defaultfield(S,W,WorS,CField),
+ match_optional_field(S,Rest,Ws,ClassFields,[NewField|Ret])
+ end;
+match_optional_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Ret) ->
+ throw({asn1,{optional_matcherror,WorS,W}}).
+
+match_mandatory_field(_S,[],[],_,[Acc]) ->
+ {Acc,[],[]};
+match_mandatory_field(_S,[],[],_,Acc) ->
+ {Acc,[],[]};
+match_mandatory_field(S,[],[H|T],CF,Acc) when list(H) ->
+ match_mandatory_field(S,[],T,CF,Acc);
+match_mandatory_field(_S,[],WithSyntax,_,_Acc) ->
+ throw({asn1,{mandatory_matcherror,[],WithSyntax}});
+%match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,[Acc]) when list(W) ->
+match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,Acc) when list(W), length(Acc) >= 1 ->
+ {Acc,Fields,WithSyntax};
+%% identify and skip word
+match_mandatory_field(S,[{_,_,WorS}|Rest],
+ [WorS|Ws],ClassFields,Acc) ->
+ match_mandatory_field(S,Rest,Ws,ClassFields,Acc);
+%% identify and skip comma
+match_mandatory_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
+ match_mandatory_field(S,Rest,Ws,ClassFields,Ret);
+%% identify and save field data
+match_mandatory_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Acc) ->
+ WorS =
+ case Setting of
+%% Atom when atom(Atom) -> Atom;
+%% #'Externalvaluereference'{value=WordOrSetting} -> WordOrSetting;
+ {object,_,_} -> Setting;
+ {_,_,WordOrSetting} -> WordOrSetting;
+ Type when record(Type,type) -> Type;
+ Other -> Other
+ end,
+ case lists:keysearch(W,2,ClassFields) of
+ false ->
+ throw({asn1,{mandatory_matcherror,WorS,W}});
+ {value,CField} ->
+ NewField = convert_to_defaultfield(S,W,WorS,CField),
+ match_mandatory_field(S,Rest,Ws,ClassFields,[NewField|Acc])
+ end;
+
+match_mandatory_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Acc) ->
+ throw({asn1,{mandatory_matcherror,WorS,W}}).
+
+%% Converts a field of an object from defined syntax to default syntax
+convert_to_defaultfield(S,ObjFieldName,ObjFieldSetting,CField)->
+ CurrMod = S#state.mname,
+ case element(1,CField) of
+ typefield ->
+ TypeDef=
+ case ObjFieldSetting of
+ TypeRec when record(TypeRec,type) -> TypeRec#type.def;
+ TDef when record(TDef,typedef) ->
+ TDef#typedef{typespec=check_type(S,TDef,
+ TDef#typedef.typespec)};
+ _ -> ObjFieldSetting
+ end,
+ Type =
+ if
+ record(TypeDef,typedef) -> TypeDef;
+ true ->
+ case asn1ct_gen:type(asn1ct_gen:get_inner(TypeDef)) of
+ ERef = #'Externaltypereference'{module=CurrMod} ->
+ {_,T} = get_referenced_type(S,ERef),
+ T#typedef{checked=true,
+ typespec=check_type(S,T,
+ T#typedef.typespec)};
+ ERef = #'Externaltypereference'{module=ExtMod} ->
+ {_,T} = get_referenced_type(S,ERef),
+ #typedef{name=Name} = T,
+ check_type(S,T,T#typedef.typespec),
+ #typedef{checked=true,
+ name={ExtMod,Name},
+ typespec=ERef};
+ Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
+ T = check_type(S,#typedef{typespec=ObjFieldSetting},
+ ObjFieldSetting),
+ #typedef{checked=true,name=Bif,typespec=T};
+ _ ->
+ {Mod,T} =
+ %% get_referenced_type(S,#typereference{val=ObjFieldSetting}),
+ get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
+ case Mod of
+ CurrMod ->
+ T;
+ ExtMod ->
+ #typedef{name=Name} = T,
+ T#typedef{name={ExtMod,Name}}
+ end
+ end
+ end,
+ {ObjFieldName,Type};
+ fixedtypevaluefield ->
+ case ObjFieldName of
+ Val when atom(Val) ->
+ %% ObjFieldSetting can be a value,an objectidentifiervalue,
+ %% an element in an enumeration or namednumberlist etc.
+ ValRef =
+ case ObjFieldSetting of
+ #'Externalvaluereference'{} -> ObjFieldSetting;
+ {'ValueFromObject',{_,ObjRef},FieldName} ->
+ {_,Object} = get_referenced_type(S,ObjRef),
+ ChObject = check_object(S,Object,
+ Object#typedef.typespec),
+ get_fieldname_element(S,Object#typedef{typespec=ChObject},
+ FieldName);
+ #valuedef{} ->
+ ObjFieldSetting;
+ _ ->
+ #identifier{val=ObjFieldSetting}
+ end,
+ case ValRef of
+ #valuedef{} ->
+ {ObjFieldName,check_value(S,ValRef)};
+ _ ->
+ ValDef =
+ case catch get_referenced_type(S,ValRef) of
+ {error,_} ->
+ check_value(S,#valuedef{name=Val,
+ type=element(3,CField),
+ value=ObjFieldSetting});
+ {_,VDef} when record(VDef,valuedef) ->
+ check_value(S,VDef);%% XXX
+ {_,VDef} ->
+ check_value(S,#valuedef{name=Val,
+ type=element(3,CField),
+ value=VDef})
+ end,
+ {ObjFieldName,ValDef}
+ end;
+ Val ->
+ {ObjFieldName,Val}
+ end;
+ fixedtypevaluesetfield ->
+ {ObjFieldName,ObjFieldSetting};
+ objectfield ->
+ ObjectSpec =
+ case ObjFieldSetting of
+ Ref when record(Ref,typereference);record(Ref,identifier);
+ record(Ref,'Externaltypereference');
+ record(Ref,'Externalvaluereference') ->
+ {_,R} = get_referenced_type(S,ObjFieldSetting),
+ R;
+ {'ValueFromObject',{_,ObjRef},FieldName} ->
+ %% This is an ObjectFromObject
+ {_,Object} = get_referenced_type(S,ObjRef),
+ ChObject = check_object(S,Object,
+ Object#typedef.typespec),
+ _ObjFromObj=
+ get_fieldname_element(S,Object#typedef{
+ typespec=ChObject},
+ FieldName);
+ %%ClassName = ObjFromObj#'Object'.classname,
+ %%#typedef{name=,
+ %% typespec=
+ %% ObjFromObj#'Object'{classname=
+ %% {objectclassname,ClassName}}};
+ {object,_,_} ->
+ %% An object defined inlined in another object
+ #type{def=Ref} = element(3,CField),
+% CRef = case Ref of
+% #'Externaltypereference'{module=CurrMod,
+% type=CName} ->
+% CName;
+% #'Externaltypereference'{module=ExtMod,
+% type=CName} ->
+% {ExtMod,CName}
+% end,
+ InlinedObjName=
+ list_to_atom(lists:concat([S#state.tname]++
+ ['_',ObjFieldName])),
+% ObjSpec = #'Object'{classname={objectclassname,CRef},
+ ObjSpec = #'Object'{classname=Ref,
+ def=ObjFieldSetting},
+ CheckedObj=
+ check_object(S,#typedef{typespec=ObjSpec},ObjSpec),
+ InlObj = #typedef{checked=true,name=InlinedObjName,
+ typespec=CheckedObj},
+ asn1ct_gen:insert_once(inlined_objects,{InlinedObjName,
+ InlinedObjName}),
+ asn1_db:dbput(S#state.mname,InlinedObjName,InlObj),
+ InlObj;
+ #type{def=Eref} when record(Eref,'Externaltypereference') ->
+ {_,R} = get_referenced_type(S,Eref),
+ R;
+ _ ->
+%% {_,R} = get_referenced_type(S,#typereference{val=ObjFieldSetting}),
+ {_,R} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
+ R
+ end,
+ {ObjFieldName,
+ ObjectSpec#typedef{checked=true,
+ typespec=check_object(S,ObjectSpec,
+ ObjectSpec#typedef.typespec)}};
+ variabletypevaluefield ->
+ {ObjFieldName,ObjFieldSetting};
+ variabletypevaluesetfield ->
+ {ObjFieldName,ObjFieldSetting};
+ objectsetfield ->
+ {_,ObjSetSpec} =
+ case ObjFieldSetting of
+ Ref when record(Ref,'Externaltypereference');
+ record(Ref,'Externalvaluereference') ->
+ get_referenced_type(S,ObjFieldSetting);
+ ObjectList when list(ObjectList) ->
+ %% an objctset defined in the object,though maybe
+ %% parsed as a SequenceOfValue
+ %% The ObjectList may be a list of references to
+ %% objects, a ValueFromObject
+ {_,_,Type,_} = CField,
+ ClassDef = Type#type.def,
+ case ClassDef#'Externaltypereference'.module of
+ CurrMod ->
+ ClassDef#'Externaltypereference'.type;
+ ExtMod ->
+ {ExtMod,
+ ClassDef#'Externaltypereference'.type}
+ end,
+ {no_name,
+ #typedef{typespec=
+ #'ObjectSet'{class=
+% {objectclassname,ClassRef},
+ ClassDef,
+ set=ObjectList}}};
+ ObjectSet={'SingleValue',_} ->
+ %% a Union of defined objects
+ {_,_,Type,_} = CField,
+ ClassDef = Type#type.def,
+% ClassRef =
+% case ClassDef#'Externaltypereference'.module of
+% CurrMod ->
+% ClassDef#'Externaltypereference'.type;
+% ExtMod ->
+% {ExtMod,
+% ClassDef#'Externaltypereference'.type}
+% end,
+ {no_name,
+% #typedef{typespec=#'ObjectSet'{class={objectclassname,ClassRef},
+ #typedef{typespec=#'ObjectSet'{class=ClassDef,
+ set=ObjectSet}}};
+ {object,_,[#type{def={'TypeFromObject',
+ {object,RefedObj},
+ FieldName}}]} ->
+ %% This case occurs when an ObjectSetFromObjects
+ %% production is used
+ {M,Def} = get_referenced_type(S,RefedObj),
+ {M,get_fieldname_element(S,Def,FieldName)};
+ #type{def=Eref} when
+ record(Eref,'Externaltypereference') ->
+ get_referenced_type(S,Eref);
+ _ ->
+%% get_referenced_type(S,#typereference{val=ObjFieldSetting})
+ get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting})
+ end,
+ {ObjFieldName,
+ ObjSetSpec#typedef{checked=true,
+ typespec=check_object(S,ObjSetSpec,
+ ObjSetSpec#typedef.typespec)}}
+ end.
+
+check_value(OldS,V) when record(V,pvaluesetdef) ->
+ #pvaluesetdef{checked=Checked,type=Type} = V,
+ case Checked of
+ true -> V;
+ {error,_} -> V;
+ false ->
+ case get_referenced_type(OldS,Type#type.def) of
+ {_,Class} when record(Class,classdef) ->
+ throw({pobjectsetdef});
+ _ -> continue
+ end
+ end;
+check_value(_OldS,V) when record(V,pvaluedef) ->
+ %% Fix this case later
+ V;
+check_value(OldS,V) when record(V,typedef) ->
+ %% This case when a value set has been parsed as an object set.
+ %% It may be a value set
+ #typedef{typespec=TS} = V,
+ case TS of
+ #'ObjectSet'{class=ClassRef} ->
+ {_,TSDef} = get_referenced_type(OldS,ClassRef),
+ %%IsObjectSet(TSDef);
+ case TSDef of
+ #classdef{} -> throw({objectsetdef});
+ #typedef{typespec=#type{def=Eref}} when
+ record(Eref,'Externaltypereference') ->
+ %% This case if the class reference is a defined
+ %% reference to class
+ check_value(OldS,V#typedef{typespec=TS#'ObjectSet'{class=Eref}});
+ #typedef{} ->
+ % an ordinary value set with a type in #typedef.typespec
+ ValueSet = TS#'ObjectSet'.set,
+ Type=check_type(OldS,TSDef,TSDef#typedef.typespec),
+ Value = check_value(OldS,#valuedef{type=Type,
+ value=ValueSet}),
+ {valueset,Type#type{constraint=Value#valuedef.value}}
+ end;
+ _ ->
+ throw({objectsetdef})
+ end;
+check_value(S,#valuedef{pos=Pos,name=Name,type=Type,
+ value={valueset,Constr}}) ->
+ NewType = Type#type{constraint=[Constr]},
+ {valueset,
+ check_type(S,#typedef{pos=Pos,name=Name,typespec=NewType},NewType)};
+check_value(OldS=#state{recordtopname=TopName},V) when record(V,valuedef) ->
+ #valuedef{name=Name,checked=Checked,type=Vtype,value=Value} = V,
+ case Checked of
+ true ->
+ V;
+ {error,_} ->
+ V;
+ false ->
+ Def = Vtype#type.def,
+ Constr = Vtype#type.constraint,
+ S = OldS#state{type=Vtype,tname=Def,value=V,vname=Name},
+ NewDef =
+ case Def of
+ Ext when record(Ext,'Externaltypereference') ->
+ RecName = Ext#'Externaltypereference'.type,
+ {_,Type} = get_referenced_type(S,Ext),
+ %% If V isn't a value but an object Type is a #classdef{}
+ case Type of
+ #classdef{} ->
+ throw({objectdef});
+ #typedef{} ->
+ case is_contextswitchtype(Type) of
+ true ->
+ #valuedef{value=CheckedVal}=
+ check_value(S,V#valuedef{type=Type#typedef.typespec}),
+ #newv{value=CheckedVal};
+ _ ->
+ #valuedef{value=CheckedVal}=
+ check_value(S#state{recordtopname=[RecName|TopName]},
+ V#valuedef{type=Type#typedef.typespec}),
+ #newv{value=CheckedVal}
+ end
+ end;
+ 'ANY' ->
+ throw({error,{asn1,{'cant check value of type',Def}}});
+ 'INTEGER' ->
+ validate_integer(S,Value,[],Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ {'INTEGER',NamedNumberList} ->
+ validate_integer(S,Value,NamedNumberList,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ {'BIT STRING',NamedNumberList} ->
+ validate_bitstring(S,Value,NamedNumberList,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'NULL' ->
+ validate_null(S,Value,Constr),
+ #newv{};
+ 'OBJECT IDENTIFIER' ->
+ validate_objectidentifier(S,Value,Constr),
+ #newv{value = normalize_value(S,Vtype,Value,[])};
+ 'ObjectDescriptor' ->
+ validate_objectdescriptor(S,Value,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ {'ENUMERATED',NamedNumberList} ->
+ validate_enumerated(S,Value,NamedNumberList,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'BOOLEAN'->
+ validate_boolean(S,Value,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'OCTET STRING' ->
+ validate_octetstring(S,Value,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'NumericString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'TeletexString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'VideotexString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'UTCTime' ->
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+% exit({'cant check value of type' ,Def});
+ 'GeneralizedTime' ->
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+% exit({'cant check value of type' ,Def});
+ 'GraphicString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'VisibleString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'GeneralString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'PrintableString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'IA5String' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+ 'BMPString' ->
+ validate_restrictedstring(S,Value,Def,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,[])};
+%% 'UniversalString' -> %added 6/12 -00
+%% #newv{value=validate_restrictedstring(S,Value,Def,Constr)};
+ Seq when record(Seq,'SEQUENCE') ->
+ SeqVal = validate_sequence(S,Value,
+ Seq#'SEQUENCE'.components,
+ Constr),
+ #newv{value=normalize_value(S,Vtype,SeqVal,TopName)};
+ {'SEQUENCE OF',Components} ->
+ validate_sequenceof(S,Value,Components,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ {'CHOICE',Components} ->
+ validate_choice(S,Value,Components,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ Set when record(Set,'SET') ->
+ validate_set(S,Value,Set#'SET'.components,
+ Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ {'SET OF',Components} ->
+ validate_setof(S,Value,Components,Constr),
+ #newv{value=normalize_value(S,Vtype,Value,TopName)};
+ Other ->
+ exit({'cant check value of type' ,Other})
+ end,
+ case NewDef#newv.value of
+ unchanged ->
+ V#valuedef{checked=true,value=Value};
+ ok ->
+ V#valuedef{checked=true,value=Value};
+ {error,Reason} ->
+ V#valuedef{checked={error,Reason},value=Value};
+ _V ->
+ V#valuedef{checked=true,value=_V}
+ end
+ end.
+
+is_contextswitchtype(#typedef{name='EXTERNAL'})->
+ true;
+is_contextswitchtype(#typedef{name='EMBEDDED PDV'}) ->
+ true;
+is_contextswitchtype(#typedef{name='CHARACTER STRING'}) ->
+ true;
+is_contextswitchtype(_) ->
+ false.
+
+% validate_integer(S,{identifier,Pos,Id},NamedNumberList,Constr) ->
+% case lists:keysearch(Id,1,NamedNumberList) of
+% {value,_} -> ok;
+% false -> error({value,"unknown NamedNumber",S})
+% end;
+%% This case occurs when there is a valuereference
+validate_integer(S=#state{mname=M},
+ #'Externalvaluereference'{module=M,value=Id},
+ NamedNumberList,_Constr) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown NamedNumber",S})
+ end;
+validate_integer(S,Id,NamedNumberList,_Constr) when atom(Id) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown NamedNumber",S})
+ end;
+validate_integer(_S,Value,_NamedNumberList,Constr) when integer(Value) ->
+ check_integer_range(Value,Constr).
+
+check_integer_range(Int,Constr) when list(Constr) ->
+ NewConstr = [X || #constraint{c=X} <- Constr],
+ check_constr(Int,NewConstr);
+
+check_integer_range(_Int,_Constr) ->
+ %%io:format("~p~n",[Constr]),
+ ok.
+
+check_constr(Int,[{'ValueRange',Lb,Ub}|T]) when Int >= Lb, Int =< Ub ->
+ check_constr(Int,T);
+check_constr(_Int,[]) ->
+ ok.
+
+validate_bitstring(_S,_Value,_NamedNumberList,_Constr) ->
+ ok.
+
+validate_null(_S,'NULL',_Constr) ->
+ ok.
+
+%%------------
+%% This can be removed when the old parser is removed
+%% The function removes 'space' atoms from the list
+
+is_space_list([H],Acc) ->
+ lists:reverse([H|Acc]);
+is_space_list([H,space|T],Acc) ->
+ is_space_list(T,[H|Acc]);
+is_space_list([],Acc) ->
+ lists:reverse(Acc);
+is_space_list([H|T],Acc) ->
+ is_space_list(T,[H|Acc]).
+
+validate_objectidentifier(S,L,_) ->
+ case is_space_list(L,[]) of
+ NewL when list(NewL) ->
+ case validate_objectidentifier1(S,NewL) of
+ NewL2 when list(NewL2) ->
+ list_to_tuple(NewL2);
+ Other -> Other
+ end;
+ {error,_} ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end.
+
+validate_objectidentifier1(S, [Id|T]) when record(Id,'Externalvaluereference') ->
+ case catch get_referenced_type(S,Id) of
+ {_,V} when record(V,valuedef) ->
+ case check_value(S,V) of
+ #valuedef{type=#type{def='OBJECT IDENTIFIER'},
+ checked=true,value=Value} when tuple(Value) ->
+ validate_objectid(S, T, lists:reverse(tuple_to_list(Value)));
+ _ ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end;
+ _ ->
+ validate_objectid(S, [Id|T], [])
+ end;
+validate_objectidentifier1(S,V) ->
+ validate_objectid(S,V,[]).
+
+validate_objectid(_, [], Acc) ->
+ lists:reverse(Acc);
+validate_objectid(S, [Value|Vrest], Acc) when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+validate_objectid(S, [{'NamedNumber',_Name,Value}|Vrest], Acc)
+ when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+validate_objectid(S, [Id|Vrest], Acc)
+ when record(Id,'Externalvaluereference') ->
+ case catch get_referenced_type(S, Id) of
+ {_,V} when record(V,valuedef) ->
+ case check_value(S, V) of
+ #valuedef{checked=true,value=Value} when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+ _ ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end;
+ _ ->
+ case reserved_objectid(Id#'Externalvaluereference'.value, Acc) of
+ Value when integer(Value) ->
+ validate_objectid(S, Vrest, [Value|Acc]);
+ false ->
+ error({value, "illegal OBJECT IDENTIFIER", S})
+ end
+ end;
+validate_objectid(S, [{Atom,Value}],[]) when atom(Atom),integer(Value) ->
+ %% this case when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value
+ Rec = #'Externalvaluereference'{module=S#state.mname,
+ value=Atom},
+ validate_objectidentifier1(S,[Rec,Value]);
+validate_objectid(S, [{Atom,EVRef}],[])
+ when atom(Atom),record(EVRef,'Externalvaluereference') ->
+ %% this case when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value OTP-4354
+ Rec = #'Externalvaluereference'{module=S#state.mname,
+ value=Atom},
+ validate_objectidentifier1(S,[Rec,EVRef]);
+validate_objectid(S, _V, _Acc) ->
+ error({value, "illegal OBJECT IDENTIFIER",S}).
+
+
+%% ITU-T Rec. X.680 Annex B - D
+reserved_objectid('itu-t',[]) -> 0;
+reserved_objectid('ccitt',[]) -> 0;
+%% arcs below "itu-t"
+reserved_objectid('recommendation',[0]) -> 0;
+reserved_objectid('question',[0]) -> 1;
+reserved_objectid('administration',[0]) -> 2;
+reserved_objectid('network-operator',[0]) -> 3;
+reserved_objectid('identified-organization',[0]) -> 4;
+%% arcs below "recommendation"
+reserved_objectid('a',[0,0]) -> 1;
+reserved_objectid('b',[0,0]) -> 2;
+reserved_objectid('c',[0,0]) -> 3;
+reserved_objectid('d',[0,0]) -> 4;
+reserved_objectid('e',[0,0]) -> 5;
+reserved_objectid('f',[0,0]) -> 6;
+reserved_objectid('g',[0,0]) -> 7;
+reserved_objectid('h',[0,0]) -> 8;
+reserved_objectid('i',[0,0]) -> 9;
+reserved_objectid('j',[0,0]) -> 10;
+reserved_objectid('k',[0,0]) -> 11;
+reserved_objectid('l',[0,0]) -> 12;
+reserved_objectid('m',[0,0]) -> 13;
+reserved_objectid('n',[0,0]) -> 14;
+reserved_objectid('o',[0,0]) -> 15;
+reserved_objectid('p',[0,0]) -> 16;
+reserved_objectid('q',[0,0]) -> 17;
+reserved_objectid('r',[0,0]) -> 18;
+reserved_objectid('s',[0,0]) -> 19;
+reserved_objectid('t',[0,0]) -> 20;
+reserved_objectid('u',[0,0]) -> 21;
+reserved_objectid('v',[0,0]) -> 22;
+reserved_objectid('w',[0,0]) -> 23;
+reserved_objectid('x',[0,0]) -> 24;
+reserved_objectid('y',[0,0]) -> 25;
+reserved_objectid('z',[0,0]) -> 26;
+
+
+reserved_objectid(iso,[]) -> 1;
+%% arcs below "iso", note that number 1 is not used
+reserved_objectid('standard',[1]) -> 0;
+reserved_objectid('member-body',[1]) -> 2;
+reserved_objectid('identified-organization',[1]) -> 3;
+
+reserved_objectid('joint-iso-itu-t',[]) -> 2;
+reserved_objectid('joint-iso-ccitt',[]) -> 2;
+
+reserved_objectid(_,_) -> false.
+
+
+
+
+
+validate_objectdescriptor(_S,_Value,_Constr) ->
+ ok.
+
+validate_enumerated(S,Id,NamedNumberList,_Constr) when atom(Id) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown ENUMERATED",S})
+ end;
+validate_enumerated(S,{identifier,_Pos,Id},NamedNumberList,_Constr) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown ENUMERATED",S})
+ end;
+validate_enumerated(S,#'Externalvaluereference'{value=Id},
+ NamedNumberList,_Constr) ->
+ case lists:keysearch(Id,1,NamedNumberList) of
+ {value,_} -> ok;
+ false -> error({value,"unknown ENUMERATED",S})
+ end.
+
+validate_boolean(_S,_Value,_Constr) ->
+ ok.
+
+validate_octetstring(_S,_Value,_Constr) ->
+ ok.
+
+validate_restrictedstring(_S,_Value,_Def,_Constr) ->
+ ok.
+
+validate_sequence(S=#state{type=Vtype},Value,_Components,_Constr) ->
+ case Vtype of
+ #type{tag=[{tag,'UNIVERSAL',8,'IMPLICIT',32}]} ->
+ %% this is an 'EXTERNAL' (or INSTANCE OF)
+ case Value of
+ [{identification,_}|_RestVal] ->
+ to_EXTERNAL1990(S,Value);
+ _ ->
+ Value
+ end;
+ _ ->
+ Value
+ end.
+
+validate_sequenceof(_S,_Value,_Components,_Constr) ->
+ ok.
+
+validate_choice(_S,_Value,_Components,_Constr) ->
+ ok.
+
+validate_set(_S,_Value,_Components,_Constr) ->
+ ok.
+
+validate_setof(_S,_Value,_Components,_Constr) ->
+ ok.
+
+to_EXTERNAL1990(S,[{identification,{'CHOICE',{syntax,Stx}}}|Rest]) ->
+ to_EXTERNAL1990(S,Rest,[{'direct-reference',Stx}]);
+to_EXTERNAL1990(S,[{identification,{'CHOICE',{'presentation-context-id',I}}}|Rest]) ->
+ to_EXTERNAL1990(S,Rest,[{'indirect-reference',I}]);
+to_EXTERNAL1990(S,[{identification,{'CHOICE',{'context-negotiation',[{_,PCid},{_,TrStx}]}}}|Rest]) ->
+ to_EXTERNAL1990(S,Rest,[{'indirect-reference',PCid},{'direct-reference',TrStx}]);
+to_EXTERNAL1990(S,_) ->
+ error({value,"illegal value in EXTERNAL type",S}).
+
+to_EXTERNAL1990(S,[V={'data-value-descriptor',_}|Rest],Acc) ->
+ to_EXTERNAL1990(S,Rest,[V|Acc]);
+to_EXTERNAL1990(_S,[{'data-value',Val}],Acc) ->
+ Encoding = {encoding,{'CHOICE',{'octet-aligned',Val}}},
+ lists:reverse([Encoding|Acc]);
+to_EXTERNAL1990(S,_,_) ->
+ error({value,"illegal value in EXTERNAL type",S}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Functions to normalize the default values of SEQUENCE
+%% and SET components into Erlang valid format
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+normalize_value(_,_,mandatory,_) ->
+ mandatory;
+normalize_value(_,_,'OPTIONAL',_) ->
+ 'OPTIONAL';
+normalize_value(S,Type,{'DEFAULT',Value},NameList) ->
+ case catch get_canonic_type(S,Type,NameList) of
+ {'BOOLEAN',CType,_} ->
+ normalize_boolean(S,Value,CType);
+ {'INTEGER',CType,_} ->
+ normalize_integer(S,Value,CType);
+ {'BIT STRING',CType,_} ->
+ normalize_bitstring(S,Value,CType);
+ {'OCTET STRING',CType,_} ->
+ normalize_octetstring(S,Value,CType);
+ {'NULL',_CType,_} ->
+ %%normalize_null(Value);
+ 'NULL';
+ {'OBJECT IDENTIFIER',_,_} ->
+ normalize_objectidentifier(S,Value);
+ {'ObjectDescriptor',_,_} ->
+ normalize_objectdescriptor(Value);
+ {'REAL',_,_} ->
+ normalize_real(Value);
+ {'ENUMERATED',CType,_} ->
+ normalize_enumerated(Value,CType);
+ {'CHOICE',CType,NewNameList} ->
+ normalize_choice(S,Value,CType,NewNameList);
+ {'SEQUENCE',CType,NewNameList} ->
+ normalize_sequence(S,Value,CType,NewNameList);
+ {'SEQUENCE OF',CType,NewNameList} ->
+ normalize_seqof(S,Value,CType,NewNameList);
+ {'SET',CType,NewNameList} ->
+ normalize_set(S,Value,CType,NewNameList);
+ {'SET OF',CType,NewNameList} ->
+ normalize_setof(S,Value,CType,NewNameList);
+ {restrictedstring,CType,_} ->
+ normalize_restrictedstring(S,Value,CType);
+ _ ->
+ io:format("WARNING: could not check default value ~p~n",[Value]),
+ Value
+ end;
+normalize_value(S,Type,Val,NameList) ->
+ normalize_value(S,Type,{'DEFAULT',Val},NameList).
+
+normalize_boolean(S,{Name,Bool},CType) when atom(Name) ->
+ normalize_boolean(S,Bool,CType);
+normalize_boolean(_,true,_) ->
+ true;
+normalize_boolean(_,false,_) ->
+ false;
+normalize_boolean(S,Bool=#'Externalvaluereference'{},CType) ->
+ get_normalized_value(S,Bool,CType,fun normalize_boolean/3,[]);
+normalize_boolean(_,Other,_) ->
+ throw({error,{asn1,{'invalid default value',Other}}}).
+
+normalize_integer(_S,Int,_) when integer(Int) ->
+ Int;
+normalize_integer(_S,{Name,Int},_) when atom(Name),integer(Int) ->
+ Int;
+normalize_integer(S,{Name,Int=#'Externalvaluereference'{}},
+ Type) when atom(Name) ->
+ normalize_integer(S,Int,Type);
+normalize_integer(S,Int=#'Externalvaluereference'{value=Name},Type) ->
+ case Type of
+ NNL when list(NNL) ->
+ case lists:keysearch(Name,1,NNL) of
+ {value,{Name,Val}} ->
+ Val;
+ false ->
+ get_normalized_value(S,Int,Type,
+ fun normalize_integer/3,[])
+ end;
+ _ ->
+ get_normalized_value(S,Int,Type,fun normalize_integer/3,[])
+ end;
+normalize_integer(_,Int,_) ->
+ exit({'Unknown INTEGER value',Int}).
+
+normalize_bitstring(S,Value,Type)->
+ %% There are four different Erlang formats of BIT STRING:
+ %% 1 - a list of ones and zeros.
+ %% 2 - a list of atoms.
+ %% 3 - as an integer, for instance in hexadecimal form.
+ %% 4 - as a tuple {Unused, Binary} where Unused is an integer
+ %% and tells how many bits of Binary are unused.
+ %%
+ %% normalize_bitstring/3 transforms Value according to:
+ %% A to 3,
+ %% B to 1,
+ %% C to 1 or 3
+ %% D to 2,
+ %% Value can be on format:
+ %% A - {hstring, String}, where String is a hexadecimal string.
+ %% B - {bstring, String}, where String is a string on bit format
+ %% C - #'Externalvaluereference'{value=V}, where V is a defined value
+ %% D - list of #'Externalvaluereference', where each value component
+ %% is an identifier corresponing to NamedBits in Type.
+ case Value of
+ {hstring,String} when list(String) ->
+ hstring_to_int(String);
+ {bstring,String} when list(String) ->
+ bstring_to_bitlist(String);
+ Rec when record(Rec,'Externalvaluereference') ->
+ get_normalized_value(S,Value,Type,
+ fun normalize_bitstring/3,[]);
+ RecList when list(RecList) ->
+ case Type of
+ NBL when list(NBL) ->
+ F = fun(#'Externalvaluereference'{value=Name}) ->
+ case lists:keysearch(Name,1,NBL) of
+ {value,{Name,_}} ->
+ Name;
+ Other ->
+ throw({error,Other})
+ end;
+ (Other) ->
+ throw({error,Other})
+ end,
+ case catch lists:map(F,RecList) of
+ {error,Reason} ->
+ io:format("WARNING: default value not "
+ "compatible with type definition ~p~n",
+ [Reason]),
+ Value;
+ NewList ->
+ NewList
+ end;
+ _ ->
+ io:format("WARNING: default value not "
+ "compatible with type definition ~p~n",
+ [RecList]),
+ Value
+ end;
+ {Name,String} when atom(Name) ->
+ normalize_bitstring(S,String,Type);
+ Other ->
+ io:format("WARNING: illegal default value ~p~n",[Other]),
+ Value
+ end.
+
+hstring_to_int(L) when list(L) ->
+ hstring_to_int(L,0).
+hstring_to_int([H|T],Acc) when H >= $A, H =< $F ->
+ hstring_to_int(T,(Acc bsl 4) + (H - $A + 10) ) ;
+hstring_to_int([H|T],Acc) when H >= $0, H =< $9 ->
+ hstring_to_int(T,(Acc bsl 4) + (H - $0));
+hstring_to_int([],Acc) ->
+ Acc.
+
+bstring_to_bitlist([H|T]) when H == $0; H == $1 ->
+ [H - $0 | bstring_to_bitlist(T)];
+bstring_to_bitlist([]) ->
+ [].
+
+%% normalize_octetstring/1 changes representation of input Value to a
+%% list of octets.
+%% Format of Value is one of:
+%% {bstring,String} each element in String corresponds to one bit in an octet
+%% {hstring,String} each element in String corresponds to one byte in an octet
+%% #'Externalvaluereference'
+normalize_octetstring(S,Value,CType) ->
+ case Value of
+ {bstring,String} ->
+ bstring_to_octetlist(String);
+ {hstring,String} ->
+ hstring_to_octetlist(String);
+ Rec when record(Rec,'Externalvaluereference') ->
+ get_normalized_value(S,Value,CType,
+ fun normalize_octetstring/3,[]);
+ {Name,String} when atom(Name) ->
+ normalize_octetstring(S,String,CType);
+ List when list(List) ->
+ %% check if list elements are valid octet values
+ lists:map(fun([])-> ok;
+ (H)when H > 255->
+ io:format("WARNING: not legal octet value ~p in OCTET STRING, ~p~n",[H,List]);
+ (_)-> ok
+ end, List),
+ List;
+ Other ->
+ io:format("WARNING: unknown default value ~p~n",[Other]),
+ Value
+ end.
+
+
+bstring_to_octetlist([]) ->
+ [];
+bstring_to_octetlist([H|T]) when H == $0 ; H == $1 ->
+ bstring_to_octetlist(T,6,[(H - $0) bsl 7]).
+bstring_to_octetlist([H|T],0,[Hacc|Tacc]) when H == $0; H == $1 ->
+ bstring_to_octetlist(T, 7, [0,Hacc + (H -$0)| Tacc]);
+bstring_to_octetlist([H|T],BSL,[Hacc|Tacc]) when H == $0; H == $1 ->
+ bstring_to_octetlist(T, BSL-1, [Hacc + ((H - $0) bsl BSL)| Tacc]);
+bstring_to_octetlist([],7,[0|Acc]) ->
+ lists:reverse(Acc);
+bstring_to_octetlist([],_,Acc) ->
+ lists:reverse(Acc).
+
+hstring_to_octetlist([]) ->
+ [];
+hstring_to_octetlist(L) ->
+ hstring_to_octetlist(L,4,[]).
+hstring_to_octetlist([H|T],0,[Hacc|Tacc]) when H >= $A, H =< $F ->
+ hstring_to_octetlist(T,4,[Hacc + (H - $A + 10)|Tacc]);
+hstring_to_octetlist([H|T],BSL,Acc) when H >= $A, H =< $F ->
+ hstring_to_octetlist(T,0,[(H - $A + 10) bsl BSL|Acc]);
+hstring_to_octetlist([H|T],0,[Hacc|Tacc]) when H >= $0; H =< $9 ->
+ hstring_to_octetlist(T,4,[Hacc + (H - $0)|Tacc]);
+hstring_to_octetlist([H|T],BSL,Acc) when H >= $0; H =< $9 ->
+ hstring_to_octetlist(T,0,[(H - $0) bsl BSL|Acc]);
+hstring_to_octetlist([],_,Acc) ->
+ lists:reverse(Acc).
+
+normalize_objectidentifier(S,Value) ->
+ validate_objectidentifier(S,Value,[]).
+
+normalize_objectdescriptor(Value) ->
+ Value.
+
+normalize_real(Value) ->
+ Value.
+
+normalize_enumerated(#'Externalvaluereference'{value=V},CType)
+ when list(CType) ->
+ normalize_enumerated2(V,CType);
+normalize_enumerated(Value,CType) when atom(Value),list(CType) ->
+ normalize_enumerated2(Value,CType);
+normalize_enumerated({Name,EnumV},CType) when atom(Name) ->
+ normalize_enumerated(EnumV,CType);
+normalize_enumerated(Value,{CType1,CType2}) when list(CType1), list(CType2)->
+ normalize_enumerated(Value,CType1++CType2);
+normalize_enumerated(V,CType) ->
+ io:format("WARNING: Enumerated unknown type ~p~n",[CType]),
+ V.
+normalize_enumerated2(V,Enum) ->
+ case lists:keysearch(V,1,Enum) of
+ {value,{Val,_}} -> Val;
+ _ ->
+ io:format("WARNING: Enumerated value is not correct ~p~n",[V]),
+ V
+ end.
+
+normalize_choice(S,{'CHOICE',{C,V}},CType,NameList) when atom(C) ->
+ Value =
+ case V of
+ Rec when record(Rec,'Externalvaluereference') ->
+ get_normalized_value(S,V,CType,
+ fun normalize_choice/4,
+ [NameList]);
+ _ -> V
+ end,
+ case catch lists:keysearch(C,#'ComponentType'.name,CType) of
+ {value,#'ComponentType'{typespec=CT,name=Name}} ->
+ {C,normalize_value(S,CT,{'DEFAULT',Value},
+ [Name|NameList])};
+ Other ->
+ io:format("WARNING: Wrong format of type/value ~p/~p~n",
+ [Other,Value]),
+ {C,Value}
+ end;
+normalize_choice(S,{'DEFAULT',ValueList},CType,NameList) ->
+ lists:map(fun(X)-> normalize_choice(S,X,CType,NameList) end, ValueList);
+normalize_choice(S,Val=#'Externalvaluereference'{},CType,NameList) ->
+ {_,#valuedef{value=V}}=get_referenced_type(S,Val),
+ normalize_choice(S,{'CHOICE',V},CType,NameList);
+% get_normalized_value(S,Val,CType,fun normalize_choice/4,[NameList]);
+normalize_choice(S,{Name,ChoiceVal},CType,NameList)
+ when atom(Name) ->
+ normalize_choice(S,ChoiceVal,CType,NameList).
+
+normalize_sequence(S,{Name,Value},Components,NameList)
+ when atom(Name),list(Value) ->
+ normalize_sequence(S,Value,Components,NameList);
+normalize_sequence(S,Value,Components,NameList) ->
+ normalized_record('SEQUENCE',S,Value,Components,NameList).
+
+normalize_set(S,{Name,Value},Components,NameList)
+ when atom(Name),list(Value) ->
+ normalized_record('SET',S,Value,Components,NameList);
+normalize_set(S,Value,Components,NameList) ->
+ normalized_record('SET',S,Value,Components,NameList).
+
+normalized_record(SorS,S,Value,Components,NameList) ->
+ NewName = list_to_atom(asn1ct_gen:list2name(NameList)),
+ NoComps = length(Components),
+ case normalize_seq_or_set(SorS,S,Value,Components,NameList,[]) of
+ ListOfVals when length(ListOfVals) == NoComps ->
+ list_to_tuple([NewName|ListOfVals]);
+ _ ->
+ error({type,{illegal,default,value,Value},S})
+ end.
+
+normalize_seq_or_set(SorS,S,[{Cname,V}|Vs],
+ [#'ComponentType'{name=Cname,typespec=TS}|Cs],
+ NameList,Acc) ->
+ NewNameList =
+ case TS#type.def of
+ #'Externaltypereference'{type=TName} ->
+ [TName];
+ _ -> [Cname|NameList]
+ end,
+ NVal = normalize_value(S,TS,{'DEFAULT',V},NewNameList),
+ normalize_seq_or_set(SorS,S,Vs,Cs,NameList,[NVal|Acc]);
+normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+ [#'ComponentType'{prop='OPTIONAL'}|Cs],
+ NameList,Acc) ->
+ normalize_seq_or_set(SorS,S,Values,Cs,NameList,[asn1_NOVALUE|Acc]);
+normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+ [#'ComponentType'{name=Cname2,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList,Acc) ->
+ NewNameList =
+ case TS#type.def of
+ #'Externaltypereference'{type=TName} ->
+ [TName];
+ _ -> [Cname2|NameList]
+ end,
+ NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
+ normalize_seq_or_set(SorS,S,Values,Cs,NameList,[NVal|Acc]);
+normalize_seq_or_set(_SorS,_S,[],[],_,Acc) ->
+ lists:reverse(Acc);
+%% If default value is {} ComponentTypes in SEQUENCE are marked DEFAULT
+%% or OPTIONAL (or the type is defined SEQUENCE{}, which is handled by
+%% the previous case).
+normalize_seq_or_set(SorS,S,[],
+ [#'ComponentType'{name=Name,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList,Acc) ->
+ NewNameList =
+ case TS#type.def of
+ #'Externaltypereference'{type=TName} ->
+ [TName];
+ _ -> [Name|NameList]
+ end,
+ NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
+ normalize_seq_or_set(SorS,S,[],Cs,NameList,[NVal|Acc]);
+normalize_seq_or_set(SorS,S,[],[#'ComponentType'{prop='OPTIONAL'}|Cs],
+ NameList,Acc) ->
+ normalize_seq_or_set(SorS,S,[],Cs,NameList,[asn1_NOVALUE|Acc]);
+normalize_seq_or_set(SorS,S,Value=#'Externalvaluereference'{},
+ Cs,NameList,Acc) ->
+ get_normalized_value(S,Value,Cs,fun normalize_seq_or_set/6,
+ [SorS,NameList,Acc]);
+normalize_seq_or_set(_SorS,S,V,_,_,_) ->
+ error({type,{illegal,default,value,V},S}).
+
+normalize_seqof(S,Value,Type,NameList) ->
+ normalize_s_of('SEQUENCE OF',S,Value,Type,NameList).
+
+normalize_setof(S,Value,Type,NameList) ->
+ normalize_s_of('SET OF',S,Value,Type,NameList).
+
+normalize_s_of(SorS,S,Value,Type,NameList) when list(Value) ->
+ DefValueList = lists:map(fun(X) -> {'DEFAULT',X} end,Value),
+ Suffix = asn1ct_gen:constructed_suffix(SorS,Type),
+ Def = Type#type.def,
+ InnerType = asn1ct_gen:get_inner(Def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ NewNameList =
+ case WhatKind of
+ {constructed,bif} ->
+ [Suffix|NameList];
+ #'Externaltypereference'{type=Name} ->
+ [Name];
+ _ -> []
+ end,
+ NormFun = fun (X) -> normalize_value(S,Type,X,
+ NewNameList) end,
+ case catch lists:map(NormFun, DefValueList) of
+ List when list(List) ->
+ List;
+ _ ->
+ io:format("WARNING: ~p could not handle value ~p~n",
+ [SorS,Value]),
+ Value
+ end;
+normalize_s_of(SorS,S,Value,Type,NameList)
+ when record(Value,'Externalvaluereference') ->
+ get_normalized_value(S,Value,Type,fun normalize_s_of/5,
+ [SorS,NameList]).
+% case catch get_referenced_type(S,Value) of
+% {_,#valuedef{value=V}} ->
+% normalize_s_of(SorS,S,V,Type);
+% {error,Reason} ->
+% io:format("WARNING: ~p could not handle value ~p~n",
+% [SorS,Value]),
+% Value;
+% {_,NewVal} ->
+% normalize_s_of(SorS,S,NewVal,Type);
+% _ ->
+% io:format("WARNING: ~p could not handle value ~p~n",
+% [SorS,Value]),
+% Value
+% end.
+
+
+%% normalize_restrictedstring handles all format of restricted strings.
+%% tuple case
+normalize_restrictedstring(_S,[Int1,Int2],_) when integer(Int1),integer(Int2) ->
+ {Int1,Int2};
+%% quadruple case
+normalize_restrictedstring(_S,[Int1,Int2,Int3,Int4],_) when integer(Int1),
+ integer(Int2),
+ integer(Int3),
+ integer(Int4) ->
+ {Int1,Int2,Int3,Int4};
+%% character string list case
+normalize_restrictedstring(S,[H|T],CType) when list(H);tuple(H) ->
+ [normalize_restrictedstring(S,H,CType)|normalize_restrictedstring(S,T,CType)];
+%% character sting case
+normalize_restrictedstring(_S,CString,_) when list(CString) ->
+ Fun =
+ fun(X) ->
+ if
+ $X =< 255, $X >= 0 ->
+ ok;
+ true ->
+ io:format("WARNING: illegal character in string"
+ " ~p~n",[X])
+ end
+ end,
+ lists:foreach(Fun,CString),
+ CString;
+%% definedvalue case or argument in a parameterized type
+normalize_restrictedstring(S,ERef,CType) when record(ERef,'Externalvaluereference') ->
+ get_normalized_value(S,ERef,CType,
+ fun normalize_restrictedstring/3,[]);
+%%
+normalize_restrictedstring(S,{Name,Val},CType) when atom(Name) ->
+ normalize_restrictedstring(S,Val,CType).
+
+
+get_normalized_value(S,Val,Type,Func,AddArg) ->
+ case catch get_referenced_type(S,Val) of
+ {_,#valuedef{type=_T,value=V}} ->
+ %% should check that Type and T equals
+ call_Func(S,V,Type,Func,AddArg);
+ {error,_} ->
+ io:format("WARNING: default value not "
+ "comparable ~p~n",[Val]),
+ Val;
+ {_,NewVal} ->
+ call_Func(S,NewVal,Type,Func,AddArg);
+ _ ->
+ io:format("WARNING: default value not "
+ "comparable ~p~n",[Val]),
+ Val
+ end.
+
+call_Func(S,Val,Type,Func,ArgList) ->
+ case ArgList of
+ [] ->
+ Func(S,Val,Type);
+ [LastArg] ->
+ Func(S,Val,Type,LastArg);
+ [Arg1,LastArg1] ->
+ Func(Arg1,S,Val,Type,LastArg1);
+ [Arg1,LastArg1,LastArg2] ->
+ Func(Arg1,S,Val,Type,LastArg1,LastArg2)
+ end.
+
+
+get_canonic_type(S,Type,NameList) ->
+ {InnerType,NewType,NewNameList} =
+ case Type#type.def of
+ Name when atom(Name) ->
+ {Name,Type,NameList};
+ Ref when record(Ref,'Externaltypereference') ->
+ {_,#typedef{name=Name,typespec=RefedType}} =
+ get_referenced_type(S,Ref),
+ get_canonic_type(S,RefedType,[Name]);
+ {Name,T} when atom(Name) ->
+ {Name,T,NameList};
+ Seq when record(Seq,'SEQUENCE') ->
+ {'SEQUENCE',Seq#'SEQUENCE'.components,NameList};
+ Set when record(Set,'SET') ->
+ {'SET',Set#'SET'.components,NameList}
+ end,
+ {asn1ct_gen:unify_if_string(InnerType),NewType,NewNameList}.
+
+
+
+check_ptype(_S,Type,Ts) when record(Ts,type) ->
+ %Tag = Ts#type.tag,
+ %Constr = Ts#type.constraint,
+ Def = Ts#type.def,
+ NewDef=
+ case Def of
+ Seq when record(Seq,'SEQUENCE') ->
+ #newt{type=Seq#'SEQUENCE'{pname=Type#ptypedef.name}};
+ Set when record(Set,'SET') ->
+ #newt{type=Set#'SET'{pname=Type#ptypedef.name}};
+ _Other ->
+ #newt{}
+ end,
+ Ts2 = case NewDef of
+ #newt{type=unchanged} ->
+ Ts;
+ #newt{type=TDef}->
+ Ts#type{def=TDef}
+ end,
+ Ts2.
+
+
+% check_type(S,Type,ObjSpec={{objectclassname,_},_}) ->
+% check_class(S,ObjSpec);
+check_type(_S,Type,Ts) when record(Type,typedef),
+ (Type#typedef.checked==true) ->
+ Ts;
+check_type(_S,Type,Ts) when record(Type,typedef),
+ (Type#typedef.checked==idle) -> % the check is going on
+ Ts;
+check_type(S=#state{recordtopname=TopName},Type,Ts) when record(Ts,type) ->
+ {Def,Tag,Constr} =
+ case match_parameters(Ts#type.def,S#state.parameters) of
+ #type{constraint=_Ctmp,def=Dtmp} ->
+ {Dtmp,Ts#type.tag,Ts#type.constraint};
+ Dtmp ->
+ {Dtmp,Ts#type.tag,Ts#type.constraint}
+ end,
+ TempNewDef = #newt{type=Def,tag=Tag,constraint=Constr},
+ TestFun =
+ fun(Tref) ->
+ {_,MaybeChoice} = get_referenced_type(S,Tref),
+ case catch((MaybeChoice#typedef.typespec)#type.def) of
+ {'CHOICE',_} ->
+ maybe_illicit_implicit_tag(choice,Tag);
+ 'ANY' ->
+ maybe_illicit_implicit_tag(open_type,Tag);
+ 'ANY DEFINED BY' ->
+ maybe_illicit_implicit_tag(open_type,Tag);
+ 'ASN1_OPEN_TYPE' ->
+ maybe_illicit_implicit_tag(open_type,Tag);
+ _ ->
+ Tag
+ end
+ end,
+ NewDef=
+ case Def of
+ Ext when record(Ext,'Externaltypereference') ->
+ {_,RefTypeDef} = get_referenced_type(S,Ext),
+% case RefTypeDef of
+% Class when record(Class,classdef) ->
+% throw({asn1_class,Class});
+% _ -> ok
+% end,
+ case is_class(S,RefTypeDef) of
+ true -> throw({asn1_class,RefTypeDef});
+ _ -> ok
+ end,
+ Ct = TestFun(Ext),
+ RefType =
+%case S#state.erule of
+% ber_bin_v2 ->
+ case RefTypeDef#typedef.checked of
+ true ->
+ RefTypeDef#typedef.typespec;
+ _ ->
+ NewRefTypeDef1 = RefTypeDef#typedef{checked=idle},
+ asn1_db:dbput(S#state.mname,
+ NewRefTypeDef1#typedef.name,NewRefTypeDef1),
+ RefType1 =
+ check_type(S,RefTypeDef,RefTypeDef#typedef.typespec),
+ NewRefTypeDef2 =
+ RefTypeDef#typedef{checked=true,typespec = RefType1},
+ asn1_db:dbput(S#state.mname,
+ NewRefTypeDef2#typedef.name,NewRefTypeDef2),
+ %% update the type and mark as checked
+ RefType1
+ end,
+% _ -> RefTypeDef#typedef.typespec
+% end,
+
+ case asn1ct_gen:prim_bif(asn1ct_gen:get_inner(RefType#type.def)) of
+ true ->
+ %% Here we expand to a built in type and inline it
+ TempNewDef#newt{
+ type=
+ RefType#type.def,
+ tag=
+ merge_tags(Ct,RefType#type.tag),
+ constraint=
+ merge_constraints(check_constraints(S,Constr),
+ RefType#type.constraint)};
+ _ ->
+ %% Here we only expand the tags and keep the ext ref
+
+ TempNewDef#newt{
+ type=
+ check_externaltypereference(S,Ext),
+ tag =
+ case S#state.erule of
+ ber_bin_v2 ->
+ merge_tags(Ct,RefType#type.tag);
+ _ ->
+ Ct
+ end
+ }
+ end;
+ 'ANY' ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+ {'ANY_DEFINED_BY',_} ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+ 'INTEGER' ->
+ check_integer(S,[],Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_INTEGER))};
+
+ {'INTEGER',NamedNumberList} ->
+ TempNewDef#newt{type={'INTEGER',check_integer(S,NamedNumberList,Constr)},
+ tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_INTEGER))};
+ {'BIT STRING',NamedNumberList} ->
+ NewL = check_bitstring(S,NamedNumberList,Constr),
+%% erlang:display({asn1ct_check,NamedNumberList,NewL}),
+ TempNewDef#newt{type={'BIT STRING',NewL},
+ tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_BIT_STRING))};
+ 'NULL' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_NULL))};
+ 'OBJECT IDENTIFIER' ->
+ check_objectidentifier(S,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER))};
+ 'ObjectDescriptor' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_OBJECT_DESCRIPTOR))};
+ 'EXTERNAL' ->
+%% AssociatedType = asn1_db:dbget(S#state.mname,'EXTERNAL'),
+%% #newt{type=check_type(S,Type,AssociatedType)};
+ put(external,unchecked),
+ TempNewDef#newt{type=
+ #'Externaltypereference'{module=S#state.mname,
+ type='EXTERNAL'},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_EXTERNAL))};
+ {'INSTANCE OF',DefinedObjectClass,Constraint} ->
+ %% check that DefinedObjectClass is of TYPE-IDENTIFIER class
+ %% If Constraint is empty make it the general INSTANCE OF type
+ %% If Constraint is not empty make an inlined type
+ %% convert INSTANCE OF to the associated type
+ IOFDef=check_instance_of(S,DefinedObjectClass,Constraint),
+ TempNewDef#newt{type=IOFDef,
+ tag=merge_tags(Tag,?TAG_CONSTRUCTED(?N_INSTANCE_OF))};
+ {'ENUMERATED',NamedNumberList} ->
+ TempNewDef#newt{type=
+ {'ENUMERATED',
+ check_enumerated(S,NamedNumberList,Constr)},
+ tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_ENUMERATED))};
+ 'EMBEDDED PDV' ->
+% AssociatedType = asn1_db:dbget(S#state.mname,'EMBEDDED PDV'),
+% CheckedType = check_type(S,Type,
+% AssociatedType#typedef.typespec),
+ put(embedded_pdv,unchecked),
+ TempNewDef#newt{type=
+ #'Externaltypereference'{module=S#state.mname,
+ type='EMBEDDED PDV'},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_EMBEDDED_PDV))};
+ 'BOOLEAN'->
+ check_boolean(S,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_BOOLEAN))};
+ 'OCTET STRING' ->
+ check_octetstring(S,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_OCTET_STRING))};
+ 'NumericString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_NumericString))};
+ 'TeletexString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_TeletexString))};
+ 'VideotexString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_VideotexString))};
+ 'UTCTime' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_UTCTime))};
+ 'GeneralizedTime' ->
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_GeneralizedTime))};
+ 'GraphicString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_GraphicString))};
+ 'VisibleString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_VisibleString))};
+ 'GeneralString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_GeneralString))};
+ 'PrintableString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_PrintableString))};
+ 'IA5String' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_IA5String))};
+ 'BMPString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_BMPString))};
+ 'UniversalString' ->
+ check_restrictedstring(S,Def,Constr),
+ TempNewDef#newt{tag=
+ merge_tags(Tag,?TAG_PRIMITIVE(?N_UniversalString))};
+ 'CHARACTER STRING' ->
+% AssociatedType = asn1_db:dbget(S#state.mname,
+% 'CHARACTER STRING'),
+% CheckedType = check_type(S,Type,
+% AssociatedType#typedef.typespec),
+ put(character_string,unchecked),
+ TempNewDef#newt{type=
+ #'Externaltypereference'{module=S#state.mname,
+ type='CHARACTER STRING'},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_CHARACTER_STRING))};
+ Seq when record(Seq,'SEQUENCE') ->
+ RecordName =
+ case TopName of
+ [] ->
+ [Type#typedef.name];
+ _ ->
+ TopName
+ end,
+ {TableCInf,Components} =
+ check_sequence(S#state{recordtopname=
+ RecordName},
+ Type,Seq#'SEQUENCE'.components),
+ TempNewDef#newt{type=Seq#'SEQUENCE'{tablecinf=TableCInf,
+ components=Components},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
+ {'SEQUENCE OF',Components} ->
+ TempNewDef#newt{type={'SEQUENCE OF',check_sequenceof(S,Type,Components)},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
+ {'CHOICE',Components} ->
+ Ct = maybe_illicit_implicit_tag(choice,Tag),
+ TempNewDef#newt{type={'CHOICE',check_choice(S,Type,Components)},tag=Ct};
+ Set when record(Set,'SET') ->
+ RecordName=
+ case TopName of
+ [] ->
+ [Type#typedef.name];
+ _ ->
+ TopName
+ end,
+ {Sorted,TableCInf,Components} =
+ check_set(S#state{recordtopname=RecordName},
+ Type,Set#'SET'.components),
+ TempNewDef#newt{type=Set#'SET'{sorted=Sorted,
+ tablecinf=TableCInf,
+ components=Components},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
+ {'SET OF',Components} ->
+ TempNewDef#newt{type={'SET OF',check_setof(S,Type,Components)},
+ tag=
+ merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
+ %% This is a temporary hack until the full Information Obj Spec
+ %% in X.681 is supported
+ {{typereference,_,'TYPE-IDENTIFIER'},[{typefieldreference,_,'Type'}]} ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+
+ {#'Externaltypereference'{type='TYPE-IDENTIFIER'},
+ [{typefieldreference,_,'Type'}]} ->
+ Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
+
+ {pt,Ptype,ParaList} ->
+ %% Ptype might be a parameterized - type, object set or
+ %% value set. If it isn't a parameterized type notify the
+ %% calling function.
+ {_,Ptypedef} = get_referenced_type(S,Ptype),
+ notify_if_not_ptype(S,Ptypedef),
+ NewParaList = [match_parameters(TmpParam,S#state.parameters)||
+ TmpParam <- ParaList],
+ Instance = instantiate_ptype(S,Ptypedef,NewParaList),
+ TempNewDef#newt{type=Instance#type.def,
+ tag=merge_tags(Tag,Instance#type.tag),
+ constraint=Instance#type.constraint,
+ inlined=yes};
+
+% {ClRef,FieldRefList} when record(ClRef,'Externaltypereference') ->
+ OCFT=#'ObjectClassFieldType'{class=ClRef} ->
+ %% this case occures in a SEQUENCE when
+ %% the type of the component is a ObjectClassFieldType
+ ClassSpec = check_class(S,ClRef),
+ NewTypeDef = maybe_open_type(S,ClassSpec,OCFT,Constr),
+ InnerTag = get_innertag(S,NewTypeDef),
+ MergedTag = merge_tags(Tag,InnerTag),
+ Ct =
+ case is_open_type(NewTypeDef) of
+ true ->
+ maybe_illicit_implicit_tag(open_type,MergedTag);
+ _ ->
+ MergedTag
+ end,
+ TempNewDef#newt{type=NewTypeDef,tag=Ct};
+ {valueset,Vtype} ->
+ TempNewDef#newt{type={valueset,check_type(S,Type,Vtype)}};
+ Other ->
+ exit({'cant check' ,Other})
+ end,
+ Ts2 = case NewDef of
+ #newt{type=unchanged} ->
+ Ts#type{def=Def};
+ #newt{type=TDef}->
+ Ts#type{def=TDef}
+ end,
+ NewTag = case NewDef of
+ #newt{tag=unchanged} ->
+ Tag;
+ #newt{tag=TT} ->
+ TT
+ end,
+ T3 = Ts2#type{tag = lists:map(fun(TempTag = #tag{type={default,TTx}}) ->
+ TempTag#tag{type=TTx};
+ (Else) -> Else end, NewTag)},
+ T4 = case NewDef of
+ #newt{constraint=unchanged} ->
+ T3#type{constraint=Constr};
+ #newt{constraint=NewConstr} ->
+ T3#type{constraint=NewConstr}
+ end,
+ T5 = T4#type{inlined=NewDef#newt.inlined},
+ T5#type{constraint=check_constraints(S,T5#type.constraint)}.
+
+
+get_innertag(_S,#'ObjectClassFieldType'{type=Type}) ->
+ case Type of
+ #type{tag=Tag} -> Tag;
+ {fixedtypevaluefield,_,#type{tag=Tag}} -> Tag;
+ {TypeFieldName,_} when atom(TypeFieldName) -> [];
+ _ -> []
+ end;
+get_innertag(_S,_) ->
+ [].
+
+is_class(_S,#classdef{}) ->
+ true;
+is_class(S,#typedef{typespec=#type{def=Eref}})
+ when record(Eref,'Externaltypereference')->
+ {_,NextDef} = get_referenced_type(S,Eref),
+ is_class(S,NextDef);
+is_class(_,_) ->
+ false.
+
+get_class_def(_S,CD=#classdef{}) ->
+ CD;
+get_class_def(S,#typedef{typespec=#type{def=Eref}})
+ when record(Eref,'Externaltypereference') ->
+ {_,NextDef} = get_referenced_type(S,Eref),
+ get_class_def(S,NextDef).
+
+maybe_illicit_implicit_tag(Kind,Tag) ->
+ case Tag of
+ [#tag{type='IMPLICIT'}|_T] ->
+ throw({error,{asn1,{implicit_tag_before,Kind}}});
+ [ChTag = #tag{type={default,_}}|T] ->
+ case Kind of
+ open_type ->
+ [ChTag#tag{type='EXPLICIT',form=32}|T]; %X.680 30.6c, X.690 8.14.2
+ choice ->
+ [ChTag#tag{type='EXPLICIT',form=32}|T] % X.680 28.6 c, 30.6c
+ end;
+ _ ->
+ Tag % unchanged
+ end.
+
+%% maybe_open_type/2 -> {ClassSpec,FieldRefList} | 'ASN1_OPEN_TYPE'
+%% if the FieldRefList points out a typefield and the class don't have
+%% any UNIQUE field, so that a component relation constraint cannot specify
+%% the type of a typefield, return 'ASN1_OPEN_TYPE', otherwise return
+%% {ClassSpec,FieldRefList}.
+maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
+ OCFT=#'ObjectClassFieldType'{fieldname=FieldRefList},
+ Constr) ->
+ Type = get_ObjectClassFieldType(S,Fs,FieldRefList),
+ FieldNames=get_referenced_fieldname(FieldRefList),
+ case lists:last(FieldRefList) of
+ {valuefieldreference,_} ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type=Type};
+ {typefieldreference,_} ->
+ case {catch get_unique_fieldname(#classdef{typespec=ClassSpec}),
+ asn1ct_gen:get_constraint(Constr,componentrelation)}of
+ {Tuple,_} when tuple(Tuple) ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type='ASN1_OPEN_TYPE'};
+ {_,no} ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type='ASN1_OPEN_TYPE'};
+ _ ->
+ OCFT#'ObjectClassFieldType'{class=ClassSpec,
+ fieldname=FieldNames,
+ type=Type}
+ end
+ end.
+
+is_open_type(#'ObjectClassFieldType'{type='ASN1_OPEN_TYPE'}) ->
+ true;
+is_open_type(#'ObjectClassFieldType'{}) ->
+ false.
+
+
+notify_if_not_ptype(S,#pvaluesetdef{type=Type}) ->
+ case Type#type.def of
+ Ref when record(Ref,'Externaltypereference') ->
+ case get_referenced_type(S,Ref) of
+ {_,#classdef{}} ->
+ throw(pobjectsetdef);
+ {_,#typedef{}} ->
+ throw(pvalueset)
+ end;
+ T when record(T,type) -> % this must be a value set
+ throw(pvalueset)
+ end;
+notify_if_not_ptype(_S,#ptypedef{}) ->
+ ok.
+
+% fix me
+instantiate_ptype(S,Ptypedef,ParaList) ->
+ #ptypedef{args=Args,typespec=Type} = Ptypedef,
+% Args = get_pt_args(Ptypedef),
+% Type = get_pt_spec(Ptypedef),
+ MatchedArgs = match_args(Args, ParaList, []),
+ NewS = S#state{type=Type,parameters=MatchedArgs,abscomppath=[]},
+ %The abscomppath must be empty since a table constraint in a
+ %parameterized type only can refer to components within the type
+ check_type(NewS, Ptypedef, Type).
+
+get_pt_args(#ptypedef{args=Args}) ->
+ Args;
+get_pt_args(#pvaluesetdef{args=Args}) ->
+ Args;
+get_pt_args(#pvaluedef{args=Args}) ->
+ Args;
+get_pt_args(#pobjectdef{args=Args}) ->
+ Args;
+get_pt_args(#pobjectsetdef{args=Args}) ->
+ Args.
+
+get_pt_spec(#ptypedef{typespec=Type}) ->
+ Type;
+get_pt_spec(#pvaluedef{value=Value}) ->
+ Value;
+get_pt_spec(#pvaluesetdef{valueset=VS}) ->
+ VS;
+get_pt_spec(#pobjectdef{def=Def}) ->
+ Def;
+get_pt_spec(#pobjectsetdef{def=Def}) ->
+ Def.
+
+
+
+match_args([FormArg|Ft], [ActArg|At], Acc) ->
+ match_args(Ft, At, [{FormArg,ActArg}|Acc]);
+match_args([], [], Acc) ->
+ lists:reverse(Acc);
+match_args(_, _, _) ->
+ throw({error,{asn1,{wrong_number_of_arguments}}}).
+
+check_constraints(S,C) when list(C) ->
+ check_constraints(S, C, []);
+check_constraints(S,C) when record(C,constraint) ->
+ check_constraints(S, C#constraint.c, []).
+
+
+resolv_tuple_or_list(S,List) when list(List) ->
+ lists:map(fun(X)->resolv_value(S,X) end, List);
+resolv_tuple_or_list(S,{Lb,Ub}) ->
+ {resolv_value(S,Lb),resolv_value(S,Ub)}.
+
+%%%-----------------------------------------
+%% If the constraint value is a defined value the valuename
+%% is replaced by the actual value
+%%
+resolv_value(S,Val) ->
+ case match_parameters(Val, S#state.parameters) of
+ Id -> % unchanged
+ resolv_value1(S,Id);
+ Other ->
+ resolv_value(S,Other)
+ end.
+
+resolv_value1(S = #state{mname=M,inputmodules=InpMods},
+ V=#'Externalvaluereference'{pos=Pos,module=ExtM,value=Name}) ->
+ case ExtM of
+ M -> resolv_value2(S,M,Name,Pos);
+ _ ->
+ case lists:member(ExtM,InpMods) of
+ true ->
+ resolv_value2(S,M,Name,Pos);
+ false ->
+ V
+ end
+ end;
+resolv_value1(S,{gt,V}) ->
+ case V of
+ Int when integer(Int) ->
+ V + 1;
+ #valuedef{value=Int} ->
+ 1 + resolv_value(S,Int);
+ Other ->
+ throw({error,{asn1,{undefined_type_or_value,Other}}})
+ end;
+resolv_value1(S,{lt,V}) ->
+ case V of
+ Int when integer(Int) ->
+ V - 1;
+ #valuedef{value=Int} ->
+ resolv_value(S,Int) - 1;
+ Other ->
+ throw({error,{asn1,{undefined_type_or_value,Other}}})
+ end;
+resolv_value1(S,{'ValueFromObject',{object,Object},[{valuefieldreference,
+ FieldName}]}) ->
+ %% FieldName can hold either a fixed-type value or a variable-type value
+ %% Object is a DefinedObject, i.e. a #'Externaltypereference'
+ {_,ObjTDef} = get_referenced_type(S,Object),
+ TS = check_object(S,ObjTDef,ObjTDef#typedef.typespec),
+ {_,_,Components} = TS#'Object'.def,
+ case lists:keysearch(FieldName,1,Components) of
+ {value,{_,#valuedef{value=Val}}} ->
+ Val;
+ _ ->
+ error({value,"illegal value in constraint",S})
+ end;
+% resolv_value1(S,{'ValueFromObject',{po,Object,Params},FieldName}) ->
+% %% FieldName can hold either a fixed-type value or a variable-type value
+% %% Object is a ParameterizedObject
+resolv_value1(_,V) ->
+ V.
+
+resolv_value2(S,ModuleName,Name,Pos) ->
+ case asn1_db:dbget(ModuleName,Name) of
+ undefined ->
+ case imported(S,Name) of
+ {ok,Imodule} ->
+ {_,V2} = get_referenced(S,Imodule,Name,Pos),
+ V2#valuedef.value;
+ _ ->
+ throw({error,{asn1,{undefined_type_or_value,Name}}})
+ end;
+ Val ->
+ Val#valuedef.value
+ end.
+
+check_constraints(S,[{'ContainedSubtype',Type} | Rest], Acc) ->
+ {_,CTDef} = get_referenced_type(S,Type#type.def),
+ CType = check_type(S,S#state.tname,CTDef#typedef.typespec),
+ check_constraints(S,Rest,CType#type.constraint ++ Acc);
+check_constraints(S,[C | Rest], Acc) ->
+ check_constraints(S,Rest,[check_constraint(S,C) | Acc]);
+check_constraints(S,[],Acc) ->
+% io:format("Acc: ~p~n",[Acc]),
+ C = constraint_merge(S,lists:reverse(Acc)),
+% io:format("C: ~p~n",[C]),
+ lists:flatten(C).
+
+
+range_check(F={FixV,FixV}) ->
+% FixV;
+ F;
+range_check(VR={Lb,Ub}) when Lb < Ub ->
+ VR;
+range_check(Err={_,_}) ->
+ throw({error,{asn1,{illegal_size_constraint,Err}}});
+range_check(Value) ->
+ Value.
+
+check_constraint(S,Ext) when record(Ext,'Externaltypereference') ->
+ check_externaltypereference(S,Ext);
+
+
+check_constraint(S,{'SizeConstraint',{Lb,Ub}})
+ when list(Lb);tuple(Lb),size(Lb)==2 ->
+ case Lb of
+ #'Externalvaluereference'{} ->
+ check_constraint(S,{'SizeConstraint',{resolv_value(S,Lb),Ub}});
+ _ ->
+ NewLb = range_check(resolv_tuple_or_list(S,Lb)),
+ NewUb = range_check(resolv_tuple_or_list(S,Ub)),
+ {'SizeConstraint',{NewLb,NewUb}}
+ end;
+check_constraint(S,{'SizeConstraint',{Lb,Ub}}) ->
+ case {resolv_value(S,Lb),resolv_value(S,Ub)} of
+ {FixV,FixV} ->
+ {'SizeConstraint',FixV};
+ {Low,High} when Low < High ->
+ {'SizeConstraint',{Low,High}};
+ Err ->
+ throw({error,{asn1,{illegal_size_constraint,Err}}})
+ end;
+check_constraint(S,{'SizeConstraint',Lb}) ->
+ {'SizeConstraint',resolv_value(S,Lb)};
+
+check_constraint(S,{'SingleValue', L}) when list(L) ->
+ F = fun(A) -> resolv_value(S,A) end,
+ {'SingleValue',lists:map(F,L)};
+
+check_constraint(S,{'SingleValue', V}) when integer(V) ->
+ Val = resolv_value(S,V),
+%% [{'SingleValue',Val},{'ValueRange',{Val,Val}}]; % Why adding value range?
+ {'SingleValue',Val};
+check_constraint(S,{'SingleValue', V}) ->
+ {'SingleValue',resolv_value(S,V)};
+
+check_constraint(S,{'ValueRange', {Lb, Ub}}) ->
+ {'ValueRange',{resolv_value(S,Lb),resolv_value(S,Ub)}};
+
+%%check_constraint(S,{'ContainedSubtype',Type}) ->
+%% #typedef{typespec=TSpec} =
+%% check_type(S,S#state.tname,get_referenced_type(S,Type#type.def)),
+%% [C] = TSpec#type.constraint,
+%% C;
+
+check_constraint(S,{valueset,Type}) ->
+ {valueset,check_type(S,S#state.tname,Type)};
+
+check_constraint(S,{simpletable,Type}) ->
+ OSName = (Type#type.def)#'Externaltypereference'.type,
+ C = match_parameters(Type#type.def,S#state.parameters),
+ case C of
+ #'Externaltypereference'{} ->
+ Type#type{def=check_externaltypereference(S,C)},
+ {simpletable,OSName};
+ _ ->
+ check_type(S,S#state.tname,Type),
+ {simpletable,OSName}
+ end;
+
+check_constraint(S,{componentrelation,{objectset,Opos,Objset},Id}) ->
+ %% Objset is an 'Externaltypereference' record, since Objset is
+ %% a DefinedObjectSet.
+ RealObjset = match_parameters(Objset,S#state.parameters),
+ Ext = check_externaltypereference(S,RealObjset),
+ {componentrelation,{objectset,Opos,Ext},Id};
+
+check_constraint(S,Type) when record(Type,type) ->
+ #type{def=Def} = check_type(S,S#state.tname,Type),
+ Def;
+
+check_constraint(S,C) when list(C) ->
+ lists:map(fun(X)->check_constraint(S,X) end,C);
+% else keep the constraint unchanged
+check_constraint(_S,Any) ->
+% io:format("Constraint = ~p~n",[Any]),
+ Any.
+
+%% constraint_merge/2
+%% Compute the intersection of the outermost level of the constraint list.
+%% See Dubuisson second paragraph and fotnote on page 285.
+%% If constraints with extension are included in combined constraints. The
+%% resulting combination will have the extension of the last constraint. Thus,
+%% there will be no extension if the last constraint is without extension.
+%% The rootset of all constraints are considered in the "outermoust
+%% intersection". See section 13.1.2 in Dubuisson.
+constraint_merge(_S,C=[H])when tuple(H) ->
+ C;
+constraint_merge(_S,[]) ->
+ [];
+constraint_merge(S,C) ->
+ %% skip all extension but the last
+ C1 = filter_extensions(C),
+ %% perform all internal level intersections, intersections first
+ %% since they have precedence over unions
+ C2 = lists:map(fun(X)when list(X)->constraint_intersection(S,X);
+ (X) -> X end,
+ C1),
+ %% perform all internal level unions
+ C3 = lists:map(fun(X)when list(X)->constraint_union(S,X);
+ (X) -> X end,
+ C2),
+
+ %% now get intersection of the outermost level
+ %% get the least common single value constraint
+ SVs = get_constraints(C3,'SingleValue'),
+ CombSV = intersection_of_sv(S,SVs),
+ %% get the least common value range constraint
+ VRs = get_constraints(C3,'ValueRange'),
+ CombVR = intersection_of_vr(S,VRs),
+ %% get the least common size constraint
+ SZs = get_constraints(C3,'SizeConstraint'),
+ CombSZ = intersection_of_size(S,SZs),
+ CminusSVs=ordsets:subtract(ordsets:from_list(C3),ordsets:from_list(SVs)),
+ % CminusSVsVRs = ordsets:subtract(ordsets:from_list(CminusSVs),
+% ordsets:from_list(VRs)),
+ RestC = ordsets:subtract(ordsets:from_list(CminusSVs),
+ ordsets:from_list(SZs)),
+ %% get the least common combined constraint. That is the union of each
+ %% deep costraint and merge of single value and value range constraints
+ combine_constraints(S,CombSV,CombVR,CombSZ++RestC).
+
+%% constraint_union(S,C) takes a list of constraints as input and
+%% merge them to a union. Unions are performed when two
+%% constraints is found with an atom union between.
+%% The list may be nested. Fix that later !!!
+constraint_union(_S,[]) ->
+ [];
+constraint_union(_S,C=[_E]) ->
+ C;
+constraint_union(S,C) when list(C) ->
+ case lists:member(union,C) of
+ true ->
+ constraint_union1(S,C,[]);
+ _ ->
+ C
+ end;
+% SV = get_constraints(C,'SingleValue'),
+% SV1 = constraint_union_sv(S,SV),
+% VR = get_constraints(C,'ValueRange'),
+% VR1 = constraint_union_vr(VR),
+% RestC = ordsets:filter(fun({'SingleValue',_})->false;
+% ({'ValueRange',_})->false;
+% (_) -> true end,ordsets:from_list(C)),
+% SV1++VR1++RestC;
+constraint_union(_S,C) ->
+ [C].
+
+constraint_union1(S,[A={'ValueRange',_},union,B={'ValueRange',_}|Rest],Acc) ->
+ AunionB = constraint_union_vr([A,B]),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[A={'SingleValue',_},union,B={'SingleValue',_}|Rest],Acc) ->
+ AunionB = constraint_union_sv(S,[A,B]),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[A={'SingleValue',_},union,B={'ValueRange',_}|Rest],Acc) ->
+ AunionB = union_sv_vr(S,A,B),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[A={'ValueRange',_},union,B={'SingleValue',_}|Rest],Acc) ->
+ AunionB = union_sv_vr(S,B,A),
+ constraint_union1(S,Rest,AunionB++Acc);
+constraint_union1(S,[union|Rest],Acc) -> %skip when unsupported constraints
+ constraint_union1(S,Rest,Acc);
+constraint_union1(S,[A|Rest],Acc) ->
+ constraint_union1(S,Rest,[A|Acc]);
+constraint_union1(_S,[],Acc) ->
+ lists:reverse(Acc).
+
+constraint_union_sv(_S,SV) ->
+ Values=lists:map(fun({_,V})->V end,SV),
+ case ordsets:from_list(Values) of
+ [] -> [];
+ [N] -> [{'SingleValue',N}];
+ L -> [{'SingleValue',L}]
+ end.
+
+%% REMOVE????
+%%constraint_union(S,VR,'ValueRange') ->
+%% constraint_union_vr(VR).
+
+%% constraint_union_vr(VR)
+%% VR = [{'ValueRange',{Lb,Ub}},...]
+%% Lb = 'MIN' | integer()
+%% Ub = 'MAX' | integer()
+%% Returns if possible only one ValueRange tuple with a range that
+%% is a union of all ranges in VR.
+constraint_union_vr(VR) ->
+ %% Sort VR by Lb in first hand and by Ub in second hand
+ Fun=fun({_,{'MIN',_B1}},{_,{A2,_B2}}) when integer(A2)->true;
+ ({_,{A1,_B1}},{_,{'MAX',_B2}}) when integer(A1) -> true;
+ ({_,{A1,_B1}},{_,{A2,_B2}}) when integer(A1),integer(A2),A1<A2 -> true;
+ ({_,{A,B1}},{_,{A,B2}}) when B1=<B2->true;
+ (_,_)->false end,
+ constraint_union_vr(lists:usort(Fun,VR),[]).
+
+constraint_union_vr([],Acc) ->
+ lists:reverse(Acc);
+constraint_union_vr([C|Rest],[]) ->
+ constraint_union_vr(Rest,[C]);
+constraint_union_vr([{_,{Lb,Ub2}}|Rest],[{_,{Lb,_Ub1}}|Acc]) -> %Ub2 > Ub1
+ constraint_union_vr(Rest,[{'ValueRange',{Lb,Ub2}}|Acc]);
+constraint_union_vr([{_,{_,Ub}}|Rest],A=[{_,{_,Ub}}|_Acc]) ->
+ constraint_union_vr(Rest,A);
+constraint_union_vr([{_,{Lb2,Ub2}}|Rest],[{_,{Lb1,Ub1}}|Acc]) when Lb2=<Ub1,
+ Ub2>Ub1->
+ constraint_union_vr(Rest,[{'ValueRange',{Lb1,Ub2}}|Acc]);
+constraint_union_vr([{_,{_,Ub2}}|Rest],A=[{_,{_,Ub1}}|_Acc]) when Ub2=<Ub1->
+ constraint_union_vr(Rest,A);
+constraint_union_vr([VR|Rest],Acc) ->
+ constraint_union_vr(Rest,[VR|Acc]).
+
+union_sv_vr(_S,[],B) ->
+ [B];
+union_sv_vr(_S,A,[]) ->
+ [A];
+union_sv_vr(_S,C1={'SingleValue',SV},C2={'ValueRange',VR={Lb,Ub}})
+ when integer(SV) ->
+ case is_int_in_vr(SV,C2) of
+ true -> [C2];
+ _ ->
+ case VR of
+ {'MIN',Ub} when SV==Ub+1 -> [{'ValueRange',{'MIN',SV}}];
+ {Lb,'MAX'} when SV==Lb-1 -> [{'ValueRange',{SV,'MAX'}}];
+ {Lb,Ub} when SV==Ub+1 -> [{'ValueRange',{Lb,SV}}];
+ {Lb,Ub} when SV==Lb-1 -> [{'ValueRange',{SV,Ub}}];
+ _ ->
+ [C1,C2]
+ end
+ end;
+union_sv_vr(_S,C1={'SingleValue',SV},C2={'ValueRange',{_Lb,_Ub}})
+ when list(SV) ->
+ case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
+ [] -> [C2];
+ L ->
+ case expand_vr(L,C2) of
+ {[],C3} -> [C3];
+ {L,C2} -> [C1,C2];
+ {[Val],C3} -> [{'SingleValue',Val},C3];
+ {L2,C3} -> [{'SingleValue',L2},C3]
+ end
+ end.
+
+expand_vr(L,VR={_,{Lb,Ub}}) ->
+ case lower_Lb(L,Lb) of
+ false ->
+ case higher_Ub(L,Ub) of
+ false ->
+ {L,VR};
+ {L1,UbNew} ->
+ expand_vr(L1,{'ValueRange',{Lb,UbNew}})
+ end;
+ {L1,LbNew} ->
+ expand_vr(L1,{'ValueRange',{LbNew,Ub}})
+ end.
+
+lower_Lb(_,'MIN') ->
+ false;
+lower_Lb(L,Lb) ->
+ remove_val_from_list(Lb - 1,L).
+
+higher_Ub(_,'MAX') ->
+ false;
+higher_Ub(L,Ub) ->
+ remove_val_from_list(Ub + 1,L).
+
+remove_val_from_list(List,Val) ->
+ case lists:member(Val,List) of
+ true ->
+ {lists:delete(Val,List),Val};
+ false ->
+ false
+ end.
+
+%% get_constraints/2
+%% Arguments are a list of constraints, which has the format {key,value},
+%% and a constraint type
+%% Returns a list of constraints only of the requested type or the atom
+%% 'no' if no such constraints were found
+get_constraints(L=[{CType,_}],CType) ->
+ L;
+get_constraints(C,CType) ->
+ keysearch_allwithkey(CType,1,C).
+
+%% keysearch_allwithkey(Key,Ix,L)
+%% Types:
+%% Key = atom()
+%% Ix = integer()
+%% L = [TwoTuple]
+%% TwoTuple = [{atom(),term()}|...]
+%% Returns a List that contains all
+%% elements from L that has a key Key as element Ix
+keysearch_allwithkey(Key,Ix,L) ->
+ lists:filter(fun(X) when tuple(X) ->
+ case element(Ix,X) of
+ Key -> true;
+ _ -> false
+ end;
+ (_) -> false
+ end, L).
+
+
+%% filter_extensions(C)
+%% takes a list of constraints as input and
+%% returns a list with the intersection of all extension roots
+%% and only the extension of the last constraint kept if any
+%% extension in the last constraint
+filter_extensions([]) ->
+ [];
+filter_extensions(C=[_H]) ->
+ C;
+filter_extensions(C) when list(C) ->
+ filter_extensions(C,[]).
+
+filter_extensions([C],Acc) ->
+ lists:reverse([C|Acc]);
+filter_extensions([{C,_E},H2|T],Acc) when tuple(C) ->
+ filter_extensions([H2|T],[C|Acc]);
+filter_extensions([{'SizeConstraint',{A,_B}},H2|T],Acc)
+ when list(A);tuple(A) ->
+ filter_extensions([H2|T],[{'SizeConstraint',A}|Acc]);
+filter_extensions([H1,H2|T],Acc) ->
+ filter_extensions([H2|T],[H1|Acc]).
+
+%% constraint_intersection(S,C) takes a list of constraints as input and
+%% performs intersections. Intersecions are performed when an
+%% atom intersection is found between two constraints.
+%% The list may be nested. Fix that later !!!
+constraint_intersection(_S,[]) ->
+ [];
+constraint_intersection(_S,C=[_E]) ->
+ C;
+constraint_intersection(S,C) when list(C) ->
+% io:format("constraint_intersection: ~p~n",[C]),
+ case lists:member(intersection,C) of
+ true ->
+ constraint_intersection1(S,C,[]);
+ _ ->
+ C
+ end;
+constraint_intersection(_S,C) ->
+ [C].
+
+constraint_intersection1(S,[A,intersection,B|Rest],Acc) ->
+ AisecB = c_intersect(S,A,B),
+ constraint_intersection1(S,Rest,AisecB++Acc);
+constraint_intersection1(S,[A|Rest],Acc) ->
+ constraint_intersection1(S,Rest,[A|Acc]);
+constraint_intersection1(_,[],Acc) ->
+ lists:reverse(Acc).
+
+c_intersect(S,C1={'SingleValue',_},C2={'SingleValue',_}) ->
+ intersection_of_sv(S,[C1,C2]);
+c_intersect(S,C1={'ValueRange',_},C2={'ValueRange',_}) ->
+ intersection_of_vr(S,[C1,C2]);
+c_intersect(S,C1={'ValueRange',_},C2={'SingleValue',_}) ->
+ intersection_sv_vr(S,[C2],[C1]);
+c_intersect(S,C1={'SingleValue',_},C2={'ValueRange',_}) ->
+ intersection_sv_vr(S,[C1],[C2]);
+c_intersect(_S,C1,C2) ->
+ [C1,C2].
+
+%% combine_constraints(S,SV,VR,CComb)
+%% Types:
+%% S = record(state,S)
+%% SV = [] | [SVC]
+%% VR = [] | [VRC]
+%% CComb = [] | [Lists]
+%% SVC = {'SingleValue',integer()} | {'SingleValue',[integer(),...]}
+%% VRC = {'ValueRange',{Lb,Ub}}
+%% Lists = List of lists containing any constraint combination
+%% Lb = 'MIN' | integer()
+%% Ub = 'MAX' | integer()
+%% Returns a combination of the least common constraint among SV,VR and all
+%% elements in CComb
+combine_constraints(_S,[],VR,CComb) ->
+ VR ++ CComb;
+% combine_combined_cnstr(S,VR,CComb);
+combine_constraints(_S,SV,[],CComb) ->
+ SV ++ CComb;
+% combine_combined_cnstr(S,SV,CComb);
+combine_constraints(S,SV,VR,CComb) ->
+ C=intersection_sv_vr(S,SV,VR),
+ C ++ CComb.
+% combine_combined_cnstr(S,C,CComb).
+
+intersection_sv_vr(_,[],_VR) ->
+ [];
+intersection_sv_vr(_,_SV,[]) ->
+ [];
+intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2={'ValueRange',{_Lb,_Ub}}])
+ when integer(SV) ->
+ case is_int_in_vr(SV,C2) of
+ true -> [C1];
+ _ -> %%error({type,{"asn1 illegal constraint",C1,C2},S})
+ throw({error,{"asn1 illegal constraint",C1,C2}})
+ end;
+intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2])
+ when list(SV) ->
+ case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
+ [] ->
+ %%error({type,{"asn1 illegal constraint",C1,C2},S});
+ throw({error,{"asn1 illegal constraint",C1,C2}});
+ [V] -> [{'SingleValue',V}];
+ L -> [{'SingleValue',L}]
+ end.
+
+
+
+intersection_of_size(_,[]) ->
+ [];
+intersection_of_size(_,C=[_SZ]) ->
+ C;
+intersection_of_size(S,[SZ,SZ|Rest]) ->
+ intersection_of_size(S,[SZ|Rest]);
+intersection_of_size(S,C=[C1={_,Int},{_,Range}|Rest])
+ when integer(Int),tuple(Range) ->
+ case Range of
+ {Lb,Ub} when Int >= Lb,
+ Int =< Ub ->
+ intersection_of_size(S,[C1|Rest]);
+ _ ->
+ throw({error,{asn1,{illegal_size_constraint,C}}})
+ end;
+intersection_of_size(S,[C1={_,Range},C2={_,Int}|Rest])
+ when integer(Int),tuple(Range) ->
+ intersection_of_size(S,[C2,C1|Rest]);
+intersection_of_size(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
+ Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
+ Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
+ intersection_of_size(S,[{'SizeConstraint',{Lb,Ub}}|Rest]);
+intersection_of_size(_,SZ) ->
+ throw({error,{asn1,{illegal_size_constraint,SZ}}}).
+
+intersection_of_vr(_,[]) ->
+ [];
+intersection_of_vr(_,VR=[_C]) ->
+ VR;
+intersection_of_vr(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
+ Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
+ Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
+ intersection_of_vr(S,[{'ValueRange',{Lb,Ub}}|Rest]);
+intersection_of_vr(_S,VR) ->
+ %%error({type,{asn1,{illegal_value_range_constraint,VR}},S});
+ throw({error,{asn1,{illegal_value_range_constraint,VR}}}).
+
+intersection_of_sv(_,[]) ->
+ [];
+intersection_of_sv(_,SV=[_C]) ->
+ SV;
+intersection_of_sv(S,[SV,SV|Rest]) ->
+ intersection_of_sv(S,[SV|Rest]);
+intersection_of_sv(S,[{_,Int},{_,SV}|Rest]) when integer(Int),
+ list(SV) ->
+ SV2=intersection_of_sv1(S,Int,SV),
+ intersection_of_sv(S,[SV2|Rest]);
+intersection_of_sv(S,[{_,SV},{_,Int}|Rest]) when integer(Int),
+ list(SV) ->
+ SV2=intersection_of_sv1(S,Int,SV),
+ intersection_of_sv(S,[SV2|Rest]);
+intersection_of_sv(S,[{_,SV1},{_,SV2}|Rest]) when list(SV1),
+ list(SV2) ->
+ SV3=common_set(SV1,SV2),
+ intersection_of_sv(S,[SV3|Rest]);
+intersection_of_sv(_S,SV) ->
+ %%error({type,{asn1,{illegal_single_value_constraint,SV}},S}).
+ throw({error,{asn1,{illegal_single_value_constraint,SV}}}).
+
+intersection_of_sv1(_S,Int,SV) when integer(Int),list(SV) ->
+ case lists:member(Int,SV) of
+ true -> {'SingleValue',Int};
+ _ ->
+ %%error({type,{asn1,{illegal_single_value_constraint,Int,SV}},S})
+ throw({error,{asn1,{illegal_single_value_constraint,Int,SV}}})
+ end;
+intersection_of_sv1(_S,SV1,SV2) ->
+ %%error({type,{asn1,{illegal_single_value_constraint,SV1,SV2}},S}).
+ throw({error,{asn1,{illegal_single_value_constraint,SV1,SV2}}}).
+
+greatest_LB([H]) ->
+ H;
+greatest_LB(L) ->
+ greatest_LB1(lists:reverse(L)).
+greatest_LB1(['MIN',H2|_T])->
+ H2;
+greatest_LB1([H|_T]) ->
+ H.
+smallest_UB(L) ->
+ hd(L).
+
+common_set(SV1,SV2) ->
+ lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
+
+is_int_in_vr(Int,{_,{'MIN','MAX'}}) when integer(Int) ->
+ true;
+is_int_in_vr(Int,{_,{'MIN',Ub}}) when integer(Int),Int =< Ub ->
+ true;
+is_int_in_vr(Int,{_,{Lb,'MAX'}}) when integer(Int),Int >= Lb ->
+ true;
+is_int_in_vr(Int,{_,{Lb,Ub}}) when integer(Int),Int >= Lb,Int =< Ub ->
+ true;
+is_int_in_vr(_,_) ->
+ false.
+
+
+
+check_imported(_S,Imodule,Name) ->
+ case asn1_db:dbget(Imodule,'MODULE') of
+ undefined ->
+ io:format("~s.asn1db not found~n",[Imodule]),
+ io:format("Type ~s imported from non existing module ~s~n",[Name,Imodule]);
+ Im when record(Im,module) ->
+ case is_exported(Im,Name) of
+ false ->
+ io:format("Imported type ~s not exported from module ~s~n",[Name,Imodule]);
+ _ ->
+ ok
+ end
+ end,
+ ok.
+
+is_exported(Module,Name) when record(Module,module) ->
+ {exports,Exports} = Module#module.exports,
+ case Exports of
+ all ->
+ true;
+ [] ->
+ false;
+ L when list(L) ->
+ case lists:keysearch(Name,#'Externaltypereference'.type,Exports) of
+ false -> false;
+ _ -> true
+ end
+ end.
+
+
+
+check_externaltypereference(S,Etref=#'Externaltypereference'{module=Emod})->
+ Currmod = S#state.mname,
+ MergedMods = S#state.inputmodules,
+ case Emod of
+ Currmod ->
+ %% reference to current module or to imported reference
+ check_reference(S,Etref);
+ _ ->
+ %% io:format("Type ~s IMPORTED FROM ~s~n",[Etype,Emod]),
+ case lists:member(Emod,MergedMods) of
+ true ->
+ check_reference(S,Etref);
+ false ->
+ Etref
+ end
+ end.
+
+check_reference(S,#'Externaltypereference'{pos=Pos,module=Emod,type=Name}) ->
+ ModName = S#state.mname,
+ case asn1_db:dbget(ModName,Name) of
+ undefined ->
+ case imported(S,Name) of
+ {ok,Imodule} ->
+ check_imported(S,Imodule,Name),
+ #'Externaltypereference'{module=Imodule,type=Name};
+ _ ->
+ %may be a renamed type in multi file compiling!
+ {_,T}=renamed_reference(S,Name,Emod),
+ NewName = asn1ct:get_name_of_def(T),
+ NewPos = asn1ct:get_pos_of_def(T),
+ #'Externaltypereference'{pos=NewPos,
+ module=ModName,
+ type=NewName}
+ end;
+ _ ->
+ %% cannot do check_type here due to recursive definitions, like
+ %% S ::= SEQUENCE {a INTEGER, b S}. This implies that references
+ %% that appear before the definition will be an
+ %% Externaltypereference in the abstract syntax tree
+ #'Externaltypereference'{pos=Pos,module=ModName,type=Name}
+ end.
+
+
+name2Extref(_Mod,Name) when record(Name,'Externaltypereference') ->
+ Name;
+name2Extref(Mod,Name) ->
+ #'Externaltypereference'{module=Mod,type=Name}.
+
+get_referenced_type(S,Ext) when record(Ext,'Externaltypereference') ->
+ case match_parameters(Ext, S#state.parameters) of
+ Ext ->
+ #'Externaltypereference'{pos=Pos,module=Emod,type=Etype} = Ext,
+ case S#state.mname of
+ Emod -> % a local reference in this module
+ get_referenced1(S,Emod,Etype,Pos);
+ _ ->% always when multi file compiling
+ case lists:member(Emod,S#state.inputmodules) of
+ true ->
+ get_referenced1(S,Emod,Etype,Pos);
+ false ->
+ get_referenced(S,Emod,Etype,Pos)
+ end
+ end;
+ Other ->
+ {undefined,Other}
+ end;
+get_referenced_type(S=#state{mname=Emod},
+ ERef=#'Externalvaluereference'{pos=P,module=Emod,
+ value=Eval}) ->
+ case match_parameters(ERef,S#state.parameters) of
+ ERef ->
+ get_referenced1(S,Emod,Eval,P);
+ OtherERef when record(OtherERef,'Externalvaluereference') ->
+ get_referenced_type(S,OtherERef);
+ Value ->
+ {Emod,Value}
+ end;
+get_referenced_type(S,ERef=#'Externalvaluereference'{pos=Pos,module=Emod,
+ value=Eval}) ->
+ case match_parameters(ERef,S#state.parameters) of
+ ERef ->
+ case lists:member(Emod,S#state.inputmodules) of
+ true ->
+ get_referenced1(S,Emod,Eval,Pos);
+ false ->
+ get_referenced(S,Emod,Eval,Pos)
+ end;
+ OtherERef ->
+ get_referenced_type(S,OtherERef)
+ end;
+get_referenced_type(S,#identifier{val=Name,pos=Pos}) ->
+ get_referenced1(S,undefined,Name,Pos);
+get_referenced_type(_S,Type) ->
+ {undefined,Type}.
+
+%% get_referenced/3
+%% The referenced entity Ename may in case of an imported parameterized
+%% type reference imported entities in the other module, which implies that
+%% asn1_db:dbget will fail even though the referenced entity exists. Thus
+%% Emod may be the module that imports the entity Ename and not holds the
+%% data about Ename.
+get_referenced(S,Emod,Ename,Pos) ->
+ case asn1_db:dbget(Emod,Ename) of
+ undefined ->
+ %% May be an imported entity in module Emod
+% throw({error,{asn1,{undefined_type_or_value,{Emod,Ename}}}});
+ NewS = S#state{module=asn1_db:dbget(Emod,'MODULE')},
+ get_imported(NewS,Ename,Emod,Pos);
+ T when record(T,typedef) ->
+ Spec = T#typedef.typespec,
+ case Spec#type.def of
+ Tref when record(Tref,typereference) ->
+ Def = #'Externaltypereference'{module=Emod,
+ type=Tref#typereference.val,
+ pos=Tref#typereference.pos},
+
+
+ {Emod,T#typedef{typespec=Spec#type{def=Def}}};
+ _ ->
+ {Emod,T} % should add check that T is exported here
+ end;
+ V -> {Emod,V}
+ end.
+
+get_referenced1(S,ModuleName,Name,Pos) ->
+ case asn1_db:dbget(S#state.mname,Name) of
+ undefined ->
+ %% ModuleName may be other than S#state.mname when
+ %% multi file compiling is used.
+ get_imported(S,Name,ModuleName,Pos);
+ T ->
+ {S#state.mname,T}
+ end.
+
+get_imported(S,Name,Module,Pos) ->
+ case imported(S,Name) of
+ {ok,Imodule} ->
+ case asn1_db:dbget(Imodule,'MODULE') of
+ undefined ->
+ throw({error,{asn1,{module_not_found,Imodule}}});
+ Im when record(Im,module) ->
+ case is_exported(Im,Name) of
+ false ->
+ throw({error,
+ {asn1,{not_exported,{Im,Name}}}});
+ _ ->
+ get_referenced_type(S,
+ #'Externaltypereference'
+ {module=Imodule,
+ type=Name,pos=Pos})
+ end
+ end;
+ _ ->
+ renamed_reference(S,Name,Module)
+ end.
+
+renamed_reference(S,Name,Module) ->
+ %% first check if there is a renamed type in this module
+ %% second check if any type was imported with this name
+ case ets:info(renamed_defs) of
+ undefined -> throw({error,{asn1,{undefined_type,Name}}});
+ _ ->
+ case ets:match(renamed_defs,{'$1',Name,Module}) of
+ [] ->
+ case ets:info(original_imports) of
+ undefined ->
+ throw({error,{asn1,{undefined_type,Name}}});
+ _ ->
+ case ets:match(original_imports,{Module,'$1'}) of
+ [] ->
+ throw({error,{asn1,{undefined_type,Name}}});
+ [[ImportsList]] ->
+ case get_importmoduleoftype(ImportsList,Name) of
+ undefined ->
+ throw({error,{asn1,{undefined_type,Name}}});
+ NextMod ->
+ renamed_reference(S,Name,NextMod)
+ end
+ end
+ end;
+ [[NewTypeName]] ->
+ get_referenced1(S,Module,NewTypeName,undefined)
+ end
+ end.
+
+get_importmoduleoftype([I|Is],Name) ->
+ Index = #'Externaltypereference'.type,
+ case lists:keysearch(Name,Index,I#'SymbolsFromModule'.symbols) of
+ {value,_Ref} ->
+ (I#'SymbolsFromModule'.module)#'Externaltypereference'.type;
+ _ ->
+ get_importmoduleoftype(Is,Name)
+ end;
+get_importmoduleoftype([],_) ->
+ undefined.
+
+
+match_parameters(Name,[]) ->
+ Name;
+
+match_parameters(#'Externaltypereference'{type=Name},[{#'Externaltypereference'{type=Name},NewName}|_T]) ->
+ NewName;
+match_parameters(#'Externaltypereference'{type=Name},[{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+ NewName;
+% match_parameters(#'Externaltypereference'{type=Name},[{#typereference{val=Name},NewName}|T]) ->
+% NewName;
+% match_parameters(#'Externaltypereference'{type=Name},[{{_,#typereference{val=Name}},NewName}|T]) ->
+% NewName;
+%match_parameters(#typereference{val=Name},[{#typereference{val=Name},NewName}|T]) ->
+% NewName;
+match_parameters(#'Externalvaluereference'{value=Name},[{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
+ NewName;
+match_parameters(#'Externalvaluereference'{value=Name},[{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
+ NewName;
+% match_parameters(#identifier{val=Name},[{#identifier{val=Name},NewName}|T]) ->
+% NewName;
+% match_parameters(#identifier{val=Name},[{{_,#identifier{val=Name}},NewName}|T]) ->
+% NewName;
+match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},{valueset,#type{def=NewName}}}|_T]) ->
+ NewName;
+match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+ NewName;
+% match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+% [{{_,#typereference{val=Name}},{valueset,#type{def=NewName}}}|T]) ->
+% NewName;
+% match_parameters({valueset,#type{def=#'Externaltypereference'{type=Name}}},
+% [{{_,#typereference{val=Name}},NewName}|T]) ->
+% NewName;
+
+match_parameters(Name, [_H|T]) ->
+ %%io:format("match_parameters(~p,~p)~n",[Name,[H|T]]),
+ match_parameters(Name,T).
+
+imported(S,Name) ->
+ {imports,Ilist} = (S#state.module)#module.imports,
+ imported1(Name,Ilist).
+
+imported1(Name,
+ [#'SymbolsFromModule'{symbols=Symlist,
+ module=#'Externaltypereference'{type=ModuleName}}|T]) ->
+ case lists:keysearch(Name,#'Externaltypereference'.type,Symlist) of
+ {value,_V} ->
+ {ok,ModuleName};
+ _ ->
+ imported1(Name,T)
+ end;
+imported1(_Name,[]) ->
+ false.
+
+
+check_integer(_S,[],_C) ->
+ ok;
+check_integer(S,NamedNumberList,_C) ->
+ case check_unique(NamedNumberList,2) of
+ [] ->
+ check_int(S,NamedNumberList,[]);
+ L when list(L) ->
+ error({type,{duplicates,L},S}),
+ unchanged
+
+ end.
+
+check_int(S,[{'NamedNumber',Id,Num}|T],Acc) when integer(Num) ->
+ check_int(S,T,[{Id,Num}|Acc]);
+check_int(S,[{'NamedNumber',Id,{identifier,_,Name}}|T],Acc) ->
+ Val = dbget_ex(S,S#state.mname,Name),
+ check_int(S,[{'NamedNumber',Id,Val#valuedef.value}|T],Acc);
+check_int(_S,[],Acc) ->
+ lists:keysort(2,Acc).
+
+
+
+check_bitstring(_S,[],_Constr) ->
+ [];
+check_bitstring(S,NamedNumberList,_Constr) ->
+ case check_unique(NamedNumberList,2) of
+ [] ->
+ check_bitstr(S,NamedNumberList,[]);
+ L when list(L) ->
+ error({type,{duplicates,L},S}),
+ unchanged
+ end.
+
+check_bitstr(S,[{'NamedNumber',Id,Num}|T],Acc)when integer(Num) ->
+ check_bitstr(S,T,[{Id,Num}|Acc]);
+check_bitstr(S,[{'NamedNumber',Id,Name}|T],Acc) when atom(Name) ->
+%%check_bitstr(S,[{'NamedNumber',Id,{identifier,_,Name}}|T],Acc) ->
+%% io:format("asn1ct_check:check_bitstr/3 hej hop ~w~n",[Name]),
+ Val = dbget_ex(S,S#state.mname,Name),
+%% io:format("asn1ct_check:check_bitstr/3: ~w~n",[Val]),
+ check_bitstr(S,[{'NamedNumber',Id,Val#valuedef.value}|T],Acc);
+check_bitstr(S,[],Acc) ->
+ case check_unique(Acc,2) of
+ [] ->
+ lists:keysort(2,Acc);
+ L when list(L) ->
+ error({type,{duplicate_values,L},S}),
+ unchanged
+ end.
+
+%%check_bitstring(S,NamedNumberList,Constr) ->
+%% NamedNumberList.
+
+%% Check INSTANCE OF
+%% check that DefinedObjectClass is of TYPE-IDENTIFIER class
+%% If Constraint is empty make it the general INSTANCE OF type
+%% If Constraint is not empty make an inlined type
+%% convert INSTANCE OF to the associated type
+check_instance_of(S,DefinedObjectClass,Constraint) ->
+ check_type_identifier(S,DefinedObjectClass),
+ iof_associated_type(S,Constraint).
+
+
+check_type_identifier(_S,'TYPE-IDENTIFIER') ->
+ ok;
+check_type_identifier(S,Eref=#'Externaltypereference'{}) ->
+ case get_referenced_type(S,Eref) of
+ {_,#classdef{name='TYPE-IDENTIFIER'}} -> ok;
+ {_,TD=#typedef{typespec=#type{def=#'Externaltypereference'{}}}} ->
+ check_type_identifier(S,(TD#typedef.typespec)#type.def);
+ _ ->
+ error({type,{"object set in type INSTANCE OF "
+ "not of class TYPE-IDENTIFIER",Eref},S})
+ end.
+
+iof_associated_type(S,[]) ->
+ %% in this case encode/decode functions for INSTANCE OF must be
+ %% generated
+ case get(instance_of) of
+ undefined ->
+ AssociateSeq = iof_associated_type1(S,[]),
+ Tag =
+ case S#state.erule of
+ ber_bin_v2 ->
+ [?TAG_CONSTRUCTED(?N_INSTANCE_OF)];
+ _ -> []
+ end,
+ TypeDef=#typedef{checked=true,
+ name='INSTANCE OF',
+ typespec=#type{tag=Tag,
+ def=AssociateSeq}},
+ asn1_db:dbput(S#state.mname,'INSTANCE OF',TypeDef),
+ put(instance_of,generate);
+ _ ->
+ ok
+ end,
+ #'Externaltypereference'{module=S#state.mname,type='INSTANCE OF'};
+iof_associated_type(S,C) ->
+ iof_associated_type1(S,C).
+
+iof_associated_type1(S,C) ->
+ {TableCInf,Comp1Cnstr,Comp2Cnstr,Comp2tablecinf}=
+ instance_of_constraints(S,C),
+
+ ModuleName = S#state.mname,
+ Typefield_type=
+ case C of
+ [] -> 'ASN1_OPEN_TYPE';
+ _ -> {typefield,'Type'}
+ end,
+ {ObjIdTag,C1TypeTag}=
+ case S#state.erule of
+ ber_bin_v2 ->
+ {[{'UNIVERSAL',8}],
+ [#tag{class='UNIVERSAL',
+ number=6,
+ type='IMPLICIT',
+ form=0}]};
+ _ -> {[{'UNIVERSAL','INTEGER'}],[]}
+ end,
+ TypeIdentifierRef=#'Externaltypereference'{module=ModuleName,
+ type='TYPE-IDENTIFIER'},
+ ObjectIdentifier =
+ #'ObjectClassFieldType'{classname=TypeIdentifierRef,
+ class=[],
+ fieldname={id,[]},
+ type={fixedtypevaluefield,id,
+ #type{def='OBJECT IDENTIFIER'}}},
+ Typefield =
+ #'ObjectClassFieldType'{classname=TypeIdentifierRef,
+ class=[],
+ fieldname={'Type',[]},
+ type=Typefield_type},
+ IOFComponents =
+ [#'ComponentType'{name='type-id',
+ typespec=#type{tag=C1TypeTag,
+ def=ObjectIdentifier,
+ constraint=Comp1Cnstr},
+ prop=mandatory,
+ tags=ObjIdTag},
+ #'ComponentType'{name=value,
+ typespec=#type{tag=[#tag{class='CONTEXT',
+ number=0,
+ type='EXPLICIT',
+ form=32}],
+ def=Typefield,
+ constraint=Comp2Cnstr,
+ tablecinf=Comp2tablecinf},
+ prop=mandatory,
+ tags=[{'CONTEXT',0}]}],
+ #'SEQUENCE'{tablecinf=TableCInf,
+ components=IOFComponents}.
+
+
+%% returns the leading attribute, the constraint of the components and
+%% the tablecinf value for the second component.
+instance_of_constraints(_,[]) ->
+ {false,[],[],[]};
+instance_of_constraints(S,#constraint{c={simpletable,Type}}) ->
+ #type{def=#'Externaltypereference'{type=Name}} = Type,
+ ModuleName = S#state.mname,
+ ObjectSetRef=#'Externaltypereference'{module=ModuleName,
+ type=Name},
+ CRel=[{componentrelation,{objectset,
+ undefined, %% pos
+ ObjectSetRef},
+ [{innermost,
+ [#'Externalvaluereference'{module=ModuleName,
+ value=type}]}]}],
+ TableCInf=#simpletableattributes{objectsetname=Name,
+ c_name='type-id',
+ c_index=1,
+ usedclassfield=id,
+ uniqueclassfield=id,
+ valueindex=[]},
+ {TableCInf,[{simpletable,Name}],CRel,[{objfun,ObjectSetRef}]}.
+
+%% Check ENUMERATED
+%% ****************************************
+%% Check that all values are unique
+%% assign values to un-numbered identifiers
+%% check that the constraints are allowed and correct
+%% put the updated info back into database
+check_enumerated(_S,[{Name,Number}|Rest],_Constr) when atom(Name), integer(Number)->
+ %% already checked , just return the same list
+ [{Name,Number}|Rest];
+check_enumerated(S,NamedNumberList,_Constr) ->
+ check_enum(S,NamedNumberList,[],[]).
+
+%% identifiers are put in Acc2
+%% returns either [{Name,Number}] or {[{Name,Number}],[{ExtName,ExtNumber}]}
+%% the latter is returned if the ENUMERATION contains EXTENSIONMARK
+check_enum(S,[{'NamedNumber',Id,Num}|T],Acc1,Acc2) when integer(Num) ->
+ check_enum(S,T,[{Id,Num}|Acc1],Acc2);
+check_enum(S,[{'NamedNumber',Id,{identifier,_,Name}}|T],Acc1,Acc2) ->
+ Val = dbget_ex(S,S#state.mname,Name),
+ check_enum(S,[{'NamedNumber',Id,Val#valuedef.value}|T],Acc1,Acc2);
+check_enum(S,['EXTENSIONMARK'|T],Acc1,Acc2) ->
+ NewAcc2 = lists:keysort(2,Acc1),
+ NewList = enum_number(lists:reverse(Acc2),NewAcc2,0,[]),
+ { NewList, check_enum(S,T,[],[])};
+check_enum(S,[Id|T],Acc1,Acc2) when atom(Id) ->
+ check_enum(S,T,Acc1,[Id|Acc2]);
+check_enum(_S,[],Acc1,Acc2) ->
+ NewAcc2 = lists:keysort(2,Acc1),
+ enum_number(lists:reverse(Acc2),NewAcc2,0,[]).
+
+
+% assign numbers to identifiers , numbers from 0 ... but must not
+% be the same as already assigned to NamedNumbers
+enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num > Cnt ->
+ enum_number(T,[{Id,Num}|T2],Cnt+1,[{H,Cnt}|Acc]);
+enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num < Cnt -> % negative Num
+ enum_number(T,T2,Cnt+1,[{H,Cnt},{Id,Num}|Acc]);
+enum_number([],L2,_Cnt,Acc) ->
+ lists:concat([lists:reverse(Acc),L2]);
+enum_number(L,[{Id,Num}|T2],Cnt,Acc) -> % Num == Cnt
+ enum_number(L,T2,Cnt+1,[{Id,Num}|Acc]);
+enum_number([H|T],[],Cnt,Acc) ->
+ enum_number(T,[],Cnt+1,[{H,Cnt}|Acc]).
+
+
+check_boolean(_S,_Constr) ->
+ ok.
+
+check_octetstring(_S,_Constr) ->
+ ok.
+
+% check all aspects of a SEQUENCE
+% - that all component names are unique
+% - that all TAGS are ok (when TAG default is applied)
+% - that each component is of a valid type
+% - that the extension marks are valid
+
+check_sequence(S,Type,Comps) ->
+ Components = expand_components(S,Comps),
+ case check_unique([C||C <- Components ,record(C,'ComponentType')]
+ ,#'ComponentType'.name) of
+ [] ->
+ %% sort_canonical(Components),
+ Components2 = maybe_automatic_tags(S,Components),
+ %% check the table constraints from here. The outermost type
+ %% is Type, the innermost is Comps (the list of components)
+ NewComps =
+ case check_each_component(S,Type,Components2) of
+ NewComponents when list(NewComponents) ->
+ check_unique_sequence_tags(S,NewComponents),
+ NewComponents;
+ Ret = {NewComponents,NewEcomps} ->
+ TagComps = NewComponents ++
+ [Comp#'ComponentType'{prop='OPTIONAL'}|| Comp <- NewEcomps],
+ %% extension components are like optionals when it comes to tagging
+ check_unique_sequence_tags(S,TagComps),
+ Ret
+ end,
+ %% CRelInf is the "leading attribute" information
+ %% necessary for code generating of the look up in the
+ %% object set table,
+ %% i.e. getenc_ObjectSet/getdec_ObjectSet.
+ %% {objfun,ERef} tuple added in NewComps2 in tablecinf
+ %% field in type record of component relation constrained
+ %% type
+% io:format("NewComps: ~p~n",[NewComps]),
+ {CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps),
+% io:format("CRelInf: ~p~n",[CRelInf]),
+% io:format("NewComps2: ~p~n",[NewComps2]),
+ %% CompListWithTblInf has got a lot unecessary info about
+ %% the involved class removed, as the class of the object
+ %% set.
+ CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2),
+% io:format("CompListWithTblInf: ~p~n",[CompListWithTblInf]),
+ {CRelInf,CompListWithTblInf};
+ Dupl ->
+ throw({error,{asn1,{duplicate_components,Dupl}}})
+ end.
+
+expand_components(S, [{'COMPONENTS OF',Type}|T]) ->
+ CompList =
+ case get_referenced_type(S,Type#type.def) of
+ {_,#typedef{typespec=#type{def=Seq}}} when record(Seq,'SEQUENCE') ->
+ case Seq#'SEQUENCE'.components of
+ {Root,_Ext} -> Root;
+ Root -> Root
+ end;
+ Err -> throw({error,{asn1,{illegal_COMPONENTS_OF,Err}}})
+ end,
+ expand_components(S,CompList) ++ expand_components(S,T);
+expand_components(S,[H|T]) ->
+ [H|expand_components(S,T)];
+expand_components(_,[]) ->
+ [].
+
+check_unique_sequence_tags(S,[#'ComponentType'{prop=mandatory}|Rest]) ->
+ check_unique_sequence_tags(S,Rest);
+check_unique_sequence_tags(S,[C|Rest]) when record(C,'ComponentType') ->
+ check_unique_sequence_tags1(S,Rest,[C]);% optional or default
+check_unique_sequence_tags(S,[_ExtensionMarker|Rest]) ->
+ check_unique_sequence_tags(S,Rest);
+check_unique_sequence_tags(_S,[]) ->
+ true.
+
+check_unique_sequence_tags1(S,[C|Rest],Acc) when record(C,'ComponentType') ->
+ case C#'ComponentType'.prop of
+ mandatory ->
+ check_unique_tags(S,lists:reverse([C|Acc])),
+ check_unique_sequence_tags(S,Rest);
+ _ ->
+ check_unique_sequence_tags1(S,Rest,[C|Acc]) % default or optional
+ end;
+check_unique_sequence_tags1(S,[H|Rest],Acc) ->
+ check_unique_sequence_tags1(S,Rest,[H|Acc]);
+check_unique_sequence_tags1(S,[],Acc) ->
+ check_unique_tags(S,lists:reverse(Acc)).
+
+check_sequenceof(S,Type,Component) when record(Component,type) ->
+ check_type(S,Type,Component).
+
+check_set(S,Type,Components) ->
+ {TableCInf,NewComponents} = check_sequence(S,Type,Components),
+ case lists:member(der,S#state.options) of
+ true when S#state.erule == ber;
+ S#state.erule == ber_bin ->
+ {Sorted,SortedComponents} =
+ sort_components(S#state.tname,
+ (S#state.module)#module.tagdefault,
+ NewComponents),
+ {Sorted,TableCInf,SortedComponents};
+ _ ->
+ {false,TableCInf,NewComponents}
+ end.
+
+sort_components(_TypeName,'AUTOMATIC',Components) ->
+ {true,Components};
+sort_components(TypeName,_TagDefault,Components) ->
+ case untagged_choice(Components) of
+ false ->
+ {true,sort_components1(TypeName,Components,[],[],[],[])};
+ true ->
+ {dynamic,Components} % sort in run-time
+ end.
+
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'UNIVERSAL',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,[C|UnivAcc],ApplAcc,ContAcc,PrivAcc);
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'APPLICATION',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,UnivAcc,[C|ApplAcc],ContAcc,PrivAcc);
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'CONTEXT',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,UnivAcc,ApplAcc,[C|ContAcc],PrivAcc);
+sort_components1(TypeName,[C=#'ComponentType'{tags=[{'PRIVATE',_}|_R]}|Cs],
+ UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ sort_components1(TypeName,Cs,UnivAcc,ApplAcc,ContAcc,[C|PrivAcc]);
+sort_components1(TypeName,[],UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
+ I = #'ComponentType'.tags,
+ ascending_order_check(TypeName,sort_universal_type(UnivAcc)) ++
+ ascending_order_check(TypeName,lists:keysort(I,ApplAcc)) ++
+ ascending_order_check(TypeName,lists:keysort(I,ContAcc)) ++
+ ascending_order_check(TypeName,lists:keysort(I,PrivAcc)).
+
+ascending_order_check(TypeName,Components) ->
+ ascending_order_check1(TypeName,Components),
+ Components.
+
+ascending_order_check1(TypeName,
+ [C1 = #'ComponentType'{tags=[{_,T}|_]},
+ C2 = #'ComponentType'{tags=[{_,T}|_]}|Rest]) ->
+ io:format("WARNING: Indistinct tag ~p in SET ~p, components ~p and ~p~n",
+ [T,TypeName,C1#'ComponentType'.name,C2#'ComponentType'.name]),
+ ascending_order_check1(TypeName,[C2|Rest]);
+ascending_order_check1(TypeName,
+ [C1 = #'ComponentType'{tags=[{'UNIVERSAL',T1}|_]},
+ C2 = #'ComponentType'{tags=[{'UNIVERSAL',T2}|_]}|Rest]) ->
+ case (asn1ct_gen_ber:decode_type(T1) == asn1ct_gen_ber:decode_type(T2)) of
+ true ->
+ io:format("WARNING: Indistinct tags ~p and ~p in"
+ " SET ~p, components ~p and ~p~n",
+ [T1,T2,TypeName,C1#'ComponentType'.name,
+ C2#'ComponentType'.name]),
+ ascending_order_check1(TypeName,[C2|Rest]);
+ _ ->
+ ascending_order_check1(TypeName,[C2|Rest])
+ end;
+ascending_order_check1(N,[_|Rest]) ->
+ ascending_order_check1(N,Rest);
+ascending_order_check1(_,[_]) ->
+ ok;
+ascending_order_check1(_,[]) ->
+ ok.
+
+sort_universal_type(Components) ->
+ List = lists:map(fun(C) ->
+ #'ComponentType'{tags=[{_,T}|_]} = C,
+ {asn1ct_gen_ber:decode_type(T),C}
+ end,
+ Components),
+ SortedList = lists:keysort(1,List),
+ lists:map(fun(X)->element(2,X) end,SortedList).
+
+untagged_choice([#'ComponentType'{typespec=#type{tag=[],def={'CHOICE',_}}}|_Rest]) ->
+ true;
+untagged_choice([_|Rest]) ->
+ untagged_choice(Rest);
+untagged_choice([]) ->
+ false.
+
+check_setof(S,Type,Component) when record(Component,type) ->
+ check_type(S,Type,Component).
+
+check_restrictedstring(_S,_Def,_Constr) ->
+ ok.
+
+check_objectidentifier(_S,_Constr) ->
+ ok.
+
+% check all aspects of a CHOICE
+% - that all alternative names are unique
+% - that all TAGS are ok (when TAG default is applied)
+% - that each alternative is of a valid type
+% - that the extension marks are valid
+check_choice(S,Type,Components) when list(Components) ->
+ case check_unique([C||C <- Components,
+ record(C,'ComponentType')],#'ComponentType'.name) of
+ [] ->
+ %% sort_canonical(Components),
+ Components2 = maybe_automatic_tags(S,Components),
+ %NewComps =
+ case check_each_alternative(S,Type,Components2) of
+ {NewComponents,NewEcomps} ->
+ check_unique_tags(S,NewComponents ++ NewEcomps),
+ {NewComponents,NewEcomps};
+ NewComponents ->
+ check_unique_tags(S,NewComponents),
+ NewComponents
+ end;
+%% CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps);
+ Dupl ->
+ throw({error,{asn1,{duplicate_choice_alternatives,Dupl}}})
+ end;
+check_choice(_S,_,[]) ->
+ [].
+
+%% probably dead code that should be removed
+%%maybe_automatic_tags(S,{Rc,Ec}) ->
+%% {maybe_automatic_tags1(S,Rc,0),maybe_automatic_tags1(S,Ec,length(Rc))};
+maybe_automatic_tags(#state{erule=per},C) ->
+ C;
+maybe_automatic_tags(#state{erule=per_bin},C) ->
+ C;
+maybe_automatic_tags(S,C) ->
+ maybe_automatic_tags1(S,C,0).
+
+maybe_automatic_tags1(S,C,TagNo) ->
+ case (S#state.module)#module.tagdefault of
+ 'AUTOMATIC' ->
+ generate_automatic_tags(S,C,TagNo);
+ _ ->
+ %% maybe is the module a multi file module were only some of
+ %% the modules have defaulttag AUTOMATIC TAGS then the names
+ %% of those types are saved in the table automatic_tags
+ Name= S#state.tname,
+ case is_automatic_tagged_in_multi_file(Name) of
+ true ->
+ generate_automatic_tags(S,C,TagNo);
+ false ->
+ C
+ end
+ end.
+
+is_automatic_tagged_in_multi_file(Name) ->
+ case ets:info(automatic_tags) of
+ undefined ->
+ %% this case when not multifile compilation
+ false;
+ _ ->
+ case ets:member(automatic_tags,Name) of
+ true ->
+ true;
+ _ ->
+ false
+ end
+ end.
+
+generate_automatic_tags(_S,C,TagNo) ->
+ case any_manual_tag(C) of
+ true ->
+ C;
+ false ->
+ generate_automatic_tags1(C,TagNo)
+ end.
+
+generate_automatic_tags1([H|T],TagNo) when record(H,'ComponentType') ->
+ #'ComponentType'{typespec=Ts} = H,
+ NewTs = Ts#type{tag=[#tag{class='CONTEXT',
+ number=TagNo,
+ type={default,'IMPLICIT'},
+ form= 0 }]}, % PRIMITIVE
+ [H#'ComponentType'{typespec=NewTs}|generate_automatic_tags1(T,TagNo+1)];
+generate_automatic_tags1([ExtMark|T],TagNo) -> % EXTENSIONMARK
+ [ExtMark | generate_automatic_tags1(T,TagNo)];
+generate_automatic_tags1([],_) ->
+ [].
+
+any_manual_tag([#'ComponentType'{typespec=#type{tag=[]}}|Rest]) ->
+ any_manual_tag(Rest);
+any_manual_tag([{'EXTENSIONMARK',_,_}|Rest]) ->
+ any_manual_tag(Rest);
+any_manual_tag([_|_Rest]) ->
+ true;
+any_manual_tag([]) ->
+ false.
+
+
+check_unique_tags(S,C) ->
+ case (S#state.module)#module.tagdefault of
+ 'AUTOMATIC' ->
+ case any_manual_tag(C) of
+ false -> true;
+ _ -> collect_and_sort_tags(C,[])
+ end;
+ _ ->
+ collect_and_sort_tags(C,[])
+ end.
+
+collect_and_sort_tags([C|Rest],Acc) when record(C,'ComponentType') ->
+ collect_and_sort_tags(Rest,C#'ComponentType'.tags ++ Acc);
+collect_and_sort_tags([_|Rest],Acc) ->
+ collect_and_sort_tags(Rest,Acc);
+collect_and_sort_tags([],Acc) ->
+ {Dupl,_}= lists:mapfoldl(fun(El,El)->{{dup,El},El};(El,_Prev)-> {El,El} end,notag,lists:sort(Acc)),
+ Dupl2 = [Dup|| {dup,Dup} <- Dupl],
+ if
+ length(Dupl2) > 0 ->
+ throw({error,{asn1,{duplicates_of_the_tags,Dupl2}}});
+ true ->
+ true
+ end.
+
+check_unique(L,Pos) ->
+ Slist = lists:keysort(Pos,L),
+ check_unique2(Slist,Pos,[]).
+
+check_unique2([A,B|T],Pos,Acc) when element(Pos,A) == element(Pos,B) ->
+ check_unique2([B|T],Pos,[element(Pos,B)|Acc]);
+check_unique2([_|T],Pos,Acc) ->
+ check_unique2(T,Pos,Acc);
+check_unique2([],_,Acc) ->
+ lists:reverse(Acc).
+
+check_each_component(S,Type,{Rlist,ExtList}) ->
+ {check_each_component(S,Type,Rlist),
+ check_each_component(S,Type,ExtList)};
+check_each_component(S,Type,Components) ->
+ check_each_component(S,Type,Components,[],[],noext).
+
+check_each_component(S = #state{abscomppath=Path,recordtopname=TopName},Type,
+ [C|Ct],Acc,Extacc,Ext) when record(C,'ComponentType') ->
+ #'ComponentType'{name=Cname,typespec=Ts,prop=Prop} = C,
+ NewAbsCPath =
+ case Ts#type.def of
+ #'Externaltypereference'{} -> [];
+ _ -> [Cname|Path]
+ end,
+ CheckedTs = check_type(S#state{abscomppath=NewAbsCPath,
+ recordtopname=[Cname|TopName]},Type,Ts),
+ NewTags = get_taglist(S,CheckedTs),
+
+ NewProp =
+% case lists:member(der,S#state.options) of
+% true ->
+% True ->
+ case normalize_value(S,CheckedTs,Prop,[Cname|TopName]) of
+ mandatory -> mandatory;
+ 'OPTIONAL' -> 'OPTIONAL';
+ DefaultValue -> {'DEFAULT',DefaultValue}
+ end,
+% _ ->
+% Prop
+% end,
+ NewC = C#'ComponentType'{typespec=CheckedTs,prop=NewProp,tags=NewTags},
+ case Ext of
+ noext ->
+ check_each_component(S,Type,Ct,[NewC|Acc],Extacc,Ext);
+ ext ->
+ check_each_component(S,Type,Ct,Acc,[NewC|Extacc],Ext)
+ end;
+check_each_component(S,Type,[_|Ct],Acc,Extacc,noext) -> % skip 'EXTENSIONMARK'
+ check_each_component(S,Type,Ct,Acc,Extacc,ext);
+check_each_component(_S,_,[_C|_Ct],_,_,ext) -> % skip 'EXTENSIONMARK'
+ throw({error,{asn1,{too_many_extension_marks}}});
+check_each_component(_S,_,[],Acc,Extacc,ext) ->
+ {lists:reverse(Acc),lists:reverse(Extacc)};
+check_each_component(_S,_,[],Acc,_,noext) ->
+ lists:reverse(Acc).
+
+check_each_alternative(S,Type,{Rlist,ExtList}) ->
+ {check_each_alternative(S,Type,Rlist),
+ check_each_alternative(S,Type,ExtList)};
+check_each_alternative(S,Type,[C|Ct]) ->
+ check_each_alternative(S,Type,[C|Ct],[],[],noext).
+
+check_each_alternative(S=#state{abscomppath=Path,recordtopname=TopName},Type,[C|Ct],
+ Acc,Extacc,Ext) when record(C,'ComponentType') ->
+ #'ComponentType'{name=Cname,typespec=Ts,prop=_Prop} = C,
+ NewAbsCPath =
+ case Ts#type.def of
+ #'Externaltypereference'{} -> [];
+ _ -> [Cname|Path]
+ end,
+ NewState =
+ S#state{abscomppath=NewAbsCPath,recordtopname=[Cname|TopName]},
+ CheckedTs = check_type(NewState,Type,Ts),
+ NewTags = get_taglist(S,CheckedTs),
+ NewC = C#'ComponentType'{typespec=CheckedTs,tags=NewTags},
+ case Ext of
+ noext ->
+ check_each_alternative(S,Type,Ct,[NewC|Acc],Extacc,Ext);
+ ext ->
+ check_each_alternative(S,Type,Ct,Acc,[NewC|Extacc],Ext)
+ end;
+
+check_each_alternative(S,Type,[_|Ct],Acc,Extacc,noext) -> % skip 'EXTENSIONMARK'
+ check_each_alternative(S,Type,Ct,Acc,Extacc,ext);
+check_each_alternative(_S,_,[_C|_Ct],_,_,ext) -> % skip 'EXTENSIONMARK'
+ throw({error,{asn1,{too_many_extension_marks}}});
+check_each_alternative(_S,_,[],Acc,Extacc,ext) ->
+ {lists:reverse(Acc),lists:reverse(Extacc)};
+check_each_alternative(_S,_,[],Acc,_,noext) ->
+ lists:reverse(Acc).
+
+%% componentrelation_leadingattr/2 searches the structure for table
+%% constraints, if any is found componentrelation_leadingattr/5 is
+%% called.
+componentrelation_leadingattr(S,CompList) ->
+% {Cs1,Cs2} =
+ Cs =
+ case CompList of
+ {Components,EComponents} when list(Components) ->
+% {Components,Components};
+ Components ++ EComponents;
+ CompList when list(CompList) ->
+% {CompList,CompList}
+ CompList
+ end,
+% case any_simple_table(S,Cs1,[]) of
+
+ %% get_simple_table_if_used/2 should find out whether there are any
+ %% component relation constraints in the entire tree of Cs1 that
+ %% relates to this level. It returns information about the simple
+ %% table constraint necessary for the the call to
+ %% componentrelation_leadingattr/6. The step when the leading
+ %% attribute and the syntax tree is modified to support the code
+ %% generating.
+ case get_simple_table_if_used(S,Cs) of
+ [] -> {false,CompList};
+ STList ->
+% componentrelation_leadingattr(S,Cs1,Cs2,STList,[],[])
+ componentrelation_leadingattr(S,Cs,Cs,STList,[],[])
+ end.
+
+%% componentrelation_leadingattr/6 when all components are searched
+%% the new modified components are returned together with the "leading
+%% attribute" information, which later is stored in the tablecinf
+%% field in the SEQUENCE/SET record. The "leading attribute"
+%% information is used to generate the lookup in the object set
+%% table. The other information gathered in the #type.tablecinf field
+%% is used in code generating phase too, to recognice the proper
+%% components for "open type" encoding and to propagate the result of
+%% the object set lookup when needed.
+componentrelation_leadingattr(_,[],_CompList,_,[],NewCompList) ->
+ {false,lists:reverse(NewCompList)};
+componentrelation_leadingattr(_,[],_CompList,_,LeadingAttr,NewCompList) ->
+ {lists:last(LeadingAttr),lists:reverse(NewCompList)}; %send all info in Ts later
+componentrelation_leadingattr(S,[C|Cs],CompList,STList,Acc,CompAcc) ->
+ {LAAcc,NewC} =
+ case catch componentrelation1(S,C#'ComponentType'.typespec,
+ [C#'ComponentType'.name]) of
+ {'EXIT',_} ->
+ {[],C};
+ {CRI=[{_A1,_B1,_C1,_D1}|_Rest],NewTSpec} ->
+ %% {ObjectSet,AtPath,ClassDef,Path}
+ %% _A1 is a reference to the object set of the
+ %% component relation constraint.
+ %% _B1 is the path of names in the at-list of the
+ %% component relation constraint.
+ %% _C1 is the class definition of the
+ %% ObjectClassFieldType.
+ %% _D1 is the path of components that was traversed to
+ %% find this constraint.
+ case leading_attr_index(S,CompList,CRI,
+ lists:reverse(S#state.abscomppath),[]) of
+ [] ->
+ {[],C};
+ [{ObjSet,Attr,N,ClassDef,_Path,ValueIndex}|_NewRest] ->
+ OS = object_set_mod_name(S,ObjSet),
+ UniqueFieldName =
+ case (catch get_unique_fieldname(#classdef{typespec=ClassDef})) of
+ {error,'__undefined_'} ->
+ no_unique;
+ {asn1,Msg,_} ->
+ error({type,Msg,S});
+ Other -> Other
+ end,
+% UsedFieldName = get_used_fieldname(S,Attr,STList),
+ %% Res should be done differently: even though
+ %% a unique field name exists it is not
+ %% certain that the ObjectClassFieldType of
+ %% the simple table constraint picks that
+ %% class field.
+ Res = #simpletableattributes{objectsetname=OS,
+%% c_name=asn1ct_gen:un_hyphen_var(Attr),
+ c_name=Attr,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex},
+ {[Res],C#'ComponentType'{typespec=NewTSpec}}
+ end;
+ _ ->
+ %% no constraint was found
+ {[],C}
+ end,
+ componentrelation_leadingattr(S,Cs,CompList,STList,LAAcc++Acc,
+ [NewC|CompAcc]).
+
+object_set_mod_name(_S,ObjSet) when atom(ObjSet) ->
+ ObjSet;
+object_set_mod_name(#state{mname=M},
+ #'Externaltypereference'{module=M,type=T}) ->
+ T;
+object_set_mod_name(S,#'Externaltypereference'{module=M,type=T}) ->
+ case lists:member(M,S#state.inputmodules) of
+ true ->
+ T;
+ false ->
+ {M,T}
+ end.
+
+%% get_used_fieldname gets the used field of the class referenced by
+%% the ObjectClassFieldType construct in the simple table constraint
+%% corresponding to the component relation constraint that depends on
+%% it.
+% get_used_fieldname(_S,CName,[{[CName|_Rest],_,ClFieldName}|_RestSimpleT]) ->
+% ClFieldName;
+% get_used_fieldname(S,CName,[_SimpleTC|Rest]) ->
+% get_used_fieldname(S,CName,Rest);
+% get_used_fieldname(S,_,[]) ->
+% error({type,"Error in Simple table constraint",S}).
+
+%% any_simple_table/3 checks if any of the components on this level is
+%% constrained by a simple table constraint. It returns a list of
+%% tuples with three elements. It is a name path to the place in the
+%% type structure where the constraint is, and the name of the object
+%% set and the referenced field in the class.
+% any_simple_table(S = #state{mname=M,abscomppath=Path},
+% [#'ComponentType'{name=Name,typespec=Type}|Cs],Acc) ->
+% Constraint = Type#type.constraint,
+% case lists:keysearch(simpletable,1,Constraint) of
+% {value,{_,#type{def=Ref}}} ->
+% %% This ObjectClassFieldType, which has a simple table
+% %% constraint, must pick a fixed type value, mustn't it ?
+% {ClassDef,[{_,ClassFieldName}]} = Type#type.def,
+% ST =
+% case Ref of
+% #'Externaltypereference'{module=M,type=ObjSetName} ->
+% {[Name|Path],ObjSetName,ClassFieldName};
+% _ ->
+% {[Name|Path],Ref,ClassFieldName}
+% end,
+% any_simple_table(S,Cs,[ST|Acc]);
+% false ->
+% any_simple_table(S,Cs,Acc)
+% end;
+% any_simple_table(_,[],Acc) ->
+% lists:reverse(Acc);
+% any_simple_table(S,[_|Cs],Acc) ->
+% any_simple_table(S,Cs,Acc).
+
+%% get_simple_table_if_used/2 searches the structure of Cs for any
+%% component relation constraints due to the present level of the
+%% structure. If there are any, the necessary information for code
+%% generation of the look up functionality in the object set table are
+%% returned.
+get_simple_table_if_used(S,Cs) ->
+ CNames = lists:map(fun(#'ComponentType'{name=Name}) -> Name;
+ (_) -> [] %% in case of extension marks
+ end,
+ Cs),
+ RefedSimpleTable=any_component_relation(S,Cs,CNames,[],[]),
+ get_simple_table_info(S,Cs,remove_doubles(RefedSimpleTable)).
+
+remove_doubles(L) ->
+ remove_doubles(L,[]).
+remove_doubles([H|T],Acc) ->
+ NewT = remove_doubles1(H,T),
+ remove_doubles(NewT,[H|Acc]);
+remove_doubles([],Acc) ->
+ Acc.
+
+remove_doubles1(El,L) ->
+ case lists:delete(El,L) of
+ L -> L;
+ NewL -> remove_doubles1(El,NewL)
+ end.
+
+%% get_simple_table_info searches the commponents Cs by the path from
+%% an at-list (third argument), and follows into a component of it if
+%% necessary, to get information needed for code generating.
+%%
+%% Returns a list of tuples with three elements. It holds a list of
+%% atoms that is the path, the name of the field of the class that are
+%% referred to in the ObjectClassFieldType, and the name of the unique
+%% field of the class of the ObjectClassFieldType.
+%%
+% %% The level information outermost/innermost must be kept. There are
+% %% at least two possibilities to cover here for an outermost case: 1)
+% %% Both the simple table and the component relation have a common path
+% %% at least one step below the outermost level, i.e. the leading
+% %% information shall be on a sub level. 2) They don't have any common
+% %% path.
+get_simple_table_info(S,Cs,[AtList|Rest]) ->
+%% [get_simple_table_info1(S,Cs,AtList,S#state.abscomppath)|get_simple_table_info(S,Cs,Rest)];
+ [get_simple_table_info1(S,Cs,AtList,[])|get_simple_table_info(S,Cs,Rest)];
+get_simple_table_info(_,_,[]) ->
+ [].
+get_simple_table_info1(S,Cs,[Cname|Cnames],Path) when list(Cs) ->
+ case lists:keysearch(Cname,#'ComponentType'.name,Cs) of
+ {value,C} ->
+ get_simple_table_info1(S,C,Cnames,[Cname|Path]);
+ _ ->
+ error({type,"Missing expected simple table constraint",S})
+ end;
+get_simple_table_info1(S,#'ComponentType'{typespec=TS},[],Path) ->
+ %% In this component there must be a simple table constraint
+ %% o.w. the asn1 code is wrong.
+ #type{def=OCFT,constraint=Cnstr} = TS,
+ case Cnstr of
+ [{simpletable,_OSRef}]�->
+ #'ObjectClassFieldType'{classname=ClRef,
+ class=ObjectClass,
+ fieldname=FieldName} = OCFT,
+% #'ObjectClassFieldType'{ObjectClass,FieldType} = ObjectClassFieldType,
+ ObjectClassFieldName =
+ case FieldName of
+ {LastFieldName,[]} -> LastFieldName;
+ {_FirstFieldName,FieldNames} ->
+ lists:last(FieldNames)
+ end,
+ %%ObjectClassFieldName is the last element in the dotted
+ %%list of the ObjectClassFieldType. The last element may
+ %%be of another class, that is referenced from the class
+ %%of the ObjectClassFieldType
+ ClassDef =
+ case ObjectClass of
+ [] ->
+ {_,CDef}=get_referenced_type(S,ClRef),
+ CDef;
+ _ -> #classdef{typespec=ObjectClass}
+ end,
+ UniqueName =
+ case (catch get_unique_fieldname(ClassDef)) of
+ {error,'__undefined_'} -> no_unique;
+ {asn1,Msg,_} ->
+ error({type,Msg,S});
+ Other -> Other
+ end,
+ {lists:reverse(Path),ObjectClassFieldName,UniqueName};
+ _ ->
+ error({type,{asn1,"missing expected simple table constraint",
+ Cnstr},S})
+ end;
+get_simple_table_info1(S,#'ComponentType'{typespec=TS},Cnames,Path) ->
+ Components = get_atlist_components(TS#type.def),
+ get_simple_table_info1(S,Components,Cnames,Path).
+
+%% any_component_relation searches for all component relation
+%% constraints that refers to the actual level and returns a list of
+%% the "name path" in the at-list to the component relation constraint
+%% that must refer to a simple table constraint. The list is empty if
+%% no component relation constraints were found.
+%%
+%% NamePath has the names of all components that are followed from the
+%% beginning of the search. CNames holds the names of all components
+%% of the start level, this info is used if an outermost at-notation
+%% is found to check the validity of the at-list.
+any_component_relation(S,[C|Cs],CNames,NamePath,Acc) ->
+ CName = C#'ComponentType'.name,
+ Type = C#'ComponentType'.typespec,
+ CRelPath =
+ case Type#type.constraint of
+ [{componentrelation,_,AtNotation}] ->
+ %% Found component relation constraint, now check
+ %% whether this constraint is relevant for the level
+ %% where the search started
+ AtNot = extract_at_notation(AtNotation),
+ %% evaluate_atpath returns the relative path to the
+ %% simple table constraint from where the component
+ %% relation is found.
+ evaluate_atpath(S#state.abscomppath,NamePath,CNames,AtNot);
+ _ ->
+ []
+ end,
+ InnerAcc =
+ case {Type#type.inlined,
+ asn1ct_gen:type(asn1ct_gen:get_inner(Type#type.def))} of
+ {no,{constructed,bif}} ->
+ InnerCs =
+ case get_components(Type#type.def) of
+ {IC1,_IC2} -> IC1 ++ IC1;
+ IC -> IC
+ end,
+ %% here we are interested in components of an
+ %% SEQUENCE/SET OF as well as SEQUENCE, SET and CHOICE
+ any_component_relation(S,InnerCs,CNames,[CName|NamePath],[]);
+ _ ->
+ []
+ end,
+ any_component_relation(S,Cs,CNames,NamePath,InnerAcc++CRelPath++Acc);
+any_component_relation(_,[],_,_,Acc) ->
+ Acc.
+
+%% evaluate_atpath/4 finds out whether the at notation refers to the
+%% search level. The list of referenced names in the AtNot list shall
+%% begin with a name that exists on the level it refers to. If the
+%% found AtPath is refering to the same sub-branch as the simple table
+%% has, then there shall not be any leading attribute info on this
+%% level.
+evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) ->
+ %% any innermost constraint found deeper in the structure is
+ %% ignored.
+ case lists:member(Ref,Cnames) of
+ true -> [AtPath];
+ false -> []
+ end;
+%% In this case must check that the AtPath doesn't step any step of
+%% the NamePath, in that case the constraint will be handled in an
+%% inner level.
+evaluate_atpath(TopPath,NamePath,Cnames,{outermost,AtPath=[_Ref|_Refs]}) ->
+ AtPathBelowTop =
+ case TopPath of
+ [] -> AtPath;
+ _ ->
+ case lists:prefix(TopPath,AtPath) of
+ true ->
+ lists:subtract(AtPath,TopPath);
+ _ -> []
+ end
+ end,
+ case {NamePath,AtPathBelowTop} of
+ {[H|_T1],[H|_T2]} -> []; % this must be handled in lower level
+ {_,[]} -> [];% this must be handled in an above level
+ {_,[H|_T]} ->
+ case lists:member(H,Cnames) of
+ true -> [AtPathBelowTop];
+ _ -> error({type,{asn1,"failed to analyze at-path",AtPath}})
+ end
+ end;
+evaluate_atpath(_,_,_,_) ->
+ [].
+
+%% Type may be any of SEQUENCE, SET, CHOICE, SEQUENCE OF, SET OF but
+%% only the three first have valid components.
+get_atlist_components(Def) ->
+ get_components(atlist,Def).
+
+get_components(Def) ->
+ get_components(any,Def).
+
+get_components(_,#'SEQUENCE'{components=Cs}) ->
+ Cs;
+get_components(_,#'SET'{components=Cs}) ->
+ Cs;
+get_components(_,{'CHOICE',Cs}) ->
+ Cs;
+get_components(any,{'SEQUENCE OF',#type{def=Def}}) ->
+ get_components(any,Def);
+get_components(any,{'SET OF',#type{def=Def}}) ->
+ get_components(any,Def);
+get_components(_,_) ->
+ [].
+
+
+extract_at_notation([{Level,[#'Externalvaluereference'{value=Name}|Rest]}]) ->
+ {Level,[Name|extract_at_notation1(Rest)]};
+extract_at_notation(At) ->
+ exit({error,{asn1,{at_notation,At}}}).
+extract_at_notation1([#'Externalvaluereference'{value=Name}|Rest]) ->
+ [Name|extract_at_notation1(Rest)];
+extract_at_notation1([]) ->
+ [].
+
+%% componentrelation1/1 identifies all componentrelation constraints
+%% that exist in C or in the substructure of C. Info about the found
+%% constraints are returned in a list. It is ObjectSet, the reference
+%% to the object set, AttrPath, the name atoms extracted from the
+%% at-list in the component relation constraint, ClassDef, the
+%% objectclass record of the class of the ObjectClassFieldType, Path,
+%% that is the component name "path" from the searched level to this
+%% constraint.
+%%
+%% The function is called with one component of the type in turn and
+%% with the component name in Path at the first call. When called from
+%% within, the name of the inner component is added to Path.
+componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
+ Path) ->
+ Ret =
+ case Constraint of
+ [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
+ [{_,AL=[#'Externalvaluereference'{}|_R1]}|_R2] = AtList,
+ %% Note: if Path is longer than one,i.e. it is within
+ %% an inner type of the actual level, then the only
+ %% relevant at-list is of "outermost" type.
+%% #'ObjectClassFieldType'{class=ClassDef} = Def,
+ ClassDef = get_ObjectClassFieldType_classdef(S,Def),
+ AtPath =
+ lists:map(fun(#'Externalvaluereference'{value=V})->V end,
+ AL),
+ {[{ObjectSet,AtPath,ClassDef,Path}],Def};
+ _Other ->
+ %% check the inner type of component
+ innertype_comprel(S,Def,Path)
+ end,
+ case Ret of
+ nofunobj ->
+ nofunobj; %% ignored by caller
+ {CRelI=[{ObjSet,_,_,_}],NewDef} -> %%
+ TCItmp = lists:subtract(TCI,[{objfun,ObjSet}]),
+ {CRelI,C#type{tablecinf=[{objfun,ObjSet}|TCItmp],def=NewDef}};
+ {CompRelInf,NewDef} -> %% more than one tuple in CompRelInf
+ TCItmp = lists:subtract(TCI,[{objfun,anyset}]),
+ {CompRelInf,C#type{tablecinf=[{objfun,anyset}|TCItmp],def=NewDef}}
+ end.
+
+innertype_comprel(S,{'SEQUENCE OF',Type},Path) ->
+ case innertype_comprel1(S,Type,Path) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewType} ->
+ {CompRelInf,{'SEQUENCE OF',NewType}}
+ end;
+innertype_comprel(S,{'SET OF',Type},Path) ->
+ case innertype_comprel1(S,Type,Path) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewType} ->
+ {CompRelInf,{'SET OF',NewType}}
+ end;
+innertype_comprel(S,{'CHOICE',CTypeList},Path) ->
+ case componentlist_comprel(S,CTypeList,[],Path,[]) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewCs} ->
+ {CompRelInf,{'CHOICE',NewCs}}
+ end;
+innertype_comprel(S,Seq = #'SEQUENCE'{components=Cs},Path) ->
+ case componentlist_comprel(S,Cs,[],Path,[]) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewCs} ->
+ {CompRelInf,Seq#'SEQUENCE'{components=NewCs}}
+ end;
+innertype_comprel(S,Set = #'SET'{components=Cs},Path) ->
+ case componentlist_comprel(S,Cs,[],Path,[]) of
+ nofunobj ->
+ nofunobj;
+ {CompRelInf,NewCs} ->
+ {CompRelInf,Set#'SET'{components=NewCs}}
+ end;
+innertype_comprel(_,_,_) ->
+ nofunobj.
+
+componentlist_comprel(S,[C = #'ComponentType'{name=Name,typespec=Type}|Cs],
+ Acc,Path,NewCL) ->
+ case catch componentrelation1(S,Type,Path++[Name]) of
+ {'EXIT',_} ->
+ componentlist_comprel(S,Cs,Acc,Path,[C|NewCL]);
+ nofunobj ->
+ componentlist_comprel(S,Cs,Acc,Path,[C|NewCL]);
+ {CRelInf,NewType} ->
+ componentlist_comprel(S,Cs,CRelInf++Acc,Path,
+ [C#'ComponentType'{typespec=NewType}|NewCL])
+ end;
+componentlist_comprel(_,[],Acc,_,NewCL) ->
+ case Acc of
+ [] ->
+ nofunobj;
+ _ ->
+ {Acc,lists:reverse(NewCL)}
+ end.
+
+innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
+ Ret =
+ case Cons of
+ [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
+ %% This AtList must have an "outermost" at sign to be
+ %% relevent here.
+ [{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
+ = AtList,
+%% #'ObjectClassFieldType'{class=ClassDef} = Def,
+ ClassDef = get_ObjectClassFieldType_classdef(S,Def),
+ AtPath =
+ lists:map(fun(#'Externalvaluereference'{value=V})->V end,
+ AL),
+ [{ObjectSet,AtPath,ClassDef,Path}];
+ _ ->
+ innertype_comprel(S,Def,Path)
+ end,
+ case Ret of
+ nofunobj -> nofunobj;
+ L = [{ObjSet,_,_,_}] ->
+ TCItmp = lists:subtract(TCI,[{objfun,ObjSet}]),
+ {L,T#type{tablecinf=[{objfun,ObjSet}|TCItmp]}};
+ {CRelInf,NewDef} ->
+ TCItmp = lists:subtract(TCI,[{objfun,anyset}]),
+ {CRelInf,T#type{def=NewDef,tablecinf=[{objfun,anyset}|TCItmp]}}
+ end.
+
+
+%% leading_attr_index counts the index and picks the name of the
+%% component that is at the actual level in the at-list of the
+%% component relation constraint (AttrP). AbsP is the path of
+%% component names from the top type level to the actual level. AttrP
+%% is a list with the atoms from the at-list.
+leading_attr_index(S,Cs,[H={_,AttrP,_,_}|T],AbsP,Acc) ->
+ AttrInfo =
+ case lists:prefix(AbsP,AttrP) of
+ %% why this ?? It is necessary when in same situation as
+ %% TConstrChoice, there is an inner structure with an
+ %% outermost at-list and the "leading attribute" code gen
+ %% may be at a level some steps below the outermost level.
+ true ->
+ RelativAttrP = lists:subtract(AttrP,AbsP),
+ %% The header is used to calculate the index of the
+ %% component and to give the fun, received from the
+ %% object set look up, an unique name. The tail is
+ %% used to match the proper value input to the fun.
+ {hd(RelativAttrP),tl(RelativAttrP)};
+ false ->
+ {hd(AttrP),tl(AttrP)}
+ end,
+ case leading_attr_index1(S,Cs,H,AttrInfo,1) of
+ 0 ->
+ leading_attr_index(S,Cs,T,AbsP,Acc);
+ Res ->
+ leading_attr_index(S,Cs,T,AbsP,[Res|Acc])
+ end;
+leading_attr_index(_,_Cs,[],_,Acc) ->
+ lists:reverse(Acc).
+
+leading_attr_index1(_,[],_,_,_) ->
+ 0;
+leading_attr_index1(S,[C|Cs],Arg={ObjectSet,_,CDef,P},
+ AttrInfo={Attr,SubAttr},N) ->
+ case C#'ComponentType'.name of
+ Attr ->
+ ValueMatch = value_match(S,C,Attr,SubAttr),
+ {ObjectSet,Attr,N,CDef,P,ValueMatch};
+ _ ->
+ leading_attr_index1(S,Cs,Arg,AttrInfo,N+1)
+ end.
+
+%% value_math gathers information for a proper value match in the
+%% generated encode function. For a SEQUENCE or a SET the index of the
+%% component is counted. For a CHOICE the index is 2.
+value_match(S,C,Name,SubAttr) ->
+ value_match(S,C,Name,SubAttr,[]). % C has name Name
+value_match(_S,#'ComponentType'{},_Name,[],Acc) ->
+ Acc;% do not reverse, indexes in reverse order
+value_match(S,#'ComponentType'{typespec=Type},Name,[At|Ats],Acc) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ Components =
+ case get_atlist_components(Type#type.def) of
+ [] -> error({type,{asn1,"element in at list must be a "
+ "SEQUENCE, SET or CHOICE.",Name},S});
+ Comps -> Comps
+ end,
+ {Index,ValueIndex} = component_value_index(S,InnerType,At,Components),
+ value_match(S,lists:nth(Index,Components),At,Ats,[ValueIndex|Acc]).
+
+component_value_index(S,'CHOICE',At,Components) ->
+ {component_index(S,At,Components),2};
+component_value_index(S,_,At,Components) ->
+ %% SEQUENCE or SET
+ Index = component_index(S,At,Components),
+ {Index,{Index+1,At}}.
+
+component_index(S,Name,Components) ->
+ component_index1(S,Name,Components,1).
+component_index1(_S,Name,[#'ComponentType'{name=Name}|_Cs],N) ->
+ N;
+component_index1(S,Name,[_C|Cs],N) ->
+ component_index1(S,Name,Cs,N+1);
+component_index1(S,Name,[],_) ->
+ error({type,{asn1,"component of at-list was not"
+ " found in substructure",Name},S}).
+
+get_unique_fieldname(ClassDef) ->
+%% {_,Fields,_} = ClassDef#classdef.typespec,
+ Fields = (ClassDef#classdef.typespec)#objectclass.fields,
+ get_unique_fieldname(Fields,[]).
+
+get_unique_fieldname([],[]) ->
+ throw({error,'__undefined_'});
+get_unique_fieldname([],[Name]) ->
+ Name;
+get_unique_fieldname([],Acc) ->
+ throw({asn1,'only one UNIQUE field is allowed in CLASS',Acc});
+get_unique_fieldname([{fixedtypevaluefield,Name,_,'UNIQUE',_}|Rest],Acc) ->
+ get_unique_fieldname(Rest,[Name|Acc]);
+get_unique_fieldname([_H|T],Acc) ->
+ get_unique_fieldname(T,Acc).
+
+get_tableconstraint_info(S,Type,{CheckedTs,EComps}) ->
+ {get_tableconstraint_info(S,Type,CheckedTs,[]),
+ get_tableconstraint_info(S,Type,EComps,[])};
+get_tableconstraint_info(S,Type,CheckedTs) ->
+ get_tableconstraint_info(S,Type,CheckedTs,[]).
+
+get_tableconstraint_info(_S,_Type,[],Acc) ->
+ lists:reverse(Acc);
+get_tableconstraint_info(S,Type,[C|Cs],Acc) ->
+ CheckedTs = C#'ComponentType'.typespec,
+ AccComp =
+ case CheckedTs#type.def of
+ %% ObjectClassFieldType
+ OCFT=#'ObjectClassFieldType'{class=#objectclass{},
+ type=_AType} ->
+% AType = get_ObjectClassFieldType(S,Fields,FieldRef),
+% RefedFieldName =
+% get_referencedclassfield(CheckedTs#type.def),%is probably obsolete
+ NewOCFT =
+ OCFT#'ObjectClassFieldType'{class=[]},
+ C#'ComponentType'{typespec=
+ CheckedTs#type{
+% def=AType,
+ def=NewOCFT
+ }};
+% constraint=[{tableconstraint_info,
+% FieldRef}]}};
+ {'SEQUENCE OF',SOType} when record(SOType,type),
+ (element(1,SOType#type.def)=='CHOICE') ->
+ CTypeList = element(2,SOType#type.def),
+ NewInnerCList =
+ get_tableconstraint_info(S,Type,CTypeList,[]),
+ C#'ComponentType'{typespec=
+ CheckedTs#type{
+ def={'SEQUENCE OF',
+ SOType#type{def={'CHOICE',
+ NewInnerCList}}}}};
+ {'SET OF',SOType} when record(SOType,type),
+ (element(1,SOType#type.def)=='CHOICE') ->
+ CTypeList = element(2,SOType#type.def),
+ NewInnerCList =
+ get_tableconstraint_info(S,Type,CTypeList,[]),
+ C#'ComponentType'{typespec=
+ CheckedTs#type{
+ def={'SET OF',
+ SOType#type{def={'CHOICE',
+ NewInnerCList}}}}};
+ _ ->
+ C
+ end,
+ get_tableconstraint_info(S,Type,Cs,[AccComp|Acc]).
+
+get_referenced_fieldname([{_,FirstFieldname}]) ->
+ {FirstFieldname,[]};
+get_referenced_fieldname([{_,FirstFieldname}|Rest]) ->
+ {FirstFieldname,lists:map(fun(X)->element(2,X) end,Rest)};
+get_referenced_fieldname(Def) ->
+ {no_type,Def}.
+
+%% get_ObjectClassFieldType extracts the type from the chain of
+%% objects that leads to a final type.
+get_ObjectClassFieldType(S,ERef,PrimFieldNameList) when
+ record(ERef,'Externaltypereference') ->
+ {_,Type} = get_referenced_type(S,ERef),
+ ClassSpec = check_class(S,Type),
+ Fields = ClassSpec#objectclass.fields,
+ get_ObjectClassFieldType(S,Fields,PrimFieldNameList);
+get_ObjectClassFieldType(S,Fields,L=[_PrimFieldName1|_Rest]) ->
+ check_PrimitiveFieldNames(S,Fields,L),
+ get_OCFType(S,Fields,L).
+
+check_PrimitiveFieldNames(_S,_Fields,_) ->
+ ok.
+
+%% get_ObjectClassFieldType_classdef gets the def of the class of the
+%% ObjectClassFieldType, i.e. the objectclass record. If the type has
+%% been checked (it may be a field type of an internal SEQUENCE) the
+%% class field = [], then the classdef has to be fetched by help of
+%% the class reference in the classname field.
+get_ObjectClassFieldType_classdef(S,#'ObjectClassFieldType'{classname=Name,
+ class=[]}) ->
+ {_,#classdef{typespec=TS}} = get_referenced_type(S,Name),
+ TS;
+get_ObjectClassFieldType_classdef(_,#'ObjectClassFieldType'{class=Cl}) ->
+ Cl.
+
+get_OCFType(S,Fields,[{_FieldType,PrimFieldName}|Rest]) ->
+ case lists:keysearch(PrimFieldName,2,Fields) of
+ {value,{fixedtypevaluefield,_,Type,_Unique,_OptSpec}} ->
+ {fixedtypevaluefield,PrimFieldName,Type};
+ {value,{objectfield,_,Type,_Unique,_OptSpec}} ->
+ {_,ClassDef} = get_referenced_type(S,Type#type.def),
+ CheckedCDef = check_class(S#state{type=ClassDef,
+ tname=ClassDef#classdef.name},
+ ClassDef#classdef.typespec),
+ get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
+ {value,{objectsetfield,_,Type,_OptSpec}} ->
+ {_,ClassDef} = get_referenced_type(S,Type#type.def),
+ CheckedCDef = check_class(S#state{type=ClassDef,
+ tname=ClassDef#classdef.name},
+ ClassDef#classdef.typespec),
+ get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
+
+ {value,Other} ->
+ {element(1,Other),PrimFieldName};
+ _ ->
+ error({type,"undefined FieldName in ObjectClassFieldType",S})
+ end.
+
+get_taglist(#state{erule=per},_) ->
+ [];
+get_taglist(#state{erule=per_bin},_) ->
+ [];
+get_taglist(S,Ext) when record(Ext,'Externaltypereference') ->
+ {_,T} = get_referenced_type(S,Ext),
+ get_taglist(S,T#typedef.typespec);
+get_taglist(S,Tref) when record(Tref,typereference) ->
+ {_,T} = get_referenced_type(S,Tref),
+ get_taglist(S,T#typedef.typespec);
+get_taglist(S,Type) when record(Type,type) ->
+ case Type#type.tag of
+ [] ->
+ get_taglist(S,Type#type.def);
+ [Tag|_] ->
+% case lists:member(S#state.erule,[ber,ber_bin]) of
+% true ->
+% lists:map(fun(Tx) -> asn1ct_gen:def_to_tag(Tx) end,Type#type.tag);
+% _ ->
+ [asn1ct_gen:def_to_tag(Tag)]
+% end
+ end;
+get_taglist(S,{'CHOICE',{Rc,Ec}}) ->
+ get_taglist(S,{'CHOICE',Rc ++ Ec});
+get_taglist(S,{'CHOICE',Components}) ->
+ get_taglist1(S,Components);
+%% ObjectClassFieldType OTP-4390
+get_taglist(_S,#'ObjectClassFieldType'{type={typefield,_}}) ->
+ [];
+get_taglist(S,#'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}) ->
+ get_taglist(S,Type);
+get_taglist(S,{ERef=#'Externaltypereference'{},FieldNameList})
+ when list(FieldNameList) ->
+ case get_ObjectClassFieldType(S,ERef,FieldNameList) of
+ Type when record(Type,type) ->
+ get_taglist(S,Type);
+ {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
+ {TypeFieldName,_} when atom(TypeFieldName) -> []%should check if allowed
+ end;
+get_taglist(S,{ObjCl,FieldNameList}) when record(ObjCl,objectclass),
+ list(FieldNameList) ->
+ case get_ObjectClassFieldType(S,ObjCl#objectclass.fields,FieldNameList) of
+ Type when record(Type,type) ->
+ get_taglist(S,Type);
+ {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
+ {TypeFieldName,_} when atom(TypeFieldName) -> []%should check if allowed
+ end;
+get_taglist(S,Def) ->
+ case lists:member(S#state.erule,[ber_bin_v2]) of
+ false ->
+ case Def of
+ 'ASN1_OPEN_TYPE' -> % open_type has no UNIVERSAL tag as such
+ [];
+ _ ->
+ [asn1ct_gen:def_to_tag(Def)]
+ end;
+ _ ->
+ []
+ end.
+
+get_taglist1(S,[#'ComponentType'{name=_Cname,tags=TagL}|Rest]) when list(TagL) ->
+ %% tag_list has been here , just return TagL and continue with next alternative
+ TagL ++ get_taglist1(S,Rest);
+get_taglist1(S,[#'ComponentType'{typespec=Ts,tags=undefined}|Rest]) ->
+ get_taglist(S,Ts) ++ get_taglist1(S,Rest);
+get_taglist1(S,[_H|Rest]) -> % skip EXTENSIONMARK
+ get_taglist1(S,Rest);
+get_taglist1(_S,[]) ->
+ [].
+
+dbget_ex(_S,Module,Key) ->
+ case asn1_db:dbget(Module,Key) of
+ undefined ->
+
+ throw({error,{asn1,{undefined,{Module,Key}}}}); % this is catched on toplevel type or value
+ T -> T
+ end.
+
+merge_tags(T1, T2) when list(T2) ->
+ merge_tags2(T1 ++ T2, []);
+merge_tags(T1, T2) ->
+ merge_tags2(T1 ++ [T2], []).
+
+merge_tags2([T1= #tag{type='IMPLICIT'}, T2 |Rest], Acc) ->
+ merge_tags2([T1#tag{type=T2#tag.type, form=T2#tag.form}|Rest],Acc);
+merge_tags2([T1= #tag{type={default,'IMPLICIT'}}, T2 |Rest], Acc) ->
+ merge_tags2([T1#tag{type=T2#tag.type, form=T2#tag.form}|Rest],Acc);
+merge_tags2([H|T],Acc) ->
+ merge_tags2(T, [H|Acc]);
+merge_tags2([], Acc) ->
+ lists:reverse(Acc).
+
+merge_constraints(C1, []) ->
+ C1;
+merge_constraints([], C2) ->
+ C2;
+merge_constraints(C1, C2) ->
+ {SList,VList,PAList,Rest} = splitlist(C1++C2,[],[],[],[]),
+ SizeC = merge_constraints(SList),
+ ValueC = merge_constraints(VList),
+ PermAlphaC = merge_constraints(PAList),
+ case Rest of
+ [] ->
+ SizeC ++ ValueC ++ PermAlphaC;
+ _ ->
+ throw({error,{asn1,{not_implemented,{merge_constraints,Rest}}}})
+ end.
+
+merge_constraints([]) -> [];
+merge_constraints([C1 = {_,{Low1,High1}},{_,{Low2,High2}}|Rest]) when Low1 >= Low2,
+ High1 =< High2 ->
+ merge_constraints([C1|Rest]);
+merge_constraints([C1={'PermittedAlphabet',_},C2|Rest]) ->
+ [C1|merge_constraints([C2|Rest])];
+merge_constraints([C1 = {_,{_Low1,_High1}},C2 = {_,{_Low2,_High2}}|_Rest]) ->
+ throw({error,asn1,{conflicting_constraints,{C1,C2}}});
+merge_constraints([C]) ->
+ [C].
+
+splitlist([C={'SizeConstraint',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,[C|Sacc],Vacc,PAacc,Restacc);
+splitlist([C={'ValueRange',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,Sacc,[C|Vacc],PAacc,Restacc);
+splitlist([C={'PermittedAlphabet',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,Sacc,Vacc,[C|PAacc],Restacc);
+splitlist([C|Rest],Sacc,Vacc,PAacc,Restacc) ->
+ splitlist(Rest,Sacc,Vacc,PAacc,[C|Restacc]);
+splitlist([],Sacc,Vacc,PAacc,Restacc) ->
+ {lists:reverse(Sacc),
+ lists:reverse(Vacc),
+ lists:reverse(PAacc),
+ lists:reverse(Restacc)}.
+
+
+
+storeindb(M) when record(M,module) ->
+ TVlist = M#module.typeorval,
+ NewM = M#module{typeorval=findtypes_and_values(TVlist)},
+ asn1_db:dbnew(NewM#module.name),
+ asn1_db:dbput(NewM#module.name,'MODULE', NewM),
+ Res = storeindb(NewM#module.name,TVlist,[]),
+ include_default_class(NewM#module.name),
+ include_default_type(NewM#module.name),
+ Res.
+
+storeindb(Module,[H|T],ErrAcc) when record(H,typedef) ->
+ storeindb(Module,H#typedef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,valuedef) ->
+ storeindb(Module,H#valuedef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,ptypedef) ->
+ storeindb(Module,H#ptypedef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,classdef) ->
+ storeindb(Module,H#classdef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,pvaluesetdef) ->
+ storeindb(Module,H#pvaluesetdef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,pobjectdef) ->
+ storeindb(Module,H#pobjectdef.name,H,T,ErrAcc);
+storeindb(Module,[H|T],ErrAcc) when record(H,pvaluedef) ->
+ storeindb(Module,H#pvaluedef.name,H,T,ErrAcc);
+storeindb(_,[],[]) -> ok;
+storeindb(_,[],ErrAcc) ->
+ {error,ErrAcc}.
+
+storeindb(Module,Name,H,T,ErrAcc) ->
+ case asn1_db:dbget(Module,Name) of
+ undefined ->
+ asn1_db:dbput(Module,Name,H),
+ storeindb(Module,T,ErrAcc);
+ _ ->
+ case H of
+ _Type when record(H,typedef) ->
+ error({type,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,valuedef) ->
+ error({value,"already defined",
+ #state{mname=Module,value=H,vname=Name}});
+ _Type when record(H,ptypedef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,pobjectdef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,pvaluesetdef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,pvaluedef) ->
+ error({ptype,"already defined",
+ #state{mname=Module,type=H,tname=Name}});
+ _Type when record(H,classdef) ->
+ error({class,"already defined",
+ #state{mname=Module,value=H,vname=Name}})
+ end,
+ storeindb(Module,T,[H|ErrAcc])
+ end.
+
+findtypes_and_values(TVList) ->
+ findtypes_and_values(TVList,[],[],[],[],[],[]).%% Types,Values,
+%% Parameterizedtypes,Classes,Objects and ObjectSets
+
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,typedef),record(H#typedef.typespec,'Object') ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,[H#typedef.name|Oacc],OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,typedef),record(H#typedef.typespec,'ObjectSet') ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,Oacc,[H#typedef.name|OSacc]);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,typedef) ->
+ findtypes_and_values(T,[H#typedef.name|Tacc],Vacc,Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,valuedef) ->
+ findtypes_and_values(T,Tacc,[H#valuedef.name|Vacc],Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,ptypedef) ->
+ findtypes_and_values(T,Tacc,Vacc,[H#ptypedef.name|Pacc],Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,classdef) ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,[H#classdef.name|Cacc],Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pvaluedef) ->
+ findtypes_and_values(T,Tacc,[H#pvaluedef.name|Vacc],Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pvaluesetdef) ->
+ findtypes_and_values(T,Tacc,[H#pvaluesetdef.name|Vacc],Pacc,Cacc,Oacc,OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pobjectdef) ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,[H#pobjectdef.name|Oacc],OSacc);
+findtypes_and_values([H|T],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc)
+ when record(H,pobjectsetdef) ->
+ findtypes_and_values(T,Tacc,Vacc,Pacc,Cacc,Oacc,[H#pobjectsetdef.name|OSacc]);
+findtypes_and_values([],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc) ->
+ {lists:reverse(Tacc),lists:reverse(Vacc),lists:reverse(Pacc),
+ lists:reverse(Cacc),lists:reverse(Oacc),lists:reverse(OSacc)}.
+
+
+
+error({export,Msg,#state{mname=Mname,type=Ref,tname=Typename}}) ->
+ Pos = Ref#'Externaltypereference'.pos,
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Typename,Msg]),
+ {error,{export,Pos,Mname,Typename,Msg}};
+error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
+ when record(Type,typedef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",
+ [Type#typedef.pos,Mname,Typename,Msg]),
+ {error,{type,Type#typedef.pos,Mname,Typename,Msg}};
+error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
+ when record(Type,ptypedef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",
+ [Type#ptypedef.pos,Mname,Typename,Msg]),
+ {error,{type,Type#ptypedef.pos,Mname,Typename,Msg}};
+error({type,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
+ when record(Value,valuedef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
+ {error,{type,Value#valuedef.pos,Mname,Valuename,Msg}};
+error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
+ when record(Type,pobjectdef) ->
+ io:format("asn1error:~p:~p:~p ~p~n",
+ [Type#pobjectdef.pos,Mname,Typename,Msg]),
+ {error,{type,Type#pobjectdef.pos,Mname,Typename,Msg}};
+error({value,Msg,#state{mname=Mname,value=Value,vname=Valuename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
+ {error,{value,Value#valuedef.pos,Mname,Valuename,Msg}};
+error({Other,Msg,#state{mname=Mname,value=#valuedef{pos=Pos},vname=Valuename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Valuename,Msg]),
+ {error,{Other,Pos,Mname,Valuename,Msg}};
+error({Other,Msg,#state{mname=Mname,type=#typedef{pos=Pos},tname=Typename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Typename,Msg]),
+ {error,{Other,Pos,Mname,Typename,Msg}};
+error({Other,Msg,#state{mname=Mname,type=#classdef{pos=Pos},tname=Typename}}) ->
+ io:format("asn1error:~p:~p:~p ~p~n",[Pos,Mname,Typename,Msg]),
+ {error,{Other,Pos,Mname,Typename,Msg}}.
+
+include_default_type(Module) ->
+ NameAbsList = default_type_list(),
+ include_default_type1(Module,NameAbsList).
+
+include_default_type1(_,[]) ->
+ ok;
+include_default_type1(Module,[{Name,TS}|Rest]) ->
+ case asn1_db:dbget(Module,Name) of
+ undefined ->
+ T = #typedef{name=Name,
+ typespec=TS},
+ asn1_db:dbput(Module,Name,T);
+ _ -> ok
+ end,
+ include_default_type1(Module,Rest).
+
+default_type_list() ->
+ %% The EXTERNAL type is represented, according to ASN.1 1997,
+ %% as a SEQUENCE with components: identification, data-value-descriptor
+ %% and data-value.
+ Syntax =
+ #'ComponentType'{name=syntax,
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ Presentation_Cid =
+ #'ComponentType'{name='presentation-context-id',
+ typespec=#type{def='INTEGER'},
+ prop=mandatory},
+ Transfer_syntax =
+ #'ComponentType'{name='transfer-syntax',
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ Negotiation_items =
+ #type{def=
+ #'SEQUENCE'{components=
+ [Presentation_Cid,
+ Transfer_syntax#'ComponentType'{prop=mandatory}]}},
+ Context_negot =
+ #'ComponentType'{name='context-negotiation',
+ typespec=Negotiation_items,
+ prop=mandatory},
+
+ Data_value_descriptor =
+ #'ComponentType'{name='data-value-descriptor',
+ typespec=#type{def='ObjectDescriptor'},
+ prop='OPTIONAL'},
+ Data_value =
+ #'ComponentType'{name='data-value',
+ typespec=#type{def='OCTET STRING'},
+ prop=mandatory},
+
+ %% The EXTERNAL type is represented, according to ASN.1 1990,
+ %% as a SEQUENCE with components: direct-reference, indirect-reference,
+ %% data-value-descriptor and encoding.
+
+ Direct_reference =
+ #'ComponentType'{name='direct-reference',
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop='OPTIONAL'},
+
+ Indirect_reference =
+ #'ComponentType'{name='indirect-reference',
+ typespec=#type{def='INTEGER'},
+ prop='OPTIONAL'},
+
+ Single_ASN1_type =
+ #'ComponentType'{name='single-ASN1-type',
+ typespec=#type{tag=[{tag,'CONTEXT',0,
+ 'EXPLICIT',32}],
+ def='ANY'},
+ prop=mandatory},
+
+ Octet_aligned =
+ #'ComponentType'{name='octet-aligned',
+ typespec=#type{tag=[{tag,'CONTEXT',1,
+ 'IMPLICIT',32}],
+ def='OCTET STRING'},
+ prop=mandatory},
+
+ Arbitrary =
+ #'ComponentType'{name=arbitrary,
+ typespec=#type{tag=[{tag,'CONTEXT',2,
+ 'IMPLICIT',32}],
+ def={'BIT STRING',[]}},
+ prop=mandatory},
+
+ Encoding =
+ #'ComponentType'{name=encoding,
+ typespec=#type{def={'CHOICE',
+ [Single_ASN1_type,Octet_aligned,
+ Arbitrary]}},
+ prop=mandatory},
+
+ EXTERNAL_components1990 =
+ [Direct_reference,Indirect_reference,Data_value_descriptor,Encoding],
+
+ %% The EMBEDDED PDV type is represented by a SEQUENCE type
+ %% with components: identification and data-value
+ Abstract =
+ #'ComponentType'{name=abstract,
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ Transfer =
+ #'ComponentType'{name=transfer,
+ typespec=#type{def='OBJECT IDENTIFIER'},
+ prop=mandatory},
+ AbstractTrSeq =
+ #'SEQUENCE'{components=[Abstract,Transfer]},
+ Syntaxes =
+ #'ComponentType'{name=syntaxes,
+ typespec=#type{def=AbstractTrSeq},
+ prop=mandatory},
+ Fixed = #'ComponentType'{name=fixed,
+ typespec=#type{def='NULL'},
+ prop=mandatory},
+ Negotiations =
+ [Syntaxes,Syntax,Presentation_Cid,Context_negot,
+ Transfer_syntax,Fixed],
+ Identification2 =
+ #'ComponentType'{name=identification,
+ typespec=#type{def={'CHOICE',Negotiations}},
+ prop=mandatory},
+ EmbeddedPdv_components =
+ [Identification2,Data_value],
+
+ %% The CHARACTER STRING type is represented by a SEQUENCE type
+ %% with components: identification and string-value
+ String_value =
+ #'ComponentType'{name='string-value',
+ typespec=#type{def='OCTET STRING'},
+ prop=mandatory},
+ CharacterString_components =
+ [Identification2,String_value],
+
+ [{'EXTERNAL',
+ #type{tag=[#tag{class='UNIVERSAL',
+ number=8,
+ type='IMPLICIT',
+ form=32}],
+ def=#'SEQUENCE'{components=
+ EXTERNAL_components1990}}},
+ {'EMBEDDED PDV',
+ #type{tag=[#tag{class='UNIVERSAL',
+ number=11,
+ type='IMPLICIT',
+ form=32}],
+ def=#'SEQUENCE'{components=EmbeddedPdv_components}}},
+ {'CHARACTER STRING',
+ #type{tag=[#tag{class='UNIVERSAL',
+ number=29,
+ type='IMPLICIT',
+ form=32}],
+ def=#'SEQUENCE'{components=CharacterString_components}}}
+ ].
+
+
+include_default_class(Module) ->
+ NameAbsList = default_class_list(),
+ include_default_class1(Module,NameAbsList).
+
+include_default_class1(_,[]) ->
+ ok;
+include_default_class1(Module,[{Name,TS}|_Rest]) ->
+ case asn1_db:dbget(Module,Name) of
+ undefined ->
+ C = #classdef{checked=true,name=Name,
+ typespec=TS},
+ asn1_db:dbput(Module,Name,C);
+ _ -> ok
+ end.
+
+default_class_list() ->
+ [{'TYPE-IDENTIFIER',
+ {objectclass,
+ [{fixedtypevaluefield,
+ id,
+ {type,[],'OBJECT IDENTIFIER',[]},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'}],
+ {'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id}]}}},
+ {'ABSTRACT-SYNTAX',
+ {objectclass,
+ [{fixedtypevaluefield,
+ id,
+ {type,[],'OBJECT IDENTIFIER',[]},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'},
+ {fixedtypevaluefield,
+ property,
+ {type,
+ [],
+ {'BIT STRING',[]},
+ []},
+ undefined,
+ {'DEFAULT',
+ [0,1,0]}}],
+ {'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id},
+ ['HAS',
+ 'PROPERTY',
+ {valuefieldreference,property}]]}}}].
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
new file mode 100644
index 0000000000..695f648924
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
@@ -0,0 +1,1468 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_constructed_ber.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_constructed_ber).
+
+-export([gen_encode_sequence/3]).
+-export([gen_decode_sequence/3]).
+-export([gen_encode_set/3]).
+-export([gen_decode_set/3]).
+-export([gen_encode_sof/4]).
+-export([gen_decode_sof/4]).
+-export([gen_encode_choice/3]).
+-export([gen_decode_choice/3]).
+
+%%%% Application internal exports
+-export([match_tag/2]).
+
+-include("asn1_records.hrl").
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+
+ %% if EXTERNAL type the input value must be transformed to
+ %% ASN1 1990 format
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([" NewVal = asn1rt_check:transform_to_EXTERNAL1990(Val),",
+ nl]);
+ _ ->
+ ok
+ end,
+
+ {SeqOrSet,TableConsInfo,CompList} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {'SEQUENCE',TCI,CL};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {'SET',TCI,CL}
+ end,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ EncObj =
+ case TableConsInfo of
+ #simpletableattributes{usedclassfield=Used,
+ uniqueclassfield=Unique} when Used /= Unique ->
+ false;
+ %% ObjectSet, name of the object set in constraints
+ %%
+ %%{ObjectSet,AttrN,N,UniqueFieldName}
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex
+ } ->
+ OSDef =
+ case ObjectSet of
+ {Module,OSName} ->
+ asn1_db:dbget(Module,OSName);
+ OSName ->
+ asn1_db:dbget(get(currmod),OSName)
+ end,
+% io:format("currmod: ~p~nOSName: ~p~nAttrN: ~p~nN: ~p~nUniqueFieldName: ~p~n",
+% [get(currmod),OSName,AttrN,N,UniqueFieldName]),
+ case (OSDef#typedef.typespec)#'ObjectSet'.gen of
+ true ->
+% Val = lists:concat(["?RT_BER:cindex(",
+% N+1,",Val,"]),
+ ObjectEncode =
+ asn1ct_gen:un_hyphen_var(lists:concat(['Obj',
+ AttrN])),
+ emit({ObjectEncode," = ",nl}),
+ emit({" 'getenc_",ObjectSet,"'(",{asis,UniqueFieldName},
+ ", ",nl}),
+% emit({indent(35),"?RT_BER:cindex(",N+1,", Val,",
+% {asis,AttrN},")),",nl}),
+ emit([indent(10+length(atom_to_list(ObjectSet))),
+ "value_match(",{asis,ValueIndex},",",
+ "?RT_BER:cindex(",N+1,",Val,",
+ {asis,AttrN},"))),",nl]),
+ notice_value_match(),
+ {AttrN,ObjectEncode};
+ _ ->
+ false
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ %% when the simpletableattributes was at an
+ %% outer level and the objfun has been passed
+ %% through the function call
+ {"got objfun through args","ObjFun"};
+ _ ->
+ false
+ end
+ end,
+
+ gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj),
+
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type(SeqOrSet),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([nl," BytesSoFar = "]),
+ case SeqOrSet of
+ 'SET' when (D#type.def)#'SET'.sorted == dynamic ->
+ emit("?RT_BER:dynamicsort_SET_components(["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["]),",nl]);
+ _ ->
+ emit("["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["],",nl])
+ end,
+ emit(" LenSoFar = "),
+ case asn1ct_name:all(encLen) of
+ [] -> emit("0");
+ AllLengths ->
+ mkvplus(AllLengths)
+ end,
+ emit([",",nl]),
+% emit(["{TagBytes,Len} = ?RT_BER:encode_tags(TagIn ++ ",
+ emit([" ?RT_BER:encode_tags(TagIn ++ ",
+ {asis,MyTag},", BytesSoFar, LenSoFar).",nl]).
+
+
+gen_decode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+% asn1ct_name:new(term),
+ asn1ct_name:new(tag),
+ #'SEQUENCE'{tablecinf=TableConsInfo,components=CList} = D#type.def,
+ Ext = extensible(CList),
+ CompList = case CList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CList
+ end,
+
+ emit({" %%-------------------------------------------------",nl}),
+ emit({" %% decode tag and length ",nl}),
+ emit({" %%-------------------------------------------------",nl}),
+
+ asn1ct_name:new(rb),
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type('SEQUENCE'),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([" {{_,",asn1ct_gen_ber:unused_var("Len",D#type.def),"},",{next,bytes},",",{curr,rb},
+ "} = ?RT_BER:check_tags(TagIn ++ ",{asis,MyTag},", ",
+ {curr,bytes},", OptOrMand), ",nl]),
+ asn1ct_name:new(bytes),
+ asn1ct_name:new(len),
+
+ case CompList of
+ [] -> true;
+ _ ->
+ emit({"{",{next,bytes},
+ ",RemBytes} = ?RT_BER:split_list(",
+ {curr,bytes},
+ ",", {prev,len},"),",nl}),
+ asn1ct_name:new(bytes)
+ end,
+
+ {DecObjInf,UniqueFName,ValueIndex} =
+ case TableConsInfo of
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValIndex
+ } ->
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_R]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ %%AttributeName = asn1ct_gen:un_hyphen_var(AttrN),
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName,
+ ValIndex}},
+ UniqueFieldName,ValIndex};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName,ValIndex}
+ end;
+ _ ->
+ {false,false,false}
+ end,
+ case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
+ no_terms -> % an empty sequence
+ emit([nl,nl]),
+ demit({"Result = "}), %dbg
+ %% return value as record
+ asn1ct_name:new(rb),
+ emit([" {{'",asn1ct_gen:list2rname(Typename),"'}, ",{curr,bytes},",",nl," "]),
+ asn1ct_gen_ber:add_removed_bytes(),
+ emit(["}.",nl]);
+ {LeadingAttrTerm,PostponedDecArgs} ->
+ emit([com,nl,nl]),
+ case {LeadingAttrTerm,PostponedDecArgs} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],PostponedDecArgs} ->
+ DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
+ ValueMatch = value_match(ValueIndex,Term),
+ emit([DecObj," =",nl," 'getdec_",ObjSet,"'(",
+% {asis,UniqueFName},", ",Term,"),",nl}),
+ {asis,UniqueFName},", ",ValueMatch,"),",nl]),
+ gen_dec_postponed_decs(DecObj,PostponedDecArgs)
+ end,
+ demit({"Result = "}), %dbg
+ %% return value as record
+ asn1ct_name:new(rb),
+ asn1ct_name:new(bytes),
+ ExtStatus = case Ext of
+ {ext,_,_} -> ext;
+ noext -> noext
+ end,
+ emit([" {",{next,bytes},",",{curr,rb},"} = ?RT_BER:restbytes2(RemBytes, ",
+ {curr,bytes},",",ExtStatus,"),",nl]),
+ asn1ct_name:new(rb),
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([" OldFormat={'",asn1ct_gen:list2rname(Typename),
+ "', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["},",nl]),
+ emit([" ASN11994Format =",nl,
+ " asn1rt_check:transform_to_EXTERNAL1994",
+ "(OldFormat),",nl]),
+ emit([" {ASN11994Format,",{next,bytes},", "]);
+ _ ->
+ emit([" {{'",asn1ct_gen:list2rname(Typename),"', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["}, ",{next,bytes},", "])
+ end,
+ asn1ct_gen_ber:add_removed_bytes(),
+ emit(["}.",nl])
+ end.
+
+gen_dec_postponed_decs(_,[]) ->
+ emit(nl);
+gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term,TmpTerm,_Tag,OptOrMand}|Rest]) ->
+% asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+
+ emit({"{",Term,", _, _} = ",nl}),
+ N = case OptOrMand of
+ mandatory -> 0;
+ 'OPTIONAL' ->
+ emit_opt_or_mand_check(asn1_NOVALUE,TmpTerm),
+ 6;
+ {'DEFAULT',Val} ->
+ emit_opt_or_mand_check(Val,TmpTerm),
+ 6
+ end,
+ emit({indent(N+3),"case (catch ",DecObj,"(",{asis,FirstPFN},
+% ", ",TmpTerm,", ", {asis,Tag},", ",{asis,PFNList},")) of",nl}),
+ ", ",TmpTerm,", [], ",{asis,PFNList},")) of",nl}),
+ emit({indent(N+6),"{'EXIT', ",{curr,reason},"} ->",nl}),
+ emit({indent(N+9),"exit({'Type not compatible with table constraint',",
+ {curr,reason},"});",nl}),
+ emit({indent(N+6),{curr,tmpterm}," ->",nl}),
+ emit({indent(N+9),{curr,tmpterm},nl}),
+
+ case OptOrMand of
+ mandatory -> emit([indent(N+3),"end,",nl]);
+ _ ->
+ emit([indent(N+3),"end",nl,
+ indent(3),"end,",nl])
+ end,
+% emit({indent(3),"end,",nl}),
+ gen_dec_postponed_decs(DecObj,Rest).
+
+
+emit_opt_or_mand_check(Value,TmpTerm) ->
+ emit([indent(3),"case ",TmpTerm," of",nl,
+ indent(6),{asis,Value}," -> {",{asis,Value},",[],[]};",nl,
+ indent(6),"_ ->",nl]).
+
+%%============================================================================
+%% Encode/decode SET
+%%
+%%============================================================================
+
+gen_encode_set(Erules,Typename,D) when record(D,type) ->
+ gen_encode_sequence(Erules,Typename,D).
+
+gen_decode_set(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(tag),
+ #'SET'{components=TCompList} = D#type.def,
+ Ext = extensible(TCompList),
+ CompList = case TCompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> TCompList
+ end,
+
+ emit([" %%-------------------------------------------------",nl]),
+ emit([" %% decode tag and length ",nl]),
+ emit([" %%-------------------------------------------------",nl]),
+
+ asn1ct_name:new(rb),
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type('SET'),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([" {{_,Len},",{next,bytes},",",{curr,rb},
+ "} = ?RT_BER:check_tags(TagIn ++ ",{asis,MyTag},", ",
+ {curr,bytes},", OptOrMand), ",nl]),
+ asn1ct_name:new(bytes),
+ asn1ct_name:new(len),
+ asn1ct_name:new(rb),
+
+ emit([" {SetTerm, SetBytes, ",{curr,rb},"} = ?RT_BER:decode_set(0, Len, ",
+ {curr,bytes},", OptOrMand, ",
+ "fun 'dec_",asn1ct_gen:list2name(Typename),"_fun'/2, []),",nl]),
+
+ asn1ct_name:new(rb),
+ emit([" 'dec_",asn1ct_gen:list2name(Typename),"_result'(lists:sort(SetTerm), SetBytes, "]),
+ asn1ct_gen_ber:add_removed_bytes(),
+ emit([").",nl,nl,nl]),
+
+ emit({"%%-------------------------------------------------",nl}),
+ emit({"%% Set loop fun for ",asn1ct_gen:list2name(Typename),nl}),
+ emit({"%%-------------------------------------------------",nl}),
+
+ asn1ct_name:clear(),
+ asn1ct_name:new(term),
+ emit(["'dec_",asn1ct_gen:list2name(Typename),"_fun'(",{curr,bytes},
+ ", OptOrMand) ->",nl]),
+
+ asn1ct_name:new(bytes),
+ gen_dec_set(Erules,Typename,CompList,1,Ext),
+
+ emit([" %% tag not found, if extensionmark we should skip bytes here",nl]),
+ emit([indent(6),"_ -> {[], Bytes,0}",nl]),
+ emit([indent(3),"end.",nl,nl,nl]),
+
+
+ emit({"%%-------------------------------------------------",nl}),
+ emit({"%% Result ",asn1ct_gen:list2name(Typename),nl}),
+ emit({"%%-------------------------------------------------",nl}),
+
+ asn1ct_name:clear(),
+ emit({"'dec_",asn1ct_gen:list2name(Typename),"_result'(",
+ asn1ct_gen_ber:unused_var("TermList",D#type.def),", Bytes, Rb) ->",nl}),
+
+ case gen_dec_set_result(Erules,Typename,CompList) of
+ no_terms ->
+ %% return value as record
+ asn1ct_name:new(rb),
+ emit({" {{'",asn1ct_gen:list2rname(Typename),"'}, Bytes, Rb}.",nl});
+ _ ->
+ emit({nl," case ",{curr,termList}," of",nl}),
+ emit({" [] -> {{'",asn1ct_gen:list2rname(Typename),"', "}),
+ mkvlist(asn1ct_name:all(term)),
+ emit({"}, Bytes, Rb};",nl}),
+ emit({" ExtraAtt -> exit({error,{asn1,{too_many_attributes, ExtraAtt}}})",nl}),
+ emit({" end.",nl}),
+ emit({nl,nl,nl})
+ end.
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE OF and SET OF
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sof(Erules,Typename,_InnerTypename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, Cont} = D#type.def,
+
+ Objfun = case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+
+ emit({" {EncBytes,EncLen} = 'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(Val",Objfun,",[],0),",nl}),
+
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type(SeqOrSetOf),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+% gen_encode_tags(Erules,MyTag,"EncLen","EncBytes"),
+ emit([" ?RT_BER:encode_tags(TagIn ++ ",
+ {asis,MyTag},", EncBytes, EncLen).",nl,nl]),
+
+ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont).
+% gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",0,
+% mandatory,"{EncBytes,EncLen} = "),
+
+
+gen_decode_sof(Erules,Typename,_InnerTypename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, TypeTag, Cont} =
+ case D#type.def of
+ {'SET OF',_Cont} -> {'SET OF','SET',_Cont};
+ {'SEQUENCE OF',_Cont} -> {'SEQUENCE OF','SEQUENCE',_Cont}
+ end,
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+
+ emit({" %%-------------------------------------------------",nl}),
+ emit({" %% decode tag and length ",nl}),
+ emit({" %%-------------------------------------------------",nl}),
+
+ asn1ct_name:new(rb),
+ MyTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- D#type.tag]
+ ++
+ [#tag{class = asn1ct_gen_ber:decode_class('UNIVERSAL'),
+ number = asn1ct_gen_ber:decode_type(TypeTag),
+ form = ?CONSTRUCTED,
+ type = 'IMPLICIT'}],
+ emit([" {{_,Len},",{next,bytes},",",{curr,rb},
+ "} = ?RT_BER:check_tags(TagIn ++ ",{asis,MyTag},", ",
+ {curr,bytes},", OptOrMand), ",nl]),
+
+ emit([" ?RT_BER:decode_components(",{curr,rb}]),
+ InnerType = asn1ct_gen:get_inner(Cont#type.def),
+ ContName = case asn1ct_gen:type(InnerType) of
+ Atom when atom(Atom) -> Atom;
+ _ -> TypeNameSuffix
+ end,
+ emit([", Len, ",{next,bytes},", "]),
+% NewCont =
+% case Cont#type.def of
+% {'ENUMERATED',_,Components}->
+% Cont#type{def={'ENUMERATED',Components}};
+% _ -> Cont
+% end,
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ []
+ end,
+ gen_dec_line_sof(Erules,Typename,ContName,Cont,ObjFun),
+ emit([", []).",nl,nl,nl]).
+
+
+gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
+ when record(Cont,type)->
+
+ {Objfun,ObjFun_novar,EncObj} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _",{no_attr,"ObjFun"}};
+ _ ->
+ {"","",false}
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([]",ObjFun_novar,", AccBytes, AccLen) -> ",nl]),
+
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit([indent(3),
+ "{?RT_BER:dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]);
+ _ ->
+ emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]),
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+ gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3,
+ mandatory,"{EncBytes,EncLen} = ",EncObj),
+ emit([",",nl]),
+ emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(T",Objfun,","]),
+ emit(["[EncBytes|AccBytes], AccLen + EncLen).",nl,nl]).
+
+%%============================================================================
+%% Encode/decode CHOICE
+%%
+%%============================================================================
+
+gen_encode_choice(Erules,Typename,D) when record(D,type) ->
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_enc_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit({nl,nl}).
+
+gen_decode_choice(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(bytes),
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_dec_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit({".",nl}).
+
+
+%%============================================================================
+%% Encode SEQUENCE
+%%
+%%============================================================================
+
+gen_enc_sequence_call(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],Pos,Ext,EncObj) ->
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Element =
+ case TopType of
+ ['EXTERNAL'] ->
+ io_lib:format("?RT_BER:cindex(~w,NewVal,~w)",[Pos+1,Cname]);
+ _ ->
+ io_lib:format("?RT_BER:cindex(~w,Val,~w)",[Pos+1,Cname])
+ end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ print_attribute_comment(InnerType,Pos,Prop),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,3,Prop,EncObj),
+ case Rest of
+ [] ->
+ emit({com,nl});
+ _ ->
+ emit({com,nl}),
+ gen_enc_sequence_call(Erules,TopType,Rest,Pos+1,Ext,EncObj)
+ end;
+
+gen_enc_sequence_call(_Erules,_TopType,[],_Num,_,_) ->
+ true.
+
+%%============================================================================
+%% Decode SEQUENCE
+%%
+%%============================================================================
+
+gen_dec_sequence_call(Erules,TopType,CompList,Ext,DecObjInf) ->
+ gen_dec_sequence_call1(Erules,TopType, CompList, 1, Ext,DecObjInf,[],[]).
+
+
+gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop,tags=Tags}|Rest],Num,Ext,DecObjInf,LeadingAttrAcc,ArgsAcc) ->
+ {LA,PostponedDec} =
+ gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
+ Ext,DecObjInf),
+ case Rest of
+ [] ->
+ {LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
+ _ ->
+ emit({com,nl}),
+% asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+ gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
+ LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
+ end;
+
+gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
+ no_terms.
+%%gen_dec_sequence_call1(Erules,_TopType,[],Num,_) ->
+%% true.
+
+
+
+%%----------------------------
+%%SEQUENCE mandatory
+%%----------------------------
+
+gen_dec_component(Erules,TopType,Cname,CTags,Type,Pos,Prop,Ext,DecObjInf) ->
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} -> OCFTType;
+ _ -> asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ Prop1 = case {Prop,Ext} of
+ {mandatory,{ext,Epos,_}} when Pos >= Epos ->
+ 'OPTIONAL';
+ _ ->
+ Prop
+ end,
+ print_attribute_comment(InnerType,Pos,Prop1),
+ emit(" "),
+
+ case {InnerType,DecObjInf} of
+ {{typefield,_},NotFalse} when NotFalse /= false ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},",",{next,rb},"} = "});
+ {{objectfield,_,_},_} ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},",",{next,rb},"} = "});
+ _ ->
+ asn1ct_name:new(term),
+ emit({"{",{curr,term},",",{next,bytes},",",{next,rb},"} = "})
+ end,
+ asn1ct_name:new(rb),
+ PostponedDec =
+ gen_dec_line(Erules,TopType,Cname,CTags,Type,Prop1,DecObjInf),
+ asn1ct_name:new(form),
+ PostponedDec.
+
+
+%%-------------------------------------
+%% Decode SET
+%%-------------------------------------
+
+gen_dec_set(Erules,TopType,CompList,Pos,_Ext) ->
+ TagList = get_all_choice_tags(CompList),
+ emit({indent(3),
+ {curr,tagList}," = ",{asis,TagList},",",nl}),
+ emit({indent(3),
+ "case ?RT_BER:check_if_valid_tag(Bytes, ",
+ {curr,tagList},", OptOrMand) of",nl}),
+ asn1ct_name:new(tagList),
+ asn1ct_name:new(rbCho),
+ asn1ct_name:new(choTags),
+ gen_dec_set_cases(Erules,TopType,CompList,TagList,Pos),
+ asn1ct_name:new(tag),
+ asn1ct_name:new(bytes).
+
+
+
+gen_dec_set_cases(_,_,[],_,_) ->
+ ok;
+gen_dec_set_cases(Erules,TopType,[H|T],List,Pos) ->
+ case H of
+ {'EXTENSIONMARK', _, _} ->
+ gen_dec_set_cases(Erules,TopType,T,List,Pos);
+ _ ->
+ Name = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+
+ emit({indent(6),"'",Name,"' ->",nl}),
+ case Type#type.def of
+ {'CHOICE',_NewCompList} ->
+ gen_dec_set_cases_choice(Erules,TopType,H,Pos);
+ _ ->
+ gen_dec_set_cases_type(Erules,TopType,H,Pos)
+ end,
+ gen_dec_set_cases(Erules,TopType,T,List,Pos+1)
+ end.
+
+
+
+
+gen_dec_set_cases_choice(_Erules,TopType,H,Pos) ->
+ Cname = H#'ComponentType'.name,
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- (H#'ComponentType'.typespec)#type.tag],
+ asn1ct_name:new(rbCho),
+ emit({indent(9),"{Dec, Rest, ",{curr,rbCho},"} = "}),
+ emit({"'dec_",asn1ct_gen:list2name([Cname|TopType]),
+ "'(Bytes,OptOrMand,",{asis,Tag},"),",nl}),
+ emit([" {{",Pos,",Dec}, Rest, ",{curr,rbCho},"}"]),
+ emit([";",nl,nl]).
+
+
+gen_dec_set_cases_type(Erules,TopType,H,Pos) ->
+ Cname = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+ %% always use Prop = mandatory here Prop = H#'ComponentType'.prop,
+
+ asn1ct_name:new(rbCho),
+ emit({indent(9),"{Dec, Rest, ",{curr,rbCho},"} = "}),
+ asn1ct_name:delete(bytes),
+ %% we have already seen the tag so now we must find the value
+ %% that why we always use 'mandatory' here
+ gen_dec_line(Erules,TopType,Cname,[],Type,mandatory,decObjInf),
+ asn1ct_name:new(bytes),
+
+ emit([",",nl]),
+ emit(["{{",Pos,",Dec}, Rest, ",{curr,rbCho},"}"]),
+ emit([";",nl,nl]).
+
+
+%%---------------------------------
+%% Decode SET result
+%%---------------------------------
+
+gen_dec_set_result(Erules,TopType,{CompList,_ExtList}) ->
+ gen_dec_set_result1(Erules,TopType, CompList, 1);
+gen_dec_set_result(Erules,TopType,CompList) ->
+ gen_dec_set_result1(Erules,TopType, CompList, 1).
+
+gen_dec_set_result1(Erules,TopType,
+ [#'ComponentType'{name=Cname,
+ typespec=Type,
+ prop=Prop}|Rest],Num) ->
+ gen_dec_set_component(Erules,TopType,Cname,Type,Num,Prop),
+ case Rest of
+ [] ->
+ true;
+ _ ->
+ gen_dec_set_result1(Erules,TopType,Rest,Num+1)
+ end;
+
+gen_dec_set_result1(_Erules,_TopType,[],1) ->
+ no_terms;
+gen_dec_set_result1(_Erules,_TopType,[],_Num) ->
+ true.
+
+
+gen_dec_set_component(_Erules,_TopType,_Cname,Type,Pos,Prop) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ print_attribute_comment(InnerType,Pos,Prop),
+ emit({" {",{next,term},com,{next,termList},"} =",nl}),
+ emit({" case ",{curr,termList}," of",nl}),
+ emit({" [{",Pos,com,{curr,termTmp},"}|",
+ {curr,rest},"] -> "}),
+ emit({"{",{curr,termTmp},com,
+ {curr,rest},"};",nl}),
+ case Prop of
+ 'OPTIONAL' ->
+ emit([indent(10),"_ -> {asn1_NOVALUE, ",{curr,termList},"}",nl]);
+ {'DEFAULT', DefVal} ->
+ emit([indent(10),
+ "_ -> {",{asis,DefVal},", ",{curr,termList},"}",nl]);
+ mandatory ->
+ emit([indent(10),
+ "_ -> exit({error,{asn1,{mandatory_attribute_no, ",
+ Pos,", missing}}})",nl])
+ end,
+ emit([indent(6),"end,",nl]),
+ asn1ct_name:new(rest),
+ asn1ct_name:new(term),
+ asn1ct_name:new(termList),
+ asn1ct_name:new(termTmp).
+
+
+%%---------------------------------------------
+%% Encode CHOICE
+%%---------------------------------------------
+%% for BER we currently do care (a little) if the choice has an EXTENSIONMARKER
+
+
+gen_enc_choice(Erules,TopType,Tag,CompList,_Ext) ->
+ gen_enc_choice1(Erules,TopType,Tag,CompList,_Ext).
+
+gen_enc_choice1(Erules,TopType,Tag,CompList,_Ext) ->
+ asn1ct_name:clear(),
+ emit({" {EncBytes,EncLen} = case element(1,Val) of",nl}),
+ gen_enc_choice2(Erules,TopType,CompList),
+ emit([nl," end,",nl,nl]),
+ NewTag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- Tag],
+% gen_encode_tags(Erules,NewTag,"EncLen","EncBytes").
+ emit(["?RT_BER:encode_tags(TagIn ++",{asis,NewTag},", EncBytes, EncLen).",nl]).
+
+
+
+gen_enc_choice2(Erules,TopType,[H1|T]) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ emit({" ",{asis,Cname}," ->",nl}),
+ {Encobj,Assign} =
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+ case {Type#type.def,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+ {#'ObjectClassFieldType'{},{componentrelation,_,_}} ->
+ asn1ct_name:new(tmpBytes),
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Emit = ["{",{curr,tmpBytes},", _} = "],
+ {{no_attr,"ObjFun"},Emit};
+ _ ->
+ {false,[]}
+ end,
+ gen_enc_line(Erules,TopType,Cname,Type,"element(2,Val)",9,
+ mandatory,Assign,Encobj),
+ case Encobj of
+ false -> ok;
+ _ ->
+ emit({",",nl,indent(9),"{",{curr,encBytes},", ",
+ {curr,encLen},"}"})
+ end,
+ emit({";",nl}),
+ case T of
+ [] ->
+ emit([indent(6), "Else -> ",nl,
+ indent(9),"exit({error,{asn1,{invalid_choice_type,Else}}})"]);
+ _ ->
+ true
+ end,
+ gen_enc_choice2(Erules,TopType,T);
+
+gen_enc_choice2(_,_,[]) ->
+ true.
+
+
+
+
+%%--------------------------------------------
+%% Decode CHOICE
+%%--------------------------------------------
+
+gen_dec_choice(Erules,TopType, ChTag, CompList, Ext) ->
+ asn1ct_name:delete(bytes),
+ Tags = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}|| X <- ChTag],
+
+ emit([" {{_,Len},",{next,bytes},
+ ", RbExp} = ?RT_BER:check_tags(TagIn++",
+ {asis,Tags},", ",
+ {curr,bytes},", OptOrMand),",nl]),
+ asn1ct_name:new(bytes),
+ asn1ct_name:new(len),
+ gen_dec_choice_indef_funs(Erules),
+ case Erules of
+ ber_bin ->
+ emit([indent(3),"case ",{curr,bytes}," of",nl]);
+ ber ->
+ emit([indent(3),
+ "case (catch ?RT_BER:peek_tag(",{curr,bytes},")) of",nl])
+ end,
+ asn1ct_name:new(tagList),
+ asn1ct_name:new(choTags),
+ gen_dec_choice_cases(Erules,TopType,CompList),
+ case Ext of
+ noext ->
+ emit([indent(6), {curr,else}," -> ",nl]),
+ emit([indent(9),"case OptOrMand of",nl,
+ indent(12),"mandatory ->","exit({error,{asn1,",
+ "{invalid_choice_tag,",{curr,else},"}}});",nl,
+ indent(12),"_ ->","exit({error,{asn1,{no_optional_tag,",
+ {curr,else},"}}})",nl,
+ indent(9),"end",nl]);
+ _ ->
+ emit([indent(6),"_ -> ",nl]),
+ emit([indent(9),"{{asn1_ExtAlt,",{curr,bytes},"},",
+ empty_lb(Erules),", RbExp}",nl])
+ end,
+ emit([indent(3),"end"]),
+ asn1ct_name:new(tag),
+ asn1ct_name:new(else).
+
+gen_dec_choice_indef_funs(Erules) ->
+ emit({indent(3),"IndefEndBytes = fun(indefinite,",indefend_match(Erules,used_var),
+ ")-> R; (_,B)-> B end,",nl}),
+ emit({indent(3),"IndefEndRb = fun(indefinite,",indefend_match(Erules,unused_var),
+ ")-> 2; (_,_)-> 0 end,",nl}).
+
+
+gen_dec_choice_cases(_,_, []) ->
+ ok;
+gen_dec_choice_cases(Erules,TopType, [H|T]) ->
+ asn1ct_name:push(rbCho),
+ Name = H#'ComponentType'.name,
+ emit([nl,"%% '",Name,"'",nl]),
+ Fcases = fun([T1,T2|Tail],Fun) ->
+ emit([indent(6),match_tag(Erules,T1)," ->",nl]),
+ gen_dec_choice_cases_type(Erules,TopType, H),
+ Fun([T2|Tail],Fun);
+ ([T1],_) ->
+ emit([indent(6),match_tag(Erules,T1)," ->",nl]),
+ gen_dec_choice_cases_type(Erules,TopType, H)
+ end,
+ Fcases(H#'ComponentType'.tags,Fcases),
+ asn1ct_name:pop(rbCho),
+ gen_dec_choice_cases(Erules,TopType, T).
+
+
+
+gen_dec_choice_cases_type(Erules,TopType,H) ->
+ Cname = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+ Prop = H#'ComponentType'.prop,
+ emit({indent(9),"{Dec, Rest, ",{curr,rbCho},"} = "}),
+ gen_dec_line(Erules,TopType,Cname,[],Type,Prop,false),
+ emit([",",nl,indent(9),"{{",{asis,Cname},
+ ", Dec}, IndefEndBytes(Len,Rest), RbExp + ",
+ {curr,rbCho}," + IndefEndRb(Len,Rest)};",nl,nl]).
+
+encode_tag_val(Erules,{Class,TagNo}) when integer(TagNo) ->
+ Rtmod = rtmod(Erules),
+ Rtmod:encode_tag_val({asn1ct_gen_ber:decode_class(Class),
+ 0,TagNo});
+encode_tag_val(Erules,{Class,TypeName}) ->
+ Rtmod = rtmod(Erules),
+ Rtmod:encode_tag_val({asn1ct_gen_ber:decode_class(Class),
+ 0,asn1ct_gen_ber:decode_type(TypeName)}).
+
+
+match_tag(ber_bin,Arg) ->
+ match_tag_with_bitsyntax(Arg);
+match_tag(Erules,Arg) ->
+ io_lib:format("~p",[encode_tag_val(Erules,Arg)]).
+
+match_tag_with_bitsyntax({Class,TagNo}) when integer(TagNo) ->
+ match_tag_with_bitsyntax1({asn1ct_gen_ber:decode_class(Class),
+ 0,TagNo});
+match_tag_with_bitsyntax({Class,TypeName}) ->
+ match_tag_with_bitsyntax1({asn1ct_gen_ber:decode_class(Class),
+ 0,asn1ct_gen_ber:decode_type(TypeName)}).
+
+match_tag_with_bitsyntax1({Class, _Form, TagNo}) when (TagNo =< 30) ->
+ io_lib:format("<<~p:2,_:1,~p:5,_/binary>>",[Class bsr 6,TagNo]);
+
+match_tag_with_bitsyntax1({Class, _Form, TagNo}) ->
+ {Octets,Len} = mk_object_val(TagNo),
+ OctForm = case Len of
+ 1 -> "~p";
+ 2 -> "~p,~p";
+ 3 -> "~p,~p,~p";
+ 4 -> "~p,~p,~p,~p"
+ end,
+ io_lib:format("<<~p:2,_:1,31:5," ++ OctForm ++ ",_/binary>>",
+ [Class bsr 6] ++ Octets).
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+
+get_all_choice_tags(ComponentTypeList) ->
+ get_all_choice_tags(ComponentTypeList,[]).
+
+get_all_choice_tags([],TagList) ->
+ TagList;
+get_all_choice_tags([H|T],TagList) ->
+ Tags = H#'ComponentType'.tags,
+ get_all_choice_tags(T, TagList ++ [{H#'ComponentType'.name, Tags}]).
+
+
+
+%%---------------------------------------
+%% Generate the encode/decode code
+%%---------------------------------------
+
+gen_enc_line(Erules,TopType,Cname,
+ Type=#type{constraint=[{componentrelation,_,_}],
+ def=#'ObjectClassFieldType'{type={typefield,_}}},
+ Element,Indent,OptOrMand=mandatory,EncObj)
+ when list(Element) ->
+ asn1ct_name:new(tmpBytes),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,tmpBytes},",_} = "],EncObj);
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,EncObj)
+ when list(Element) ->
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,encBytes},",",{curr,encLen},"} = "],EncObj).
+
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj)
+ when list(Element) ->
+ IndDeep = indent(Indent),
+
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- Type#type.tag],
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ emit(IndDeep),
+ emit(Assign),
+ gen_optormand_case(OptOrMand,Erules,TopType,Cname,Type,InnerType,WhatKind,
+ Element),
+ case {Type,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+% #type{constraint=[{tableconstraint_info,RefedFieldName}],
+% def={typefield,_}} ->
+ {#type{def=#'ObjectClassFieldType'{type={typefield,_},
+ fieldname=RefedFieldName}},
+ {componentrelation,_,_}} ->
+ {_LeadingAttrName,Fun} = EncObj,
+ case RefedFieldName of
+ {notype,T} ->
+ throw({error,{notype,type_from_object,T}});
+ {Name,RestFieldNames} when atom(Name) ->
+ case OptOrMand of
+ mandatory -> ok;
+ _ ->
+% emit(["{",{curr,tmpBytes},",",{curr,tmpLen},
+ emit(["{",{curr,tmpBytes},", _} = "])
+%% asn1ct_name:new(tmpBytes),
+%% asn1ct_name:new(tmpLen)
+ end,
+ emit({Fun,"(",{asis,Name},", ",Element,", [], ",
+ {asis,RestFieldNames},"),",nl}),
+ emit(IndDeep),
+ case OptOrMand of
+ mandatory ->
+ emit({"{",{curr,encBytes},", ",{curr,encLen},"} = "}),
+ emit({"?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},")"});
+ _ ->
+% emit({"{",{next,tmpBytes},", _} = "}),
+ emit({"{",{next,tmpBytes},", ",{curr,tmpLen},
+ "} = "}),
+ emit({"?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},"),",nl}),
+ emit(IndDeep),
+ emit({"{",{next,tmpBytes},", ",{curr,tmpLen},"}"})
+ end;
+ _ ->
+ throw({asn1,{'internal error'}})
+ end;
+% #type{constraint=[{tableconstraint_info,_}],
+% def={objectfield,PrimFieldName1,PFNList}} ->
+ {{#'ObjectClassFieldType'{type={objectfield,PrimFieldName1,
+ PFNList}},_},
+ {componentrelation,_,_}} ->
+ %% this is when the dotted list in the FieldName has more
+ %% than one element
+ {_LeadingAttrName,Fun} = EncObj,
+ emit({"?RT_BER:encode_open_type(",Fun,"(",{asis,PrimFieldName1},
+ ", ",Element,", ",{asis,PFNList},"),",{asis,Tag},")"});
+ _ ->
+ case WhatKind of
+ {primitive,bif} ->
+ EncType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{
+ type={fixedtypevaluefield,
+ _,Btype}} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ asn1ct_gen_ber:gen_encode_prim(ber,EncType,{asis,Tag},
+ Element);
+ {notype,_} ->
+ emit({"'enc_",InnerType,"'(",Element,", ",{asis,Tag},")"});
+ 'ASN1_OPEN_TYPE' ->
+ asn1ct_gen_ber:gen_encode_prim(ber,Type#type{def='ASN1_OPEN_TYPE'},{asis,Tag},Element);
+ _ ->
+ {EncFunName, _, _} =
+ mkfuncname(TopType,Cname,WhatKind,enc),
+ case {WhatKind,Type#type.tablecinf,EncObj} of
+ {{constructed,bif},[{objfun,_}|_R],{_,Fun}} ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},
+ ", ",Fun,")"]);
+ _ ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},")"])
+ end
+ end
+ end,
+ case OptOrMand of
+ mandatory -> true;
+ _ ->
+ emit({nl,indent(7),"end"})
+ end.
+
+
+
+gen_optormand_case(mandatory,_,_,_,_,_,_, _) ->
+ ok;
+gen_optormand_case('OPTIONAL',Erules,_,_,_,_,_,Element) ->
+ emit({" case ",Element," of",nl}),
+ emit({indent(9),"asn1_NOVALUE -> {",
+ empty_lb(Erules),",0};",nl}),
+ emit({indent(9),"_ ->",nl,indent(12)});
+gen_optormand_case({'DEFAULT',DefaultValue},Erules,TopType,Cname,Type,
+ InnerType,WhatKind,Element) ->
+ CurrMod = get(currmod),
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit(" case catch "),
+ asn1ct_gen:gen_check_call(TopType,Cname,Type,InnerType,
+ WhatKind,{asis,DefaultValue},
+ Element),
+ emit({" of",nl}),
+ emit({indent(12),"true -> {[],0};",nl});
+ _ ->
+ emit({" case ",Element," of",nl}),
+ emit({indent(9),"asn1_DEFAULT -> {",
+ empty_lb(Erules),
+ ",0};",nl}),
+ case DefaultValue of
+ #'Externalvaluereference'{module=CurrMod,
+ value=V} ->
+ emit({indent(9),"?",{asis,V}," -> {",
+ empty_lb(Erules),",0};",nl});
+ _ ->
+ emit({indent(9),{asis,
+ DefaultValue}," -> {",
+ empty_lb(Erules),",0};",nl})
+ end
+ end,
+ emit({indent(9),"_ ->",nl,indent(12)}).
+
+
+
+
+gen_dec_line_sof(_Erules,TopType,Cname,Type,ObjFun) ->
+
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- Type#type.tag],
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ case WhatKind of
+ {primitive,bif} ->
+ asn1ct_name:delete(len),
+
+ asn1ct_name:new(len),
+ emit(["fun(FBytes,_,_)->",nl]),
+ EncType = case Type#type.def of
+ #'ObjectClassFieldType'{
+ type={fixedtypevaluefield,
+ _,Btype}} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ asn1ct_gen_ber:gen_dec_prim(ber,EncType,"FBytes",Tag,
+ [],no_length,?PRIMITIVE,
+ mandatory),
+ emit([nl,"end, []"]);
+ _ ->
+ case ObjFun of
+ [] ->
+ {DecFunName, _, _} =
+ mkfunname(TopType,Cname,WhatKind,dec,3),
+ emit([DecFunName,", ",{asis,Tag}]);
+ _ ->
+ {DecFunName, _, _} =
+ mkfunname(TopType,Cname,WhatKind,dec,4),
+ emit([DecFunName,", ",{asis,Tag},", ObjFun"])
+ end
+ end.
+
+
+gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ Tag = [X#tag{class=asn1ct_gen_ber:decode_class(X#tag.class)}
+ || X <- Type#type.tag],
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+ PostpDec =
+ case OptOrMand of
+ mandatory ->
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,mandatory,", mandatory, ",
+ DecObjInf,OptOrMand);
+ _ -> %optional or default
+ case {CTags,Erules} of
+ {[CTag],ber_bin} ->
+ emit(["case ",{curr,bytes}," of",nl]),
+ emit([match_tag(Erules,CTag)," ->",nl]),
+ PostponedDec =
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,mandatory,
+ ", opt_or_default, ",DecObjInf,
+ OptOrMand),
+ emit([";",nl]),
+ emit(["_ ->",nl]),
+ case OptOrMand of
+ {'DEFAULT', Def} ->
+ emit(["{",{asis,Def},",",
+ BytesVar,", 0 }",nl]);
+ 'OPTIONAL' ->
+ emit(["{ asn1_NOVALUE, ",
+ BytesVar,", 0 }",nl])
+ end,
+ emit("end"),
+ PostponedDec;
+ _ ->
+ emit("case (catch "),
+ PostponedDec =
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,OptOrMand,
+ ", opt_or_default, ",DecObjInf,
+ OptOrMand),
+ emit([") of",nl]),
+ case OptOrMand of
+ {'DEFAULT', Def} ->
+ emit(["{'EXIT',{error,{asn1,{no_optional_tag,_}}}}",
+ " -> {",{asis,Def},",",
+ BytesVar,", 0 };",nl]);
+ 'OPTIONAL' ->
+ emit(["{'EXIT',{error,{asn1,{no_optional_tag,_}}}}",
+ " -> { asn1_NOVALUE, ",
+ BytesVar,", 0 };",nl])
+ end,
+ asn1ct_name:new(casetmp),
+ emit([{curr,casetmp},"-> ",{curr,casetmp},nl,"end"]),
+ PostponedDec
+ end
+ end,
+ case DecObjInf of
+ {Cname,ObjSet} -> % this must be the component were an object is
+ %% choosen from the object set according to the table
+ %% constraint.
+ {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
+ PostpDec};
+ _ -> {[],PostpDec}
+ end.
+
+
+gen_dec_call({typefield,_},Erules,_,_,Type,_,Tag,_,_,false,_) ->
+ %% this in case of a choice with typefield components
+ asn1ct_name:new(reason),
+ {FirstPFName,RestPFName} =
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ emit([nl,indent(6),"begin",nl]),
+ emit([indent(9),"{OpenDec,TmpRest,TmpRbCho} =",nl,indent(12),
+ "?RT_BER:decode_open_type(",Erules,",",{curr,bytes},",",
+ {asis,Tag},"),",nl]),
+ emit([indent(9),"case (catch ObjFun(",{asis,FirstPFName},
+ ", OpenDec, [], ",{asis,RestPFName},
+ ")) of", nl]),%% ??? What about Tag
+ emit([indent(12),"{'EXIT',",{curr,reason},"} ->",nl]),
+%% emit({indent(15),"throw({runtime_error,{'Type not ",
+%% "compatible with tableconstraint', OpenDec}});",nl}),
+ emit([indent(15),"exit({'Type not ",
+ "compatible with table constraint', ",{curr,reason},"});",nl]),
+ emit([indent(12),"{TmpDec,_ ,_} ->",nl]),
+ emit([indent(15),"{TmpDec, TmpRest, TmpRbCho}",nl]),
+ emit([indent(9),"end",nl,indent(6),"end",nl]),
+ [];
+gen_dec_call({typefield,_},_Erules,_,Cname,Type,_BytesVar,Tag,_,_,
+ _DecObjInf,OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",{curr,bytes},",",{asis,Tag},")"]),
+ RefedFieldName =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ [{Cname,RefedFieldName,
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+% asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),[],OptOrMandComp}];
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call({objectfield,PrimFieldName,PFNList},_Erules,_,Cname,_,_,Tag,_,_,_,
+ OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",{curr,bytes},",",{asis,Tag},")"]),
+ [{Cname,{PrimFieldName,PFNList},
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+% asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),[],OptOrMandComp}];
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call(InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,PrimOptOrMand,
+ OptOrMand,DecObjInf,_) ->
+ WhatKind = asn1ct_gen:type(InnerType),
+ gen_dec_call1(WhatKind,InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,
+ PrimOptOrMand,OptOrMand),
+ case DecObjInf of
+ {Cname,{_,OSet,UniqueFName,ValIndex}} ->
+ Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ ValueMatch = value_match(ValIndex,Term),
+ emit({",",nl,"ObjFun = 'getdec_",OSet,"'(",
+% {asis,UniqueFName},", ",{curr,term},")"});
+ {asis,UniqueFName},", ",ValueMatch,")"});
+ _ ->
+ ok
+ end,
+ [].
+gen_dec_call1({primitive,bif},InnerType,Erules,_,_,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ case InnerType of
+ {fixedtypevaluefield,_,Btype} ->
+ asn1ct_gen_ber:gen_dec_prim(Erules,Btype,BytesVar,Tag,[],no_length,
+ ?PRIMITIVE,OptOrMand);
+ _ ->
+ asn1ct_gen_ber:gen_dec_prim(Erules,Type,BytesVar,Tag,[],no_length,
+ ?PRIMITIVE,OptOrMand)
+ end;
+gen_dec_call1('ASN1_OPEN_TYPE',_InnerType,Erules,_,_,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ asn1ct_gen_ber:gen_dec_prim(Erules,Type#type{def='ASN1_OPEN_TYPE'},
+ BytesVar,Tag,[],no_length,
+ ?PRIMITIVE,OptOrMand);
+gen_dec_call1(WhatKind,_,_Erules,TopType,Cname,Type,_,Tag,_,OptOrMand) ->
+ {DecFunName,_,_} =
+ mkfuncname(TopType,Cname,WhatKind,dec),
+ case {WhatKind,Type#type.tablecinf} of
+ {{constructed,bif},[{objfun,_}|_R]} ->
+ emit({DecFunName,"(",{curr,bytes},OptOrMand,{asis,Tag},", ObjFun)"});
+ _ ->
+ emit({DecFunName,"(",{curr,bytes},OptOrMand,{asis,Tag},")"})
+ end.
+
+
+%%------------------------------------------------------
+%% General and special help functions (not exported)
+%%------------------------------------------------------
+
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
+ emit([{var,H},Sep]),
+ mkvlist([T1|T], Sep);
+mkvlist([H|T], Sep) ->
+ emit([{var,H}]),
+ mkvlist(T, Sep);
+mkvlist([], _) ->
+ true.
+
+mkvlist(L) ->
+ mkvlist(L,", ").
+
+mkvplus(L) ->
+ mkvlist(L," + ").
+
+extensible(CompList) when list(CompList) ->
+ noext;
+extensible({RootList,ExtList}) ->
+ {ext,length(RootList)+1,length(ExtList)}.
+
+
+print_attribute_comment(InnerType,Pos,Prop) ->
+ CommentLine = "%%-------------------------------------------------",
+ emit([nl,CommentLine]),
+ case InnerType of
+ {typereference,_,Name} ->
+ emit([nl,"%% attribute number ",Pos," with type ",Name]);
+ {'Externaltypereference',_,XModule,Name} ->
+ emit([nl,"%% attribute number ",Pos," External ",XModule,":",Name]);
+ _ ->
+ emit([nl,"%% attribute number ",Pos," with type ",InnerType])
+ end,
+ case Prop of
+ mandatory ->
+ continue;
+ {'DEFAULT', Def} ->
+ emit([" DEFAULT = ",{asis,Def}]);
+ 'OPTIONAL' ->
+ emit([" OPTIONAL"])
+ end,
+ emit([nl,CommentLine,nl]).
+
+
+mkfuncname(TopType,Cname,WhatKind,DecOrEnc) ->
+ CurrMod = get(currmod),
+ case WhatKind of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ F = lists:concat(["'",DecOrEnc,"_",EType,"'"]),
+ {F, "?MODULE", F};
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ {lists:concat(["'",Mod,"':'",DecOrEnc,"_",EType,"'"]),Mod,
+ lists:concat(["'",DecOrEnc,"_",EType,"'"])};
+ {constructed,bif} ->
+ F = lists:concat(["'",DecOrEnc,"_",asn1ct_gen:list2name([Cname|TopType]),"'"]),
+ {F, "?MODULE", F}
+ end.
+
+mkfunname(TopType,Cname,WhatKind,DecOrEnc,Arity) ->
+ CurrMod = get(currmod),
+ case WhatKind of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ F = lists:concat(["fun '",DecOrEnc,"_",EType,"'/",Arity]),
+ {F, "?MODULE", F};
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ {lists:concat(["{'",Mod,"','",DecOrEnc,"_",EType,"'}"]),Mod,
+ lists:concat(["'",DecOrEnc,"_",EType,"'"])};
+ {constructed,bif} ->
+ F =
+ lists:concat(["fun '",DecOrEnc,"_",
+ asn1ct_gen:list2name([Cname|TopType]),"'/",
+ Arity]),
+ {F, "?MODULE", F}
+ end.
+
+empty_lb(ber) ->
+ "[]";
+empty_lb(ber_bin) ->
+ "<<>>".
+
+rtmod(ber) ->
+ list_to_atom(?RT_BER);
+rtmod(ber_bin) ->
+ list_to_atom(?RT_BER_BIN).
+
+indefend_match(ber,used_var) ->
+ "[0,0|R]";
+indefend_match(ber,unused_var) ->
+ "[0,0|_R]";
+indefend_match(ber_bin,used_var) ->
+ "<<0,0,R/binary>>";
+indefend_match(ber_bin,unused_var) ->
+ "<<0,0,_R/binary>>".
+
+notice_value_match() ->
+ Module = get(currmod),
+ put(value_match,{true,Module}).
+
+value_match(Index,Value) when atom(Value) ->
+ value_match(Index,atom_to_list(Value));
+value_match([],Value) ->
+ Value;
+value_match([{VI,_Cname}|VIs],Value) ->
+ value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
+value_match1(Value,[],Acc,Depth) ->
+ Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
+value_match1(Value,[{VI,_Cname}|VIs],Acc,Depth) ->
+ value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
new file mode 100644
index 0000000000..991240731e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
@@ -0,0 +1,1357 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_constructed_ber_bin_v2.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_constructed_ber_bin_v2).
+
+-export([gen_encode_sequence/3]).
+-export([gen_decode_sequence/3]).
+-export([gen_encode_set/3]).
+-export([gen_decode_set/3]).
+-export([gen_encode_sof/4]).
+-export([gen_decode_sof/4]).
+-export([gen_encode_choice/3]).
+-export([gen_decode_choice/3]).
+
+
+-include("asn1_records.hrl").
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+-import(asn1ct_constructed_ber,[match_tag/2]).
+
+-define(ASN1CT_GEN_BER,asn1ct_gen_ber_bin_v2).
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE (and SET)
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+
+ %% if EXTERNAL type the input value must be transformed to
+ %% ASN1 1990 format
+ ValName =
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([indent(4),
+ "NewVal = asn1rt_check:transform_to_EXTERNAL1990(Val),",
+ nl]),
+ "NewVal";
+ _ ->
+ "Val"
+ end,
+
+ {SeqOrSet,TableConsInfo,CompList} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {'SEQUENCE',TCI,CL};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {'SET',TCI,CL}
+ end,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+
+%% don't match recordname for now, because of compatibility reasons
+%% emit(["{'",asn1ct_gen:list2rname(Typename),"'"]),
+ emit(["{_"]),
+ case length(CompList1) of
+ 0 ->
+ true;
+ CompListLen ->
+ emit([","]),
+ mkcindexlist([Tc || Tc <- lists:seq(1,CompListLen)])
+ end,
+ emit(["} = ",ValName,",",nl]),
+ EncObj =
+ case TableConsInfo of
+ #simpletableattributes{usedclassfield=Used,
+ uniqueclassfield=Unique} when Used /= Unique ->
+ false;
+ %% ObjectSet, name of the object set in constraints
+ %%
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex} -> %% N is index of attribute that determines constraint
+ OSDef =
+ case ObjectSet of
+ {Module,OSName} ->
+ asn1_db:dbget(Module,OSName);
+ OSName ->
+ asn1_db:dbget(get(currmod),OSName)
+ end,
+% io:format("currmod: ~p~nOSName: ~p~nAttrN: ~p~nN: ~p~nUniqueFieldName: ~p~n",
+% [get(currmod),OSName,AttrN,N,UniqueFieldName]),
+ case (OSDef#typedef.typespec)#'ObjectSet'.gen of
+ true ->
+ ObjectEncode =
+ asn1ct_gen:un_hyphen_var(lists:concat(['Obj',
+ AttrN])),
+ emit([ObjectEncode," = ",nl]),
+ emit([" 'getenc_",ObjectSet,"'(",{asis,UniqueFieldName},
+ ", ",nl]),
+ ValueMatch = value_match(ValueIndex,
+ lists:concat(["Cindex",N])),
+ emit([indent(35),ValueMatch,"),",nl]),
+ {AttrN,ObjectEncode};
+ _ ->
+ false
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ %% when the simpletableattributes was at an outer
+ %% level and the objfun has been passed through the
+ %% function call
+ {"got objfun through args","ObjFun"};
+ _ ->
+ false
+ end
+ end,
+
+ gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj),
+
+ emit([nl," BytesSoFar = "]),
+ case SeqOrSet of
+ 'SET' when (D#type.def)#'SET'.sorted == dynamic ->
+ emit("?RT_BER:dynamicsort_SET_components(["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["]),",nl]);
+ _ ->
+ emit("["),
+ mkvlist(asn1ct_name:all(encBytes)),
+ emit(["],",nl])
+ end,
+ emit("LenSoFar = "),
+ case asn1ct_name:all(encLen) of
+ [] -> emit("0");
+ AllLengths ->
+ mkvplus(AllLengths)
+ end,
+ emit([",",nl]),
+ emit(["?RT_BER:encode_tags(TagIn, BytesSoFar, LenSoFar)."
+ ,nl]).
+
+gen_decode_sequence(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(tag),
+ #'SEQUENCE'{tablecinf=TableConsInfo,components=CList} = D#type.def,
+ Ext = extensible(CList),
+ CompList = case CList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CList
+ end,
+
+ emit([" %%-------------------------------------------------",nl]),
+ emit([" %% decode tag and length ",nl]),
+ emit([" %%-------------------------------------------------",nl]),
+
+ asn1ct_name:new(tlv),
+ case CompList of
+ EmptyCL when EmptyCL == [];EmptyCL == {[],[]}-> % empty sequence
+ true;
+ _ ->
+ emit([{curr,tlv}," = "])
+ end,
+ emit(["?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(tlv),
+ asn1ct_name:new(v),
+
+ {DecObjInf,UniqueFName,ValueIndex} =
+ case TableConsInfo of
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValIndex} ->
+% {ObjectSet,AttrN,_N,UniqueFieldName} ->%% N is index of attribute that determines constraint
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName,ValIndex}},
+ UniqueFieldName,ValIndex};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName,ValIndex}
+ end;
+ _ ->
+% case D#type.tablecinf of
+% [{objfun,_}|_] ->
+% {{"got objfun through args","ObjFun"},false,false};
+% _ ->
+ {false,false,false}
+% end
+ end,
+ case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
+ no_terms -> % an empty sequence
+ emit([nl,nl]),
+ demit(["Result = "]), %dbg
+ %% return value as record
+ asn1ct_name:new(rb),
+ emit([" {'",asn1ct_gen:list2rname(Typename),"'}.",nl,nl]);
+ {LeadingAttrTerm,PostponedDecArgs} ->
+ emit([com,nl,nl]),
+ case {LeadingAttrTerm,PostponedDecArgs} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],PostponedDecArgs} ->
+ DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
+ ValueMatch = value_match(ValueIndex,Term),
+ emit([DecObj," =",nl," 'getdec_",ObjSet,"'(",
+ {asis,UniqueFName},", ",ValueMatch,"),",nl]),
+ gen_dec_postponed_decs(DecObj,PostponedDecArgs)
+ end,
+ demit(["Result = "]), %dbg
+ %% return value as record
+ case Ext of
+ {ext,_,_} ->
+ emit(["case ",{prev,tlv}," of [] -> true; _ -> true end, % ... extra fields skipped",nl]);
+ noext ->
+ emit(["case ",{prev,tlv}," of",nl,
+ "[] -> true;",
+ "_ -> exit({error,{asn1, {unexpected,",{prev,tlv},
+ "}}}) % extra fields not allowed",nl,
+ "end,",nl])
+ end,
+ asn1ct_name:new(rb),
+ case Typename of
+ ['EXTERNAL'] ->
+ emit([" OldFormat={'",asn1ct_gen:list2rname(Typename),
+ "', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["},",nl]),
+ emit([" asn1rt_check:transform_to_EXTERNAL1994",
+ "(OldFormat).",nl]);
+ _ ->
+ emit([" {'",asn1ct_gen:list2rname(Typename),"', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["}.",nl,nl])
+ end
+ end.
+
+gen_dec_postponed_decs(_,[]) ->
+ emit(nl);
+gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term,
+ TmpTerm,_Tag,OptOrMand}|Rest]) ->
+
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+ asn1ct_name:new(tmptlv),
+
+ emit([Term," = ",nl]),
+ N = case OptOrMand of
+ mandatory -> 0;
+ 'OPTIONAL' ->
+ emit_opt_or_mand_check(asn1_NOVALUE,TmpTerm),
+ 6;
+ {'DEFAULT',Val} ->
+ emit_opt_or_mand_check(Val,TmpTerm),
+ 6
+ end,
+ emit([indent(N+3),"case (catch ",DecObj,"(",{asis,FirstPFN},
+ ", ",TmpTerm,", ",{asis,PFNList},")) of",nl]),
+ emit([indent(N+6),"{'EXIT', ",{curr,reason},"} ->",nl]),
+ emit([indent(N+9),"exit({'Type not compatible with table constraint',",
+ {curr,reason},"});",nl]),
+ emit([indent(N+6),{curr,tmpterm}," ->",nl]),
+ emit([indent(N+9),{curr,tmpterm},nl]),
+
+ case OptOrMand of
+ mandatory -> emit([indent(N+3),"end,",nl]);
+ _ ->
+ emit([indent(N+3),"end",nl,
+ indent(3),"end,",nl])
+ end,
+ gen_dec_postponed_decs(DecObj,Rest).
+
+emit_opt_or_mand_check(Value,TmpTerm) ->
+ emit([indent(3),"case ",TmpTerm," of",nl,
+ indent(6),{asis,Value}," ->",{asis,Value},";",nl,
+ indent(6),"_ ->",nl]).
+
+%%============================================================================
+%% Encode/decode SET
+%%
+%%============================================================================
+
+gen_encode_set(Erules,Typename,D) when record(D,type) ->
+ gen_encode_sequence(Erules,Typename,D).
+
+gen_decode_set(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(tag),
+ #'SET'{tablecinf=TableConsInfo,components=TCompList} = D#type.def,
+ Ext = extensible(TCompList),
+ CompList = case TCompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> TCompList
+ end,
+
+ asn1ct_name:clear(),
+ asn1ct_name:new(tlv),
+ case CompList of
+ EmptyCL when EmptyCL == [];EmptyCL == {[],[]}-> % empty sequence
+ true;
+ _ ->
+ emit([{curr,tlv}," = "])
+ end,
+ emit(["?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(v),
+
+
+ {DecObjInf,UniqueFName} =
+ case TableConsInfo of
+ {ObjectSet,AttrN,_N,UniqueFieldName} ->%% N is index of attribute that determines constraint
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName}},
+ UniqueFieldName};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName}
+ end;
+ _ ->
+ {false,false}
+ end,
+
+ case CompList of
+ [] -> % empty set
+ true;
+ _ ->
+ emit(["SetFun = fun(FunTlv) ->", nl]),
+ emit(["case FunTlv of ",nl]),
+ NextNum = gen_dec_set_cases(Erules,Typename,CompList,1),
+ emit([indent(6), {curr,else}," -> ",nl,
+ indent(9),"{",NextNum,", ",{curr,else},"}",nl]),
+ emit([indent(3),"end",nl]),
+ emit([indent(3),"end,",nl]),
+
+ emit(["PositionList = [SetFun(TempTlv)|| TempTlv <- ",{curr,tlv},"],",nl]),
+ asn1ct_name:new(tlv),
+ emit([{curr,tlv}," = [Stlv || {_,Stlv} <- lists:sort(PositionList)],",nl]),
+ asn1ct_name:new(tlv)
+
+ end,
+ case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
+ no_terms -> % an empty sequence
+ emit([nl,nl]),
+ demit(["Result = "]), %dbg
+ %% return value as record
+ emit([" {'",asn1ct_gen:list2rname(Typename),"'}.",nl]);
+ {LeadingAttrTerm,PostponedDecArgs} ->
+ emit([com,nl,nl]),
+ case {LeadingAttrTerm,PostponedDecArgs} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],PostponedDecArgs} ->
+ DecObj = lists:concat(['DecObj',LeadingAttr,Term]),
+ emit([DecObj," =",nl," 'getdec_",ObjSet,"'(",
+ {asis,UniqueFName},", ",Term,"),",nl]),
+ gen_dec_postponed_decs(DecObj,PostponedDecArgs)
+ end,
+ demit(["Result = "]), %dbg
+ %% return value as record
+ case Ext of
+ {ext,_,_} ->
+ emit(["case ",{prev,tlv}," of [] -> true; _ -> true end, % ... extra fields skipped",nl]);
+ noext ->
+ emit(["case ",{prev,tlv}," of",nl,
+ "[] -> true;",
+ "_ -> exit({error,{asn1, {unexpected,",{prev,tlv},
+ "}}}) % extra fields not allowed",nl,
+ "end,",nl])
+ end,
+ emit([" {'",asn1ct_gen:list2rname(Typename),"', "]),
+ mkvlist(asn1ct_name:all(term)),
+ emit(["}.",nl])
+ end.
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Encode/decode SEQUENCE OF and SET OF
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+gen_encode_sof(Erules,Typename,_InnerTypename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, Cont} = D#type.def,
+
+ Objfun = case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+
+ emit([" {EncBytes,EncLen} = 'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(Val",Objfun,",[],0),",nl]),
+
+ emit([" ?RT_BER:encode_tags(TagIn, EncBytes, EncLen).",nl,nl]),
+
+ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont).
+
+
+gen_decode_sof(Erules,TypeName,_InnerTypeName,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {SeqOrSetOf, _TypeTag, Cont} =
+ case D#type.def of
+ {'SET OF',_Cont} -> {'SET OF','SET',_Cont};
+ {'SEQUENCE OF',_Cont} -> {'SEQUENCE OF','SEQUENCE',_Cont}
+ end,
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+
+ emit([" %%-------------------------------------------------",nl]),
+ emit([" %% decode tag and length ",nl]),
+ emit([" %%-------------------------------------------------",nl]),
+
+ asn1ct_name:new(tlv),
+ emit([{curr,tlv},
+ " = ?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(v),
+
+ emit(["["]),
+
+ InnerType = asn1ct_gen:get_inner(Cont#type.def),
+ ContName = case asn1ct_gen:type(InnerType) of
+ Atom when atom(Atom) -> Atom;
+ _ -> TypeNameSuffix
+ end,
+%% fix me
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ []
+ end,
+ gen_dec_line(Erules,TypeName,ContName,[],Cont,mandatory,ObjFun),
+ %% gen_dec_line_sof(Erules,Typename,ContName,Cont,ObjFun),
+ emit([" || ",{curr,v}," <- ",{curr,tlv},"].",nl,nl,nl]).
+
+
+gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
+ when record(Cont,type)->
+
+ {Objfun,Objfun_novar,EncObj} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _",{no_attr,"ObjFun"}};
+ _ ->
+ {"","",false}
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([]",Objfun_novar,", AccBytes, AccLen) -> ",nl]),
+
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit([indent(3),
+ "{?RT_BER:dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]);
+ _ ->
+ emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]),
+ TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
+ gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3,
+ mandatory,"{EncBytes,EncLen} = ",EncObj),
+ emit([",",nl]),
+ emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(T",Objfun,","]),
+ emit(["[EncBytes|AccBytes], AccLen + EncLen).",nl,nl]).
+
+%%============================================================================
+%% Encode/decode CHOICE
+%%
+%%============================================================================
+
+gen_encode_choice(Erules,Typename,D) when record(D,type) ->
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_enc_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit([nl,nl]).
+
+gen_decode_choice(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(bytes),
+ ChoiceTag = D#type.tag,
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ CompList1 = case CompList of
+ {Rl,El} -> Rl ++ El;
+ _ -> CompList
+ end,
+ gen_dec_choice(Erules,Typename,ChoiceTag,CompList1,Ext),
+ emit([".",nl]).
+
+
+%%============================================================================
+%% Encode SEQUENCE
+%%
+%%============================================================================
+
+gen_enc_sequence_call(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],Pos,Ext,EncObj) ->
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Element =
+ case TopType of
+ ['EXTERNAL'] ->
+ io_lib:format("Cindex~w",[Pos]);
+ _ ->
+ io_lib:format("Cindex~w",[Pos])
+ end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ print_attribute_comment(InnerType,Pos,Cname,Prop),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,3,Prop,EncObj),
+ emit([com,nl]),
+ gen_enc_sequence_call(Erules,TopType,Rest,Pos+1,Ext,EncObj);
+
+gen_enc_sequence_call(_Erules,_TopType,[],_Num,_,_) ->
+ true.
+
+%%============================================================================
+%% Decode SEQUENCE
+%%
+%%============================================================================
+
+gen_dec_sequence_call(Erules,TopType,CompList,Ext,DecObjInf) ->
+ gen_dec_sequence_call1(Erules,TopType, CompList, 1, Ext,DecObjInf,[],[]).
+
+
+gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type,prop=Prop,tags=Tags}|Rest],Num,Ext,DecObjInf,LeadingAttrAcc,ArgsAcc) ->
+ {LA,PostponedDec} =
+ gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
+ Ext,DecObjInf),
+ case Rest of
+ [] ->
+ {LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
+ _ ->
+ emit([com,nl]),
+ asn1ct_name:new(bytes),
+ gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
+ LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
+ end;
+
+gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
+ no_terms.
+
+
+%%----------------------------
+%%SEQUENCE mandatory
+%%----------------------------
+
+gen_dec_component(Erules,TopType,Cname,CTags,Type,Pos,Prop,Ext,DecObjInf) ->
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} -> OCFTType;
+ _ -> asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ Prop1 = case {Prop,Ext} of
+ {mandatory,{ext,Epos,_}} when Pos >= Epos ->
+ 'OPTIONAL';
+ _ ->
+ Prop
+ end,
+ print_attribute_comment(InnerType,Pos,Cname,Prop1),
+ asn1ct_name:new(term),
+ emit_term_tlv(Prop1,InnerType,DecObjInf),
+ asn1ct_name:new(rb),
+ PostponedDec =
+ gen_dec_line(Erules,TopType,Cname,CTags,Type,Prop1,DecObjInf),
+ asn1ct_name:new(v),
+ asn1ct_name:new(tlv),
+ asn1ct_name:new(form),
+ PostponedDec.
+
+
+emit_term_tlv({'DEFAULT',_},InnerType,DecObjInf) ->
+ emit_term_tlv(opt_or_def,InnerType,DecObjInf);
+emit_term_tlv('OPTIONAL',InnerType,DecObjInf) ->
+ emit_term_tlv(opt_or_def,InnerType,DecObjInf);
+emit_term_tlv(Prop,{typefield,_},DecObjInf) ->
+ emit_term_tlv(Prop,type_or_object_field,DecObjInf);
+emit_term_tlv(Prop,{objectfield,_,_},DecObjInf) ->
+ emit_term_tlv(Prop,type_or_object_field,DecObjInf);
+emit_term_tlv(opt_or_def,type_or_object_field,_) ->
+ asn1ct_name:new(tmpterm),
+ emit(["{",{curr,tmpterm},",",{curr,tlv},"} = "]);
+emit_term_tlv(opt_or_def,_,_) ->
+ emit(["{",{curr,term},",",{curr,tlv},"} = "]);
+emit_term_tlv(_,type_or_object_field,false) ->
+ emit(["[",{curr,v},"|",{curr,tlv},"] = ",{prev,tlv},", ",nl,
+ {curr,term}," = "]);
+emit_term_tlv(_,type_or_object_field,_) ->
+ asn1ct_name:new(tmpterm),
+ emit(["[",{curr,v},"|",{curr,tlv},"] = ",{prev,tlv},", ",nl]),
+ emit([nl," ",{curr,tmpterm}," = "]);
+emit_term_tlv(mandatory,_,_) ->
+ emit(["[",{curr,v},"|",{curr,tlv},"] = ",{prev,tlv},", ",nl,
+ {curr,term}," = "]).
+
+
+gen_dec_set_cases(_Erules,_TopType,[],Pos) ->
+ Pos;
+gen_dec_set_cases(Erules,TopType,[Comp|RestComps],Pos) ->
+ Name = Comp#'ComponentType'.name,
+ Type = Comp#'ComponentType'.typespec,
+ CTags = Comp#'ComponentType'.tags,
+
+ emit([indent(6),"%",Name,nl]),
+ Tags = case Type#type.tag of
+ [] -> % this is a choice without explicit tag
+ [(?ASN1CT_GEN_BER:decode_class(T1class) bsl 10) + T1number||
+ {T1class,T1number} <- CTags];
+ [FirstTag|_] ->
+ [(?ASN1CT_GEN_BER:decode_class(FirstTag#tag.class) bsl 10) + FirstTag#tag.number]
+ end,
+% emit([indent(6),"%Tags: ",Tags,nl]),
+% emit([indent(6),"%Type#type.tag: ",Type#type.tag,nl]),
+ CaseFun = fun(TagList=[H|T],Fun,N) ->
+ Semicolon = case TagList of
+ [_Tag1,_|_] -> [";",nl];
+ _ -> ""
+ end,
+ emit(["TTlv = {",H,",_} ->",nl]),
+ emit([indent(4),"{",Pos,", TTlv}",Semicolon]),
+ Fun(T,Fun,N+1);
+ ([],_,0) ->
+ true;
+ ([],_,_) ->
+ emit([";",nl])
+ end,
+ CaseFun(Tags,CaseFun,0),
+%% emit([";",nl]),
+ gen_dec_set_cases(Erules,TopType,RestComps,Pos+1).
+
+
+
+%%---------------------------------------------
+%% Encode CHOICE
+%%---------------------------------------------
+%% for BER we currently do care (a little) if the choice has an EXTENSIONMARKER
+
+
+gen_enc_choice(Erules,TopType,Tag,CompList,_Ext) ->
+ gen_enc_choice1(Erules,TopType,Tag,CompList,_Ext).
+
+gen_enc_choice1(Erules,TopType,_Tag,CompList,_Ext) ->
+ asn1ct_name:clear(),
+ emit([" {EncBytes,EncLen} = case element(1,Val) of",nl]),
+ gen_enc_choice2(Erules,TopType,CompList),
+ emit([nl," end,",nl,nl]),
+
+ emit(["?RT_BER:encode_tags(TagIn, EncBytes, EncLen).",nl]).
+
+
+gen_enc_choice2(Erules,TopType,[H1|T]) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ emit([" ",{asis,Cname}," ->",nl]),
+ {Encobj,Assign} =
+ case {Type#type.def,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+ {#'ObjectClassFieldType'{},{componentrelation,_,_}} ->
+ asn1ct_name:new(tmpBytes),
+ asn1ct_name:new(encBytes),
+ asn1ct_name:new(encLen),
+ Emit = ["{",{curr,tmpBytes},", _} = "],
+ {{no_attr,"ObjFun"},Emit};
+ _ ->
+ {false,[]}
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% {false,[]};
+% _ ->
+% asn1ct_name:new(tmpBytes),
+% asn1ct_name:new(encBytes),
+% asn1ct_name:new(encLen),
+% Emit = ["{",{curr,tmpBytes},", _} = "],
+% {{no_attr,"ObjFun"},Emit}
+% end,
+ gen_enc_line(Erules,TopType,Cname,Type,"element(2,Val)",9,
+ mandatory,Assign,Encobj),
+ case Encobj of
+ false -> ok;
+ _ ->
+ emit([",",nl,indent(9),"{",{curr,encBytes},", ",
+ {curr,encLen},"}"])
+ end,
+ emit([";",nl]),
+ case T of
+ [] ->
+ emit([indent(6), "Else -> ",nl,
+ indent(9),"exit({error,{asn1,{invalid_choice_type,Else}}})"]);
+ _ ->
+ true
+ end,
+ gen_enc_choice2(Erules,TopType,T);
+
+gen_enc_choice2(_Erules,_TopType,[]) ->
+ true.
+
+
+
+
+%%--------------------------------------------
+%% Decode CHOICE
+%%--------------------------------------------
+
+gen_dec_choice(Erules,TopType, _ChTag, CompList, Ext) ->
+ asn1ct_name:clear(),
+ asn1ct_name:new(tlv),
+ emit([{curr,tlv},
+ " = ?RT_BER:match_tags(",{prev,tlv},",TagIn), ",nl]),
+ asn1ct_name:new(tlv),
+ asn1ct_name:new(v),
+ emit(["case (case ",{prev,tlv},
+ " of [Ctemp",{prev,tlv},"] -> Ctemp",{prev,tlv},
+ "; _ -> ",{prev,tlv}," end)"," of",nl]),
+ asn1ct_name:new(tagList),
+ asn1ct_name:new(choTags),
+ asn1ct_name:new(res),
+ gen_dec_choice_cases(Erules,TopType,CompList),
+ emit([indent(6), {curr,else}," -> ",nl]),
+ case Ext of
+ noext ->
+ emit([indent(9),"exit({error,{asn1,{invalid_choice_tag,",
+ {curr,else},"}}})",nl]);
+ _ ->
+ emit([indent(9),"{asn1_ExtAlt, ?RT_BER:encode(",{curr,else},")}",nl])
+ end,
+ emit([indent(3),"end",nl]),
+ asn1ct_name:new(tag),
+ asn1ct_name:new(else).
+
+
+gen_dec_choice_cases(_Erules,_TopType, []) ->
+ ok;
+gen_dec_choice_cases(Erules,TopType, [H|T]) ->
+ Cname = H#'ComponentType'.name,
+ Type = H#'ComponentType'.typespec,
+ Prop = H#'ComponentType'.prop,
+ Tags = Type#type.tag,
+ Fcases = fun([{T1class,T1number}|Tail],Fun) ->
+ emit([indent(4),{curr,v}," = {",
+ (?ASN1CT_GEN_BER:decode_class(T1class) bsl 10) +
+ T1number,",_} -> ",nl]),
+ emit([indent(8),"{",{asis,Cname},", "]),
+ gen_dec_line(Erules,TopType,Cname,[],Type,Prop,false),
+ emit(["};",nl,nl]),
+ Fun(Tail,Fun);
+ ([],_) ->
+ ok
+ end,
+ emit([nl,"%% '",Cname,"'",nl]),
+ case {Tags,asn1ct:get_gen_state_field(namelist)} of
+ {[],_} -> % choice without explicit tags
+ Fcases(H#'ComponentType'.tags,Fcases);
+ {[FirstT|_RestT],[{Cname,undecoded}|Names]} ->
+ DecTag=(?ASN1CT_GEN_BER:decode_class(FirstT#tag.class) bsl 10) +
+ FirstT#tag.number,
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ [DecTag],Type}),
+ asn1ct:update_gen_state(namelist,Names),
+ emit([indent(4),{curr,res}," = ",
+ match_tag(ber_bin,{FirstT#tag.class,FirstT#tag.number}),
+ " -> ",nl]),
+ emit([indent(8),"{",{asis,Cname},", {'",
+ asn1ct_gen:list2name([Cname|TopType]),"',",
+ {curr,res},"}};",nl,nl]);
+ {[FirstT|RestT],_} ->
+ emit([indent(4),"{",
+ (?ASN1CT_GEN_BER:decode_class(FirstT#tag.class) bsl 10) +
+ FirstT#tag.number,", ",{curr,v},"} -> ",nl]),
+ emit([indent(8),"{",{asis,Cname},", "]),
+ gen_dec_line(Erules,TopType,Cname,[],Type#type{tag=RestT},Prop,false),
+ emit(["};",nl,nl])
+ end,
+ gen_dec_choice_cases(Erules,TopType, T).
+
+
+
+%%---------------------------------------
+%% Generate the encode/decode code
+%%---------------------------------------
+
+gen_enc_line(Erules,TopType,Cname,
+ Type=#type{constraint=[{componentrelation,_,_}],
+ def=#'ObjectClassFieldType'{type={typefield,_}}},
+ Element,Indent,OptOrMand=mandatory,EncObj)
+ when list(Element) ->
+ asn1ct_name:new(tmpBytes),
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,tmpBytes},",_} = "],EncObj);
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,EncObj)
+ when list(Element) ->
+ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,
+ ["{",{curr,encBytes},",",{curr,encLen},"} = "],EncObj).
+
+gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj)
+ when list(Element) ->
+ IndDeep = indent(Indent),
+ Tag = lists:reverse([?ASN1CT_GEN_BER:encode_tag_val(
+ ?ASN1CT_GEN_BER:decode_class(X#tag.class),
+ X#tag.form,
+ X#tag.number)
+ || X <- Type#type.tag]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ emit(IndDeep),
+ emit(Assign),
+ gen_optormand_case(OptOrMand,Erules,TopType,Cname,Type,InnerType,WhatKind,
+ Element),
+ case {Type,asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation)} of
+% #type{constraint=[{tableconstraint_info,RefedFieldName}],
+% def={typefield,_}} ->
+ {#type{def=#'ObjectClassFieldType'{type={typefield,_},
+ fieldname=RefedFieldName}},
+ {componentrelation,_,_}} ->
+ {_LeadingAttrName,Fun} = EncObj,
+ case RefedFieldName of
+ {notype,T} ->
+ throw({error,{notype,type_from_object,T}});
+ {Name,RestFieldNames} when atom(Name) ->
+ case OptOrMand of
+ mandatory -> ok;
+ _ ->
+% emit(["{",{curr,tmpBytes},",",{curr,tmpLen},
+ emit(["{",{curr,tmpBytes},",_ } = "])
+% "} = "])
+ end,
+ emit([Fun,"(",{asis,Name},", ",Element,", ",
+ {asis,RestFieldNames},"),",nl]),
+ emit(IndDeep),
+ case OptOrMand of
+ mandatory ->
+ emit(["{",{curr,encBytes},",",{curr,encLen},
+ "} = "]),
+ emit(["?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},")"]);
+ _ ->
+% emit(["{",{next,tmpBytes},", _} = "]),
+ emit(["{",{next,tmpBytes},",",{curr,tmpLen},
+ "} = "]),
+ emit(["?RT_BER:encode_open_type(",{curr,tmpBytes},
+ ",",{asis,Tag},"),",nl]),
+ emit(IndDeep),
+ emit(["{",{next,tmpBytes},", ",{curr,tmpLen},"}"])
+ end;
+ _ ->
+ throw({asn1,{'internal error'}})
+ end;
+ {{#'ObjectClassFieldType'{type={objectfield,PrimFieldName1,
+ PFNList}},_},
+ {componentrelation,_,_}} ->
+ %% this is when the dotted list in the FieldName has more
+ %% than one element
+ {_LeadingAttrName,Fun} = EncObj,
+ emit(["?RT_BER:encode_open_type(",Fun,"(",{asis,PrimFieldName1},
+ ", ",Element,", ",{asis,PFNList},"))"]);
+ _ ->
+ case WhatKind of
+ {primitive,bif} ->
+ EncType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Btype}} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ ?ASN1CT_GEN_BER:gen_encode_prim(ber,EncType,{asis,Tag},
+ Element);
+ {notype,_} ->
+ emit(["'enc_",InnerType,"'(",Element,", ",{asis,Tag},")"]);
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.def of
+ #'ObjectClassFieldType'{} -> %Open Type
+ ?ASN1CT_GEN_BER:gen_encode_prim(ber,#type{def='ASN1_OPEN_TYPE'},{asis,Tag},Element);
+ _ ->
+ ?ASN1CT_GEN_BER:gen_encode_prim(ber,Type,
+ {asis,Tag},
+ Element)
+ end;
+ _ ->
+ {EncFunName, _EncMod, _EncFun} =
+ mkfuncname(TopType,Cname,WhatKind,"enc_"),
+ case {WhatKind,Type#type.tablecinf,EncObj} of
+ {{constructed,bif},[{objfun,_}|_R],{_,Fun}} ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},
+ ", ",Fun,")"]);
+ _ ->
+ emit([EncFunName,"(",Element,", ",{asis,Tag},")"])
+ end
+ end
+ end,
+ case OptOrMand of
+ mandatory -> true;
+ _ ->
+ emit([nl,indent(7),"end"])
+ end.
+
+gen_optormand_case(mandatory,_Erules,_TopType,_Cname,_Type,_InnerType,_WhatKind,
+ _Element) ->
+ ok;
+gen_optormand_case('OPTIONAL',Erules,_TopType,_Cname,_Type,_InnerType,_WhatKind,
+ Element) ->
+ emit([" case ",Element," of",nl]),
+ emit([indent(9),"asn1_NOVALUE -> {",
+ empty_lb(Erules),",0};",nl]),
+ emit([indent(9),"_ ->",nl,indent(12)]);
+gen_optormand_case({'DEFAULT',DefaultValue},Erules,TopType,Cname,Type,
+ InnerType,WhatKind,Element) ->
+ CurrMod = get(currmod),
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ emit(" case catch "),
+ asn1ct_gen:gen_check_call(TopType,Cname,Type,InnerType,
+ WhatKind,{asis,DefaultValue},
+ Element),
+ emit([" of",nl]),
+ emit([indent(12),"true -> {[],0};",nl]);
+ _ ->
+ emit([" case ",Element," of",nl]),
+ emit([indent(9),"asn1_DEFAULT -> {",
+ empty_lb(Erules),
+ ",0};",nl]),
+ case DefaultValue of
+ #'Externalvaluereference'{module=CurrMod,
+ value=V} ->
+ emit([indent(9),"?",{asis,V}," -> {",
+ empty_lb(Erules),",0};",nl]);
+ _ ->
+ emit([indent(9),{asis,
+ DefaultValue}," -> {",
+ empty_lb(Erules),",0};",nl])
+ end
+ end,
+ emit([indent(9),"_ ->",nl,indent(12)]).
+
+
+
+gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(v)),
+ Tag =
+ [(?ASN1CT_GEN_BER:decode_class(X#tag.class) bsl 10) + X#tag.number ||
+ X <- Type#type.tag],
+ ChoiceTags =
+ [(?ASN1CT_GEN_BER:decode_class(Class) bsl 10) + Number||
+ {Class,Number} <- CTags],
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+ PostpDec =
+ case OptOrMand of
+ mandatory ->
+ gen_dec_call(InnerType,Erules,TopType,Cname,Type,
+ BytesVar,Tag,
+ mandatory,", mandatory, ",DecObjInf,OptOrMand);
+ _ -> %optional or default or a mandatory component after an extensionmark
+ {FirstTag,RestTag} =
+ case Tag of
+ [] ->
+ {ChoiceTags,[]};
+ [Ft|Rt] ->
+ {Ft,Rt}
+ end,
+ emit(["case ",{prev,tlv}," of",nl]),
+ PostponedDec =
+ case Tag of
+ [] when length(ChoiceTags) > 0 -> % a choice without explicit tag
+ Fcases =
+ fun(FirstTag1) ->
+ emit(["[",{curr,v}," = {",{asis,FirstTag1},
+ ",_}|Temp",
+ {curr,tlv},
+ "] ->",nl]),
+ emit([indent(4),"{"]),
+ Pdec=
+ gen_dec_call(InnerType,Erules,
+ TopType,Cname,Type,
+ BytesVar,RestTag,
+ mandatory,
+ ", mandatory, ",
+ DecObjInf,OptOrMand),
+
+ emit([", Temp",{curr,tlv},"}"]),
+ emit([";",nl]),
+ Pdec
+ end,
+ hd([Fcases(TmpTag)|| TmpTag <- FirstTag]);
+
+ [] -> % an open type without explicit tag
+ emit(["[",{curr,v},"|Temp",{curr,tlv},"] ->",nl]),
+ emit([indent(4),"{"]),
+ Pdec=
+ gen_dec_call(InnerType,Erules,TopType,Cname,
+ Type,BytesVar,RestTag,mandatory,
+ ", mandatory, ",DecObjInf,
+ OptOrMand),
+
+ emit([", Temp",{curr,tlv},"}"]),
+ emit([";",nl]),
+ Pdec;
+
+ _ ->
+ emit(["[{",{asis,FirstTag},
+ ",",{curr,v},"}|Temp",
+ {curr,tlv},
+ "] ->",nl]),
+ emit([indent(4),"{"]),
+ Pdec=
+ gen_dec_call(InnerType,Erules,TopType,Cname,
+ Type,BytesVar,RestTag,mandatory,
+ ", mandatory, ",DecObjInf,
+ OptOrMand),
+
+ emit([", Temp",{curr,tlv},"}"]),
+ emit([";",nl]),
+ Pdec
+ end,
+
+ emit([indent(4),"_ ->",nl]),
+ case OptOrMand of
+ {'DEFAULT', Def} ->
+ emit([indent(8),"{",{asis,Def},",",{prev,tlv},"}",nl]);
+ 'OPTIONAL' ->
+ emit([indent(8),"{ asn1_NOVALUE, ",{prev,tlv},"}",nl])
+ end,
+ emit(["end"]),
+ PostponedDec
+ end,
+ case DecObjInf of
+ {Cname,ObjSet} -> % this must be the component were an object is
+ %% choosen from the object set according to the table
+ %% constraint.
+ {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
+ PostpDec};
+ _ -> {[],PostpDec}
+ end.
+
+gen_dec_call({typefield,_},_,_,_Cname,Type,BytesVar,Tag,_,_,false,_) ->
+ %% this in case of a choice with typefield components
+ asn1ct_name:new(reason),
+ asn1ct_name:new(opendec),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(tmptlv),
+
+ {FirstPFName,RestPFName} =
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ emit([nl,indent(6),"begin",nl]),
+% emit([indent(9),{curr,opendec}," = ?RT_BER:decode_open_type(",
+ emit([indent(9),{curr,tmptlv}," = ?RT_BER:decode_open_type(",
+ BytesVar,",",{asis,Tag},"),",nl]),
+% emit([indent(9),"{",{curr,tmptlv},",_} = ?RT_BER:decode(",
+% {curr,opendec},"),",nl]),
+
+ emit([indent(9),"case (catch ObjFun(",{asis,FirstPFName},
+ ", ",{curr,tmptlv},", ",{asis,RestPFName},
+ ")) of", nl]),%% ??? What about Tag
+ emit([indent(12),"{'EXIT',",{curr,reason},"} ->",nl]),
+ emit([indent(15),"exit({'Type not ",
+ "compatible with table constraint', ",{curr,reason},"});",nl]),
+ emit([indent(12),{curr,tmpterm}," ->",nl]),
+ emit([indent(15),{curr,tmpterm},nl]),
+ emit([indent(9),"end",nl,indent(6),"end",nl]),
+ [];
+gen_dec_call({typefield,_},_,_,Cname,Type,BytesVar,Tag,_,_,_DecObjInf,OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",BytesVar,",",{asis,Tag},")"]),
+ RefedFieldName =
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ [{Cname,RefedFieldName,asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call({objectfield,PrimFieldName,PFNList},_,_,Cname,_,BytesVar,Tag,_,_,_,OptOrMandComp) ->
+ emit(["?RT_BER:decode_open_type(",BytesVar,",",{asis,Tag},")"]),
+ [{Cname,{PrimFieldName,PFNList},asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
+gen_dec_call(InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,PrimOptOrMand,
+ OptOrMand,DecObjInf,_) ->
+ WhatKind = asn1ct_gen:type(InnerType),
+ gen_dec_call1(WhatKind,InnerType,Erules,TopType,Cname,Type,BytesVar,Tag,
+ PrimOptOrMand,OptOrMand),
+ case DecObjInf of
+ {Cname,{_,OSet,UniqueFName,ValIndex}} ->
+ Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ ValueMatch = value_match(ValIndex,Term),
+ emit([",",nl,"ObjFun = 'getdec_",OSet,"'(",
+% {asis,UniqueFName},", ",{curr,term},")"]);
+ {asis,UniqueFName},", ",ValueMatch,")"]);
+ _ ->
+ ok
+ end,
+ [].
+gen_dec_call1({primitive,bif},InnerType,Erules,TopType,Cname,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ case {asn1ct:get_gen_state_field(namelist),InnerType} of
+ {[{Cname,undecoded}|Rest],_} ->
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ Tag,Type}),
+ asn1ct:update_gen_state(namelist,Rest),
+% emit(["?RT_BER:match_tags(",BytesVar,",",{asis,Tag},")"]);
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',",
+ BytesVar,"}"]);
+ {_,{fixedtypevaluefield,_,Btype}} ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,Btype,BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand);
+ _ ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,Type,BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand)
+ end;
+gen_dec_call1('ASN1_OPEN_TYPE',_InnerType,Erules,TopType,Cname,Type,BytesVar,
+ Tag,OptOrMand,_) ->
+ case {asn1ct:get_gen_state_field(namelist),Type#type.def} of
+ {[{Cname,undecoded}|Rest],_} ->
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ Tag,Type}),
+ asn1ct:update_gen_state(namelist,Rest),
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',",
+ BytesVar,"}"]);
+% emit(["?RT_BER:match_tags(",BytesVar,",",{asis,Tag},")"]);
+ {_,#'ObjectClassFieldType'{type=OpenType}} ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,#type{def=OpenType},
+ BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand);
+ _ ->
+ ?ASN1CT_GEN_BER:gen_dec_prim(Erules,Type,BytesVar,Tag,[],
+ ?PRIMITIVE,OptOrMand)
+ end;
+gen_dec_call1(WhatKind,_,_Erules,TopType,Cname,Type,BytesVar,
+ Tag,_,_OptOrMand) ->
+ case asn1ct:get_gen_state_field(namelist) of
+ [{Cname,undecoded}|Rest] ->
+ asn1ct:add_generated_refed_func({[Cname|TopType],undecoded,
+ Tag,Type}),
+ asn1ct:update_gen_state(namelist,Rest),
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',",
+ BytesVar,"}"]);
+ _ ->
+% {DecFunName, _DecMod, _DecFun} =
+% case {asn1ct:get_gen_state_field(namelist),WhatKind} of
+ EmitDecFunCall =
+ fun(FuncName) ->
+ case {WhatKind,Type#type.tablecinf} of
+ {{constructed,bif},[{objfun,_}|_Rest]} ->
+ emit([FuncName,"(",BytesVar,", ",{asis,Tag},
+ ", ObjFun)"]);
+ _ ->
+ emit([FuncName,"(",BytesVar,", ",{asis,Tag},")"])
+ end
+ end,
+ case asn1ct:get_gen_state_field(namelist) of
+ [{Cname,List}|Rest] when list(List) ->
+ case WhatKind of
+ #'Externaltypereference'{} ->
+ %%io:format("gen_dec_call1 1:~n~p~n~n",[WhatKind]),
+ asn1ct:add_tobe_refed_func({WhatKind,List});
+ _ ->
+ %%io:format("gen_dec_call1 2:~n~p~n~n",[[Cname|TopType]]),
+ asn1ct:add_tobe_refed_func({[Cname|TopType],
+ List})
+ end,
+ asn1ct:update_gen_state(namelist,Rest),
+ Prefix=asn1ct:get_gen_state_field(prefix),
+ {DecFunName,_,_}=
+ mkfuncname(TopType,Cname,WhatKind,Prefix),
+ EmitDecFunCall(DecFunName);
+ [{Cname,parts}|Rest] ->
+ asn1ct:update_gen_state(namelist,Rest),
+ asn1ct:get_gen_state_field(prefix),
+ %% This is to prepare SEQUENCE OF value in
+ %% partial incomplete decode for a later
+ %% part-decode, i.e. skip %% the tag.
+ asn1ct:add_generated_refed_func({[Cname|TopType],
+ parts,
+ [],Type}),
+ emit(["{'",asn1ct_gen:list2name([Cname|TopType]),"',"]),
+ EmitDecFunCall("?RT_BER:match_tags"),
+ emit("}");
+ _ ->
+ {DecFunName,_,_}=
+ mkfuncname(TopType,Cname,WhatKind,"dec_"),
+ EmitDecFunCall(DecFunName)
+ end
+% case {WhatKind,Type#type.tablecinf} of
+% {{constructed,bif},[{objfun,_}|_Rest]} ->
+% emit([DecFunName,"(",BytesVar,", ",{asis,Tag},
+% ", ObjFun)"]);
+% _ ->
+% emit([DecFunName,"(",BytesVar,", ",{asis,Tag},")"])
+% end
+ end.
+
+
+%%------------------------------------------------------
+%% General and special help functions (not exported)
+%%------------------------------------------------------
+
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+mkcindexlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
+ emit(["Cindex",H,Sep]),
+ mkcindexlist([T1|T], Sep);
+mkcindexlist([H|T], Sep) ->
+ emit(["Cindex",H]),
+ mkcindexlist(T, Sep);
+mkcindexlist([], _) ->
+ true.
+
+mkcindexlist(L) ->
+ mkcindexlist(L,", ").
+
+
+mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
+ emit([{var,H},Sep]),
+ mkvlist([T1|T], Sep);
+mkvlist([H|T], Sep) ->
+ emit([{var,H}]),
+ mkvlist(T, Sep);
+mkvlist([], _) ->
+ true.
+
+mkvlist(L) ->
+ mkvlist(L,", ").
+
+mkvplus(L) ->
+ mkvlist(L," + ").
+
+extensible(CompList) when list(CompList) ->
+ noext;
+extensible({RootList,ExtList}) ->
+ {ext,length(RootList)+1,length(ExtList)}.
+
+
+print_attribute_comment(InnerType,Pos,Cname,Prop) ->
+ CommentLine = "%%-------------------------------------------------",
+ emit([nl,CommentLine]),
+ case InnerType of
+ {typereference,_,Name} ->
+ emit([nl,"%% attribute ",Cname,"(",Pos,") with type ",Name]);
+ {'Externaltypereference',_,XModule,Name} ->
+ emit([nl,"%% attribute ",Cname,"(",Pos,") External ",XModule,":",Name]);
+ _ ->
+ emit([nl,"%% attribute ",Cname,"(",Pos,") with type ",InnerType])
+ end,
+ case Prop of
+ mandatory ->
+ continue;
+ {'DEFAULT', Def} ->
+ emit([" DEFAULT = ",{asis,Def}]);
+ 'OPTIONAL' ->
+ emit([" OPTIONAL"])
+ end,
+ emit([nl,CommentLine,nl]).
+
+
+
+mkfuncname(TopType,Cname,WhatKind,Prefix) ->
+ CurrMod = get(currmod),
+ case WhatKind of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ F = lists:concat(["'",Prefix,EType,"'"]),
+ {F, "?MODULE", F};
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ {lists:concat(["'",Mod,"':'",Prefix,EType,"'"]),Mod,
+ lists:concat(["'",Prefix,EType,"'"])};
+ {constructed,bif} ->
+ F = lists:concat(["'",Prefix,asn1ct_gen:list2name([Cname|TopType]),"'"]),
+ {F, "?MODULE", F}
+ end.
+
+empty_lb(ber) ->
+ "[]";
+empty_lb(ber_bin) ->
+ "<<>>";
+empty_lb(ber_bin_v2) ->
+ "<<>>".
+
+value_match(Index,Value) when atom(Value) ->
+ value_match(Index,atom_to_list(Value));
+value_match([],Value) ->
+ Value;
+value_match([{VI,_}|VIs],Value) ->
+ value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
+value_match1(Value,[],Acc,Depth) ->
+ Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
+value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
+ value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_per.erl
new file mode 100644
index 0000000000..a21c38f8a8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_per.erl
@@ -0,0 +1,1234 @@
+% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_constructed_per.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_constructed_per).
+
+-export([gen_encode_sequence/3]).
+-export([gen_decode_sequence/3]).
+-export([gen_encode_set/3]).
+-export([gen_decode_set/3]).
+-export([gen_encode_sof/4]).
+-export([gen_decode_sof/4]).
+-export([gen_encode_choice/3]).
+-export([gen_decode_choice/3]).
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+
+%% ENCODE GENERATOR FOR SEQUENCE TYPE ** **********
+
+
+gen_encode_set(Erules,TypeName,D) ->
+ gen_encode_constructed(Erules,TypeName,D).
+
+gen_encode_sequence(Erules,TypeName,D) ->
+ gen_encode_constructed(Erules,TypeName,D).
+
+gen_encode_constructed(Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(term),
+ asn1ct_name:new(bytes),
+ {CompList,TableConsInfo} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {CL,TCI};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {CL,TCI}
+ end,
+ case Typename of
+ ['EXTERNAL'] ->
+ emit({{var,asn1ct_name:next(val)},
+ " = asn1rt_check:transform_to_EXTERNAL1990(",
+ {var,asn1ct_name:curr(val)},"),",nl}),
+ asn1ct_name:new(val);
+ _ ->
+ ok
+ end,
+ case {Optionals = optionals(CompList),CompList} of
+ {[],EmptyCL} when EmptyCL == {[],[]};EmptyCL == [] ->
+ emit(["%%Variable setting just to eliminate ",
+ "compiler warning for unused vars!",nl,
+ "_Val = ",{var,asn1ct_name:curr(val)},",",nl]);
+ {[],_} ->
+ emit([{var,asn1ct_name:next(val)}," = ?RT_PER:list_to_record("]),
+ emit(["'",asn1ct_gen:list2rname(Typename),"'"]),
+ emit([", ",{var,asn1ct_name:curr(val)},"),",nl]);
+ _ ->
+ Fixoptcall =
+ case Erules of
+ per -> ",Opt} = ?RT_PER:fixoptionals2(";
+ _ -> ",Opt} = ?RT_PER:fixoptionals("
+ end,
+ emit({"{",{var,asn1ct_name:next(val)},Fixoptcall,
+ {asis,Optionals},",",length(Optionals),
+ ",",{var,asn1ct_name:curr(val)},"),",nl})
+ end,
+ asn1ct_name:new(val),
+ Ext = extensible(CompList),
+ case Ext of
+ {ext,_,NumExt} when NumExt > 0 ->
+ emit(["Extensions = ?RT_PER:fixextensions(",{asis,Ext},
+ ", ",{curr,val},"),",nl]);
+ _ -> true
+ end,
+ EncObj =
+ case TableConsInfo of
+ #simpletableattributes{usedclassfield=Used,
+ uniqueclassfield=Unique} when Used /= Unique ->
+ false;
+ %% ObjectSet, name of the object set in constraints
+ %%
+ %%{ObjectSet,AttrN,N,UniqueFieldName} -> %% N is index of attribute that determines constraint
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex
+ } -> %% N is index of attribute that determines constraint
+ OSDef =
+ case ObjectSet of
+ {Module,OSName} ->
+ asn1_db:dbget(Module,OSName);
+ OSName ->
+ asn1_db:dbget(get(currmod),OSName)
+ end,
+ case (OSDef#typedef.typespec)#'ObjectSet'.gen of
+ true ->
+ ObjectEncode =
+ asn1ct_gen:un_hyphen_var(lists:concat(['Obj',AttrN])),
+ emit([ObjectEncode," = ",nl]),
+ emit([" 'getenc_",ObjectSet,"'(",
+ {asis,UniqueFieldName},", ",nl]),
+ El = make_element(N+1,asn1ct_gen:mk_var(asn1ct_name:curr(val)),AttrN),
+ Indent = 12 + length(atom_to_list(ObjectSet)),
+ case ValueIndex of
+ [] ->
+ emit([indent(Indent),El,"),",nl]);
+ _ ->
+ emit([indent(Indent),"value_match(",
+ {asis,ValueIndex},",",El,")),",nl]),
+ notice_value_match()
+ end,
+ {AttrN,ObjectEncode};
+ _ ->
+ false
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ %% when the simpletableattributes was at an outer
+ %% level and the objfun has been passed through the
+ %% function call
+ {"got objfun through args","ObjFun"};
+ _ ->
+ false
+ end
+ end,
+ emit({"[",nl}),
+ MaybeComma1 =
+ case Ext of
+ {ext,_Pos,NumExt2} when NumExt2 > 0 ->
+ emit({"?RT_PER:setext(Extensions =/= [])"}),
+ ", ";
+ {ext,_Pos,_} ->
+ emit({"?RT_PER:setext(false)"}),
+ ", ";
+ _ ->
+ ""
+ end,
+ MaybeComma2 =
+ case optionals(CompList) of
+ [] -> MaybeComma1;
+ _ ->
+ emit(MaybeComma1),
+ emit("Opt"),
+ {",",nl}
+ end,
+ gen_enc_components_call(Typename,CompList,MaybeComma2,EncObj,Ext),
+ emit({"].",nl}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% generate decode function for SEQUENCE and SET
+%%
+gen_decode_set(Erules,Typename,D) ->
+ gen_decode_constructed(Erules,Typename,D).
+
+gen_decode_sequence(Erules,Typename,D) ->
+ gen_decode_constructed(Erules,Typename,D).
+
+gen_decode_constructed(_Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ {CompList,TableConsInfo} =
+ case D#type.def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL} ->
+ {CL,TCI};
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {CL,TCI}
+ end,
+ Ext = extensible(CompList),
+ MaybeComma1 = case Ext of
+ {ext,_Pos,_NumExt} ->
+ gen_dec_extension_value("Bytes"),
+ {",",nl};
+ _ ->
+ ""
+ end,
+ Optionals = optionals(CompList),
+ MaybeComma2 = case Optionals of
+ [] -> MaybeComma1;
+ _ ->
+ Bcurr = asn1ct_name:curr(bytes),
+ Bnext = asn1ct_name:next(bytes),
+ emit(MaybeComma1),
+ GetoptCall = "} = ?RT_PER:getoptionals2(",
+ emit({"{Opt,",{var,Bnext},GetoptCall,
+ {var,Bcurr},",",{asis,length(Optionals)},")"}),
+ asn1ct_name:new(bytes),
+ ", "
+ end,
+ {DecObjInf,UniqueFName,ValueIndex} =
+ case TableConsInfo of
+%% {ObjectSet,AttrN,N,UniqueFieldName} ->%% N is index of attribute that determines constraint
+ #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValIndex} ->
+%% {AttrN,ObjectSet};
+ F = fun(#'ComponentType'{typespec=CT})->
+ case {CT#type.constraint,CT#type.tablecinf} of
+ {[],[{objfun,_}|_R]} -> true;
+ _ -> false
+ end
+ end,
+ case lists:any(F,CompList) of
+ true -> % when component relation constraint establish
+ %% relation from a component to another components
+ %% subtype component
+ {{AttrN,{deep,ObjectSet,UniqueFieldName,ValIndex}},
+ UniqueFieldName,ValIndex};
+ false ->
+ {{AttrN,ObjectSet},UniqueFieldName,ValIndex}
+ end;
+ _ ->
+ case D#type.tablecinf of
+ [{objfun,_}|_] ->
+ {{"got objfun through args","ObjFun"},false,false};
+ _ ->
+ {false,false,false}
+ end
+ end,
+ {AccTerm,AccBytes} =
+ gen_dec_components_call(Typename,CompList,MaybeComma2,DecObjInf,Ext,length(Optionals)),
+ case asn1ct_name:all(term) of
+ [] -> emit(MaybeComma2); % no components at all
+ _ -> emit({com,nl})
+ end,
+ case {AccTerm,AccBytes} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} ->
+ DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
+ ValueMatch = value_match(ValueIndex,Term),
+ emit({DecObj," =",nl," 'getdec_",ObjSet,"'(",
+% {asis,UniqueFName},", ",Term,"),",nl}),
+ {asis,UniqueFName},", ",ValueMatch,"),",nl}),
+ gen_dec_listofopentypes(DecObj,ListOfOpenTypes,false)
+ end,
+ %% we don't return named lists any more Cnames = mkcnamelist(CompList),
+ demit({"Result = "}), %dbg
+ %% return value as record
+ case Typename of
+ ['EXTERNAL'] ->
+ emit({" OldFormat={'",asn1ct_gen:list2rname(Typename),
+ "'"}),
+ mkvlist(asn1ct_name:all(term)),
+ emit({"},",nl}),
+ emit({" ASN11994Format =",nl,
+ " asn1rt_check:transform_to_EXTERNAL1994",
+ "(OldFormat),",nl}),
+ emit(" {ASN11994Format,");
+ _ ->
+ emit(["{{'",asn1ct_gen:list2rname(Typename),"'"]),
+ mkvlist(asn1ct_name:all(term)),
+ emit("},")
+ end,
+ emit({{var,asn1ct_name:curr(bytes)},"}"}),
+ emit({".",nl,nl}).
+
+gen_dec_listofopentypes(_,[],_) ->
+ emit(nl);
+gen_dec_listofopentypes(DecObj,[{_Cname,{FirstPFN,PFNList},Term,TmpTerm,Prop}|Rest],_Update) ->
+
+% asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+
+ emit([Term," = ",nl]),
+
+ N = case Prop of
+ mandatory -> 0;
+ 'OPTIONAL' ->
+ emit_opt_or_mand_check(asn1_NOVALUE,TmpTerm),
+ 6;
+ {'DEFAULT',Val} ->
+ emit_opt_or_mand_check(Val,TmpTerm),
+ 6
+ end,
+
+ emit([indent(N+3),"case (catch ",DecObj,"(",
+ {asis,FirstPFN},", ",TmpTerm,", telltype,",{asis,PFNList},")) of",nl]),
+ emit([indent(N+6),"{'EXIT', ",{curr,reason},"} ->",nl]),
+%% emit({indent(9),"throw({runtime_error,{","'Type not compatible with table constraint'",",",Term,"}});",nl}),
+ emit([indent(N+9),"exit({'Type not compatible with table constraint',",
+ {curr,reason},"});",nl]),
+ emit([indent(N+6),"{",{curr,tmpterm},",_} ->",nl]),
+ emit([indent(N+9),{curr,tmpterm},nl]),
+
+ case Prop of
+ mandatory ->
+ emit([indent(N+3),"end,",nl]);
+ _ ->
+ emit([indent(N+3),"end",nl,
+ indent(3),"end,",nl])
+ end,
+ gen_dec_listofopentypes(DecObj,Rest,true).
+
+
+emit_opt_or_mand_check(Val,Term) ->
+ emit([indent(3),"case ",Term," of",nl,
+ indent(6),{asis,Val}," ->",{asis,Val},";",nl,
+ indent(6),"_ ->",nl]).
+
+%% ENCODE GENERATOR FOR THE CHOICE TYPE *******
+%% assume Val = {Alternative,AltType}
+%% generate
+%%[
+%% ?RT_PER:set_choice(element(1,Val),Altnum,Altlist,ext),
+%%case element(1,Val) of
+%% alt1 ->
+%% encode_alt1(element(2,Val));
+%% alt2 ->
+%% encode_alt2(element(2,Val))
+%%end
+%%].
+
+gen_encode_choice(_Erules,Typename,D) when record(D,type) ->
+ {'CHOICE',CompList} = D#type.def,
+ emit({"[",nl}),
+ Ext = extensible(CompList),
+ gen_enc_choice(Typename,CompList,Ext),
+ emit({nl,"].",nl}).
+
+gen_decode_choice(_Erules,Typename,D) when record(D,type) ->
+ asn1ct_name:start(),
+ asn1ct_name:new(bytes),
+ {'CHOICE',CompList} = D#type.def,
+ Ext = extensible(CompList),
+ gen_dec_choice(Typename,CompList,Ext),
+ emit({".",nl}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Encode generator for SEQUENCE OF type
+
+
+gen_encode_sof(_Erules,Typename,SeqOrSetOf,D) when record(D,type) ->
+ asn1ct_name:start(),
+% Val = [Component]
+% ?RT_PER:encode_length(length(Val)),
+% lists:
+ {_SeqOrSetOf,ComponentType} = D#type.def,
+ emit({"[",nl}),
+ SizeConstraint =
+ case asn1ct_gen:get_constraint(D#type.constraint,
+ 'SizeConstraint') of
+ no -> undefined;
+ Range -> Range
+ end,
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _->
+ ""
+ end,
+ emit({nl,indent(3),"?RT_PER:encode_length(",
+ {asis,SizeConstraint},
+ ",length(Val)),",nl}),
+ emit({indent(3),"'enc_",asn1ct_gen:list2name(Typename),
+ "_components'(Val",ObjFun,", [])"}),
+ emit({nl,"].",nl}),
+ NewComponentType =
+ case ComponentType#type.def of
+ {'ENUMERATED',_,Component}->
+ ComponentType#type{def={'ENUMERATED',Component}};
+ _ -> ComponentType
+ end,
+ gen_encode_sof_components(Typename,SeqOrSetOf,NewComponentType).
+
+gen_decode_sof(_Erules,Typename,SeqOrSetOf,D) when record(D,type) ->
+ asn1ct_name:start(),
+% Val = [Component]
+% ?RT_PER:encode_length(length(Val)),
+% lists:
+ {_SeqOrSetOf,ComponentType} = D#type.def,
+ SizeConstraint =
+ case asn1ct_gen:get_constraint(D#type.constraint,
+ 'SizeConstraint') of
+ no -> undefined;
+ Range -> Range
+ end,
+ ObjFun =
+ case D#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({nl,"{Num,Bytes1} = ?RT_PER:decode_length(Bytes,",{asis,SizeConstraint},"),",nl}),
+ emit({"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(Num, Bytes1, telltype",ObjFun,", []).",nl}),
+ NewComponentType =
+ case ComponentType#type.def of
+ {'ENUMERATED',_,Component}->
+ ComponentType#type{def={'ENUMERATED',Component}};
+ _ -> ComponentType
+ end,
+ gen_decode_sof_components(Typename,SeqOrSetOf,NewComponentType).
+
+gen_encode_sof_components(Typename,SeqOrSetOf,Cont) ->
+ {ObjFun,ObjFun_Var} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _"};
+ _ ->
+ {"",""}
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"_components'([]",
+ ObjFun_Var,", Acc) -> lists:reverse(Acc);",nl,nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"_components'([H|T]",
+ ObjFun,", Acc) ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"_components'(T"}),
+ emit({ObjFun,", ["}),
+ %% the component encoder
+ Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
+ Cont#type.def),
+
+ Conttype = asn1ct_gen:get_inner(Cont#type.def),
+ Currmod = get(currmod),
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ case asn1ct_gen:type(Conttype) of
+ {primitive,bif} ->
+ gen_encode_prim_wrapper(Ctgenmod,per,Cont,false,"H");
+% Ctgenmod:gen_encode_prim(per,Cont,false,"H");
+ {constructed,bif} ->
+ NewTypename = [Constructed_Suffix|Typename],
+ emit({"'enc_",asn1ct_gen:list2name(NewTypename),"'(H",
+ ObjFun,")",nl,nl});
+ #'Externaltypereference'{module=Currmod,type=Ename} ->
+ emit({"'enc_",Ename,"'(H)",nl,nl});
+ #'Externaltypereference'{module=EMod,type=EType} ->
+ emit({"'",EMod,"':'enc_",EType,"'(H)",nl,nl});
+ _ ->
+ emit({"'enc_",Conttype,"'(H)",nl,nl})
+ end,
+ emit({" | Acc]).",nl}).
+
+gen_decode_sof_components(Typename,SeqOrSetOf,Cont) ->
+ {ObjFun,ObjFun_Var} =
+ case Cont#type.tablecinf of
+ [{objfun,_}|_R] ->
+ {", ObjFun",", _"};
+ _ ->
+ {"",""}
+ end,
+ emit({"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(0, Bytes, _",ObjFun_Var,", Acc) ->",nl,
+ indent(3),"{lists:reverse(Acc), Bytes};",nl}),
+ emit({"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(Num, Bytes, _",ObjFun,", Acc) ->",nl}),
+ emit({indent(3),"{Term,Remain} = "}),
+ Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
+ Cont#type.def),
+ Conttype = asn1ct_gen:get_inner(Cont#type.def),
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ case asn1ct_gen:type(Conttype) of
+ {primitive,bif} ->
+ Ctgenmod:gen_dec_prim(per,Cont,"Bytes"),
+ emit({com,nl});
+ {constructed,bif} ->
+ NewTypename = [Constructed_Suffix|Typename],
+ emit({"'dec_",asn1ct_gen:list2name(NewTypename),
+ "'(Bytes, telltype",ObjFun,"),",nl});
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(Bytes,telltype),",nl});
+ #'Externaltypereference'{module=EMod,type=EType} ->
+ emit({"'",EMod,"':'dec_",EType,"'(Bytes,telltype),",nl});
+ _ ->
+ emit({"'dec_",Conttype,"'(Bytes,telltype),",nl})
+ end,
+ emit({indent(3),"'dec_",asn1ct_gen:list2name(Typename),
+ "_components'(Num-1, Remain, telltype",ObjFun,", [Term|Acc]).",nl}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% General and special help functions (not exported)
+
+mkvlist([H|T]) ->
+ emit(","),
+ mkvlist2([H|T]);
+mkvlist([]) ->
+ true.
+mkvlist2([H,T1|T]) ->
+ emit({{var,H},","}),
+ mkvlist2([T1|T]);
+mkvlist2([H|T]) ->
+ emit({{var,H}}),
+ mkvlist2(T);
+mkvlist2([]) ->
+ true.
+
+extensible(CompList) when list(CompList) ->
+ noext;
+extensible({RootList,ExtList}) ->
+ {ext,length(RootList)+1,length(ExtList)}.
+
+gen_dec_extension_value(_) ->
+ emit({"{Ext,",{next,bytes},"} = ?RT_PER:getext(",{curr,bytes},")"}),
+ asn1ct_name:new(bytes).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Produce a list with positions (in the Value record) where
+%% there are optional components, start with 2 because first element
+%% is the record name
+
+optionals({L,_Ext}) -> optionals(L,[],2);
+optionals(L) -> optionals(L,[],2).
+
+optionals([{'EXTENSIONMARK',_,_}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos); % optionals in extension are currently not handled
+optionals([#'ComponentType'{prop='OPTIONAL'}|Rest],Acc,Pos) ->
+ optionals(Rest,[Pos|Acc],Pos+1);
+optionals([#'ComponentType'{prop={'DEFAULT',_}}|Rest],Acc,Pos) ->
+ optionals(Rest,[Pos|Acc],Pos+1);
+optionals([#'ComponentType'{}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos+1);
+optionals([],Acc,_) ->
+ lists:reverse(Acc).
+
+
+gen_enc_components_call(TopType,{CompList,ExtList},MaybeComma,DynamicEnc,Ext) ->
+ %% The type has extensionmarker
+ Rpos = gen_enc_components_call1(TopType,CompList,1,MaybeComma,DynamicEnc,noext),
+ case Ext of
+ {ext,_,ExtNum} when ExtNum > 0 ->
+ emit([nl,
+ ",Extensions",nl]);
+ _ -> true
+ end,
+ %handle extensions
+ gen_enc_components_call1(TopType,ExtList,Rpos,MaybeComma,DynamicEnc,Ext);
+gen_enc_components_call(TopType, CompList, MaybeComma, DynamicEnc, Ext) ->
+ %% The type has no extensionmarker
+ gen_enc_components_call1(TopType,CompList,1,MaybeComma,DynamicEnc,Ext).
+
+gen_enc_components_call1(TopType,
+ [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],
+ Tpos,
+ MaybeComma, DynamicEnc, Ext) ->
+
+ put(component_type,{true,C}),
+ %% information necessary in asn1ct_gen_per_rt2ct:gen_encode_prim
+
+ Pos = case Ext of
+ noext -> Tpos;
+ {ext,Epos,_Enum} -> Tpos - Epos + 1
+ end,
+ emit(MaybeComma),
+ case Prop of
+ 'OPTIONAL' ->
+ gen_enc_component_optional(TopType,Cname,Type,Tpos,DynamicEnc,Ext);
+ {'DEFAULT',_DefVal} ->
+ gen_enc_component_default(TopType,Cname,Type,Tpos,DynamicEnc,Ext);
+ _ ->
+ case Ext of
+ {ext,ExtPos,_} when Tpos >= ExtPos ->
+ gen_enc_component_optional(TopType,Cname,Type,Tpos,DynamicEnc,Ext);
+ _ ->
+ gen_enc_component_mandatory(TopType,Cname,Type,Tpos,DynamicEnc,Ext)
+ end
+ end,
+
+ erase(component_type),
+
+ case Rest of
+ [] ->
+ Pos+1;
+ _ ->
+ emit({com,nl}),
+ gen_enc_components_call1(TopType,Rest,Tpos+1,"",DynamicEnc,Ext)
+ end;
+gen_enc_components_call1(_TopType,[],Pos,_,_,_) ->
+ Pos.
+
+gen_enc_component_default(TopType,Cname,Type,Pos,DynamicEnc,Ext) ->
+% Element = io_lib:format("?RT_PER:cindex(~w,Val1,~w)",[Pos+1,Cname]),
+ Element = make_element(Pos+1,"Val1",Cname),
+ emit({"case ",Element," of",nl}),
+% case Ext of
+% {ext,ExtPos,_} when Pos >= ExtPos ->
+% emit({"asn1_NOEXTVALUE -> [];",nl});
+% _ ->
+ emit({"asn1_DEFAULT -> [];",nl}),
+% end,
+ asn1ct_name:new(tmpval),
+ emit({{curr,tmpval}," ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ emit({nl,"%% attribute number ",Pos," with type ",
+ InnerType,nl}),
+ NextElement = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ gen_enc_line(TopType,Cname,Type,NextElement, Pos,DynamicEnc,Ext),
+ emit({nl,"end"}).
+
+gen_enc_component_optional(TopType,Cname,Type,Pos,DynamicEnc,Ext) ->
+% Element = io_lib:format("?RT_PER:cindex(~w,Val1,~w)",[Pos+1,Cname]),
+ Element = make_element(Pos+1,"Val1",Cname),
+ emit({"case ",Element," of",nl}),
+% case Ext of
+% {ext,ExtPos,_} when Pos >= ExtPos ->
+% emit({"asn1_NOEXTVALUE -> [];",nl});
+% _ ->
+ emit({"asn1_NOVALUE -> [];",nl}),
+% end,
+ asn1ct_name:new(tmpval),
+ emit({{curr,tmpval}," ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ emit({nl,"%% attribute number ",Pos," with type ",
+ InnerType,nl}),
+ NextElement = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ gen_enc_line(TopType,Cname,Type,NextElement, Pos,DynamicEnc,Ext),
+ emit({nl,"end"}).
+
+gen_enc_component_mandatory(TopType,Cname,Type,Pos,DynamicEnc,Ext) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ emit({nl,"%% attribute number ",Pos," with type ",
+ InnerType,nl}),
+ gen_enc_line(TopType,Cname,Type,[],Pos,DynamicEnc,Ext).
+
+gen_enc_line(TopType, Cname, Type, [], Pos,DynamicEnc,Ext) ->
+% Element = io_lib:format("?RT_PER:cindex(~w,~s,~w)",[Pos+1,asn1ct_gen:mk_var(asn1ct_name:curr(val)),Cname]),
+ Element = make_element(Pos+1,asn1ct_gen:mk_var(asn1ct_name:curr(val)),Cname),
+ gen_enc_line(TopType,Cname,Type,Element, Pos,DynamicEnc,Ext);
+gen_enc_line(TopType,Cname,Type,Element, Pos,DynamicEnc,Ext) ->
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ Atype =
+ case Type of
+ #type{def=#'ObjectClassFieldType'{type=InnerType}} ->
+ InnerType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ case Ext of
+ {ext,Ep1,_} when Pos >= Ep1 ->
+ emit(["?RT_PER:encode_open_type(dummy,?RT_PER:complete("]);
+ _ -> true
+ end,
+ case Atype of
+ {typefield,_} ->
+ case DynamicEnc of
+ {_LeadingAttrName,Fun} ->
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% componentrelation) of
+ case (Type#type.def)#'ObjectClassFieldType'.fieldname of
+ {notype,T} ->
+ throw({error,{notype,type_from_object,T}});
+ {Name,RestFieldNames} when atom(Name) ->
+ emit({"?RT_PER:encode_open_type([],?RT_PER:complete(",nl}),
+ emit({" ",Fun,"(",{asis,Name},", ",
+ Element,", ",{asis,RestFieldNames},")))"});
+ Other ->
+ throw({asn1,{'internal error',Other}})
+ end
+ end;
+ {objectfield,PrimFieldName1,PFNList} ->
+ case DynamicEnc of
+ {_LeadingAttrName,Fun} ->
+ emit({"?RT_PER:encode_open_type([],"
+ "?RT_PER:complete(",nl}),
+ emit({" ",Fun,"(",{asis,PrimFieldName1},
+ ", ",Element,", ",{asis,PFNList},")))"})
+ end;
+ _ ->
+ CurrMod = get(currmod),
+ case asn1ct_gen:type(Atype) of
+ #'Externaltypereference'{module=Mod,type=EType} when
+ (CurrMod==Mod) ->
+ emit({"'enc_",EType,"'(",Element,")"});
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ emit({"'",Mod,"':'enc_",
+ EType,"'(",Element,")"});
+ #typereference{val=Ename} ->
+ emit({"'enc_",Ename,"'(",Element,")"});
+ {notype,_} ->
+ emit({"'enc_",Atype,"'(",Element,")"});
+ {primitive,bif} ->
+ EncType =
+ case Atype of
+ {fixedtypevaluefield,_,Btype} ->
+ Btype;
+ _ ->
+ Type
+ end,
+ gen_encode_prim_wrapper(Ctgenmod,per,EncType,
+ false,Element);
+% Ctgenmod:gen_encode_prim(per,EncType,
+% false,Element);
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OpenType} ->
+ gen_encode_prim_wrapper(Ctgenmod,per,
+ #type{def=OpenType},
+ false,Element);
+ _ ->
+ gen_encode_prim_wrapper(Ctgenmod,per,Type,
+ false,Element)
+ end;
+% Ctgenmod:gen_encode_prim(per,Type,
+% false,Element);
+ {constructed,bif} ->
+ NewTypename = [Cname|TopType],
+ case {Type#type.tablecinf,DynamicEnc} of
+ {[{objfun,_}|_R],{_,EncFun}} ->
+%% emit({"?RT_PER:encode_open_type([],",
+%% "?RT_PER:complete(",nl}),
+ emit({"'enc_",
+ asn1ct_gen:list2name(NewTypename),
+ "'(",Element,", ",EncFun,")"});
+ _ ->
+ emit({"'enc_",
+ asn1ct_gen:list2name(NewTypename),
+ "'(",Element,")"})
+ end
+ end
+ end,
+ case Ext of
+ {ext,Ep2,_} when Pos >= Ep2 ->
+ emit(["))"]);
+ _ -> true
+ end.
+
+gen_dec_components_call(TopType,{CompList,ExtList},MaybeComma,DecInfObj,Ext,NumberOfOptionals) ->
+ %% The type has extensionmarker
+ {Rpos,AccTerm,AccBytes} =
+ gen_dec_components_call1(TopType, CompList, 1, 1, MaybeComma,DecInfObj,
+ noext,[],[],NumberOfOptionals),
+ emit([",",nl,"{Extensions,",{next,bytes},"} = "]),
+ emit(["?RT_PER:getextension(Ext,",{curr,bytes},"),",nl]),
+ asn1ct_name:new(bytes),
+ {_Epos,AccTermE,AccBytesE} =
+ gen_dec_components_call1(TopType,ExtList,Rpos, 1, "",DecInfObj,Ext,[],[],NumberOfOptionals),
+ case ExtList of
+ [] -> true;
+ _ -> emit([",",nl])
+ end,
+ emit([{next,bytes},"= ?RT_PER:skipextensions(",{curr,bytes},",",
+ length(ExtList)+1,",Extensions)",nl]),
+ asn1ct_name:new(bytes),
+ {AccTerm++AccTermE,AccBytes++AccBytesE};
+
+gen_dec_components_call(TopType,CompList,MaybeComma,DecInfObj,Ext,NumberOfOptionals) ->
+ %% The type has no extensionmarker
+ {_,AccTerm,AccBytes} =
+ gen_dec_components_call1(TopType, CompList, 1, 1,MaybeComma,DecInfObj,Ext,[],[],NumberOfOptionals),
+ {AccTerm,AccBytes}.
+
+
+gen_dec_components_call1(TopType,
+ [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],
+ Tpos,OptPos,MaybeComma,DecInfObj,Ext,AccTerm,AccBytes,NumberOfOptionals) ->
+ Pos = case Ext of
+ noext -> Tpos;
+ {ext,Epos,_Enum} -> Tpos - Epos + 1
+ end,
+ emit(MaybeComma),
+%% asn1ct_name:new(term),
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=InType} ->
+ InType;
+ Def ->
+ asn1ct_gen:get_inner(Def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ case InnerType of
+ #'Externaltypereference'{type=T} ->
+ emit({nl,"%% attribute number ",Tpos," with type ",
+ T,nl});
+ IT when tuple(IT) ->
+ emit({nl,"%% attribute number ",Tpos," with type ",
+ element(2,IT),nl});
+ _ ->
+ emit({nl,"%% attribute number ",Tpos," with type ",
+ InnerType,nl})
+ end,
+
+ case InnerType of
+ {typefield,_} ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},"} = "});
+ {objectfield,_,_} ->
+ asn1ct_name:new(term),
+ asn1ct_name:new(tmpterm),
+ emit({"{",{curr,tmpterm},", ",{next,bytes},"} = "});
+ _ ->
+ asn1ct_name:new(term),
+ emit({"{",{curr,term},",",{next,bytes},"} = "})
+ end,
+
+ NewOptPos =
+ case {Ext,Prop} of
+ {noext,mandatory} -> OptPos; % generate nothing
+ {noext,_} ->
+ Element = io_lib:format("Opt band (1 bsl ~w)",[NumberOfOptionals - OptPos]),
+ emit({"case ",Element," of",nl}),
+ emit({"_Opt",OptPos," when _Opt",OptPos," > 0 ->"}),
+ OptPos+1;
+ _ ->
+ emit(["case Extensions of",nl]),
+ emit(["_ when size(Extensions) >= ",Pos,",element(",Pos,",Extensions) == 1 ->",nl])
+ end,
+ put(component_type,{true,C}),
+ {TermVar,BytesVar} = gen_dec_line(TopType,Cname,Type,Tpos,DecInfObj,Ext),
+ erase(component_type),
+ case {Ext,Prop} of
+ {noext,mandatory} -> true; % generate nothing
+ {noext,_} ->
+ emit([";",nl,"0 ->"]),
+ gen_dec_component_no_val(TopType,Cname,Type,Prop,Tpos,Ext);
+ _ ->
+ emit([";",nl,"_ ->",nl]),
+ gen_dec_component_no_val(TopType,Cname,Type,Prop,Tpos,Ext)
+ end,
+ case {Ext,Prop} of
+ {noext,mandatory} -> true; % generate nothing
+ {noext,_} ->
+ emit([nl,"end"]);
+ _ ->
+ emit([nl,"end"])
+
+ end,
+ asn1ct_name:new(bytes),
+ case Rest of
+ [] ->
+ {Pos+1,AccTerm++TermVar,AccBytes++BytesVar};
+ _ ->
+ emit({com,nl}),
+ gen_dec_components_call1(TopType,Rest,Tpos+1,NewOptPos,"",DecInfObj,Ext,
+ AccTerm++TermVar,AccBytes++BytesVar,NumberOfOptionals)
+ end;
+
+gen_dec_components_call1(_TopType,[],Pos,_OptPos,_,_,_,AccTerm,AccBytes,_NumberOfOptionals) ->
+ {Pos,AccTerm,AccBytes}.
+
+
+%%gen_dec_component_no_val(TopType,Cname,Type,_,Pos,{ext,Ep,Enum}) when Pos >= Ep ->
+%% emit({"{asn1_NOEXTVALUE,",{curr,bytes},"}",nl});
+gen_dec_component_no_val(_,_,_,{'DEFAULT',DefVal},_,_) ->
+ emit(["{",{asis,DefVal},",",{curr,bytes},"}",nl]);
+gen_dec_component_no_val(_,_,_,'OPTIONAL',_,_) ->
+ emit({"{asn1_NOVALUE,",{curr,bytes},"}",nl});
+gen_dec_component_no_val(_,_,_,mandatory,_,{ext,_,_}) ->
+ emit({"{asn1_NOVALUE,",{curr,bytes},"}",nl}).
+
+
+gen_dec_line(TopType,Cname,Type,Pos,DecInfObj,Ext) ->
+ Ctgenmod = list_to_atom(lists:concat(["asn1ct_gen_",per,
+ asn1ct_gen:rt2ct_suffix()])),
+ Atype =
+ case Type of
+ #type{def=#'ObjectClassFieldType'{type=InnerType}} ->
+ InnerType;
+ _ ->
+ asn1ct_gen:get_inner(Type#type.def)
+ end,
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% asn1ct_gen:get_inner(Type#type.def);
+% _ ->
+% Type#type.def
+% end,
+ BytesVar0 = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ BytesVar = case Ext of
+ {ext,Ep,_} when Pos >= Ep ->
+ emit(["begin",nl,"{TmpVal",Pos,",Trem",Pos,
+ "}=?RT_PER:decode_open_type(",
+ {curr,bytes},",[]),",nl,
+ "{TmpValx",Pos,",_}="]),
+ io_lib:format("TmpVal~p",[Pos]);
+ _ -> BytesVar0
+ end,
+ SaveBytes =
+ case Atype of
+ {typefield,_} ->
+ case DecInfObj of
+ false -> % This is in a choice with typefield components
+ {Name,RestFieldNames} =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(reason),
+ emit([indent(2),"{",{curr,tmpterm},", ",{next,bytes},
+ "} = ?RT_PER:decode_open_type(",{curr,bytes},
+ ", []),",nl]),
+ emit([indent(2),"case (catch ObjFun(",
+ {asis,Name},
+ ",",{curr,tmpterm},",telltype,",
+ {asis,RestFieldNames},")) of", nl]),
+ emit([indent(4),"{'EXIT',",{curr,reason},"} ->",nl]),
+ emit([indent(6),"exit({'Type not ",
+ "compatible with table constraint', ",
+ {curr,reason},"});",nl]),
+ asn1ct_name:new(tmpterm),
+ emit([indent(4),"{",{curr,tmpterm},", _} ->",nl]),
+ emit([indent(6),"{",Cname,", {",{curr,tmpterm},", ",
+ {next,bytes},"}}",nl]),
+ emit([indent(2),"end"]),
+ [];
+ {"got objfun through args","ObjFun"} ->
+ %% this is when the generated code gots the
+ %% objfun though arguments on function
+ %% invocation.
+ {Name,RestFieldNames} =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+ emit(["?RT_PER:decode_open_type(",{curr,bytes},
+ ", []),",nl]),
+ emit([{curr,term}," =",nl,
+ " case (catch ObjFun(",{asis,Name},",",
+ {curr,tmpterm},",telltype,",
+ {asis,RestFieldNames},")) of", nl]),
+ emit([" {'EXIT',",{curr,reason},"} ->",nl]),
+ emit([indent(6),"exit({'Type not ",
+ "compatible with table constraint', ",
+ {curr,reason},"});",nl]),
+ asn1ct_name:new(tmpterm),
+ emit([indent(4),"{",{curr,tmpterm},", _} ->",nl]),
+ emit([indent(6),{curr,tmpterm},nl]),
+ emit([indent(2),"end"]),
+ [];
+ _ ->
+ emit({"?RT_PER:decode_open_type(",{curr,bytes},
+ ", [])"}),
+ RefedFieldName =
+ (Type#type.def)#'ObjectClassFieldType'.fieldname,
+% asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info),
+ [{Cname,RefedFieldName,
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ get_components_prop()}]
+ end;
+ {objectfield,PrimFieldName1,PFNList} ->
+ emit({"?RT_PER:decode_open_type(",{curr,bytes},", [])"}),
+ [{Cname,{PrimFieldName1,PFNList},
+ asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ get_components_prop()}];
+ _ ->
+ CurrMod = get(currmod),
+ case asn1ct_gen:type(Atype) of
+ #'Externaltypereference'{module=CurrMod,type=EType} ->
+ emit({"'dec_",EType,"'(",BytesVar,",telltype)"});
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ emit({"'",Mod,"':'dec_",EType,"'(",BytesVar,
+ ",telltype)"});
+ {primitive,bif} ->
+ case Atype of
+ {fixedtypevaluefield,_,Btype} ->
+ Ctgenmod:gen_dec_prim(per,Btype,
+ BytesVar);
+ _ ->
+ Ctgenmod:gen_dec_prim(per,Type,
+ BytesVar)
+ end;
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OpenType} ->
+ Ctgenmod:gen_dec_prim(per,#type{def=OpenType},
+ BytesVar);
+ _ ->
+ Ctgenmod:gen_dec_prim(per,Type,
+ BytesVar)
+ end;
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(",BytesVar,",telltype)"});
+ {notype,_} ->
+ emit({"'dec_",Atype,"'(",BytesVar,",telltype)"});
+ {constructed,bif} ->
+ NewTypename = [Cname|TopType],
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ emit({"'dec_",asn1ct_gen:list2name(NewTypename),
+ "'(",BytesVar,", telltype, ObjFun)"});
+ _ ->
+ emit({"'dec_",asn1ct_gen:list2name(NewTypename),
+ "'(",BytesVar,", telltype)"})
+ end
+ end,
+ case DecInfObj of
+ {Cname,{_,OSet,UniqueFName,ValIndex}} ->
+ Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
+ ValueMatch = value_match(ValIndex,Term),
+ emit({",",nl,"ObjFun = 'getdec_",OSet,"'(",
+ {asis,UniqueFName},", ",ValueMatch,")"});
+ _ ->
+ ok
+ end,
+ []
+ end,
+ case Ext of
+ {ext,Ep2,_} when Pos >= Ep2 ->
+ emit([", {TmpValx",Pos,",Trem",Pos,"}",nl,"end"]);
+ _ -> true
+ end,
+ %% Prepare return value
+ case DecInfObj of
+ {Cname,ObjSet} ->
+ {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
+ SaveBytes};
+ _ ->
+ {[],SaveBytes}
+ end.
+
+gen_enc_choice(TopType,CompList,Ext) ->
+ gen_enc_choice_tag(CompList, [], Ext),
+ emit({com,nl}),
+ emit({"case element(1,Val) of",nl}),
+ gen_enc_choice2(TopType, CompList, Ext),
+ emit({nl,"end"}).
+
+gen_enc_choice_tag({C1,C2},_,_) ->
+ N1 = get_name_list(C1),
+ N2 = get_name_list(C2),
+ emit(["?RT_PER:set_choice(element(1,Val),",
+ {asis,{N1,N2}},", ",{asis,{length(N1),length(N2)}},")"]);
+gen_enc_choice_tag(C,_,_) ->
+ N = get_name_list(C),
+ emit(["?RT_PER:set_choice(element(1,Val),",
+ {asis,N},", ",{asis,length(N)},")"]).
+
+get_name_list(L) ->
+ get_name_list(L,[]).
+
+get_name_list([#'ComponentType'{name=Name}|T], Acc) ->
+ get_name_list(T,[Name|Acc]);
+get_name_list([], Acc) ->
+ lists:reverse(Acc).
+
+%gen_enc_choice_tag([H|T],Acc,Ext) when record(H,'ComponentType') ->
+% gen_enc_choice_tag(T,[H#'ComponentType'.name|Acc],Ext);
+%gen_enc_choice_tag([H|T],Acc,Ext) -> % skip EXTENSIONMARK
+% gen_enc_choice_tag(T,Acc,Ext);
+%gen_enc_choice_tag([],Acc,Ext) ->
+% Length = length(Acc),
+% emit({"?RT_PER:set_choice(element(1,Val),",{asis,Length},",",
+% {asis,lists:reverse(Acc)},",",{asis,Ext},")"}),
+% Length.
+
+gen_enc_choice2(TopType, {L1,L2}, Ext) ->
+ gen_enc_choice2(TopType, L1 ++ L2, 0, Ext);
+gen_enc_choice2(TopType, L, Ext) ->
+ gen_enc_choice2(TopType, L, 0, Ext).
+
+gen_enc_choice2(TopType,[H1,H2|T], Pos, Ext)
+when record(H1,'ComponentType'), record(H2,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ EncObj =
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% false;
+% _ ->
+% {no_attr,"ObjFun"}
+% end,
+ case asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation) of
+ no -> false;
+ _ -> {no_attr,"ObjFun"}
+ end,
+ emit({{asis,Cname}," ->",nl}),
+ gen_enc_line(TopType,Cname,Type,"element(2,Val)", Pos+1,EncObj,Ext),
+ emit({";",nl}),
+ gen_enc_choice2(TopType,[H2|T], Pos+1, Ext);
+gen_enc_choice2(TopType,[H1|T], Pos, Ext) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ EncObj =
+% case asn1ct_gen:get_constraint(Type#type.constraint,
+% tableconstraint_info) of
+% no ->
+% false;
+% _ ->
+% {no_attr,"ObjFun"}
+% end,
+ case asn1ct_gen:get_constraint(Type#type.constraint,
+ componentrelation) of
+ no -> false;
+ _ -> {no_attr,"ObjFun"}
+ end,
+ emit({{asis,H1#'ComponentType'.name}," ->",nl}),
+ gen_enc_line(TopType,Cname,Type,"element(2,Val)", Pos+1,EncObj,Ext),
+ gen_enc_choice2(TopType,T, Pos+1, Ext);
+gen_enc_choice2(_,[], _, _) ->
+ true.
+
+gen_dec_choice(TopType,CompList,{ext,Pos,NumExt}) ->
+ emit({"{Ext,",{curr,bytes},"} = ?RT_PER:getbit(Bytes),",nl}),
+ asn1ct_name:new(bytes),
+ gen_dec_choice1(TopType,CompList,{ext,Pos,NumExt});
+gen_dec_choice(TopType,CompList,noext) ->
+ gen_dec_choice1(TopType,CompList,noext).
+
+gen_dec_choice1(TopType,CompList,noext) ->
+ emit({"{Choice,",{curr,bytes},
+ "} = ?RT_PER:getchoice(",{prev,bytes},",",
+ length(CompList),", 0),",nl}),
+ emit({"{Cname,{Val,NewBytes}} = case Choice of",nl}),
+ gen_dec_choice2(TopType,CompList,noext),
+ emit({nl,"end,",nl}),
+ emit({nl,"{{Cname,Val},NewBytes}"});
+gen_dec_choice1(TopType,{RootList,ExtList},Ext) ->
+ NewList = RootList ++ ExtList,
+ gen_dec_choice1(TopType, NewList, Ext);
+gen_dec_choice1(TopType,CompList,{ext,ExtPos,ExtNum}) ->
+ emit({"{Choice,",{curr,bytes},
+ "} = ?RT_PER:getchoice(",{prev,bytes},",",
+ length(CompList)-ExtNum,",Ext ),",nl}),
+ emit({"{Cname,{Val,NewBytes}} = case Choice + Ext*",ExtPos-1," of",nl}),
+ gen_dec_choice2(TopType,CompList,{ext,ExtPos,ExtNum}),
+ emit([";",nl,"_ -> {asn1_ExtAlt, ?RT_PER:decode_open_type(",{curr,bytes},",[])}"]),
+ emit({nl,"end,",nl}),
+ emit({nl,"{{Cname,Val},NewBytes}"}).
+
+
+gen_dec_choice2(TopType,L,Ext) ->
+ gen_dec_choice2(TopType,L,0,Ext).
+
+gen_dec_choice2(TopType,[H1,H2|T],Pos,Ext)
+when record(H1,'ComponentType'), record(H2,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ case Type#type.def of
+ #'ObjectClassFieldType'{type={typefield,_}} ->
+ emit({Pos," -> ",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext),
+ emit({";",nl});
+ _ ->
+ emit({Pos," -> {",{asis,Cname},",",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext),
+ emit({"};",nl})
+ end,
+ gen_dec_choice2(TopType,[H2|T],Pos+1,Ext);
+gen_dec_choice2(TopType,[H1,_H2|T],Pos,Ext) when record(H1,'ComponentType') ->
+ gen_dec_choice2(TopType,[H1|T],Pos,Ext); % skip extensionmark
+gen_dec_choice2(TopType,[H1|T],Pos,Ext) when record(H1,'ComponentType') ->
+ Cname = H1#'ComponentType'.name,
+ Type = H1#'ComponentType'.typespec,
+ case Type#type.def of
+ #'ObjectClassFieldType'{type={typefield,_}} ->
+ emit({Pos," -> ",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext);
+ _ ->
+ emit({Pos," -> {",{asis,Cname},",",nl}),
+ wrap_gen_dec_line(H1,TopType,Cname,Type,Pos+1,false,Ext),
+ emit("}")
+ end,
+ gen_dec_choice2(TopType,[T],Pos+1);
+gen_dec_choice2(TopType,[_|T],Pos,Ext) ->
+ gen_dec_choice2(TopType,T,Pos,Ext);% skip extensionmark
+gen_dec_choice2(_,[],Pos,_) ->
+ Pos.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+gen_encode_prim_wrapper(CtgenMod,Erule,Cont,DoTag,Value) ->
+% put(component_type,true), % add more info in component_type
+ CtgenMod:gen_encode_prim(Erule,Cont,DoTag,Value).
+% erase(component_type).
+
+make_element(I,Val,Cname) ->
+ case lists:member(optimize,get(encoding_options)) of
+ false ->
+ io_lib:format("?RT_PER:cindex(~w,~s,~w)",[I,Val,Cname]);
+ _ ->
+ io_lib:format("element(~w,~s)",[I,Val])
+ end.
+
+wrap_gen_dec_line(C,TopType,Cname,Type,Pos,DIO,Ext) ->
+ put(component_type,{true,C}),
+ gen_dec_line(TopType,Cname,Type,Pos,DIO,Ext),
+ erase(component_type).
+
+get_components_prop() ->
+ case get(component_type) of
+ undefined ->
+ mandatory;
+ {true,#'ComponentType'{prop=Prop}} -> Prop
+ end.
+
+
+value_match(Index,Value) when atom(Value) ->
+ value_match(Index,atom_to_list(Value));
+value_match([],Value) ->
+ Value;
+value_match([{VI,_}|VIs],Value) ->
+ value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
+value_match1(Value,[],Acc,Depth) ->
+ Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
+value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
+ value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
+
+notice_value_match() ->
+ Module = get(currmod),
+ put(value_match,{true,Module}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen.erl
new file mode 100644
index 0000000000..5d2f7a13bd
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen.erl
@@ -0,0 +1,1664 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_gen).
+
+-include("asn1_records.hrl").
+%%-compile(export_all).
+-export([pgen_exports/3,
+ pgen_hrl/4,
+ gen_head/3,
+ demit/1,
+ emit/1,
+ fopen/2,
+ get_inner/1,type/1,def_to_tag/1,prim_bif/1,
+ type_from_object/1,
+ get_typefromobject/1,get_fieldcategory/2,
+ get_classfieldcategory/2,
+ list2name/1,
+ list2rname/1,
+ constructed_suffix/2,
+ unify_if_string/1,
+ gen_check_call/7,
+ get_constraint/2,
+ insert_once/2,
+ rt2ct_suffix/1,rt2ct_suffix/0]).
+-export([pgen/4,pgen_module/5,mk_var/1, un_hyphen_var/1]).
+-export([gen_encode_constructed/4,gen_decode_constructed/4]).
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber | ber_bin | per_bin
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+pgen_module(OutFile,Erules,Module,TypeOrVal,Indent) ->
+ put(outfile,OutFile),
+ HrlGenerated = asn1ct_gen:pgen_hrl(Erules,Module,TypeOrVal,Indent),
+ asn1ct_name:start(),
+ ErlFile = lists:concat([OutFile,".erl"]),
+ Fid = asn1ct_gen:fopen(ErlFile,write),
+ put(gen_file_out,Fid),
+ asn1ct_gen:gen_head(Erules,Module,HrlGenerated),
+ pgen_exports(Erules,Module,TypeOrVal),
+ pgen_dispatcher(Erules,Module,TypeOrVal),
+ pgen_info(Erules,Module),
+ pgen_typeorval(wrap_ber(Erules),Module,TypeOrVal),
+ pgen_partial_incomplete_decode(Erules),
+% gen_vars(asn1_db:mod_to_vars(Module)),
+% gen_tag_table(AllTypes),
+ file:close(Fid),
+ io:format("--~p--~n",[{generated,ErlFile}]).
+
+
+pgen_typeorval(Erules,Module,{Types,Values,_Ptypes,_Classes,Objects,ObjectSets}) ->
+ pgen_types(Erules,Module,Types),
+ pgen_values(Erules,Module,Values),
+ pgen_objects(Erules,Module,Objects),
+ pgen_objectsets(Erules,Module,ObjectSets),
+ case catch lists:member(der,get(encoding_options)) of
+ true ->
+ pgen_check_defaultval(Erules,Module);
+ _ -> ok
+ end,
+ pgen_partial_decode(Erules,Module).
+
+pgen_values(_,_,[]) ->
+ true;
+pgen_values(Erules,Module,[H|T]) ->
+ Valuedef = asn1_db:dbget(Module,H),
+ gen_value(Valuedef),
+ pgen_values(Erules,Module,T).
+
+pgen_types(_,Module,[]) ->
+ gen_value_match(Module),
+ true;
+pgen_types(Erules,Module,[H|T]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Typedef = asn1_db:dbget(Module,H),
+ Rtmod:gen_encode(Erules,Typedef),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,Typedef),
+ pgen_types(Erules,Module,T).
+
+pgen_objects(_,_,[]) ->
+ true;
+pgen_objects(Erules,Module,[H|T]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Typedef = asn1_db:dbget(Module,H),
+ Rtmod:gen_obj_code(Erules,Module,Typedef),
+ pgen_objects(Erules,Module,T).
+
+pgen_objectsets(_,_,[]) ->
+ true;
+pgen_objectsets(Erules,Module,[H|T]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ TypeDef = asn1_db:dbget(Module,H),
+ Rtmod:gen_objectset_code(Erules,TypeDef),
+ pgen_objectsets(Erules,Module,T).
+
+pgen_check_defaultval(Erules,Module) ->
+ CheckObjects = ets:tab2list(check_functions),
+ case get(asndebug) of
+ true ->
+ FileName = lists:concat([Module,'.table']),
+ {ok,IoDevice} = file:open(FileName,[write]),
+ Fun =
+ fun(X)->
+ io:format(IoDevice,"~n~n************~n~n~p~n~n*****"
+ "********~n~n",[X])
+ end,
+ lists:foreach(Fun,CheckObjects),
+ file:close(IoDevice);
+ _ -> ok
+ end,
+ gen_check_defaultval(Erules,Module,CheckObjects).
+
+pgen_partial_decode(Erules,Module) ->
+ pgen_partial_inc_dec(Erules,Module),
+ pgen_partial_dec(Erules,Module).
+
+pgen_partial_inc_dec(Erules,Module) ->
+% io:format("Start partial incomplete decode gen?~n"),
+ case asn1ct:get_gen_state_field(inc_type_pattern) of
+ undefined ->
+% io:format("Partial incomplete decode gen not started:�~w~n",[asn1ct:get_gen_state_field(active)]),
+ ok;
+% [] ->
+% ok;
+ ConfList ->
+ PatternLists=lists:map(fun({_,P}) -> P end,ConfList),
+ pgen_partial_inc_dec1(Erules,Module,PatternLists),
+ gen_partial_inc_dec_refed_funcs(Erules)
+ end.
+
+%% pgen_partial_inc_dec1 generates a function of the toptype in each
+%% of the partial incomplete decoded types.
+pgen_partial_inc_dec1(Erules,Module,[P|Ps]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ TopTypeName = asn1ct:partial_inc_dec_toptype(P),
+ TypeDef=asn1_db:dbget(Module,TopTypeName),
+ asn1ct_name:clear(),
+ asn1ct:update_gen_state(namelist,P),
+ asn1ct:update_gen_state(active,true),
+ asn1ct:update_gen_state(prefix,"dec-inc-"),
+ Rtmod:gen_decode(Erules,TypeDef),
+%% asn1ct:update_gen_state(namelist,tl(P)), %%
+ gen_dec_part_inner_constr(Erules,TypeDef,[TopTypeName]),
+ pgen_partial_inc_dec1(Erules,Module,Ps);
+pgen_partial_inc_dec1(_,_,[]) ->
+ ok.
+
+gen_partial_inc_dec_refed_funcs(Erule) when Erule == ber_bin_v2 ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erule),
+ rt2ct_suffix(Erule)])),
+ case asn1ct:next_refed_func() of
+ [] ->
+ ok;
+ {#'Externaltypereference'{module=M,type=Name},Pattern} ->
+ TypeDef = asn1_db:dbget(M,Name),
+ asn1ct:update_gen_state(namelist,Pattern),
+ Rtmod:gen_inc_decode(Erule,TypeDef),
+ gen_dec_part_inner_constr(Erule,TypeDef,[Name]),
+ gen_partial_inc_dec_refed_funcs(Erule);
+ _ ->
+ gen_partial_inc_dec_refed_funcs(Erule)
+ end;
+gen_partial_inc_dec_refed_funcs(_) ->
+ ok.
+
+pgen_partial_dec(_Erules,_Module) ->
+ ok. %%%% implement later
+
+%% generate code for all inner types that are called from the top type
+%% of the partial incomplete decode
+gen_dec_part_inner_constr(Erules,TypeDef,TypeName) ->
+ Def = TypeDef#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case InnerType of
+ 'SET' ->
+ #'SET'{components=Components} = Def#type.def,
+ gen_dec_part_inner_types(Erules,Components,TypeName);
+ %% Continue generate the inner of each component
+ 'SEQUENCE' ->
+ #'SEQUENCE'{components=Components} = Def#type.def,
+ gen_dec_part_inner_types(Erules,Components,TypeName);
+ 'CHOICE' ->
+ {_,Components} = Def#type.def,
+ gen_dec_part_inner_types(Erules,Components,TypeName);
+ 'SEQUENCE OF' ->
+ %% this and next case must be the last component in the
+ %% partial decode chain here. Not likely that this occur.
+ {_,Type} = Def#type.def,
+ NameSuffix = constructed_suffix(InnerType,Type#type.def),
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,[NameSuffix|TypeName],Type);
+%% gen_types(Erules,[NameSuffix|Typename],Type);
+ 'SET OF' ->
+ {_,Type} = Def#type.def,
+ NameSuffix = constructed_suffix(InnerType,Type#type.def),
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,[NameSuffix|TypeName],Type);
+ _ ->
+ ok
+ end.
+
+gen_dec_part_inner_types(Erules,[ComponentType|Rest],TypeName) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,TypeName,ComponentType),
+ gen_dec_part_inner_types(Erules,Rest,TypeName);
+gen_dec_part_inner_types(Erules,{Comps1,Comps2},TypeName)
+ when list(Comps1),list(Comps2) ->
+ gen_dec_part_inner_types(Erules,Comps1 ++ Comps2,TypeName);
+gen_dec_part_inner_types(_,[],_) ->
+ ok.
+
+
+pgen_partial_incomplete_decode(Erule) ->
+ case asn1ct:get_gen_state_field(active) of
+ true ->
+ pgen_partial_incomplete_decode1(Erule),
+ asn1ct:reset_gen_state();
+ _ ->
+ ok
+ end.
+pgen_partial_incomplete_decode1(ber_bin_v2) ->
+ case asn1ct:read_config_data(partial_incomplete_decode) of
+ undefined ->
+ ok;
+ Data ->
+ lists:foreach(fun emit_partial_incomplete_decode/1,Data)
+ end,
+ GeneratedFs= asn1ct:get_gen_state_field(gen_refed_funcs),
+% io:format("GeneratedFs :~n~p~n",[GeneratedFs]),
+ gen_part_decode_funcs(GeneratedFs,0);
+pgen_partial_incomplete_decode1(_) -> ok.
+
+emit_partial_incomplete_decode({FuncName,TopTypeName,Pattern}) ->
+ emit([{asis,FuncName},"(Bytes) ->",nl,
+ " decode_partial_incomplete(",{asis,TopTypeName},",Bytes,",{asis,Pattern},").",nl]);
+emit_partial_incomplete_decode(D) ->
+ throw({error,{asn1,{"bad data in asn1config file",D}}}).
+
+gen_part_decode_funcs([Data={Name,_,_,Type}|GeneratedFs],N) ->
+ InnerType =
+ case Type#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ get_inner(Type#type.def)
+ end,
+ WhatKind = type(InnerType),
+ TypeName=list2name(Name),
+ if
+ N > 0 -> emit([";",nl]);
+ true -> ok
+ end,
+ emit(["decode_inc_disp('",TypeName,"',Data) ->",nl]),
+ gen_part_decode_funcs(WhatKind,TypeName,Data),
+ gen_part_decode_funcs(GeneratedFs,N+1);
+gen_part_decode_funcs([_H|T],N) ->
+ gen_part_decode_funcs(T,N);
+gen_part_decode_funcs([],N) ->
+ if
+ N > 0 ->
+ .emit([".",nl]);
+ true ->
+ ok
+ end.
+
+gen_part_decode_funcs(#'Externaltypereference'{module=M,type=T},
+ _TypeName,Data) ->
+ #typedef{typespec=TS} = asn1_db:dbget(M,T),
+ InnerType =
+ case TS#type.def of
+ #'ObjectClassFieldType'{type=OCFTType} ->
+ OCFTType;
+ _ ->
+ get_inner(TS#type.def)
+ end,
+ WhatKind = type(InnerType),
+ gen_part_decode_funcs(WhatKind,[T],Data);
+gen_part_decode_funcs({constructed,bif},TypeName,
+ {_Name,parts,Tag,_Type}) ->
+ emit([" case Data of",nl,
+ " L when list(L) ->",nl,
+ " 'dec_",TypeName,"'(lists:map(fun(X)->element(1,?RT_BER:decode(X)) end,L),",{asis,Tag},");",nl,
+ " _ ->",nl,
+ " [Res] = 'dec_",TypeName,"'([Data],",{asis,Tag},"),",nl,
+ " Res",nl,
+ " end"]);
+gen_part_decode_funcs(WhatKind,_TypeName,{_Name,parts,_Tag,_Type}) ->
+ throw({error,{asn1,{"only SEQUENCE OF/SET OF may have the partial incomplete directive 'parts'.",WhatKind}}});
+gen_part_decode_funcs({constructed,bif},TypeName,
+ {_Name,undecoded,Tag,_Type}) ->
+ emit([" 'dec_",TypeName,"'(Data,",{asis,Tag},")"]);
+gen_part_decode_funcs({primitive,bif},_TypeName,
+ {_Name,undecoded,Tag,Type}) ->
+ % Argument no 6 is 0, i.e. bit 6 for primitive encoding.
+ asn1ct_gen_ber_bin_v2:gen_dec_prim(ber_bin_v2,Type,"Data",Tag,[],0,", mandatory, ");
+gen_part_decode_funcs(WhatKind,_TypeName,{_,Directive,_,_}) ->
+ throw({error,{asn1,{"Not implemented yet",WhatKind," partial incomplete directive:",Directive}}}).
+
+gen_types(Erules,Tname,{RootList,ExtList}) when list(RootList) ->
+ gen_types(Erules,Tname,RootList),
+ gen_types(Erules,Tname,ExtList);
+gen_types(Erules,Tname,[{'EXTENSIONMARK',_,_}|Rest]) ->
+ gen_types(Erules,Tname,Rest);
+gen_types(Erules,Tname,[ComponentType|Rest]) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_encode(Erules,Tname,ComponentType),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,Tname,ComponentType),
+ gen_types(Erules,Tname,Rest);
+gen_types(_,_,[]) ->
+ true;
+gen_types(Erules,Tname,Type) when record(Type,type) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_gen_",erule(Erules),
+ rt2ct_suffix(Erules)])),
+ asn1ct_name:clear(),
+ Rtmod:gen_encode(Erules,Tname,Type),
+ asn1ct_name:clear(),
+ Rtmod:gen_decode(Erules,Tname,Type).
+
+gen_value_match(Module) ->
+ case get(value_match) of
+ {true,Module} ->
+ emit(["value_match([{Index,Cname}|Rest],Value) ->",nl,
+ " Value2 =",nl,
+ " case element(Index,Value) of",nl,
+ " {Cname,Val2} -> Val2;",nl,
+ " X -> X",nl,
+ " end,",nl,
+ " value_match(Rest,Value2);",nl,
+ "value_match([],Value) ->",nl,
+ " Value.",nl]);
+ _ -> ok
+ end,
+ put(value_match,undefined).
+
+gen_check_defaultval(Erules,Module,[{Name,Type}|Rest]) ->
+ gen_check_func(Name,Type),
+ gen_check_defaultval(Erules,Module,Rest);
+gen_check_defaultval(_,_,[]) ->
+ ok.
+
+gen_check_func(Name,FType = #type{def=Def}) ->
+ emit({Name,"(V,asn1_DEFAULT) ->",nl," true;",nl}),
+ emit({Name,"(V,V) ->",nl," true;",nl}),
+ emit({Name,"(V,{_,V}) ->",nl," true;",nl}),
+ case Def of
+ {'SEQUENCE OF',Type} ->
+ gen_check_sof(Name,'SEQOF',Type);
+ {'SET OF',Type} ->
+ gen_check_sof(Name,'SETOF',Type);
+ #'SEQUENCE'{components=Components} ->
+ gen_check_sequence(Name,Components);
+ #'SET'{components=Components} ->
+ gen_check_sequence(Name,Components);
+ {'CHOICE',Components} ->
+ gen_check_choice(Name,Components);
+ #'Externaltypereference'{type=T} ->
+ emit({Name,"(DefaultValue,Value) ->",nl}),
+ emit({" ",list2name([T,check]),"(DefaultValue,Value).",nl});
+ MaybePrim ->
+ InnerType = get_inner(MaybePrim),
+ case type(InnerType) of
+ {primitive,bif} ->
+ emit({Name,"(DefaultValue,Value) ->",nl," "}),
+ gen_prim_check_call(InnerType,"DefaultValue","Value",
+ FType),
+ emit({".",nl,nl});
+ _ ->
+ throw({asn1_error,{unknown,type,MaybePrim}})
+ end
+ end.
+
+gen_check_sof(Name,SOF,Type) ->
+ NewName = list2name([sorted,Name]),
+ emit({Name,"(V1,V2) ->",nl}),
+ emit({" ",NewName,"(lists:sort(V1),lists:sort(V2)).",nl,nl}),
+ emit({NewName,"([],[]) ->",nl," true;",nl}),
+ emit({NewName,"([DV|DVs],[V|Vs]) ->",nl," "}),
+ InnerType = get_inner(Type#type.def),
+ case type(InnerType) of
+ {primitive,bif} ->
+ gen_prim_check_call(InnerType,"DV","V",Type),
+ emit({",",nl});
+ {constructed,bif} ->
+ emit({list2name([SOF,Name]),"(DV, V),",nl});
+ #'Externaltypereference'{type=T} ->
+ emit({list2name([T,check]),"(DV,V),",nl})
+ end,
+ emit({" ",NewName,"(DVs,Vs).",nl,nl}).
+
+gen_check_sequence(Name,Components) ->
+ emit({Name,"(DefaultValue,Value) ->",nl}),
+ gen_check_sequence(Name,Components,1).
+gen_check_sequence(Name,[#'ComponentType'{name=N,typespec=Type}|Cs],Num) ->
+ InnerType = get_inner(Type#type.def),
+% NthDefV = lists:concat(["lists:nth(",Num,",DefaultValue)"]),
+ NthDefV = ["element(",Num+1,",DefaultValue)"],
+% NthV = lists:concat(["lists:nth(",Num,",Value)"]),
+ NthV = ["element(",Num+1,",Value)"],
+ gen_check_func_call(Name,Type,InnerType,NthDefV,NthV,N),
+ case Cs of
+ [] ->
+ emit({".",nl,nl});
+ _ ->
+ emit({",",nl}),
+ gen_check_sequence(Name,Cs,Num+1)
+ end;
+gen_check_sequence(_,[],_) ->
+ ok.
+
+gen_check_choice(Name,CList=[#'ComponentType'{}|_Cs]) ->
+ emit({Name,"({Id,DefaultValue},{Id,Value}) ->",nl}),
+ emit({" case Id of",nl}),
+ gen_check_choice_components(Name,CList,1).
+
+gen_check_choice_components(_,[],_)->
+ ok;
+gen_check_choice_components(Name,[#'ComponentType'{name=N,typespec=Type}|
+ Cs],Num) ->
+ Ind6 = " ",
+ InnerType = get_inner(Type#type.def),
+% DefVal = ["element(2,lists:nth(",Num,",DefaultValue))"],
+ emit({Ind6,N," ->",nl,Ind6}),
+ gen_check_func_call(Name,Type,InnerType,{var,"defaultValue"},
+ {var,"value"},N),
+ case Cs of
+ [] ->
+ emit({nl," end.",nl,nl});
+ _ ->
+ emit({";",nl}),
+ gen_check_choice_components(Name,Cs,Num+1)
+ end.
+
+gen_check_func_call(Name,Type,InnerType,DefVal,Val,N) ->
+ case type(InnerType) of
+ {primitive,bif} ->
+ emit(" "),
+ gen_prim_check_call(InnerType,DefVal,Val,Type);
+ #'Externaltypereference'{type=T} ->
+ emit({" ",list2name([T,check]),"(",DefVal,",",Val,")"});
+ _ ->
+ emit({" ",list2name([N,Name]),"(",DefVal,",",Val,")"})
+ end.
+
+
+%% VARIOUS GENERATOR STUFF
+%% *************************************************
+%%**************************************************
+
+mk_var(X) when atom(X) ->
+ list_to_atom(mk_var(atom_to_list(X)));
+
+mk_var([H|T]) ->
+ [H-32|T].
+
+%% Since hyphens are allowed in ASN.1 names, it may occur in a
+%% variable to. Turn a hyphen into a under-score sign.
+un_hyphen_var(X) when atom(X) ->
+ list_to_atom(un_hyphen_var(atom_to_list(X)));
+un_hyphen_var([45|T]) ->
+ [95|un_hyphen_var(T)];
+un_hyphen_var([H|T]) ->
+ [H|un_hyphen_var(T)];
+un_hyphen_var([]) ->
+ [].
+
+%% Generate value functions ***************
+%% ****************************************
+%% Generates a function 'V'/0 for each Value V defined in the ASN.1 module
+%% the function returns the value in an Erlang representation which can be
+%% used as input to the runtime encode functions
+
+gen_value(Value) when record(Value,valuedef) ->
+%% io:format(" ~w ",[Value#valuedef.name]),
+ emit({"'",Value#valuedef.name,"'() ->",nl}),
+ V = Value#valuedef.value,
+ emit([{asis,V},".",nl,nl]).
+
+gen_encode_constructed(Erules,Typename,InnerType,D) when record(D,type) ->
+
+ Rtmod = list_to_atom(lists:concat(["asn1ct_constructed_",erule(Erules)])),
+ case InnerType of
+ 'SET' ->
+ Rtmod:gen_encode_set(Erules,Typename,D),
+ #'SET'{components=Components} = D#type.def,
+ gen_types(Erules,Typename,Components);
+ 'SEQUENCE' ->
+ Rtmod:gen_encode_sequence(Erules,Typename,D),
+ #'SEQUENCE'{components=Components} = D#type.def,
+ gen_types(Erules,Typename,Components);
+ 'CHOICE' ->
+ Rtmod:gen_encode_choice(Erules,Typename,D),
+ {_,Components} = D#type.def,
+ gen_types(Erules,Typename,Components);
+ 'SEQUENCE OF' ->
+ Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ gen_types(Erules,[NameSuffix|Typename],Type);
+ 'SET OF' ->
+ Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ gen_types(Erules,[NameSuffix|Typename],Type);
+ _ ->
+ exit({nyi,InnerType})
+ end;
+gen_encode_constructed(Erules,Typename,InnerType,D)
+ when record(D,typedef) ->
+ gen_encode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
+
+gen_decode_constructed(Erules,Typename,InnerType,D) when record(D,type) ->
+ Rtmod = list_to_atom(lists:concat(["asn1ct_constructed_",erule(Erules)])),
+ asn1ct:step_in_constructed(), %% updates namelist for incomplete
+ %% partial decode
+ case InnerType of
+ 'SET' ->
+ Rtmod:gen_decode_set(Erules,Typename,D);
+ 'SEQUENCE' ->
+ Rtmod:gen_decode_sequence(Erules,Typename,D);
+ 'CHOICE' ->
+ Rtmod:gen_decode_choice(Erules,Typename,D);
+ 'SEQUENCE OF' ->
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ 'SET OF' ->
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ _ ->
+ exit({nyi,InnerType})
+ end;
+
+
+gen_decode_constructed(Erules,Typename,InnerType,D) when record(D,typedef) ->
+ gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
+
+
+pgen_exports(Erules,_Module,{Types,Values,_,_,Objects,ObjectSets}) ->
+ emit({"-export([encoding_rule/0]).",nl}),
+ case Types of
+ [] -> ok;
+ _ ->
+ emit({"-export([",nl}),
+ case Erules of
+ ber ->
+ gen_exports1(Types,"enc_",2);
+ ber_bin ->
+ gen_exports1(Types,"enc_",2);
+ ber_bin_v2 ->
+ gen_exports1(Types,"enc_",2);
+ _ ->
+ gen_exports1(Types,"enc_",1)
+ end,
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",2),
+ case Erules of
+ ber ->
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",3);
+ ber_bin ->
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",3);
+ ber_bin_v2 ->
+ emit({"-export([",nl}),
+ gen_exports1(Types,"dec_",2);
+ _ -> ok
+ end
+ end,
+ case Values of
+ [] -> ok;
+ _ ->
+ emit({"-export([",nl}),
+ gen_exports1(Values,"",0)
+ end,
+ case Objects of
+ [] -> ok;
+ _ ->
+ case erule(Erules) of
+ per ->
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"enc_",3),
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"dec_",4);
+ ber_bin_v2 ->
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"enc_",3),
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"dec_",3);
+ _ ->
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"enc_",4),
+ emit({"-export([",nl}),
+ gen_exports1(Objects,"dec_",4)
+ end
+ end,
+ case ObjectSets of
+ [] -> ok;
+ _ ->
+ emit({"-export([",nl}),
+ gen_exports1(ObjectSets,"getenc_",2),
+ emit({"-export([",nl}),
+ gen_exports1(ObjectSets,"getdec_",2)
+ end,
+ emit({"-export([info/0]).",nl}),
+ gen_partial_inc_decode_exports(),
+ emit({nl,nl}).
+
+gen_exports1([F1,F2|T],Prefix,Arity) ->
+ emit({"'",Prefix,F1,"'/",Arity,com,nl}),
+ gen_exports1([F2|T],Prefix,Arity);
+gen_exports1([Flast|_T],Prefix,Arity) ->
+ emit({"'",Prefix,Flast,"'/",Arity,nl,"]).",nl,nl}).
+
+gen_partial_inc_decode_exports() ->
+ case {asn1ct:read_config_data(partial_incomplete_decode),
+ asn1ct:get_gen_state_field(inc_type_pattern)} of
+ {undefined,_} ->
+ ok;
+ {_,undefined} ->
+ ok;
+ {Data,_} ->
+ gen_partial_inc_decode_exports(Data),
+ emit("-export([decode_part/2]).")
+ end.
+gen_partial_inc_decode_exports([]) ->
+ ok;
+gen_partial_inc_decode_exports([{Name,_,_}|Rest]) ->
+ emit(["-export([",Name,"/1"]),
+ gen_partial_inc_decode_exports1(Rest);
+gen_partial_inc_decode_exports([_|Rest]) ->
+ gen_partial_inc_decode_exports(Rest).
+
+gen_partial_inc_decode_exports1([]) ->
+ emit(["]).",nl]);
+gen_partial_inc_decode_exports1([{Name,_,_}|Rest]) ->
+ emit([", ",Name,"/1"]),
+ gen_partial_inc_decode_exports1(Rest);
+gen_partial_inc_decode_exports1([_|Rest]) ->
+ gen_partial_inc_decode_exports1(Rest).
+
+pgen_dispatcher(Erules,_Module,{[],_Values,_,_,_Objects,_ObjectSets}) ->
+ emit(["encoding_rule() ->",nl]),
+ emit([{asis,Erules},".",nl,nl]);
+pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
+ emit(["-export([encode/2,decode/2,encode_disp/2,decode_disp/2]).",nl,nl]),
+ emit(["encoding_rule() ->",nl]),
+ emit([" ",{asis,Erules},".",nl,nl]),
+ Call = case Erules of
+ per -> "?RT_PER:complete(encode_disp(Type,Data))";
+ per_bin -> "?RT_PER:complete(encode_disp(Type,Data))";
+ ber -> "encode_disp(Type,Data)";
+ ber_bin -> "encode_disp(Type,Data)";
+ ber_bin_v2 -> "encode_disp(Type,Data)"
+ end,
+ EncWrap = case Erules of
+ ber -> "wrap_encode(Bytes)";
+ _ -> "Bytes"
+ end,
+ emit(["encode(Type,Data) ->",nl,
+ "case catch ",Call," of",nl,
+ " {'EXIT',{error,Reason}} ->",nl,
+ " {error,Reason};",nl,
+ " {'EXIT',Reason} ->",nl,
+ " {error,{asn1,Reason}};",nl,
+ " {Bytes,_Len} ->",nl,
+ " {ok,",EncWrap,"};",nl,
+ " Bytes ->",nl,
+ " {ok,",EncWrap,"}",nl,
+ "end.",nl,nl]),
+
+ case Erules of
+ ber_bin_v2 ->
+ emit(["decode(Type,Data0) ->",nl]),
+ emit(["{Data,_RestBin} = ?RT_BER:decode(Data0",driver_parameter(),"),",nl]);
+ _ ->
+ emit(["decode(Type,Data) ->",nl])
+ end,
+ DecWrap = case Erules of
+ ber -> "wrap_decode(Data)";
+ _ -> "Data"
+ end,
+
+ emit(["case catch decode_disp(Type,",DecWrap,") of",nl,
+ " {'EXIT',{error,Reason}} ->",nl,
+ " {error,Reason};",nl,
+ " {'EXIT',Reason} ->",nl,
+ " {error,{asn1,Reason}};",nl]),
+ case Erules of
+ ber_bin_v2 ->
+ emit([" Result ->",nl,
+ " {ok,Result}",nl]);
+ _ ->
+ emit([" {X,_Rest} ->",nl,
+ " {ok,X};",nl,
+ " {X,_Rest,_Len} ->",nl,
+ " {ok,X}",nl])
+ end,
+ emit(["end.",nl,nl]),
+
+ gen_decode_partial_incomplete(Erules),
+
+ case Types of
+ [] -> ok;
+ _ ->
+ case Erules of
+ ber ->
+ gen_dispatcher(Types,"encode_disp","enc_",",[]"),
+ gen_dispatcher(Types,"decode_disp","dec_",",mandatory");
+ ber_bin ->
+ gen_dispatcher(Types,"encode_disp","enc_",",[]"),
+ gen_dispatcher(Types,"decode_disp","dec_",",mandatory");
+ ber_bin_v2 ->
+ gen_dispatcher(Types,"encode_disp","enc_",""),
+ gen_dispatcher(Types,"decode_disp","dec_",""),
+ gen_partial_inc_dispatcher();
+ _PerOrPer_bin ->
+ gen_dispatcher(Types,"encode_disp","enc_",""),
+ gen_dispatcher(Types,"decode_disp","dec_",",mandatory")
+ end,
+ emit([nl])
+ end,
+ case Erules of
+ ber ->
+ gen_wrapper();
+ _ -> ok
+ end,
+ emit({nl,nl}).
+
+
+gen_decode_partial_incomplete(Erule) when Erule == ber;Erule==ber_bin;
+ Erule==ber_bin_v2 ->
+ case {asn1ct:read_config_data(partial_incomplete_decode),
+ asn1ct:get_gen_state_field(inc_type_pattern)} of
+ {undefined,_} ->
+ ok;
+ {_,undefined} ->
+ ok;
+ _ ->
+ case Erule of
+ ber_bin_v2 ->
+ EmitCaseClauses =
+ fun() ->
+ emit([" {'EXIT',{error,Reason}} ->",nl,
+ " {error,Reason};",nl,
+ " {'EXIT',Reason} ->",nl,
+ " {error,{asn1,Reason}};",nl,
+ " Result ->",nl,
+ " {ok,Result}",nl,
+ " end.",nl,nl])
+ end,
+ emit(["decode_partial_incomplete(Type,Data0,",
+ "Pattern) ->",nl]),
+ emit([" {Data,_RestBin} =",nl,
+ " ?RT_BER:decode_primitive_",
+ "incomplete(Pattern,Data0),",nl,
+ " case catch decode_partial_inc_disp(Type,",
+ "Data) of",nl]),
+ EmitCaseClauses(),
+ emit(["decode_part(Type,Data0) ->",nl,
+ " {Data,_RestBin} = ?RT_BER:decode(Data0),",nl,
+ " case catch decode_inc_disp(Type,Data) of",nl]),
+ EmitCaseClauses();
+ _ -> ok % add later
+ end
+ end;
+gen_decode_partial_incomplete(_Erule) ->
+ ok.
+
+gen_partial_inc_dispatcher() ->
+ case {asn1ct:read_config_data(partial_incomplete_decode),
+ asn1ct:get_gen_state_field(inc_type_pattern)} of
+ {undefined,_} ->
+ ok;
+ {_,undefined} ->
+ ok;
+ {Data,_} ->
+ gen_partial_inc_dispatcher(Data)
+ end.
+gen_partial_inc_dispatcher([{_FuncName,TopType,_Pattern}|Rest]) ->
+ emit(["decode_partial_inc_disp(",{asis,TopType},",Data) ->",nl,
+ " ",{asis,list_to_atom(lists:concat([dec,"-inc-",TopType]))},
+ "(Data);",nl]),
+ gen_partial_inc_dispatcher(Rest);
+gen_partial_inc_dispatcher([]) ->
+ emit(["decode_partial_inc_disp(Type,_Data) ->",nl,
+ " exit({error,{asn1,{undefined_type,Type}}}).",nl]).
+
+driver_parameter() ->
+ Options = get(encoding_options),
+ case lists:member(driver,Options) of
+ true ->
+ ",driver";
+ _ -> ""
+ end.
+
+gen_wrapper() ->
+ emit(["wrap_encode(Bytes) when list(Bytes) ->",nl,
+ " binary_to_list(list_to_binary(Bytes));",nl,
+ "wrap_encode(Bytes) when binary(Bytes) ->",nl,
+ " binary_to_list(Bytes);",nl,
+ "wrap_encode(Bytes) -> Bytes.",nl,nl]),
+ emit(["wrap_decode(Bytes) when list(Bytes) ->",nl,
+ " list_to_binary(Bytes);",nl,
+ "wrap_decode(Bytes) -> Bytes.",nl]).
+
+gen_dispatcher([F1,F2|T],FuncName,Prefix,ExtraArg) ->
+ emit([FuncName,"('",F1,"',Data) -> '",Prefix,F1,"'(Data",ExtraArg,")",";",nl]),
+ gen_dispatcher([F2|T],FuncName,Prefix,ExtraArg);
+gen_dispatcher([Flast|_T],FuncName,Prefix,ExtraArg) ->
+ emit([FuncName,"('",Flast,"',Data) -> '",Prefix,Flast,"'(Data",ExtraArg,")",";",nl]),
+ emit([FuncName,"(","Type",",_Data) -> exit({error,{asn1,{undefined_type,Type}}}).",nl,nl,nl]).
+
+pgen_info(_Erules,Module) ->
+ Options = get(encoding_options),
+ emit({"info() ->",nl,
+ " [{vsn,'",asn1ct:vsn(),"'},",
+ " {module,'",Module,"'},",
+ " {options,",io_lib:format("~p",[Options]),"}].",nl}).
+
+open_hrl(OutFile,Module) ->
+ File = lists:concat([OutFile,".hrl"]),
+ Fid = fopen(File,write),
+ put(gen_file_out,Fid),
+ gen_hrlhead(Module).
+
+%% EMIT functions ************************
+%% ***************************************
+
+ % debug generation
+demit(Term) ->
+ case get(asndebug) of
+ true -> emit(Term);
+ _ ->true
+ end.
+
+ % always generation
+
+emit({external,_M,T}) ->
+ emit(T);
+
+emit({prev,Variable}) when atom(Variable) ->
+ emit({var,asn1ct_name:prev(Variable)});
+
+emit({next,Variable}) when atom(Variable) ->
+ emit({var,asn1ct_name:next(Variable)});
+
+emit({curr,Variable}) when atom(Variable) ->
+ emit({var,asn1ct_name:curr(Variable)});
+
+emit({var,Variable}) when atom(Variable) ->
+ [Head|V] = atom_to_list(Variable),
+ emit([Head-32|V]);
+
+emit({var,Variable}) ->
+ [Head|V] = Variable,
+ emit([Head-32|V]);
+
+emit({asis,What}) ->
+ format(get(gen_file_out),"~w",[What]);
+
+emit(nl) ->
+ nl(get(gen_file_out));
+
+emit(com) ->
+ emit(",");
+
+emit(tab) ->
+ put_chars(get(gen_file_out)," ");
+
+emit(What) when integer(What) ->
+ put_chars(get(gen_file_out),integer_to_list(What));
+
+emit(What) when list(What), integer(hd(What)) ->
+ put_chars(get(gen_file_out),What);
+
+emit(What) when atom(What) ->
+ put_chars(get(gen_file_out),atom_to_list(What));
+
+emit(What) when tuple(What) ->
+ emit_parts(tuple_to_list(What));
+
+emit(What) when list(What) ->
+ emit_parts(What);
+
+emit(X) ->
+ exit({'cant emit ',X}).
+
+emit_parts([]) -> true;
+emit_parts([H|T]) ->
+ emit(H),
+ emit_parts(T).
+
+format(undefined,X,Y) ->
+ io:format(X,Y);
+format(X,Y,Z) ->
+ io:format(X,Y,Z).
+
+nl(undefined) -> io:nl();
+nl(X) -> io:nl(X).
+
+put_chars(undefined,X) ->
+ io:put_chars(X);
+put_chars(Y,X) ->
+ io:put_chars(Y,X).
+
+fopen(F, Mode) ->
+ case file:open(F, [Mode]) of
+ {ok, Fd} ->
+ Fd;
+ {error, Reason} ->
+ io:format("** Can't open file ~p ~n", [F]),
+ exit({error,Reason})
+ end.
+
+pgen_hrl(Erules,Module,TypeOrVal,_Indent) ->
+ put(currmod,Module),
+ {Types,Values,Ptypes,_,_,_} = TypeOrVal,
+ Ret =
+ case pgen_hrltypes(Erules,Module,Ptypes++Types,0) of
+ 0 ->
+ case Values of
+ [] ->
+ 0;
+ _ ->
+ open_hrl(get(outfile),get(currmod)),
+ pgen_macros(Erules,Module,Values),
+ 1
+ end;
+ X ->
+ pgen_macros(Erules,Module,Values),
+ X
+ end,
+ case Ret of
+ 0 ->
+ 0;
+ Y ->
+ Fid = get(gen_file_out),
+ file:close(Fid),
+ io:format("--~p--~n",
+ [{generated,lists:concat([get(outfile),".hrl"])}]),
+ Y
+ end.
+
+pgen_macros(_,_,[]) ->
+ true;
+pgen_macros(Erules,Module,[H|T]) ->
+ Valuedef = asn1_db:dbget(Module,H),
+ gen_macro(Valuedef),
+ pgen_macros(Erules,Module,T).
+
+pgen_hrltypes(_,_,[],NumRecords) ->
+ NumRecords;
+pgen_hrltypes(Erules,Module,[H|T],NumRecords) ->
+% io:format("records = ~p~n",NumRecords),
+ Typedef = asn1_db:dbget(Module,H),
+ AddNumRecords = gen_record(Typedef,NumRecords),
+ pgen_hrltypes(Erules,Module,T,NumRecords+AddNumRecords).
+
+
+%% Generates a macro for value Value defined in the ASN.1 module
+gen_macro(Value) when record(Value,valuedef) ->
+ emit({"-define('",Value#valuedef.name,"', ",
+ {asis,Value#valuedef.value},").",nl}).
+
+%% Generate record functions **************
+%% Generates an Erlang record for each named and unnamed SEQUENCE and SET in the ASN.1
+%% module. If no SEQUENCE or SET is found there is no .hrl file generated
+
+
+gen_record(Tdef,NumRecords) when record(Tdef,typedef) ->
+ Name = [Tdef#typedef.name],
+ Type = Tdef#typedef.typespec,
+ gen_record(type,Name,Type,NumRecords);
+
+gen_record(Tdef,NumRecords) when record(Tdef,ptypedef) ->
+ Name = [Tdef#ptypedef.name],
+ Type = Tdef#ptypedef.typespec,
+ gen_record(ptype,Name,Type,NumRecords).
+
+gen_record(TorPtype,Name,[#'ComponentType'{name=Cname,typespec=Type}|T],Num) ->
+ Num2 = gen_record(TorPtype,[Cname|Name],Type,Num),
+ gen_record(TorPtype,Name,T,Num2);
+gen_record(TorPtype,Name,{Clist1,Clist2},Num) when list(Clist1), list(Clist2) ->
+ gen_record(TorPtype,Name,Clist1++Clist2,Num);
+gen_record(TorPtype,Name,[_|T],Num) -> % skip EXTENSIONMARK
+ gen_record(TorPtype,Name,T,Num);
+gen_record(_TorPtype,_Name,[],Num) ->
+ Num;
+
+gen_record(TorPtype,Name,Type,Num) when record(Type,type) ->
+ Def = Type#type.def,
+ Rec = case Def of
+ Seq when record(Seq,'SEQUENCE') ->
+ case Seq#'SEQUENCE'.pname of
+ false ->
+ {record,Seq#'SEQUENCE'.components};
+ _Pname when TorPtype == type ->
+ false;
+ _ ->
+ {record,Seq#'SEQUENCE'.components}
+ end;
+ Set when record(Set,'SET') ->
+ case Set#'SET'.pname of
+ false ->
+ {record,Set#'SET'.components};
+ _Pname when TorPtype == type ->
+ false;
+ _ ->
+ {record,Set#'SET'.components}
+ end;
+% {'SET',{_,_CompList}} ->
+% {record,_CompList};
+ {'CHOICE',_CompList} -> {inner,Def};
+ {'SEQUENCE OF',_CompList} -> {['SEQOF'|Name],Def};
+ {'SET OF',_CompList} -> {['SETOF'|Name],Def};
+ _ -> false
+ end,
+ case Rec of
+ false -> Num;
+ {record,CompList} ->
+ case Num of
+ 0 -> open_hrl(get(outfile),get(currmod));
+ _ -> true
+ end,
+ emit({"-record('",list2name(Name),"',{",nl}),
+ RootList = case CompList of
+ _ when list(CompList) ->
+ CompList;
+ {_Rl,_} -> _Rl
+ end,
+ gen_record2(Name,'SEQUENCE',RootList),
+ NewCompList =
+ case CompList of
+ {CompList1,[]} ->
+ emit({"}). % with extension mark",nl,nl}),
+ CompList1;
+ {Tr,ExtensionList2} ->
+ case Tr of
+ [] -> true;
+ _ -> emit({",",nl})
+ end,
+ emit({"%% with extensions",nl}),
+ gen_record2(Name, 'SEQUENCE', ExtensionList2,
+ "", ext),
+ emit({"}).",nl,nl}),
+ Tr ++ ExtensionList2;
+ _ ->
+ emit({"}).",nl,nl}),
+ CompList
+ end,
+ gen_record(TorPtype,Name,NewCompList,Num+1);
+ {inner,{'CHOICE', CompList}} ->
+ gen_record(TorPtype,Name,CompList,Num);
+ {NewName,{_, CompList}} ->
+ gen_record(TorPtype,NewName,CompList,Num)
+ end;
+gen_record(_,_,_,NumRecords) -> % skip CLASS etc for now.
+ NumRecords.
+
+gen_head(Erules,Mod,Hrl) ->
+ {Rtmac,Rtmod} = case Erules of
+ per ->
+ emit({"%% Generated by the Erlang ASN.1 PER-"
+ "compiler version:",asn1ct:vsn(),nl}),
+ {"RT_PER",?RT_PER};
+ ber ->
+ emit({"%% Generated by the Erlang ASN.1 BER-"
+ "compiler version:",asn1ct:vsn(),nl}),
+ {"RT_BER",?RT_BER_BIN};
+ per_bin ->
+ emit({"%% Generated by the Erlang ASN.1 BER-"
+ "compiler version, utilizing bit-syntax:",
+ asn1ct:vsn(),nl}),
+ %% temporary code to enable rt2ct optimization
+ Options = get(encoding_options),
+ case lists:member(optimize,Options) of
+ true -> {"RT_PER","asn1rt_per_bin_rt2ct"};
+ _ ->
+ {"RT_PER",?RT_PER_BIN}
+ end;
+ ber_bin ->
+ emit({"%% Generated by the Erlang ASN.1 BER-"
+ "compiler version, utilizing bit-syntax:",
+ asn1ct:vsn(),nl}),
+ {"RT_BER",?RT_BER_BIN};
+ ber_bin_v2 ->
+ emit({"%% Generated by the Erlang ASN.1 BER_V2-"
+ "compiler version, utilizing bit-syntax:",
+ asn1ct:vsn(),nl}),
+ {"RT_BER","asn1rt_ber_bin_v2"}
+ end,
+ emit({"%% Purpose: encoder and decoder to the types in mod ",Mod,nl,nl}),
+ emit({"-module('",Mod,"').",nl}),
+ put(currmod,Mod),
+ %emit({"-compile(export_all).",nl}),
+ case Hrl of
+ 0 -> true;
+ _ ->
+ emit({"-include(\"",Mod,".hrl\").",nl})
+ end,
+ emit(["-define('",Rtmac,"',",Rtmod,").",nl]).
+
+
+gen_hrlhead(Mod) ->
+ emit({"%% Generated by the Erlang ASN.1 compiler version:",asn1ct:vsn(),nl}),
+ emit({"%% Purpose: Erlang record definitions for each named and unnamed",nl}),
+ emit({"%% SEQUENCE and SET, and macro definitions for each value",nl}),
+ emit({"%% definition,in module ",Mod,nl,nl}),
+ emit({nl,nl}).
+
+gen_record2(Name,SeqOrSet,Comps) ->
+ gen_record2(Name,SeqOrSet,Comps,"",noext).
+
+gen_record2(_Name,_SeqOrSet,[],_Com,_Extension) ->
+ true;
+gen_record2(Name,SeqOrSet,[{'EXTENSIONMARK',_,_}|T],Com,Extension) ->
+ gen_record2(Name,SeqOrSet,T,Com,Extension);
+gen_record2(_Name,_SeqOrSet,[H],Com,Extension) ->
+ #'ComponentType'{name=Cname} = H,
+ emit(Com),
+ emit({asis,Cname}),
+ gen_record_default(H, Extension);
+gen_record2(Name,SeqOrSet,[H|T],Com, Extension) ->
+ #'ComponentType'{name=Cname} = H,
+ emit(Com),
+ emit({asis,Cname}),
+ gen_record_default(H, Extension),
+% emit(", "),
+ gen_record2(Name,SeqOrSet,T,", ", Extension).
+
+%gen_record_default(C, ext) ->
+% emit(" = asn1_NOEXTVALUE");
+gen_record_default(#'ComponentType'{prop='OPTIONAL'}, _)->
+ emit(" = asn1_NOVALUE");
+gen_record_default(#'ComponentType'{prop={'DEFAULT',_}}, _)->
+ emit(" = asn1_DEFAULT");
+gen_record_default(_, _) ->
+ true.
+
+gen_check_call(TopType,Cname,Type,InnerType,WhatKind,DefaultValue,Element) ->
+ case WhatKind of
+ {primitive,bif} ->
+ gen_prim_check_call(InnerType,DefaultValue,Element,Type);
+ #'Externaltypereference'{module=M,type=T} ->
+ %% generate function call
+ Name = list2name([T,check]),
+ emit({"'",Name,"'(",DefaultValue,", ",Element,")"}),
+ %% insert in ets table and do look ahead check
+ Typedef = asn1_db:dbget(M,T),
+ RefType = Typedef#typedef.typespec,
+ InType = asn1ct_gen:get_inner(RefType#type.def),
+ case insert_once(check_functions,{Name,RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+% case asn1ct_gen:type(InType) of
+% {constructed,bif} ->
+% lookahead_innertype([T],InType,RefType);
+% #'Externaltypereference'{type=TNew} ->
+% lookahead_innertype([TNew],InType,RefType);
+% _ ->
+% ok
+% end;
+ _ ->
+ ok
+ end;
+ {constructed,bif} ->
+ NameList = [Cname|TopType],
+ Name = list2name(NameList ++ [check]),
+ emit({"'",Name,"'(",DefaultValue,", ",Element,")"}),
+ ets:insert(check_functions,{Name,Type}),
+ %% Must look for check functions in InnerType,
+ %% that may be referenced or internal defined
+ %% constructed types not used elsewhere.
+ lookahead_innertype(NameList,InnerType,Type)
+ end.
+
+gen_prim_check_call(PrimType,DefaultValue,Element,Type) ->
+ case unify_if_string(PrimType) of
+ 'BOOLEAN' ->
+ emit({"asn1rt_check:check_bool(",DefaultValue,", ",
+ Element,")"});
+ 'INTEGER' ->
+ NNL =
+ case Type#type.def of
+ {_,NamedNumberList} -> NamedNumberList;
+ _ -> []
+ end,
+ emit({"asn1rt_check:check_int(",DefaultValue,", ",
+ Element,", ",{asis,NNL},")"});
+ 'BIT STRING' ->
+ {_,NBL} = Type#type.def,
+ emit({"asn1rt_check:check_bitstring(",DefaultValue,", ",
+ Element,", ",{asis,NBL},")"});
+ 'OCTET STRING' ->
+ emit({"asn1rt_check:check_octetstring(",DefaultValue,", ",
+ Element,")"});
+ 'NULL' ->
+ emit({"asn1rt_check:check_null(",DefaultValue,", ",
+ Element,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"asn1rt_check:check_objectidentifier(",DefaultValue,
+ ", ",Element,")"});
+ 'ObjectDescriptor' ->
+ emit({"asn1rt_check:check_objectdescriptor(",DefaultValue,
+ ", ",Element,")"});
+ 'REAL' ->
+ emit({"asn1rt_check:check_real(",DefaultValue,
+ ", ",Element,")"});
+ 'ENUMERATED' ->
+ {_,Enumerations} = Type#type.def,
+ emit({"asn1rt_check:check_enum(",DefaultValue,
+ ", ",Element,", ",{asis,Enumerations},")"});
+ restrictedstring ->
+ emit({"asn1rt_check:check_restrictedstring(",DefaultValue,
+ ", ",Element,")"})
+ end.
+
+%% lokahead_innertype/3 traverses Type and checks if check functions
+%% have to be generated, i.e. for all constructed or referenced types.
+lookahead_innertype(Name,'SEQUENCE',Type) ->
+ Components = (Type#type.def)#'SEQUENCE'.components,
+ lookahead_components(Name,Components);
+lookahead_innertype(Name,'SET',Type) ->
+ Components = (Type#type.def)#'SET'.components,
+ lookahead_components(Name,Components);
+lookahead_innertype(Name,'CHOICE',Type) ->
+ {_,Components} = Type#type.def,
+ lookahead_components(Name,Components);
+lookahead_innertype(Name,'SEQUENCE OF',SeqOf) ->
+ lookahead_sof(Name,'SEQOF',SeqOf);
+lookahead_innertype(Name,'SET OF',SeqOf) ->
+ lookahead_sof(Name,'SETOF',SeqOf);
+lookahead_innertype(_Name,#'Externaltypereference'{module=M,type=T},_) ->
+ Typedef = asn1_db:dbget(M,T),
+ RefType = Typedef#typedef.typespec,
+ InType = asn1ct_gen:get_inner(RefType#type.def),
+ case type(InType) of
+ {constructed,bif} ->
+ NewName = list2name([T,check]),
+ case insert_once(check_functions,{NewName,RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+ _ ->
+ ok
+ end;
+ #'Externaltypereference'{} ->
+ NewName = list2name([T,check]),
+ case insert_once(check_functions,{NewName,RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end;
+% case insert_once(check_functions,{list2name(Name++[check]),Type}) of
+% true ->
+% InnerType = asn1ct_gen:get_inner(Type#type.def),
+% case asn1ct_gen:type(InnerType) of
+% {constructed,bif} ->
+% lookahead_innertype([T],InnerType,Type);
+% #'Externaltypereference'{type=TNew} ->
+% lookahead_innertype([TNew],InnerType,Type);
+% _ ->
+% ok
+% end;
+% _ ->
+% ok
+% end;
+lookahead_innertype(_,_,_) ->
+ ok.
+
+lookahead_components(_,[]) -> ok;
+lookahead_components(Name,[C|Cs]) ->
+ #'ComponentType'{name=Cname,typespec=Type} = C,
+ InType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InType) of
+ {constructed,bif} ->
+ case insert_once(check_functions,
+ {list2name([Cname|Name] ++ [check]),Type}) of
+ true ->
+ lookahead_innertype([Cname|Name],InType,Type);
+ _ ->
+ ok
+ end;
+ #'Externaltypereference'{module=RefMod,type=RefName} ->
+ Typedef = asn1_db:dbget(RefMod,RefName),
+ RefType = Typedef#typedef.typespec,
+ case insert_once(check_functions,{list2name([RefName,check]),
+ RefType}) of
+ true ->
+ lookahead_innertype([RefName],InType,RefType);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end,
+ lookahead_components(Name,Cs).
+
+lookahead_sof(Name,SOF,SOFType) ->
+ Type = case SOFType#type.def of
+ {_,_Type} -> _Type;
+ _Type -> _Type
+ end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ %% this is if a constructed type is defined in
+ %% the SEQUENCE OF type
+ NameList = [SOF|Name],
+ insert_once(check_functions,
+ {list2name(NameList ++ [check]),Type}),
+ lookahead_innertype(NameList,InnerType,Type);
+ #'Externaltypereference'{module=M,type=T} ->
+ Typedef = asn1_db:dbget(M,T),
+ RefType = Typedef#typedef.typespec,
+ InType = get_inner(RefType#type.def),
+ case insert_once(check_functions,
+ {list2name([T,check]),RefType}) of
+ true ->
+ lookahead_innertype([T],InType,RefType);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end.
+
+
+insert_once(Table,Object) ->
+ case ets:lookup(Table,element(1,Object)) of
+ [] ->
+ ets:insert(Table,Object); %returns true
+ _ -> false
+ end.
+
+unify_if_string(PrimType) ->
+ case PrimType of
+ 'NumericString' ->
+ restrictedstring;
+ 'PrintableString' ->
+ restrictedstring;
+ 'TeletexString' ->
+ restrictedstring;
+ 'VideotexString' ->
+ restrictedstring;
+ 'IA5String' ->
+ restrictedstring;
+ 'UTCTime' ->
+ restrictedstring;
+ 'GeneralizedTime' ->
+ restrictedstring;
+ 'GraphicString' ->
+ restrictedstring;
+ 'VisibleString' ->
+ restrictedstring;
+ 'GeneralString' ->
+ restrictedstring;
+ 'UniversalString' ->
+ restrictedstring;
+ 'BMPString' ->
+ restrictedstring;
+ Other -> Other
+ end.
+
+
+
+
+
+get_inner(A) when atom(A) -> A;
+get_inner(Ext) when record(Ext,'Externaltypereference') -> Ext;
+get_inner(Tref) when record(Tref,typereference) -> Tref;
+get_inner({fixedtypevaluefield,_,Type}) ->
+ if
+ record(Type,type) ->
+ get_inner(Type#type.def);
+ true ->
+ get_inner(Type)
+ end;
+get_inner({typefield,TypeName}) ->
+ TypeName;
+get_inner(#'ObjectClassFieldType'{type=Type}) ->
+% get_inner(Type);
+ Type;
+get_inner(T) when tuple(T) ->
+ case element(1,T) of
+ Tuple when tuple(Tuple),element(1,Tuple) == objectclass ->
+ case catch(lists:last(element(2,T))) of
+ {valuefieldreference,FieldName} ->
+ get_fieldtype(element(2,Tuple),FieldName);
+ {typefieldreference,FieldName} ->
+ get_fieldtype(element(2,Tuple),FieldName);
+ {'EXIT',Reason} ->
+ throw({asn1,{'internal error in get_inner/1',Reason}})
+ end;
+ _ -> element(1,T)
+ end.
+
+
+
+
+
+type(X) when record(X,'Externaltypereference') ->
+ X;
+type(X) when record(X,typereference) ->
+ X;
+type('ASN1_OPEN_TYPE') ->
+ 'ASN1_OPEN_TYPE';
+type({fixedtypevaluefield,_Name,Type}) when record(Type,type) ->
+ type(get_inner(Type#type.def));
+type({typefield,_}) ->
+ 'ASN1_OPEN_TYPE';
+type(X) ->
+ %% io:format("asn1_types:type(~p)~n",[X]),
+ case catch type2(X) of
+ {'EXIT',_} ->
+ {notype,X};
+ Normal ->
+ Normal
+ end.
+
+type2(X) ->
+ case prim_bif(X) of
+ true ->
+ {primitive,bif};
+ false ->
+ case construct_bif(X) of
+ true ->
+ {constructed,bif};
+ false ->
+ {undefined,user}
+ end
+ end.
+
+prim_bif(X) ->
+ lists:member(X,['INTEGER' ,
+ 'ENUMERATED',
+ 'OBJECT IDENTIFIER',
+ 'ANY',
+ 'NULL',
+ 'BIT STRING' ,
+ 'OCTET STRING' ,
+ 'ObjectDescriptor',
+ 'NumericString',
+ 'TeletexString',
+ 'VideotexString',
+ 'UTCTime',
+ 'GeneralizedTime',
+ 'GraphicString',
+ 'VisibleString',
+ 'GeneralString',
+ 'PrintableString',
+ 'IA5String',
+ 'UniversalString',
+ 'BMPString',
+ 'ENUMERATED',
+ 'BOOLEAN']).
+
+construct_bif(T) ->
+ lists:member(T,['SEQUENCE' ,
+ 'SEQUENCE OF' ,
+ 'CHOICE' ,
+ 'SET' ,
+ 'SET OF']).
+
+def_to_tag(#tag{class=Class,number=Number}) ->
+ {Class,Number};
+def_to_tag(#'ObjectClassFieldType'{type=Type}) ->
+ case Type of
+ T when tuple(T),element(1,T)==fixedtypevaluefield ->
+ {'UNIVERSAL',get_inner(Type)};
+ _ ->
+ []
+ end;
+def_to_tag(Def) ->
+ {'UNIVERSAL',get_inner(Def)}.
+
+
+%% Information Object Class
+
+type_from_object(X) ->
+ case (catch lists:last(element(2,X))) of
+ {'EXIT',_} ->
+ {notype,X};
+ Normal ->
+ Normal
+ end.
+
+
+get_fieldtype([],_FieldName)->
+ {no_type,no_name};
+get_fieldtype([Field|Rest],FieldName) ->
+ case element(2,Field) of
+ FieldName ->
+ case element(1,Field) of
+ fixedtypevaluefield ->
+ {element(1,Field),FieldName,element(3,Field)};
+ _ ->
+ {element(1,Field),FieldName}
+ end;
+ _ ->
+ get_fieldtype(Rest,FieldName)
+ end.
+
+get_fieldcategory([],_FieldName) ->
+ no_cat;
+get_fieldcategory([Field|Rest],FieldName) ->
+ case element(2,Field) of
+ FieldName ->
+ element(1,Field);
+ _ ->
+ get_fieldcategory(Rest,FieldName)
+ end.
+
+get_typefromobject(Type) when record(Type,type) ->
+ case Type#type.def of
+ {{objectclass,_,_},TypeFrObj} when list(TypeFrObj) ->
+ {_,FieldName} = lists:last(TypeFrObj),
+ FieldName;
+ _ ->
+ {no_field}
+ end.
+
+get_classfieldcategory(Type,FieldName) ->
+ case (catch Type#type.def) of
+ {{obejctclass,Fields,_},_} ->
+ get_fieldcategory(Fields,FieldName);
+ {'EXIT',_} ->
+ no_cat;
+ _ ->
+ no_cat
+ end.
+%% Information Object Class
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Convert a list of name parts to something that can be output by emit
+%%
+%% used to output function names in generated code.
+
+list2name(L) ->
+ NewL = list2name1(L),
+ lists:concat(lists:reverse(NewL)).
+
+list2name1([{ptype,H1},H2|T]) ->
+ [H1,"_",list2name([H2|T])];
+list2name1([H1,H2|T]) ->
+ [H1,"_",list2name([H2|T])];
+list2name1([{ptype,H}|_T]) ->
+ [H];
+list2name1([H|_T]) ->
+ [H];
+list2name1([]) ->
+ [].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Convert a list of name parts to something that can be output by emit
+%% stops at {ptype,Pname} i.e Pname whill be the first part of the name
+%% used to output record names in generated code.
+
+list2rname(L) ->
+ NewL = list2rname1(L),
+ lists:concat(lists:reverse(NewL)).
+
+list2rname1([{ptype,H1},_H2|_T]) ->
+ [H1];
+list2rname1([H1,H2|T]) ->
+ [H1,"_",list2name([H2|T])];
+list2rname1([{ptype,H}|_T]) ->
+ [H];
+list2rname1([H|_T]) ->
+ [H];
+list2rname1([]) ->
+ [].
+
+
+
+constructed_suffix(_,#'SEQUENCE'{pname=Ptypename}) when Ptypename =/= false ->
+ {ptype, Ptypename};
+constructed_suffix(_,#'SET'{pname=Ptypename}) when Ptypename =/= false ->
+ {ptype,Ptypename};
+constructed_suffix('SEQUENCE OF',_) ->
+ 'SEQOF';
+constructed_suffix('SET OF',_) ->
+ 'SETOF'.
+
+erule(ber) ->
+ ber;
+erule(ber_bin) ->
+ ber;
+erule(ber_bin_v2) ->
+ ber_bin_v2;
+erule(per) ->
+ per;
+erule(per_bin) ->
+ per.
+
+wrap_ber(ber) ->
+ ber_bin;
+wrap_ber(Erule) ->
+ Erule.
+
+rt2ct_suffix() ->
+ Options = get(encoding_options),
+ case {lists:member(optimize,Options),lists:member(per_bin,Options)} of
+ {true,true} -> "_rt2ct";
+ _ -> ""
+ end.
+rt2ct_suffix(per_bin) ->
+ Options = get(encoding_options),
+ case lists:member(optimize,Options) of
+ true -> "_rt2ct";
+ _ -> ""
+ end;
+rt2ct_suffix(_) -> "".
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V;
+ {value,Cnstr} ->
+ Cnstr
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber.erl
new file mode 100644
index 0000000000..765745dc13
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber.erl
@@ -0,0 +1,1525 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_ber.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_gen_ber).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+
+-export([pgen/4]).
+-export([decode_class/1, decode_type/1]).
+-export([add_removed_bytes/0]).
+-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
+-export([gen_encode_prim/4]).
+-export([gen_dec_prim/8]).
+-export([gen_objectset_code/2, gen_obj_code/3]).
+-export([re_wrap_erule/1]).
+-export([unused_var/2]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+ % the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+ % primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+-define(T_ObjectDescriptor, ?UNIVERSAL bor ?PRIMITIVE bor 7).
+ % restricted character string types
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList,PTypeList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate ENCODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+
+%%===============================================================================
+%% encode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+ ", ObjFun";
+ false ->
+ ""
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([nl,nl,nl,"%%================================"]),
+ emit([nl,"%% ",asn1ct_gen:list2name(Typename)]),
+ emit([nl,"%%================================",nl]),
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,
+ ") when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn",ObjFun,");",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}, TagIn",ObjFun,") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,");",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,") ->",nl," "]),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end;
+
+%%===============================================================================
+%% encode ComponentType
+%%===============================================================================
+
+gen_encode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_,_}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ gen_encode(Erules,NewTname,NewType).
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Type = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ emit([nl,nl,"%%================================"]),
+ emit([nl,"%% ",Typename]),
+ emit([nl,"%%================================",nl]),
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn) when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn);",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),"',Val}, TagIn) ->",nl}),
+ emit({" 'enc_",asn1ct_gen:list2name(Typename),"'(Val, TagIn);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(",
+ unused_var("Val",Type#type.def),", TagIn) ->",nl}),
+ CurrentMod = get(currmod),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ {primitive,bif} ->
+ asn1ct_gen_ber:gen_encode_prim(ber,Type,["TagIn ++ ",
+ {asis,Tag}],"Val"),
+ emit([".",nl]);
+ #typereference{val=Ename} ->
+ emit([" 'enc_",Ename,"'(Val, TagIn ++ ",{asis,Tag},").",nl]);
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, TagIn ++ ",
+ {asis,Tag},").",nl]);
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, TagIn ++ ",
+ {asis,Tag},").",nl]);
+ 'ASN1_OPEN_TYPE' ->
+ emit(["%% OPEN TYPE",nl]),
+ asn1ct_gen_ber:gen_encode_prim(ber,
+ Type#type{def='ASN1_OPEN_TYPE'},
+ ["TagIn ++ ",
+ {asis,Tag}],"Val"),
+ emit([".",nl])
+ end.
+
+unused_var(Var,#'SEQUENCE'{components=Cl}) ->
+ unused_var1(Var,Cl);
+unused_var(Var,#'SET'{components=Cl}) ->
+ unused_var1(Var,Cl);
+unused_var(Var,_) ->
+ Var.
+unused_var1(Var,Cs) when Cs == []; Cs == {[],[]} ->
+ lists:concat(["_",Var]);
+unused_var1(Var,_) ->
+ Var.
+
+unused_optormand_var(Var,Def) ->
+ case asn1ct_gen:type(asn1ct_gen:get_inner(Def)) of
+ 'ASN1_OPEN_TYPE' ->
+ lists:concat(["_",Var]);
+ _ ->
+ Var
+ end.
+
+
+gen_encode_prim(_Erules,D,DoTag,Value) when record(D,type) ->
+
+%%% Currently not used for BER (except for BitString) and therefore replaced
+%%% with [] as a placeholder
+ BitStringConstraint = D#type.constraint,
+ Constraint = [],
+ asn1ct_name:new(enumval),
+ case D#type.def of
+ 'BOOLEAN' ->
+ emit_encode_func('boolean',Value,DoTag);
+ 'INTEGER' ->
+ emit_encode_func('integer',Constraint,Value,DoTag);
+ {'INTEGER',NamedNumberList} ->
+ emit_encode_func('integer',Constraint,Value,
+ NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList={_,_}} ->
+
+ emit(["case (case ",Value," of {asn1_enum,_}->",Value,";{_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList} ->
+
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+
+ {'BIT STRING',NamedNumberList} ->
+ emit_encode_func('bit_string',BitStringConstraint,Value,
+ NamedNumberList,DoTag);
+ 'ANY' ->
+ emit_encode_func('open_type', Value,DoTag);
+ 'NULL' ->
+ emit_encode_func('null',Value,DoTag);
+ 'OBJECT IDENTIFIER' ->
+ emit_encode_func("object_identifier",Value,DoTag);
+ 'ObjectDescriptor' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_ObjectDescriptor,DoTag);
+ 'OCTET STRING' ->
+ emit_encode_func('octet_string',Constraint,Value,DoTag);
+ 'NumericString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_NumericString,DoTag);
+ 'TeletexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_TeletexString,DoTag);
+ 'VideotexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VideotexString,DoTag);
+ 'GraphicString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GraphicString,DoTag);
+ 'VisibleString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VisibleString,DoTag);
+ 'GeneralString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GeneralString,DoTag);
+ 'PrintableString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_PrintableString,DoTag);
+ 'IA5String' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_IA5String,DoTag);
+ 'UniversalString' ->
+ emit_encode_func('universal_string',Constraint,Value,DoTag);
+ 'BMPString' ->
+ emit_encode_func('BMP_string',Constraint,Value,DoTag);
+ 'UTCTime' ->
+ emit_encode_func('utc_time',Constraint,Value,DoTag);
+ 'GeneralizedTime' ->
+ emit_encode_func('generalized_time',Constraint,Value,DoTag);
+ 'ASN1_OPEN_TYPE' ->
+ emit_encode_func('open_type', Value,DoTag);
+ XX ->
+ exit({'can not encode' ,XX})
+ end.
+
+
+emit_encode_func(Name,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Value,Tags);
+emit_encode_func(Name,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Tags);
+emit_encode_func(Name,Constraint,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Asis,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Asis,Tags);
+emit_encode_func(Name,Constraint,Value,Asis,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,
+ ", ",{asis,Asis},
+ ", ",Tags,")"]).
+
+emit_enc_enumerated_cases({L1,L2}, Tags) ->
+ emit_enc_enumerated_cases(L1++L2, Tags, ext);
+emit_enc_enumerated_cases(L, Tags) ->
+ emit_enc_enumerated_cases(L, Tags, noext).
+
+emit_enc_enumerated_cases([{EnumName,EnumVal},H2|T], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+ emit_enc_enumerated_cases([H2|T], Tags, Ext);
+emit_enc_enumerated_cases([{EnumName,EnumVal}], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+ case Ext of
+ noext -> emit([";",nl]);
+ ext ->
+ emit([";",nl,"{asn1_enum,",{curr,enumval},"} -> ",
+ "?RT_BER:encode_enumerated(",{curr,enumval},",",Tags,");",nl]),
+ asn1ct_name:new(enumval)
+ end,
+ emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
+ emit([nl,"end"]).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate DECODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% decode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ D = Type,
+ emit({nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes, OptOrMand) ->",nl}),
+ emit({" 'dec_",Type#typedef.name,"'(Bytes, OptOrMand, []).",nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes, ",
+ unused_optormand_var("OptOrMand",(Type#typedef.typespec)#type.def),", TagIn) ->",nl}),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,D).
+
+
+%%===============================================================================
+%% decode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+gen_decode(Erules,Tname,Type) when record(Type,type) ->
+ Typename = Tname,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({"'dec_",asn1ct_gen:list2name(Typename),"'(Bytes, OptOrMand, TagIn",ObjFun,") ->",nl}),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end;
+
+
+%%===============================================================================
+%% decode ComponentType
+%%===============================================================================
+
+gen_decode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_,_}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ gen_decode(Erules,NewTname,NewType).
+
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ InnerTag = Def#type.tag ,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- InnerTag],
+ case asn1ct_gen:type(InnerType) of
+ 'ASN1_OPEN_TYPE' ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ asn1ct_name:new(len),
+ gen_dec_prim(Erules, Def#type{def='ASN1_OPEN_TYPE'},
+ BytesVar, Tag, "TagIn",no_length,
+ ?PRIMITIVE,"OptOrMand"),
+ emit({".",nl,nl});
+ {primitive,bif} ->
+ BytesVar = asn1ct_gen:mk_var(asn1ct_name:curr(bytes)),
+ asn1ct_name:new(len),
+ gen_dec_prim(Erules, Def, BytesVar, Tag, "TagIn",no_length,
+ ?PRIMITIVE,"OptOrMand"),
+ emit({".",nl,nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ TheType ->
+ DecFunName = mkfuncname(TheType,dec),
+ emit({DecFunName,"(",{curr,bytes},
+ ", OptOrMand, TagIn++",{asis,Tag},")"}),
+ emit({".",nl,nl})
+ end.
+
+
+gen_dec_prim(Erules,Att,BytesVar,DoTag,TagIn,Length,_Form,OptOrMand) ->
+ Typename = Att#type.def,
+%% Currently not used for BER replaced with [] as place holder
+%% Constraint = Att#type.constraint,
+%% Constraint = [],
+ Constraint =
+ case get_constraint(Att#type.constraint,'SizeConstraint') of
+ no -> [];
+ Tc -> Tc
+ end,
+ ValueRange =
+ case get_constraint(Att#type.constraint,'ValueRange') of
+ no -> [];
+ Tv -> Tv
+ end,
+ SingleValue =
+ case get_constraint(Att#type.constraint,'SingleValue') of
+ no -> [];
+ Sv -> Sv
+ end,
+ AsBin = case get(binary_strings) of
+ true -> "_as_bin";
+ _ -> ""
+ end,
+ NewTypeName = case Typename of
+ 'ANY' -> 'ASN1_OPEN_TYPE';
+ _ -> Typename
+ end,
+ DoLength =
+ case NewTypeName of
+ 'BOOLEAN'->
+ emit({"?RT_BER:decode_boolean(",BytesVar,","}),
+ false;
+ 'INTEGER' ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},","}),
+ false;
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},",",
+ {asis,NamedNumberList},","}),
+ false;
+ {'ENUMERATED',NamedNumberList} ->
+ emit({"?RT_BER:decode_enumerated(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ false;
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_BER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},","});
+ _ ->
+ emit({"?RT_BER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","})
+ end,
+ true;
+ 'NULL' ->
+ emit({"?RT_BER:decode_null(",BytesVar,","}),
+ false;
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_BER:decode_object_identifier(",BytesVar,","}),
+ false;
+ 'ObjectDescriptor' ->
+ emit({"?RT_BER:decode_restricted_string(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_ObjectDescriptor},","}),
+ true;
+ 'OCTET STRING' ->
+ emit({"?RT_BER:decode_octet_string",AsBin,"(",BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'NumericString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_NumericString},","}),true;
+ 'TeletexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_TeletexString},","}),
+ true;
+ 'VideotexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VideotexString},","}),
+ true;
+ 'GraphicString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GraphicString},","})
+ ,true;
+ 'VisibleString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VisibleString},","}),
+ true;
+ 'GeneralString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GeneralString},","}),
+ true;
+ 'PrintableString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_PrintableString},","}),
+ true;
+ 'IA5String' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_IA5String},","}),
+ true;
+ 'UniversalString' ->
+ emit({"?RT_BER:decode_universal_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'BMPString' ->
+ emit({"?RT_BER:decode_BMP_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'UTCTime' ->
+ emit({"?RT_BER:decode_utc_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'GeneralizedTime' ->
+ emit({"?RT_BER:decode_generalized_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ true;
+ 'ASN1_OPEN_TYPE' ->
+ emit(["?RT_BER:decode_open_type(",re_wrap_erule(Erules),",",
+ BytesVar,","]),
+ false;
+ Other ->
+ exit({'can not decode' ,Other})
+ end,
+
+ NewLength = case DoLength of
+ true -> [", ", Length];
+ false -> ""
+ end,
+ NewOptOrMand = case OptOrMand of
+ _ when list(OptOrMand) -> OptOrMand;
+ mandatory -> {asis,mandatory};
+ _ -> {asis,opt_or_default}
+ end,
+ case {TagIn,NewTypeName} of
+ {[],'ASN1_OPEN_TYPE'} ->
+ emit([{asis,DoTag},")"]);
+ {_,'ASN1_OPEN_TYPE'} ->
+ emit([TagIn,"++",{asis,DoTag},")"]);
+ {[],_} ->
+ emit([{asis,DoTag},NewLength,", ",NewOptOrMand,")"]);
+ _ when list(TagIn) ->
+ emit([TagIn,"++",{asis,DoTag},NewLength,", ",NewOptOrMand,")"])
+ end.
+
+
+int_constr([],[]) ->
+ [];
+int_constr([],ValueRange) ->
+ ValueRange;
+int_constr(SingleValue,[]) ->
+ SingleValue;
+int_constr(SV,VR) ->
+ [SV,VR].
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=M,type=ClName} = Def#'Object'.classname,
+ Class = asn1_db:dbget(M,ClName),
+
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+ gen_encode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+ gen_decode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed);
+gen_obj_code(_Erules,_Module,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Args,", _RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, TagIn, _RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_, _"),
+ emit([" {[],0}"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val, TagIn"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val, TagIn"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, TagIn, [H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause(" Val, TagIn, [H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, TagIn, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, TagIn, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_encode_objectfields(ClassName,[_|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+% MaybeConstr=
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% OTag = Def#type.tag,
+% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, TagIn, RestPrimFieldName) ->",nl}),
+% CAcc=
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_encode_prim(ber,Def,["TagIn ++ ",{asis,Tag}],
+% "Val"),
+% [];
+% {constructed,bif} ->
+% %%InnerType = asn1ct_gen:get_inner(Def#type.def),
+% %%asn1ct_gen:gen_encode_constructed(ber,[ObjName],
+% %% InnerType,Def);
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val, TagIn ++ ",{asis,Tag},")"}),
+% [{['enc_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,
+% "'(Val, TagIn ++ ",{asis,Tag},")"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val, TagIn ++ ",
+% {asis,Tag},")"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, TagIn, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, TagIn, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, TagIn, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_encode_objectfields(C,O,[H|T],Acc) ->
+% gen_encode_objectfields(C,O,T,Acc);
+% gen_encode_objectfields(_,_,[],Acc) ->
+% Acc.
+
+% gen_encode_constr_type([{Name,Def}|Rest]) ->
+% emit({Name,"(Val,TagIn) ->",nl}),
+% InnerType = asn1ct_gen:get_inner(Def#type.def),
+% asn1ct_gen:gen_encode_constructed(ber,Name,InnerType,Def),
+% gen_encode_constr_type(Rest);
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ -> gen_encode_user(Erules,TypeDef)
+ end,
+ gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+ gen_encode_prim(ber,Def,["TagIn ++ ",{asis,Tag}],
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val, TagIn ++",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val, TagIn ++ ",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val, TagIn ++ ",{asis,Tag},")"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Bytes, TagIn ++ ",
+ {asis,Tag},")"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(ber,Type,["TagIn ++ ",{asis,Tag}],"Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, TagIn ++ ",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, TagIn ++ ",{asis,Tag},")",nl]),
+ []
+ end.
+
+
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Args,"_) ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes, TagIn, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_, _,"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes, TagIn,"),
+ gen_decode_default_call(ClassName,Name,"Bytes",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes, TagIn,"),
+ gen_decode_field_call(ObjName,Name,"Bytes",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,TagIn,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,TagIn,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, TagIn, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, TagIn, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+
+
+% gen_decode_objectfields(Erules,Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Bytes, TagIn, RestPrimFieldName) ->",nl}),
+% OTag = Def#type.tag,
+% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% Prop =
+% case get_optionalityspec(Fields,FieldName) of
+% 'OPTIONAL' -> opt_or_default;
+% {'DEFAULT',_} -> opt_or_default;
+% _ -> mandatory
+% end,
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_dec_prim(Erules,Def,"Bytes",Tag,"TagIn",no_length,
+% ?PRIMITIVE,Prop),
+% [];
+% {constructed,bif} ->
+% emit({" 'dec_",ObjName,'_',FieldName,"'(Bytes,",
+% {asis,Prop},", TagIn ++ ",{asis,Tag},")"}),
+% [{['dec_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'dec_",TypeName,"'(Bytes, ",
+% {asis,Prop},", TagIn ++ ",{asis,Tag},")"}),
+% [];
+% TypeName ->
+% emit({" 'dec_",TypeName,"'(Bytes, ",{asis,Prop},
+% ", TagIn ++ ",{asis,Tag},")"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Bytes, TagIn, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+% "'(H, Bytes, TagIn, T)"});
+% TypeName ->
+% emit({indent(3),"'dec_",TypeName,
+% "'(H, Bytes, TagIn, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} ->
+% []
+% end,
+% gen_decode_objectfields(Erules,Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_decode_objectfields(Erules,C,O,[H|T],CAcc) ->
+% gen_decode_objectfields(Erules,C,O,T,CAcc);
+% gen_decode_objectfields(_,_,_,[],CAcc) ->
+% CAcc.
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+%% emit({Name,"(Bytes, OptOrMand) ->",nl}),
+%% emit({" ",Name,"(Bytes, OptOrMand, []).",nl,nl}),
+ emit({Name,"(Bytes, OptOrMand, TagIn) ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(ber,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+ gen_dec_prim(ber,Def,Bytes,Tag,"TagIn",no_length,
+ ?PRIMITIVE,opt_or_default),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",opt_or_default, TagIn ++ ",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,", opt_or_default,TagIn ++ ",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,
+ ", opt_or_default,TagIn ++ ",{asis,Tag},")"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,
+ ",opt_or_default, TagIn ++ ",{asis,Tag},")"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(ber,Type,Bytes,Tag,"TagIn",no_length,
+ ?PRIMITIVE,opt_or_default),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes,
+ " ,opt_or_default, TagIn ++ ",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,
+ ", opt_or_defualt, TagIn ++ ",{asis,Tag},")",nl]),
+ []
+ end.
+
+
+more_genfields([]) ->
+ false;
+more_genfields([Field|Fields]) ->
+ case element(1,Field) of
+ typefield ->
+ true;
+ objectfield ->
+ true;
+ _ ->
+ more_genfields(Fields)
+ end.
+
+
+
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = (ClassDef#classdef.typespec)#objectclass.fields,
+ InternalFuncs=gen_objset_enc(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1,[]),
+ gen_objset_dec(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+%% gen_objset_enc iterates over the objects of the object set
+gen_objset_enc(_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(ObjSName,UniqueName,
+ [{ObjName,Val,Fields},T|Rest],ClName,ClFields,NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/4"}),
+ {[],NthObj}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc ++ Acc);
+gen_objset_enc(ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",{asis,Val},") ->",nl}),
+ {InternalFunc,_}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/4"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc ++ Acc;
+%% See X.681 Annex E for the following case
+gen_objset_enc(ObjSetName,_UniqueName,['EXTENSIONMARK'],
+ _ClName,_ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_Attr, Val, _TagIn, _RestPrimFieldName) ->",nl}),
+ emit({indent(6),"Len = case Val of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Val)",nl,indent(6),"end,"}),
+ emit({indent(6),"{Val,Len}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, TagIn, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N} = emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, TagIn, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N} = emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_H|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+
+emit_inner_of_fun(TDef = #typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(ber,Type,["TagIn ++ ",{asis,Tag}],"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val,TagIn ++ ",
+ {asis,Tag},")"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val, TagIn ++ ",
+ {asis,Tag},")"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name,typespec=Type},_) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ emit({indent(12),"'enc_",Name,"'(Val, TagIn ++ ",{asis,Tag},")"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_encode_prim(ber,Type,["TagIn ++ ",{asis,Tag}],"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val, TagIn ++ ",{asis,Tag},")"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val, TagIn ++ ",{asis,Tag},")"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val, TagIn ++ ",{asis,Tag},")"})
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(Erules,ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj)->
+ emit({"'getdec_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Erules,Fields,ClFields,ObjSName,
+ NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"}),
+ NthObj
+ end,
+ emit({";",nl}),
+ gen_objset_dec(Erules,ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj);
+gen_objset_dec(Erules,ObjSetName,UniqueName,[{ObjName,Val,Fields}],_ClName,
+ ClFields,NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(",{asis,UniqueName},",",{asis,Val},") ->",nl}),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Erules,Fields,ClFields,ObjSetName,
+ NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"})
+ end,
+ emit({".",nl,nl});
+gen_objset_dec(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,_ClFields,
+ _NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Bytes, _, _) ->",nl}),
+ emit({indent(6),"Len = case Bytes of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Bytes)",nl,indent(6),"end,"}),
+ emit({indent(6),"{Bytes,[],Len}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ ok;
+gen_objset_dec(_,_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Erules,Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Bytes, TagIn, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl}),
+ N=emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Bytes, TagIn, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ N=emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Erules,Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Erules,Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Erules,Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Erules,Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ emit_inner_of_decfun(Erules,Type,DecProp,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Erules,Fields,[_H|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Erules,Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,_,[],_,NthObj) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ NthObj.
+
+emit_inner_of_decfun(Erules,#typedef{name={ExtName,Name},typespec=Type},
+ Prop,InternalDefFunName) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(Erules,Type,"Bytes",Tag,"TagIn",no_length,
+ ?PRIMITIVE,Prop),
+ 0;
+ {constructed,bif} ->
+ emit({indent(12),"'dec_",
+ asn1ct_gen:list2name(InternalDefFunName),"'(Bytes, ",Prop,
+ ", TagIn ++ ",{asis,Tag},")"}),
+ 1;
+ _ ->
+ emit({indent(12),"'",ExtName,"':'dec_",Name,"'(Bytes, ",Prop,
+ ", TagIn ++ ",{asis,Tag},")"}),
+ 0
+ end;
+emit_inner_of_decfun(_,#typedef{name=Name,typespec=Type},Prop,_) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ emit({indent(12),"'dec_",Name,"'(Bytes, ",Prop,", TagIn ++ ",
+ {asis,Tag},")"}),
+ 0;
+emit_inner_of_decfun(Erules,Type,Prop,_) when record(Type,type) ->
+ OTag = Type#type.tag,
+ Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ CurrMod = get(currmod),
+ Def = Type#type.def,
+ InnerType = asn1ct_gen:get_inner(Def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ case WhatKind of
+ {primitive,bif} ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_dec_prim(Erules,Type,"Bytes",Tag,"TagIn",no_length,
+ ?PRIMITIVE,Prop);
+% TRef when record(TRef,typereference) ->
+% T = TRef#typereference.val,
+% emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,
+ "'(Bytes, ",Prop,", TagIn ++ ",{asis,Tag},")"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+ T,"'(Bytes, ",Prop,", TagIn ++ ",{asis,Tag},")"})
+ end,
+ 0.
+
+
+gen_internal_funcs(_,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit({"'dec_",TypeDef#typedef.name,"'(Bytes, ",
+ unused_optormand_var("OptOrMand",(TypeDef#typedef.typespec)#type.def),", TagIn) ->",nl}),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+
+decode_class('UNIVERSAL') ->
+ ?UNIVERSAL;
+decode_class('APPLICATION') ->
+ ?APPLICATION;
+decode_class('CONTEXT') ->
+ ?CONTEXT;
+decode_class('PRIVATE') ->
+ ?PRIVATE.
+
+decode_type('BOOLEAN') -> 1;
+decode_type('INTEGER') -> 2;
+decode_type('BIT STRING') -> 3;
+decode_type('OCTET STRING') -> 4;
+decode_type('NULL') -> 5;
+decode_type('OBJECT IDENTIFIER') -> 6;
+decode_type('OBJECT DESCRIPTOR') -> 7;
+decode_type('EXTERNAL') -> 8;
+decode_type('REAL') -> 9;
+decode_type('ENUMERATED') -> 10;
+decode_type('EMBEDDED_PDV') -> 11;
+decode_type('SEQUENCE') -> 16;
+decode_type('SEQUENCE OF') -> 16;
+decode_type('SET') -> 17;
+decode_type('SET OF') -> 17;
+decode_type('NumericString') -> 18;
+decode_type('PrintableString') -> 19;
+decode_type('TeletexString') -> 20;
+decode_type('VideotexString') -> 21;
+decode_type('IA5String') -> 22;
+decode_type('UTCTime') -> 23;
+decode_type('GeneralizedTime') -> 24;
+decode_type('GraphicString') -> 25;
+decode_type('VisibleString') -> 26;
+decode_type('GeneralString') -> 27;
+decode_type('UniversalString') -> 28;
+decode_type('BMPString') -> 30;
+decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
+decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
+
+add_removed_bytes() ->
+ asn1ct_name:delete(rb),
+ add_removed_bytes(asn1ct_name:all(rb)).
+
+add_removed_bytes([H,T1|T]) ->
+ emit({{var,H},"+"}),
+ add_removed_bytes([T1|T]);
+add_removed_bytes([H|T]) ->
+ emit({{var,H}}),
+ add_removed_bytes(T);
+add_removed_bytes([]) ->
+ true.
+
+mkfuncname(WhatKind,DecOrEnc) ->
+ case WhatKind of
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ CurrMod = get(currmod),
+ case CurrMod of
+ Mod ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ _ ->
+% io:format("CurrMod: ~p, Mod: ~p~n",[CurrMod,Mod]),
+ lists:concat(["'",Mod,"':'",DecOrEnc,"_",EType,"'"])
+ end;
+ #'typereference'{val=EType} ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ 'ASN1_OPEN_TYPE' ->
+ lists:concat(["'",DecOrEnc,"_",WhatKind,"'"])
+
+ end.
+
+optionals(L) -> optionals(L,[],1).
+
+optionals([{'EXTENSIONMARK',_,_}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos); % optionals in extension are currently not handled
+optionals([#'ComponentType'{name=Name,prop='OPTIONAL'}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{name=Name,prop={'DEFAULT',_}}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos+1);
+optionals([],Acc,_) ->
+ lists:reverse(Acc).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%% if the original option was ber and it has been wrapped to ber_bin
+%% turn it back to ber
+re_wrap_erule(ber_bin) ->
+ case get(encoding_options) of
+ Options when list(Options) ->
+ case lists:member(ber,Options) of
+ true -> ber;
+ _ -> ber_bin
+ end;
+ _ -> ber_bin
+ end;
+re_wrap_erule(Erule) ->
+ Erule.
+
+is_already_generated(Operation,Name) ->
+ case get(class_default_type) of
+ undefined ->
+ put(class_default_type,[{Operation,Name}]),
+ false;
+ GeneratedList ->
+ case lists:member({Operation,Name},GeneratedList) of
+ true ->
+ true;
+ false ->
+ put(class_default_type,[{Operation,Name}|GeneratedList]),
+ false
+ end
+ end.
+
+get_class_fields(#classdef{typespec=ObjClass}) ->
+ ObjClass#objectclass.fields;
+get_class_fields(#objectclass{fields=Fields}) ->
+ Fields;
+get_class_fields(_) ->
+ [].
+
+get_object_field(Name,ObjectFields) ->
+ case lists:keysearch(Name,1,ObjectFields) of
+ {value,Field} -> Field;
+ false -> false
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl
new file mode 100644
index 0000000000..89530d4017
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_ber_bin_v2.erl
@@ -0,0 +1,1562 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_ber_bin_v2.erl,v 1.1 2008/12/17 09:53:29 mikpe Exp $
+%%
+-module(asn1ct_gen_ber_bin_v2).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+
+-export([pgen/4]).
+-export([decode_class/1, decode_type/1]).
+-export([add_removed_bytes/0]).
+-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
+-export([gen_encode_prim/4]).
+-export([gen_dec_prim/7]).
+-export([gen_objectset_code/2, gen_obj_code/3]).
+-export([encode_tag_val/3]).
+-export([gen_inc_decode/2]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+ % the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+ % primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+
+-define(T_ObjectDescriptor, ?UNIVERSAL bor ?PRIMITIVE bor 7).
+ % restricted character string types
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList,PTypeList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate ENCODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+
+%%===============================================================================
+%% encode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+ ", ObjFun";
+ false ->
+ ""
+ end,
+
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([nl,nl,nl,"%%================================"]),
+ emit([nl,"%% ",asn1ct_gen:list2name(Typename)]),
+ emit([nl,"%%================================",nl]),
+ case length(Typename) of
+ 1 -> % top level type
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",ObjFun,") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, ", {asis,lists:reverse(Type#type.tag)},ObjFun,").",nl,nl]);
+ _ -> % embedded type with constructed name
+ true
+ end,
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,
+ ") when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn",ObjFun,");",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}, TagIn",ObjFun,") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,");",nl,nl])
+ end,
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn",ObjFun,") ->",nl," "]),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end;
+
+%%===============================================================================
+%% encode ComponentType
+%%===============================================================================
+
+gen_encode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_Prop,_Tags}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ gen_encode(Erules,NewTname,NewType).
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Type = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ emit([nl,nl,"%%================================"]),
+ emit([nl,"%% ",Typename]),
+ emit([nl,"%%================================",nl]),
+ emit(["'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",") ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, ", {asis,lists:reverse(Tag)},").",nl,nl]),
+
+ case lists:member(InnerType,['SET','SEQUENCE']) of
+ true ->
+ case get(asn_keyed_list) of
+ true ->
+ CompList =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> Cl;
+ #'SET'{components=Cl} -> Cl
+ end,
+
+ emit([nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val, TagIn) when list(Val) ->",nl]),
+ emit([" 'enc_",asn1ct_gen:list2name(Typename),
+ "'(?RT_BER:fixoptionals(",
+ {asis,optionals(CompList)},
+ ",Val), TagIn);",nl,nl]);
+ _ -> true
+ end;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),"',Val}, TagIn) ->",nl}),
+ emit({" 'enc_",asn1ct_gen:list2name(Typename),"'(Val, TagIn);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val, TagIn) ->",nl}),
+ CurrentMod = get(currmod),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ {primitive,bif} ->
+ gen_encode_prim(ber,Type,"TagIn","Val"),
+ emit([".",nl]);
+ #typereference{val=Ename} ->
+ emit([" 'enc_",Ename,"'(Val, TagIn).",nl]);
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, TagIn).",nl]);
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, TagIn).",nl]);
+ 'ASN1_OPEN_TYPE' ->
+ emit(["%% OPEN TYPE",nl]),
+ gen_encode_prim(ber,
+ Type#type{def='ASN1_OPEN_TYPE'},
+ "TagIn","Val"),
+ emit([".",nl])
+ end.
+
+gen_encode_prim(_Erules,D,DoTag,Value) when record(D,type) ->
+
+%%% Constraint is currently not used for BER (except for BitString) and therefore replaced
+%%% with [] as a placeholder
+ BitStringConstraint = D#type.constraint,
+ Constraint = [],
+ asn1ct_name:new(enumval),
+ case D#type.def of
+ 'BOOLEAN' ->
+ emit_encode_func('boolean',Value,DoTag);
+ 'INTEGER' ->
+ emit_encode_func('integer',Constraint,Value,DoTag);
+ {'INTEGER',NamedNumberList} ->
+ emit_encode_func('integer',Constraint,Value,
+ NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList={_,_}} ->
+
+ emit(["case (case ",Value," of {asn1_enum,_}->",Value,";{_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+ {'ENUMERATED',NamedNumberList} ->
+
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NamedNumberList,DoTag);
+
+ {'BIT STRING',NamedNumberList} ->
+ emit_encode_func('bit_string',BitStringConstraint,Value,
+ NamedNumberList,DoTag);
+ 'ANY' ->
+ emit_encode_func('open_type', Value,DoTag);
+ 'NULL' ->
+ emit_encode_func('null',Value,DoTag);
+ 'OBJECT IDENTIFIER' ->
+ emit_encode_func("object_identifier",Value,DoTag);
+ 'ObjectDescriptor' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_ObjectDescriptor,DoTag);
+ 'OCTET STRING' ->
+ emit_encode_func('octet_string',Constraint,Value,DoTag);
+ 'NumericString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_NumericString,DoTag);
+ 'TeletexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_TeletexString,DoTag);
+ 'VideotexString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VideotexString,DoTag);
+ 'GraphicString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GraphicString,DoTag);
+ 'VisibleString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_VisibleString,DoTag);
+ 'GeneralString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_GeneralString,DoTag);
+ 'PrintableString' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_PrintableString,DoTag);
+ 'IA5String' ->
+ emit_encode_func('restricted_string',Constraint,Value,
+ ?T_IA5String,DoTag);
+ 'UniversalString' ->
+ emit_encode_func('universal_string',Constraint,Value,DoTag);
+ 'BMPString' ->
+ emit_encode_func('BMP_string',Constraint,Value,DoTag);
+ 'UTCTime' ->
+ emit_encode_func('utc_time',Constraint,Value,DoTag);
+ 'GeneralizedTime' ->
+ emit_encode_func('generalized_time',Constraint,Value,DoTag);
+ 'ASN1_OPEN_TYPE' ->
+ emit_encode_func('open_type', Value,DoTag);
+ XX ->
+ exit({'can not encode' ,XX})
+ end.
+
+
+emit_encode_func(Name,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Value,Tags);
+emit_encode_func(Name,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Tags);
+emit_encode_func(Name,Constraint,Value,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,", ",Tags,")"]).
+
+emit_encode_func(Name,Constraint,Value,Asis,Tags) when atom(Name) ->
+ emit_encode_func(atom_to_list(Name),Constraint,Value,Asis,Tags);
+emit_encode_func(Name,Constraint,Value,Asis,Tags) ->
+ Fname = "?RT_BER:encode_" ++ Name,
+ emit([Fname,"(",{asis,Constraint},", ",Value,
+ ", ",{asis,Asis},
+ ", ",Tags,")"]).
+
+emit_enc_enumerated_cases({L1,L2}, Tags) ->
+ emit_enc_enumerated_cases(L1++L2, Tags, ext);
+emit_enc_enumerated_cases(L, Tags) ->
+ emit_enc_enumerated_cases(L, Tags, noext).
+
+emit_enc_enumerated_cases([{EnumName,EnumVal},H2|T], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,");",nl]),
+ emit_enc_enumerated_cases([H2|T], Tags, Ext);
+emit_enc_enumerated_cases([{EnumName,EnumVal}], Tags, Ext) ->
+ emit([{asis,EnumName}," -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+%% emit(["'",{asis,EnumName},"' -> ?RT_BER:encode_enumerated(",EnumVal,",",Tags,")"]),
+ case Ext of
+ noext -> emit([";",nl]);
+ ext ->
+ emit([";",nl,"{asn1_enum,",{curr,enumval},"} -> ",
+ "?RT_BER:encode_enumerated(",{curr,enumval},",",Tags,");",nl]),
+ asn1ct_name:new(enumval)
+ end,
+ emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
+ emit([nl,"end"]).
+
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Generate DECODING
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% decode #{typedef, {pos, name, typespec}}
+%%===============================================================================
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ Def = Type#typedef.typespec,
+ InnerTag = Def#type.tag ,
+
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- InnerTag],
+
+ Prefix =
+ case {asn1ct:get_gen_state_field(active),
+ asn1ct:get_gen_state_field(prefix)} of
+ {true,Pref} -> Pref;
+ _ -> "dec_"
+ end,
+ emit({nl,nl}),
+ emit(["'",Prefix,Type#typedef.name,"'(Tlv) ->",nl]),
+ emit([" '",Prefix,Type#typedef.name,"'(Tlv, ",{asis,Tag},").",nl,nl]),
+ emit(["'",Prefix,Type#typedef.name,"'(Tlv, TagIn) ->",nl]),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,Type).
+
+gen_inc_decode(Erules,Type) when record(Type,typedef) ->
+ Prefix = asn1ct:get_gen_state_field(prefix),
+ emit({nl,nl}),
+ emit(["'",Prefix,Type#typedef.name,"'(Tlv, TagIn) ->",nl]),
+ gen_decode_user(Erules,Type).
+
+%%===============================================================================
+%% decode #{type, {tag, def, constraint}}
+%%===============================================================================
+
+%% This gen_decode is called by the gen_decode/3 that decodes
+%% ComponentType and the type of a SEQUENCE OF/SET OF.
+gen_decode(Erules,Tname,Type) when record(Type,type) ->
+ Typename = Tname,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ Prefix =
+ case asn1ct:get_gen_state_field(active) of
+ true -> "'dec-inc-";
+ _ -> "'dec_"
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit([Prefix,asn1ct_gen:list2name(Typename),"'(Tlv, TagIn",ObjFun,") ->",nl]),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ Rec when record(Rec,'Externaltypereference') ->
+ case {Typename,asn1ct:get_gen_state_field(namelist)} of
+ {[Cname|_],[{Cname,_}|_]} -> %%
+ %% This referenced type must only be generated
+ %% once as incomplete partial decode. Therefore we
+ %% have to check whether this function already is
+ %% generated.
+ case asn1ct:is_function_generated(Typename) of
+ true ->
+ ok;
+ _ ->
+ asn1ct:generated_refed_func(Typename),
+ #'Externaltypereference'{module=M,type=Name}=Rec,
+ TypeDef = asn1_db:dbget(M,Name),
+ gen_decode(Erules,TypeDef)
+ end;
+ _ ->
+ true
+ end;
+ _ ->
+ true
+ end;
+
+
+%%===============================================================================
+%% decode ComponentType
+%%===============================================================================
+
+gen_decode(Erules,Tname,{'ComponentType',_Pos,Cname,Type,_Prop,_Tags}) ->
+ NewTname = [Cname|Tname],
+ %% The tag is set to [] to avoid that it is
+ %% taken into account twice, both as a component/alternative (passed as
+ %% argument to the encode decode function and within the encode decode
+ %% function it self.
+ NewType = Type#type{tag=[]},
+ case {asn1ct:get_gen_state_field(active),
+ asn1ct:get_tobe_refed_func(NewTname)} of
+ {true,{_,NameList}} ->
+ asn1ct:update_gen_state(namelist,NameList),
+ %% remove to gen_refed_funcs list from tobe_refed_funcs later
+ gen_decode(Erules,NewTname,NewType);
+ {No,_} when No == false; No == undefined ->
+ gen_decode(Erules,NewTname,NewType);
+ _ ->
+ ok
+ end.
+
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ BytesVar = "Tlv",
+ case asn1ct_gen:type(InnerType) of
+ 'ASN1_OPEN_TYPE' ->
+ asn1ct_name:new(len),
+ gen_dec_prim(ber, Def#type{def='ASN1_OPEN_TYPE'},
+ BytesVar,{string,"TagIn"}, [] ,
+ ?PRIMITIVE,"OptOrMand"),
+ emit({".",nl,nl});
+ {primitive,bif} ->
+ asn1ct_name:new(len),
+ gen_dec_prim(ber, Def, BytesVar,{string,"TagIn"},[] ,
+ ?PRIMITIVE,"OptOrMand"),
+ emit([".",nl,nl]);
+ {constructed,bif} ->
+ asn1ct:update_namelist(D#typedef.name),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ TheType ->
+ DecFunName = mkfuncname(TheType,dec),
+ emit([DecFunName,"(",BytesVar,
+ ", TagIn)"]),
+ emit([".",nl,nl])
+ end.
+
+
+gen_dec_prim(_Erules,Att,BytesVar,DoTag,_TagIn,_Form,_OptOrMand) ->
+ Typename = Att#type.def,
+%% Currently not used for BER replaced with [] as place holder
+%% Constraint = Att#type.constraint,
+%% Constraint = [],
+ Constraint =
+ case get_constraint(Att#type.constraint,'SizeConstraint') of
+ no -> [];
+ Tc -> Tc
+ end,
+ ValueRange =
+ case get_constraint(Att#type.constraint,'ValueRange') of
+ no -> [];
+ Tv -> Tv
+ end,
+ SingleValue =
+ case get_constraint(Att#type.constraint,'SingleValue') of
+ no -> [];
+ Sv -> Sv
+ end,
+ AsBin = case get(binary_strings) of
+ true -> "_as_bin";
+ _ -> ""
+ end,
+ NewTypeName = case Typename of
+ 'ANY' -> 'ASN1_OPEN_TYPE';
+ _ -> Typename
+ end,
+% DoLength =
+ case NewTypeName of
+ 'BOOLEAN'->
+ emit({"?RT_BER:decode_boolean(",BytesVar,","}),
+ add_func({decode_boolean,2});
+ 'INTEGER' ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},","}),
+ add_func({decode_integer,3});
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_BER:decode_integer(",BytesVar,",",
+ {asis,int_constr(SingleValue,ValueRange)},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_integer,4});
+ {'ENUMERATED',NamedNumberList} ->
+ emit({"?RT_BER:decode_enumerated(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_enumerated,4});
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_BER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_compact_bit_string,4});
+ _ ->
+ emit({"?RT_BER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},","}),
+ add_func({decode_bit_string,4})
+ end;
+ 'NULL' ->
+ emit({"?RT_BER:decode_null(",BytesVar,","}),
+ add_func({decode_null,2});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_BER:decode_object_identifier(",BytesVar,","}),
+ add_func({decode_object_identifier,2});
+ 'ObjectDescriptor' ->
+ emit({"?RT_BER:decode_restricted_string(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_ObjectDescriptor},","}),
+ add_func({decode_restricted_string,4});
+ 'OCTET STRING' ->
+ emit({"?RT_BER:decode_octet_string",AsBin,"(",BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_octet_string,3});
+ 'NumericString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_NumericString},","}),
+ add_func({decode_restricted_string,4});
+ 'TeletexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_TeletexString},","}),
+ add_func({decode_restricted_string,4});
+ 'VideotexString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VideotexString},","}),
+ add_func({decode_restricted_string,4});
+ 'GraphicString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GraphicString},","}),
+ add_func({decode_restricted_string,4});
+ 'VisibleString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_VisibleString},","}),
+ add_func({decode_restricted_string,4});
+ 'GeneralString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_GeneralString},","}),
+ add_func({decode_restricted_string,4});
+ 'PrintableString' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_PrintableString},","}),
+ add_func({decode_restricted_string,4});
+ 'IA5String' ->
+ emit({"?RT_BER:decode_restricted_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},",",{asis,?T_IA5String},","}),
+ add_func({decode_restricted_string,4}) ;
+ 'UniversalString' ->
+ emit({"?RT_BER:decode_universal_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_universal_string,3});
+ 'BMPString' ->
+ emit({"?RT_BER:decode_BMP_string",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_BMP_string,3});
+ 'UTCTime' ->
+ emit({"?RT_BER:decode_utc_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_utc_time,3});
+ 'GeneralizedTime' ->
+ emit({"?RT_BER:decode_generalized_time",AsBin,"(",
+ BytesVar,",",{asis,Constraint},","}),
+ add_func({decode_generalized_time,3});
+ 'ASN1_OPEN_TYPE' ->
+ emit(["?RT_BER:decode_open_type_as_binary(",
+ BytesVar,","]),
+ add_func({decode_open_type_as_binary,2});
+ Other ->
+ exit({'can not decode' ,Other})
+ end,
+
+ case {DoTag,NewTypeName} of
+ {{string,TagStr},'ASN1_OPEN_TYPE'} ->
+ emit([TagStr,")"]);
+ {_,'ASN1_OPEN_TYPE'} ->
+ emit([{asis,DoTag},")"]);
+ {{string,TagStr},_} ->
+ emit([TagStr,")"]);
+ _ when list(DoTag) ->
+ emit([{asis,DoTag},")"])
+ end.
+
+
+int_constr([],[]) ->
+ [];
+int_constr([],ValueRange) ->
+ ValueRange;
+int_constr(SingleValue,[]) ->
+ SingleValue;
+int_constr(SV,VR) ->
+ [SV,VR].
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=M,type=ClName} = Def#'Object'.classname,
+ Class = asn1_db:dbget(M,ClName),
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+ gen_encode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+ gen_decode_objectfields(ClName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed),
+ emit_tlv_format_function();
+gen_obj_code(_Erules,_Module,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Arg) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Arg,", _RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit([" {<<>>,0}"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause(" Val, [H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+% MaybeConstr=
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, RestPrimFieldName) ->",nl}),
+% CAcc=
+% case Type#typedef.name of
+% {primitive,bif} -> %%tag should be the primitive tag
+% OTag = Def#type.tag,
+% Tag = [encode_tag_val(decode_class(X#tag.class),
+% X#tag.form,X#tag.number)||
+% X <- OTag],
+% gen_encode_prim(ber,Def,{asis,lists:reverse(Tag)},
+% "Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val)"}),
+% [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,
+% "'(Val)"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val,[H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+gen_encode_objectfields(ClassName,[_C|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+% gen_encode_constr_type(Erules,[{Name,Def}|Rest]) ->
+% emit({Name,"(Val,TagIn) ->",nl}),
+% InnerType = asn1ct_gen:get_inner(Def#type.def),
+% asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+% gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ -> gen_encode_user(Erules,TypeDef)
+ end,
+ gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),
+ X#tag.form,X#tag.number)||
+ X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+% OTag = Def#type.tag,
+% Tag = [encode_tag_val(decode_class(X#tag.class),
+% X#tag.form,X#tag.number)||
+% X <- OTag],
+ gen_encode_prim(ber,Def,{asis,lists:reverse(Tag)},
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val,",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val,",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val,",{asis,Tag},")"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Bytes)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(ber,Type,{asis,lists:reverse(Tag)},"Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val, ",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val, ",{asis,Tag},")",nl]),
+ []
+% 'ASN1_OPEN_TYPE' ->
+% emit(["%% OPEN TYPE",nl]),
+% gen_encode_prim(ber,
+% Type#type{def='ASN1_OPEN_TYPE'},
+% "TagIn","Val"),
+% emit([".",nl])
+ end.
+
+%%%%%%%%%%%%%%%%
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Arg) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Arg,",_) ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause(" _"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes"),
+ emit_tlv_format("Bytes"),
+ gen_decode_default_call(ClassName,Name,"Tlv",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes"),
+ emit_tlv_format("Bytes"),
+ gen_decode_field_call(ObjName,Name,"Tlv",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Args) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ", ",Args,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,[H|T]) ->",nl]),
+% emit_tlv_format("Bytes"),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,[H|T]"),
+% emit_tlv_format("Bytes"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_C|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+emit_tlv_format(Bytes) ->
+ notice_tlv_format_gen(), % notice for generating of tlv_format/1
+ emit([" Tlv = tlv_format(",Bytes,"),",nl]).
+
+notice_tlv_format_gen() ->
+ Module = get(currmod),
+% io:format("Noticed: ~p~n",[Module]),
+ case get(tlv_format) of
+ {done,Module} ->
+ ok;
+ _ -> % true or undefined
+ put(tlv_format,true)
+ end.
+
+emit_tlv_format_function() ->
+ Module = get(currmod),
+% io:format("Tlv formated: ~p",[Module]),
+ case get(tlv_format) of
+ true ->
+% io:format(" YES!~n"),
+ emit_tlv_format_function1(),
+ put(tlv_format,{done,Module});
+ _ ->
+% io:format(" NO!~n"),
+ ok
+ end.
+emit_tlv_format_function1() ->
+ emit(["tlv_format(Bytes) when binary(Bytes) ->",nl,
+ " {Tlv,_}=?RT_BER:decode(Bytes),",nl,
+ " Tlv;",nl,
+ "tlv_format(Bytes) ->",nl,
+ " Bytes.",nl]).
+
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+ emit([Name,"(Tlv, TagIn) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+%%%%%%%%%%%
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ OTag = Def#type.tag,
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number ||
+ X <- OTag],
+ case Type#typedef.name of
+ {primitive,bif} -> %%tag should be the primitive tag
+ gen_dec_prim(ber,Def,Bytes,Tag,"TagIn",?PRIMITIVE,
+ opt_or_default),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",",{asis,Tag},")"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,",",{asis,Tag},")"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,",",{asis,Tag},")"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ OTag = Type#type.tag,
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- OTag],
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,",",
+ {asis,Tag},")"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',
+ FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(ber,Type,Bytes,Tag,"TagIn",
+ ?PRIMITIVE,opt_or_default),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes, " ,",{asis,Tag},")",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,", ",
+ {asis,Tag},")",nl]),
+ []
+% 'ASN1_OPEN_TYPE' ->
+% emit(["%% OPEN TYPE",nl]),
+% gen_encode_prim(ber,
+% Type#type{def='ASN1_OPEN_TYPE'},
+% "TagIn","Val"),
+% emit([".",nl])
+ end.
+%%%%%%%%%%%
+
+is_already_generated(Operation,Name) ->
+ case get(class_default_type) of
+ undefined ->
+ put(class_default_type,[{Operation,Name}]),
+ false;
+ GeneratedList ->
+ case lists:member({Operation,Name},GeneratedList) of
+ true ->
+ true;
+ false ->
+ put(class_default_type,[{Operation,Name}|GeneratedList]),
+ false
+ end
+ end.
+
+more_genfields([]) ->
+ false;
+more_genfields([Field|Fields]) ->
+ case element(1,Field) of
+ typefield ->
+ true;
+ objectfield ->
+ true;
+ _ ->
+ more_genfields(Fields)
+ end.
+
+
+
+
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = get_class_fields(ClassDef),
+ InternalFuncs=gen_objset_enc(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassFields,1,[]),
+ gen_objset_dec(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+%% gen_objset_enc iterates over the objects of the object set
+gen_objset_enc(_,_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(Erules,ObjSName,UniqueName,
+ [{ObjName,Val,Fields},T|Rest],ClName,ClFields,
+ NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(Erules,ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc ++ Acc);
+gen_objset_enc(_,ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,_} =
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc ++ Acc;
+%% See X.681 Annex E for the following case
+gen_objset_enc(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Val, _RestPrimFieldName) ->",nl}),
+ emit({indent(6),"Len = case Val of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Val)",nl,indent(6),"end,"}),
+ emit({indent(6),"{Val,Len}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+emit_inner_of_fun(TDef=#typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+% remove Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(ber,Type,[{asis,lists:reverse(Tag)}],"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val)"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val)"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name},_) ->
+% OTag = Type#type.tag,
+% remove Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ emit({indent(12),"'enc_",Name,"'(Val)"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+% OTag = Type#type.tag,
+% remove Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+% Tag = [encode_tag_val(decode_class(X#tag.class),X#tag.form,X#tag.number)|| X <- OTag],
+ case Type#type.def of
+ Def when atom(Def) ->
+ OTag = Type#type.tag,
+ Tag = [encode_tag_val(decode_class(X#tag.class),
+ X#tag.form,X#tag.number)||X <- OTag],
+ emit([indent(9),Def," ->",nl,indent(12)]),
+ gen_encode_prim(ber,Type,{asis,lists:reverse(Tag)},"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit([indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val)"]);
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),"'enc_",T,
+ "'(Val)"]);
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val)"])
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(Erules,ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj)->
+ emit(["'getdec_",ObjSName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl]),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit([" fun 'dec_",ObjName,"'/3"]),
+ NthObj
+ end,
+ emit([";",nl]),
+ gen_objset_dec(Erules,ObjSName,UniqueName,[T|Rest],ClName,
+ ClFields,NewNthObj);
+gen_objset_dec(_,ObjSetName,UniqueName,[{ObjName,Val,Fields}],
+ _ClName,ClFields,NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl]),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit([" fun 'dec_",ObjName,"'/3"])
+ end,
+ emit([".",nl,nl]),
+ ok;
+gen_objset_dec(Erules,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(_, _) ->",nl]),
+ emit([indent(2),"fun(_,Bytes, _RestPrimFieldName) ->",nl]),
+ case Erules of
+ ber_bin_v2 ->
+ emit([indent(4),"case Bytes of",nl,
+ indent(6),"Bin when binary(Bin) -> ",nl,
+ indent(8),"Bin;",nl,
+ indent(6),"_ ->",nl,
+ indent(8),"?RT_BER:encode(Bytes)",nl,
+ indent(4),"end",nl]);
+ _ ->
+ emit([indent(6),"Len = case Bytes of",nl,indent(9),
+ "Bin when binary(Bin) -> size(Bin);",nl,indent(9),
+ "_ -> length(Bytes)",nl,indent(6),"end,"]),
+ emit([indent(4),"{Bytes,[],Len}",nl])
+ end,
+ emit([indent(2),"end.",nl,nl]),
+ ok;
+gen_objset_dec(_,_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl]),
+ N=emit_inner_of_decfun(Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",
+ nl,indent(6),"case Type of",nl]),
+ emit([indent(9),{asis,Name}," ->",nl]),
+ N=emit_inner_of_decfun(Type,DecProp,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Fields,[{typefield,Name,Prop}|Rest],
+ ObjSetName,NthObj) ->
+ DecProp = case Prop of
+ 'OPTIONAL' -> opt_or_default;
+ {'DEFAULT',_} -> opt_or_default;
+ _ -> mandatory
+ end,
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit([";",nl]),
+ emit_inner_of_decfun(Type,DecProp,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit([";",nl,indent(9),{asis,Name}," ->",nl]),
+ emit_inner_of_decfun(Type,DecProp,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Fields,[_|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,[],_,NthObj) ->
+ emit([nl,indent(6),"end",nl]),
+ emit([indent(3),"end"]),
+ NthObj.
+
+emit_inner_of_decfun(#typedef{name={ExtName,Name},typespec=Type},Prop,
+ InternalDefFunName) ->
+ OTag = Type#type.tag,
+%% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- OTag],
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(ber,Type,"Bytes",Tag,"TagIn",
+ ?PRIMITIVE,Prop),
+ 0;
+ {constructed,bif} ->
+ emit([indent(12),"'dec_",
+% asn1ct_gen:list2name(InternalDefFunName),"'(Bytes, ",Prop,
+% ", ",{asis,Tag},")"]),
+ asn1ct_gen:list2name(InternalDefFunName),"'(Bytes, ",
+ {asis,Tag},")"]),
+ 1;
+ _ ->
+ emit([indent(12),"'",ExtName,"':'dec_",Name,"'(Bytes)"]),
+ 0
+ end;
+emit_inner_of_decfun(#typedef{name=Name},_Prop,_) ->
+ emit([indent(12),"'dec_",Name,"'(Bytes)"]),
+ 0;
+emit_inner_of_decfun(Type,Prop,_) when record(Type,type) ->
+ OTag = Type#type.tag,
+%% Tag = [X#tag{class=decode_class(X#tag.class)}|| X <- OTag],
+ Tag = [(decode_class(X#tag.class) bsl 10) + X#tag.number || X <- OTag],
+ CurrMod = get(currmod),
+ Def = Type#type.def,
+ InnerType = asn1ct_gen:get_inner(Def),
+ WhatKind = asn1ct_gen:type(InnerType),
+ case WhatKind of
+ {primitive,bif} ->
+ emit([indent(9),Def," ->",nl,indent(12)]),
+ gen_dec_prim(ber,Type,"Bytes",Tag,"TagIn",
+ ?PRIMITIVE,Prop);
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),"'dec_",T,
+% "'(Bytes, ",Prop,")"]);
+ "'(Bytes)"]);
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit([indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+% T,"'(Bytes, ",Prop,")"])
+ T,"'(Bytes)"])
+ end,
+ 0.
+
+gen_internal_funcs(_,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit([nl,nl,"'dec_",TypeDef#typedef.name,
+% "'(Tlv, OptOrMand, TagIn) ->",nl]),
+ "'(Tlv, TagIn) ->",nl]),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+
+decode_class('UNIVERSAL') ->
+ ?UNIVERSAL;
+decode_class('APPLICATION') ->
+ ?APPLICATION;
+decode_class('CONTEXT') ->
+ ?CONTEXT;
+decode_class('PRIVATE') ->
+ ?PRIVATE.
+
+decode_type('BOOLEAN') -> 1;
+decode_type('INTEGER') -> 2;
+decode_type('BIT STRING') -> 3;
+decode_type('OCTET STRING') -> 4;
+decode_type('NULL') -> 5;
+decode_type('OBJECT IDENTIFIER') -> 6;
+decode_type('OBJECT DESCRIPTOR') -> 7;
+decode_type('EXTERNAL') -> 8;
+decode_type('REAL') -> 9;
+decode_type('ENUMERATED') -> 10;
+decode_type('EMBEDDED_PDV') -> 11;
+decode_type('SEQUENCE') -> 16;
+decode_type('SEQUENCE OF') -> 16;
+decode_type('SET') -> 17;
+decode_type('SET OF') -> 17;
+decode_type('NumericString') -> 18;
+decode_type('PrintableString') -> 19;
+decode_type('TeletexString') -> 20;
+decode_type('VideotexString') -> 21;
+decode_type('IA5String') -> 22;
+decode_type('UTCTime') -> 23;
+decode_type('GeneralizedTime') -> 24;
+decode_type('GraphicString') -> 25;
+decode_type('VisibleString') -> 26;
+decode_type('GeneralString') -> 27;
+decode_type('UniversalString') -> 28;
+decode_type('BMPString') -> 30;
+decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
+decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
+
+add_removed_bytes() ->
+ asn1ct_name:delete(rb),
+ add_removed_bytes(asn1ct_name:all(rb)).
+
+add_removed_bytes([H,T1|T]) ->
+ emit({{var,H},"+"}),
+ add_removed_bytes([T1|T]);
+add_removed_bytes([H|T]) ->
+ emit({{var,H}}),
+ add_removed_bytes(T);
+add_removed_bytes([]) ->
+ true.
+
+mkfuncname(WhatKind,DecOrEnc) ->
+ case WhatKind of
+ #'Externaltypereference'{module=Mod,type=EType} ->
+ CurrMod = get(currmod),
+ case CurrMod of
+ Mod ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ _ ->
+% io:format("CurrMod: ~p, Mod: ~p~n",[CurrMod,Mod]),
+ lists:concat(["'",Mod,"':'",DecOrEnc,"_",EType,"'"])
+ end;
+ #'typereference'{val=EType} ->
+ lists:concat(["'",DecOrEnc,"_",EType,"'"]);
+ 'ASN1_OPEN_TYPE' ->
+ lists:concat(["'",DecOrEnc,"_",WhatKind,"'"])
+
+ end.
+
+optionals(L) -> optionals(L,[],1).
+
+optionals([{'EXTENSIONMARK',_,_}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos); % optionals in extension are currently not handled
+optionals([#'ComponentType'{name=Name,prop='OPTIONAL'}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{name=Name,prop={'DEFAULT',_}}|Rest],Acc,Pos) ->
+ optionals(Rest,[{Name,Pos}|Acc],Pos+1);
+optionals([#'ComponentType'{}|Rest],Acc,Pos) ->
+ optionals(Rest,Acc,Pos+1);
+optionals([],Acc,_) ->
+ lists:reverse(Acc).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+
+get_class_fields(#classdef{typespec=ObjClass}) ->
+ ObjClass#objectclass.fields;
+get_class_fields(#objectclass{fields=Fields}) ->
+ Fields;
+get_class_fields(_) ->
+ [].
+
+get_object_field(Name,ObjectFields) ->
+ case lists:keysearch(Name,1,ObjectFields) of
+ {value,Field} -> Field;
+ false -> false
+ end.
+
+%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%% 8bit Int | binary
+encode_tag_val(Class, Form, TagNo) when (TagNo =< 30) ->
+ <<(Class bsr 6):2,(Form bsr 5):1,TagNo:5>>;
+
+encode_tag_val(Class, Form, TagNo) ->
+ {Octets,_Len} = mk_object_val(TagNo),
+ BinOct = list_to_binary(Octets),
+ <<(Class bsr 6):2, (Form bsr 5):1, 31:5,BinOct/binary>>.
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+add_func(F={_Func,_Arity}) ->
+ ets:insert(asn1_functab,{F}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per.erl
new file mode 100644
index 0000000000..b5c70fd856
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per.erl
@@ -0,0 +1,1189 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_per.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_gen_per).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-export([pgen/4,gen_dec_prim/3,gen_encode_prim/4]).
+-export([gen_obj_code/3,gen_objectset_code/2]).
+-export([gen_decode/2, gen_decode/3]).
+-export([gen_encode/2, gen_encode/3]).
+-export([is_already_generated/2,more_genfields/1,get_class_fields/1,
+ get_object_field/2]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%% Generate ENCODING ******************************
+%%****************************************x
+
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+%% case Type#typedef.typespec of
+%% Def when record(Def,type) ->
+%% gen_encode_user(Erules,Type);
+%% Def when tuple(Def),(element(1,Def) == 'Object') ->
+%% gen_encode_object(Erules,Type);
+%% Other ->
+%% exit({error,{asn1,{unknown,Other}}})
+%% end.
+
+gen_encode(Erules,Typename,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTypename = [Cname|Typename],
+ gen_encode(Erules,NewTypename,Type);
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+%% lists:concat([", ObjFun",Name]);
+ ", ObjFun";
+ false ->
+ ""
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ case InnerType of
+ 'SET' ->
+ true;
+ 'SEQUENCE' ->
+ true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}",ObjFun,") ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",ObjFun,");",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val",ObjFun,
+ ") ->",nl}),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case InnerType of
+ 'SET' -> true;
+ 'SEQUENCE' -> true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),"'({'",asn1ct_gen:list2name(Typename),"',Val}) ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val) ->",nl}),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_encode_prim(Erules,Def,"false"),
+ emit({".",nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_encode_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"false"),
+ emit({".",nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'enc_",Etype,"'(Val).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'enc_",Etype,"'(Val).",nl,nl});
+ #typereference{val=Ename} ->
+ emit({"'enc_",Ename,"'(Val).",nl,nl});
+ {notype,_} ->
+ emit({"'enc_",InnerType,"'(Val).",nl,nl})
+ end.
+
+
+gen_encode_prim(Erules,D,DoTag) ->
+ Value = case asn1ct_name:active(val) of
+ true ->
+ asn1ct_gen:mk_var(asn1ct_name:curr(val));
+ false ->
+ "Val"
+ end,
+ gen_encode_prim(Erules,D,DoTag,Value).
+
+gen_encode_prim(_Erules,D,_DoTag,Value) when record(D,type) ->
+ Constraint = D#type.constraint,
+ case D#type.def of
+ 'INTEGER' ->
+ emit({"?RT_PER:encode_integer(", %fel
+ {asis,Constraint},",",Value,")"});
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_PER:encode_integer(",
+ {asis,Constraint},",",Value,",",
+ {asis,NamedNumberList},")"});
+ {'ENUMERATED',{Nlist1,Nlist2}} ->
+ NewList = lists:concat([[{0,X}||{X,_} <- Nlist1],['EXT_MARK'],[{1,X}||{X,_} <- Nlist2]]),
+ NewC = [{'ValueRange',{0,length(Nlist1)-1}}],
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NewC, NewList++[{asn1_enum,length(Nlist1)-1}], 0);
+ {'ENUMERATED',NamedNumberList} ->
+ NewList = [X||{X,_} <- NamedNumberList],
+ NewC = [{'ValueRange',{0,length(NewList)-1}}],
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NewC, NewList, 0);
+ {'BIT STRING',NamedNumberList} ->
+ emit({"?RT_PER:encode_bit_string(",
+ {asis,Constraint},",",Value,",",
+ {asis,NamedNumberList},")"});
+ 'NULL' ->
+ emit({"?RT_PER:encode_null(",Value,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:encode_object_identifier(",Value,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:encode_ObjectDescriptor(",{asis,Constraint},
+ ",",Value,")"});
+ 'BOOLEAN' ->
+ emit({"?RT_PER:encode_boolean(",Value,")"});
+ 'OCTET STRING' ->
+ emit({"?RT_PER:encode_octet_string(",{asis,Constraint},",",Value,")"});
+ 'NumericString' ->
+ emit({"?RT_PER:encode_NumericString(",{asis,Constraint},",",Value,")"});
+ 'TeletexString' ->
+ emit({"?RT_PER:encode_TeletexString(",{asis,Constraint},",",Value,")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:encode_VideotexString(",{asis,Constraint},",",Value,")"});
+ 'UTCTime' ->
+ emit({"?RT_PER:encode_VisibleString(",{asis,Constraint},",",Value,")"});
+ 'GeneralizedTime' ->
+ emit({"?RT_PER:encode_VisibleString(",{asis,Constraint},",",Value,")"});
+ 'GraphicString' ->
+ emit({"?RT_PER:encode_GraphicString(",{asis,Constraint},",",Value,")"});
+ 'VisibleString' ->
+ emit({"?RT_PER:encode_VisibleString(",{asis,Constraint},",",Value,")"});
+ 'GeneralString' ->
+ emit({"?RT_PER:encode_GeneralString(",{asis,Constraint},",",Value,")"});
+ 'PrintableString' ->
+ emit({"?RT_PER:encode_PrintableString(",{asis,Constraint},",",Value,")"});
+ 'IA5String' ->
+ emit({"?RT_PER:encode_IA5String(",{asis,Constraint},",",Value,")"});
+ 'BMPString' ->
+ emit({"?RT_PER:encode_BMPString(",{asis,Constraint},",",Value,")"});
+ 'UniversalString' ->
+ emit({"?RT_PER:encode_UniversalString(",{asis,Constraint},",",Value,")"});
+ 'ANY' ->
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ Value, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ NewValue = case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ _ -> Value
+ end,
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ NewValue, ")"]);
+ XX ->
+ exit({asn1_error,nyi,XX})
+ end.
+
+emit_enc_enumerated_cases(C, [H], Count) ->
+ emit_enc_enumerated_case(C, H, Count),
+ emit([";",nl,"EnumVal -> exit({error,{asn1, {enumerated_not_in_range, EnumVal}}})"]),
+ emit([nl,"end"]);
+emit_enc_enumerated_cases(C, ['EXT_MARK'|T], _Count) ->
+ emit_enc_enumerated_cases(C, T, 0);
+emit_enc_enumerated_cases(C, [H1,H2|T], Count) ->
+ emit_enc_enumerated_case(C, H1, Count),
+ emit([";",nl]),
+ emit_enc_enumerated_cases(C, [H2|T], Count+1).
+
+
+
+emit_enc_enumerated_case(_C, {asn1_enum,High}, _) ->
+ emit([
+ "{asn1_enum,EnumV} when integer(EnumV), EnumV > ",High," -> ",
+ "[{bit,1},?RT_PER:encode_small_number(EnumV)]"]);
+emit_enc_enumerated_case(_C, 'EXT_MARK', _Count) ->
+ true;
+emit_enc_enumerated_case(_C, {1,EnumName}, Count) ->
+ emit(["'",EnumName,"' -> [{bit,1},?RT_PER:encode_small_number(",Count,")]"]);
+emit_enc_enumerated_case(C, {0,EnumName}, Count) ->
+ emit(["'",EnumName,"' -> [{bit,0},?RT_PER:encode_integer(",{asis,C},", ",Count,")]"]);
+emit_enc_enumerated_case(C, EnumName, Count) ->
+ emit(["'",EnumName,"' -> ?RT_PER:encode_integer(",{asis,C},", ",Count,")"]).
+
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=Mod,type=ClassName} =
+ Def#'Object'.classname,
+ Class = asn1_db:dbget(Mod,ClassName),
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+ gen_encode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+ gen_decode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed),
+ emit(nl);
+gen_obj_code(_,_,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(V) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",V,",_RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, _RestPrimFieldName) ->",nl]),
+ MaybeConstr =
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit(" []"),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Val,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_encode_objectfields(ClassName,[_C|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, Dummy) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_encode_prim(per,Def,"false","Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val)"}),
+% [{['enc_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,"'(Val)"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_encode_objectfields(C,O,[H|T],Acc) ->
+% gen_encode_objectfields(C,O,T,Acc);
+% gen_encode_objectfields(_,_,[],Acc) ->
+% Acc.
+
+% gen_encode_constr_type(Erules,[{Name,Def}|Rest]) ->
+% emit({Name,"(Val) ->",nl}),
+% InnerType = asn1ct_gen:get_inner(Def#type.def),
+% asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+% gen_encode_constr_type(Erules,Rest);
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ Name = lists:concat(["enc_",TypeDef#typedef.name]),
+ emit({Name,"(Val) ->",nl}),
+ Def = TypeDef#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+ gen_encode_constr_type(Erules,Rest)
+ end;
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_encode_prim(per,Def,"false",
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val)"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val)"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Val)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(per,Type,"false","Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val)",nl]),
+ []
+ end.
+
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Bytes) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},",",Bytes,
+ ",_,_RestPrimFieldName) ->",nl])
+ end,
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes"),
+ gen_decode_default_call(ClassName,Name,"Bytes",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes"),
+ gen_decode_field_call(ObjName,Name,"Bytes",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,_,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,_,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, telltype, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, telltype, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_C|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+
+% gen_decode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, RestPrimFieldName) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_dec_prim(per,Def,"Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'dec_",ObjName,'_',FieldName,
+% "'(Val, Telltype)"}),
+% [{['dec_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'dec_",TypeName,
+% "'(Val, Telltype)"}),
+% [];
+% TypeName ->
+% emit({" 'dec_",TypeName,"'(Val, Telltype)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+% "'(H, Val, Telltype, T)"});
+% TypeName ->
+% emit({indent(3),"'dec_",TypeName,
+% "'(H, Val, Telltype, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} ->
+% []
+% end,
+% gen_decode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_decode_objectfields(C,O,[H|T],CAcc) ->
+% gen_decode_objectfields(C,O,T,CAcc);
+% gen_decode_objectfields(_,_,[],CAcc) ->
+% CAcc.
+
+
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_dec_prim(per,Def,Bytes),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",telltype)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,", telltype)"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,", telltype)"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,", telltype)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(per,Type,Bytes),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ []
+ end.
+
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+ emit({Name,"(Bytes,_) ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+% more_genfields(Fields,[]) ->
+% false;
+% more_genfields(Fields,[{FieldName,_}|T]) ->
+% case is_typefield(Fields,FieldName) of
+% true -> true;
+% {false,objectfield} -> true;
+% {false,_} -> more_genfields(Fields,T)
+% end.
+
+more_genfields([]) ->
+ false;
+more_genfields([Field|Fields]) ->
+ case element(1,Field) of
+ typefield ->
+ true;
+ objectfield ->
+ true;
+ _ ->
+ more_genfields(Fields)
+ end.
+
+% is_typefield(Fields,FieldName) ->
+% case lists:keysearch(FieldName,2,Fields) of
+% {value,Field} ->
+% case element(1,Field) of
+% typefield ->
+% true;
+% Other ->
+% {false,Other}
+% end;
+% _ ->
+% false
+% end.
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+%% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = (ClassDef#classdef.typespec)#objectclass.fields,
+ InternalFuncs=
+ gen_objset_enc(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1,[]),
+ gen_objset_dec(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+%% gen_objset_enc iterates over the objects of the object set
+gen_objset_enc(_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],0}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc ++ Acc);
+gen_objset_enc(ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,_}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _Other ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc++Acc;
+gen_objset_enc(ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Val, _) ->",nl}),
+ emit({indent(6),"[{octets,Val}]",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],ObjSetName,NthObj) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N} = emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_H|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_H|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+emit_inner_of_fun(TDef=#typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(per,Type,dotag,"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val)"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val)"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name},_) ->
+ emit({indent(12),"'enc_",Name,"'(Val)"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_encode_prim(erules,Type,dotag,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val)"})
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],ClName,
+ ClFields,NthObj)->
+
+ emit({"'getdec_",ObjSName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"}),
+ NthObj
+ end,
+ emit({";",nl}),
+ gen_objset_dec(ObjSName,UniqueName,[T|Rest],ClName,ClFields,NewNthObj);
+gen_objset_dec(ObjSetName,UniqueName,[{ObjName,Val,Fields}],_ClName,
+ ClFields,NthObj) ->
+
+ emit({"'getdec_",ObjSetName,"'(",{asis,UniqueName},",",{asis,Val},
+ ") ->",nl}),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSetName,NthObj);
+ _Other ->
+ emit({" fun 'dec_",ObjName,"'/4"})
+ end,
+ emit({".",nl,nl}),
+ ok;
+gen_objset_dec(ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,_ClFields,
+ _NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(Attr1, Bytes, _,_) ->",nl}),
+%% emit({indent(6),"?RT_PER:decode_open_type(Bytes,[])",nl}),
+ emit({indent(6),"{Bytes,Attr1}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ ok;
+gen_objset_dec(_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Fields,[_|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,[],_,NthObj) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ NthObj.
+
+emit_inner_of_decfun(#typedef{name={ExtName,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(per,Type,"Val"),
+ 0;
+ {constructed,bif} ->
+ emit({indent(12),"'dec_",
+ asn1ct_gen:list2name(InternalDefFunName),"'(Val)"}),
+ 1;
+ _ ->
+ emit({indent(12),"'",ExtName,"':'dec_",Name,"'(Val, telltype)"}),
+ 0
+ end;
+emit_inner_of_decfun(#typedef{name=Name},_) ->
+ emit({indent(12),"'dec_",Name,"'(Val, telltype)"}),
+ 0;
+emit_inner_of_decfun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_dec_prim(erules,Type,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+ T,"'(Val)"})
+ end,
+ 0.
+
+
+gen_internal_funcs(_,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit([nl,nl,"'dec_",TypeDef#typedef.name,"'(Bytes) ->",nl]),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+
+%% DECODING *****************************
+%%***************************************
+
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ D = Type,
+ emit({nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes,_) ->",nl}),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,D).
+
+gen_decode(Erules,Tname,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTname = [Cname|Tname],
+ gen_decode(Erules,NewTname,Type);
+
+gen_decode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({nl,"'dec_",asn1ct_gen:list2name(Typename),
+ "'(Bytes,_",ObjFun,") ->",nl}),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+dbdec(Type) when list(Type)->
+ demit({"io:format(\"decoding: ",asn1ct_gen:list2name(Type),"~w~n\",[Bytes]),",nl});
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_dec_prim(Erules,Def,"Bytes"),
+ emit({".",nl,nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_dec_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"Bytes"),
+ emit({".",nl,nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(Bytes,telltype)"}),
+ emit({".",nl,nl});
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ Other ->
+ exit({error,{asn1,{unknown,Other}}})
+ end.
+
+
+gen_dec_prim(_Erules,Att,BytesVar) ->
+ Typename = Att#type.def,
+ Constraint = Att#type.constraint,
+ case Typename of
+ 'INTEGER' ->
+ emit({"?RT_PER:decode_integer(",BytesVar,",",
+ {asis,Constraint},")"});
+ {'INTEGER',NamedNumberList} ->
+ emit({"?RT_PER:decode_integer(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},")"});
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_PER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},")"});
+ _ ->
+ emit({"?RT_PER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},")"})
+ end;
+ 'NULL' ->
+ emit({"?RT_PER:decode_null(",
+ BytesVar,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:decode_object_identifier(",
+ BytesVar,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:decode_ObjectDescriptor(",
+ BytesVar,")"});
+ {'ENUMERATED',{NamedNumberList1,NamedNumberList2}} ->
+ NewTup = {list_to_tuple([X||{X,_} <- NamedNumberList1]),
+ list_to_tuple([X||{X,_} <- NamedNumberList2])},
+ NewC = [{'ValueRange',{0,size(element(1,NewTup))-1}}],
+ emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+ {asis,NewC},",",
+ {asis,NewTup},")"});
+ {'ENUMERATED',NamedNumberList} ->
+ NewTup = list_to_tuple([X||{X,_} <- NamedNumberList]),
+ NewC = [{'ValueRange',{0,size(NewTup)-1}}],
+ emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+ {asis,NewC},",",
+ {asis,NewTup},")"});
+ 'BOOLEAN'->
+ emit({"?RT_PER:decode_boolean(",BytesVar,")"});
+ 'OCTET STRING' ->
+ emit({"?RT_PER:decode_octet_string(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'NumericString' ->
+ emit({"?RT_PER:decode_NumericString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'TeletexString' ->
+ emit({"?RT_PER:decode_TeletexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:decode_VideotexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'UTCTime' ->
+ emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'GeneralizedTime' ->
+ emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'GraphicString' ->
+ emit({"?RT_PER:decode_GraphicString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VisibleString' ->
+ emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'GeneralString' ->
+ emit({"?RT_PER:decode_GeneralString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'PrintableString' ->
+ emit({"?RT_PER:decode_PrintableString(",BytesVar,",",{asis,Constraint},")"});
+ 'IA5String' ->
+ emit({"?RT_PER:decode_IA5String(",BytesVar,",",{asis,Constraint},")"});
+ 'BMPString' ->
+ emit({"?RT_PER:decode_BMPString(",BytesVar,",",{asis,Constraint},")"});
+ 'UniversalString' ->
+ emit({"?RT_PER:decode_UniversalString(",BytesVar,",",{asis,Constraint},")"});
+ 'ANY' ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",",
+ {asis,Constraint}, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ _ ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",[])"])
+ end;
+ Other ->
+ exit({'cant decode' ,Other})
+ end.
+
+
+is_already_generated(Operation,Name) ->
+ case get(class_default_type) of
+ undefined ->
+ put(class_default_type,[{Operation,Name}]),
+ false;
+ GeneratedList ->
+ case lists:member({Operation,Name},GeneratedList) of
+ true ->
+ true;
+ false ->
+ put(class_default_type,[{Operation,Name}|GeneratedList]),
+ false
+ end
+ end.
+
+get_class_fields(#classdef{typespec=ObjClass}) ->
+ ObjClass#objectclass.fields;
+get_class_fields(#objectclass{fields=Fields}) ->
+ Fields;
+get_class_fields(_) ->
+ [].
+
+
+get_object_field(Name,ObjectFields) ->
+ case lists:keysearch(Name,1,ObjectFields) of
+ {value,Field} -> Field;
+ false -> false
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl
new file mode 100644
index 0000000000..ddfa124048
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_gen_per_rt2ct.erl
@@ -0,0 +1,1811 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_gen_per_rt2ct.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_gen_per_rt2ct).
+
+%% Generate erlang module which handles (PER) encode and decode for
+%% all types in an ASN.1 module
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-export([pgen/4,gen_dec_prim/3,gen_encode_prim/4]).
+-export([gen_obj_code/3,gen_objectset_code/2]).
+-export([gen_decode/2, gen_decode/3]).
+-export([gen_encode/2, gen_encode/3]).
+
+-import(asn1ct_gen, [emit/1,demit/1]).
+-import(asn1ct_gen_per, [is_already_generated/2,more_genfields/1,
+ get_class_fields/1,get_object_field/2]).
+
+%% pgen(Erules, Module, TypeOrVal)
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
+%% .hrl file is only generated if necessary
+%% Erules = per | ber
+%% Module = atom()
+%% TypeOrVal = {TypeList,ValueList}
+%% TypeList = ValueList = [atom()]
+
+pgen(OutFile,Erules,Module,TypeOrVal) ->
+ asn1ct_gen:pgen_module(OutFile,Erules,Module,TypeOrVal,true).
+
+
+%% Generate ENCODING ******************************
+%%****************************************x
+
+
+gen_encode(Erules,Type) when record(Type,typedef) ->
+ gen_encode_user(Erules,Type).
+
+gen_encode(Erules,Typename,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTypename = [Cname|Typename],
+ gen_encode(Erules,NewTypename,Type);
+
+gen_encode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ ObjFun =
+ case lists:keysearch(objfun,1,Type#type.tablecinf) of
+ {value,{_,_Name}} ->
+ ", ObjFun";
+ false ->
+ ""
+ end,
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ case InnerType of
+ 'SET' ->
+ true;
+ 'SEQUENCE' ->
+ true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),
+ "'({'",asn1ct_gen:list2name(Typename),
+ "',Val}",ObjFun,") ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),
+ "'(Val",ObjFun,");",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val",ObjFun,
+ ") ->",nl}),
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+
+gen_encode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case InnerType of
+ 'SET' -> true;
+ 'SEQUENCE' -> true;
+ _ ->
+ emit({nl,"'enc_",asn1ct_gen:list2name(Typename),"'({'",asn1ct_gen:list2name(Typename),"',Val}) ->",nl}),
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val);",nl,nl})
+ end,
+ emit({"'enc_",asn1ct_gen:list2name(Typename),"'(Val) ->",nl}),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_encode_prim(Erules,Def,"false"),
+ emit({".",nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_encode_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"false"),
+ emit({".",nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,D);
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'enc_",Etype,"'(Val).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'enc_",Etype,"'(Val).",nl,nl});
+ #typereference{val=Ename} ->
+ emit({"'enc_",Ename,"'(Val).",nl,nl});
+ {notype,_} ->
+ emit({"'enc_",InnerType,"'(Val).",nl,nl})
+ end.
+
+
+gen_encode_prim(Erules,D,DoTag) ->
+ Value = case asn1ct_name:active(val) of
+ true ->
+ asn1ct_gen:mk_var(asn1ct_name:curr(val));
+ false ->
+ "Val"
+ end,
+ gen_encode_prim(Erules,D,DoTag,Value).
+
+
+
+
+
+gen_encode_prim(_Erules,D,_DoTag,Value) when record(D,type) ->
+ Constraint = D#type.constraint,
+ case D#type.def of
+ 'INTEGER' ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ emit([" %%INTEGER with effective constraint: ",
+ {asis,EffectiveConstr},nl]),
+ emit_enc_integer(EffectiveConstr,Value);
+ {'INTEGER',NamedNumberList} ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ %% maybe an emit_enc_NNL_integer
+ emit([" %%INTEGER with effective constraint: ",
+ {asis,EffectiveConstr},nl]),
+ emit_enc_integer_NNL(EffectiveConstr,Value,NamedNumberList);
+ {'ENUMERATED',{Nlist1,Nlist2}} ->
+ NewList = lists:concat([[{0,X}||{X,_} <- Nlist1],['EXT_MARK'],[{1,X}||{X,_} <- Nlist2]]),
+ NewC = [{'ValueRange',{0,length(Nlist1)-1}}],
+ emit(["case (case ",Value," of {_,_}->element(2,",Value,");_->",
+ Value," end) of",nl]),
+ emit_enc_enumerated_cases(NewC, NewList++[{asn1_enum,length(Nlist1)-1}], 0);
+ {'ENUMERATED',NamedNumberList} ->
+ NewList = [X||{X,_} <- NamedNumberList],
+ NewC = effective_constraint(integer,
+ [{'ValueRange',
+ {0,length(NewList)-1}}]),
+ NewVal = enc_enum_cases(Value,NewList),
+ emit_enc_integer(NewC,NewVal);
+ {'BIT STRING',NamedNumberList} ->
+ EffectiveC = effective_constraint(bitstring,Constraint),
+ case EffectiveC of
+ 0 -> emit({"[]"});
+ _ ->
+ emit({"?RT_PER:encode_bit_string(",
+ {asis,EffectiveC},",",Value,",",
+ {asis,NamedNumberList},")"})
+ end;
+ 'NULL' ->
+ emit({"?RT_PER:encode_null(",Value,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:encode_object_identifier(",Value,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:encode_ObjectDescriptor(",{asis,Constraint},
+ ",",Value,")"});
+ 'BOOLEAN' ->
+% emit({"?RT_PER:encode_boolean(",Value,")"});
+ emit({"case ",Value," of",nl,
+% " true -> {bits,1,1};",nl,
+ " true -> [1];",nl,
+% " false -> {bits,1,0};",nl,
+ " false -> [0];",nl,
+ " _ -> exit({error,{asn1,{encode_boolean,",Value,"}}})",nl,
+ "end"});
+ 'OCTET STRING' ->
+ emit_enc_octet_string(Constraint,Value);
+
+ 'NumericString' ->
+ emit_enc_known_multiplier_string('NumericString',Constraint,Value);
+ 'TeletexString' ->
+ emit({"?RT_PER:encode_TeletexString(",{asis,Constraint},",",Value,")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:encode_VideotexString(",{asis,Constraint},",",Value,")"});
+ 'UTCTime' ->
+ emit_enc_known_multiplier_string('VisibleString',Constraint,Value);
+ 'GeneralizedTime' ->
+ emit_enc_known_multiplier_string('VisibleString',Constraint,Value);
+ 'GraphicString' ->
+ emit({"?RT_PER:encode_GraphicString(",{asis,Constraint},",",Value,")"});
+ 'VisibleString' ->
+ emit_enc_known_multiplier_string('VisibleString',Constraint,Value);
+ 'GeneralString' ->
+ emit({"?RT_PER:encode_GeneralString(",{asis,Constraint},",",Value,")"});
+ 'PrintableString' ->
+ emit_enc_known_multiplier_string('PrintableString',Constraint,Value);
+ 'IA5String' ->
+ emit_enc_known_multiplier_string('IA5String',Constraint,Value);
+ 'BMPString' ->
+ emit_enc_known_multiplier_string('BMPString',Constraint,Value);
+ 'UniversalString' ->
+ emit_enc_known_multiplier_string('UniversalString',Constraint,Value);
+ 'ANY' ->
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ Value, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ NewValue = case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ io_lib:format(
+ "?RT_PER:complete(enc_~s(~s))",[Tname,Value]);
+ _ -> Value
+ end,
+ emit(["?RT_PER:encode_open_type(", {asis,Constraint}, ",",
+ NewValue, ")"]);
+ XX ->
+ exit({asn1_error,nyi,XX})
+ end.
+
+emit_enc_known_multiplier_string(StringType,C,Value) ->
+ SizeC =
+ case get_constraint(C,'SizeConstraint') of
+ L when list(L) -> {lists:min(L),lists:max(L)};
+ L -> L
+ end,
+ PAlphabC = get_constraint(C,'PermittedAlphabet'),
+ case {StringType,PAlphabC} of
+ {'UniversalString',{_,_}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with "
+ "PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with "
+ "PermittedAlphabet constraint"}}});
+ _ -> ok
+ end,
+ NumBits = get_NumBits(C,StringType),
+ CharOutTab = get_CharOutTab(C,StringType),
+ %% NunBits and CharOutTab for chars_encode
+ emit_enc_k_m_string(StringType,SizeC,NumBits,CharOutTab,Value).
+
+emit_enc_k_m_string(_StringType,0,_NumBits,_CharOutTab,_Value) ->
+ emit({"[]"});
+emit_enc_k_m_string(StringType,SizeC,NumBits,CharOutTab,Value) ->
+ emit({"?RT_PER:encode_known_multiplier_string(",{asis,StringType},",",
+ {asis,SizeC},",",NumBits,",",{asis,CharOutTab},",",Value,")"}).
+
+emit_dec_known_multiplier_string(StringType,C,BytesVar) ->
+ SizeC = get_constraint(C,'SizeConstraint'),
+ PAlphabC = get_constraint(C,'PermittedAlphabet'),
+ case {StringType,PAlphabC} of
+ {'BMPString',{_,_}} ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet "
+ "constraint"}}});
+ _ ->
+ ok
+ end,
+ NumBits = get_NumBits(C,StringType),
+ CharInTab = get_CharInTab(C,StringType),
+ case SizeC of
+ 0 ->
+ emit({"{[],",BytesVar,"}"});
+ _ ->
+ emit({"?RT_PER:decode_known_multiplier_string(",
+ {asis,StringType},",",{asis,SizeC},",",NumBits,
+ ",",{asis,CharInTab},",",BytesVar,")"})
+ end.
+
+
+%% copied from run time module
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B =< 4 -> 4;
+ B when B =< 8 -> 8;
+ B when B =< 16 -> 16;
+ B when B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+%% copied from run time module
+
+emit_enc_octet_string(Constraint,Value) ->
+ case get_constraint(Constraint,'SizeConstraint') of
+ 0 ->
+ emit({" []"});
+ 1 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+ emit({" [",{curr,tmpval},"] = ",Value,",",nl}),
+% emit({" {bits,8,",{curr,tmpval},"}",nl}),
+ emit({" [10,8,",{curr,tmpval},"]",nl}),
+ emit(" end");
+ 2 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+ emit({" [",{curr,tmpval},",",{next,tmpval},"] = ",
+ Value,",",nl}),
+% emit({" [{bits,8,",{curr,tmpval},"},{bits,8,",
+% {next,tmpval},"}]",nl}),
+ emit({" [[10,8,",{curr,tmpval},"],[10,8,",
+ {next,tmpval},"]]",nl}),
+ emit(" end"),
+ asn1ct_name:new(tmpval);
+ Sv when integer(Sv),Sv =< 256 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+% emit({" case length(",Value,") == ",Sv," of",nl}),
+ emit({" case length(",Value,") of",nl}),
+ emit({" ",{curr,tmpval}," when ",{curr,tmpval}," == ",Sv," -> [2,20,",{curr,tmpval},",",Value,"];",nl}),
+ emit({" _ -> exit({error,{value_out_of_bounds,",Value,"}})",
+ nl," end",nl}),
+ emit(" end");
+ Sv when integer(Sv),Sv =< 65535 ->
+ asn1ct_name:new(tmpval),
+ emit({" begin",nl}),
+% emit({" case length(",Value,") == ",Sv," of",nl}),
+ emit({" case length(",Value,") of",nl}),
+% emit({" true -> [align,{octets,",Value,"}];",nl}),
+ emit({" ",{curr,tmpval}," when ",{curr,tmpval}," == ",Sv," -> [2,21,",{curr,tmpval},",",Value,"];",nl}),
+ emit({" _ -> exit({error,{value_out_of_bounds,",Value,"}})",
+ nl," end",nl}),
+ emit(" end");
+ C ->
+ emit({" ?RT_PER:encode_octet_string(",{asis,C},",false,",Value,")",nl})
+ end.
+
+emit_dec_octet_string(Constraint,BytesVar) ->
+ case get_constraint(Constraint,'SizeConstraint') of
+ 0 ->
+ emit({" {[],",BytesVar,"}",nl});
+ {_,0} ->
+ emit({" {[],",BytesVar,"}",nl});
+ C ->
+ emit({" ?RT_PER:decode_octet_string(",BytesVar,",",
+ {asis,C},",false)",nl})
+ end.
+
+emit_enc_integer_case(Value) ->
+ case get(component_type) of
+ {true,#'ComponentType'{prop=Prop}} ->
+ emit({" begin",nl}),
+ case Prop of
+ Opt when Opt=='OPTIONAL';
+ tuple(Opt),element(1,Opt)=='DEFAULT' ->
+ emit({" case ",Value," of",nl}),
+ ok;
+ _ ->
+ emit({" ",{curr,tmpval},"=",Value,",",nl}),
+ emit({" case ",{curr,tmpval}," of",nl}),
+ asn1ct_name:new(tmpval)
+ end;
+% asn1ct_name:new(tmpval);
+ _ ->
+ emit({" case ",Value," of ",nl})
+ end.
+emit_enc_integer_end_case() ->
+ case get(component_type) of
+ {true,_} ->
+ emit({nl," end"}); % end of begin ... end
+ _ -> ok
+ end.
+
+
+emit_enc_integer_NNL(C,Value,NNL) ->
+ EncVal = enc_integer_NNL_cases(Value,NNL),
+ emit_enc_integer(C,EncVal).
+
+enc_integer_NNL_cases(Value,NNL) ->
+ asn1ct_name:new(tmpval),
+ TmpVal = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ Cases=enc_integer_NNL_cases1(NNL),
+ lists:flatten(io_lib:format("(case ~s of "++Cases++
+ "~s when atom(~s)->exit({error,{asn1,{namednumber,~s}}});_->~s end)",[Value,TmpVal,TmpVal,TmpVal,Value])).
+
+enc_integer_NNL_cases1([{NNo,No}|Rest]) ->
+ io_lib:format("~w->~w;",[NNo,No])++enc_integer_NNL_cases1(Rest);
+enc_integer_NNL_cases1([]) ->
+ "".
+
+emit_enc_integer([{'SingleValue',Int}],Value) ->
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),% emit([" case ",Value," of",nl]),
+ emit([" ",Int," -> [];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+emit_enc_integer([{_,{Lb,Ub},_Range,{bits,NoBs}}],Value) -> % Range =< 255
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),
+ emit([" ",{curr,tmpval}," when ",{curr,tmpval},"=<",Ub,",",
+ {curr,tmpval},">=",Lb," ->",nl]),
+ emit([" [10,",NoBs,",",{curr,tmpval},"-",Lb,"];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+emit_enc_integer([{_,{Lb,Ub},Range,_}],Value) when Range =< 256 ->
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),
+ emit([" ",{curr,tmpval}," when ",{curr,tmpval},"=<",Ub,",",
+ {curr,tmpval},">=",Lb," ->",nl]),
+ emit([" [20,1,",{curr,tmpval},"-",Lb,"];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+emit_enc_integer([{_,{Lb,Ub},Range,_}],Value) when Range =< 65536 ->
+ asn1ct_name:new(tmpval),
+ emit_enc_integer_case(Value),
+ emit([" ",{curr,tmpval}," when ",{curr,tmpval},"=<",Ub,",",
+ {curr,tmpval},">=",Lb," ->",nl]),
+ emit([" [20,2,<<(",{curr,tmpval},"-",Lb,"):16>>];",nl]),
+ emit([" ",{curr,tmpval}," ->",nl]),
+ emit([" exit({error,{value_out_of_bounds,",{curr,tmpval},"}})",
+ nl," end",nl]),
+ emit_enc_integer_end_case();
+
+
+emit_enc_integer(C,Value) ->
+ emit({" ?RT_PER:encode_integer(",{asis,C},",",Value,")"}).
+
+
+
+
+enc_enum_cases(Value,NewList) ->
+ asn1ct_name:new(tmpval),
+ TmpVal = asn1ct_gen:mk_var(asn1ct_name:curr(tmpval)),
+ Cases=enc_enum_cases1(NewList),
+ lists:flatten(io_lib:format("(case ~s of "++Cases++
+ "~s ->exit({error,"
+ "{asn1,{enumerated,~s}}})"
+ " end)",
+ [Value,TmpVal,TmpVal])).
+enc_enum_cases1(NNL) ->
+ enc_enum_cases1(NNL,0).
+enc_enum_cases1([H|T],Index) ->
+ io_lib:format("~w->~w;",[H,Index])++enc_enum_cases1(T,Index+1);
+enc_enum_cases1([],_) ->
+ "".
+
+
+emit_enc_enumerated_cases(C, [H], Count) ->
+ emit_enc_enumerated_case(C, H, Count),
+ emit([";",nl,"EnumVal -> exit({error,{asn1, {enumerated_not_in_range, EnumVal}}})"]),
+ emit([nl,"end"]);
+emit_enc_enumerated_cases(C, ['EXT_MARK'|T], _Count) ->
+ emit_enc_enumerated_cases(C, T, 0);
+emit_enc_enumerated_cases(C, [H1,H2|T], Count) ->
+ emit_enc_enumerated_case(C, H1, Count),
+ emit([";",nl]),
+ emit_enc_enumerated_cases(C, [H2|T], Count+1).
+
+
+%% The function clauses matching on tuples with first element
+%% asn1_enum, 1 or 0 and the atom 'EXT_MARK' are for ENUMERATED
+%% with extension mark.
+emit_enc_enumerated_case(_C, {asn1_enum,High}, _) ->
+ %% ENUMERATED with extensionmark
+ %% value higher than the extension base and not
+ %% present in the extension range.
+ emit(["{asn1_enum,EnumV} when integer(EnumV), EnumV > ",High," -> ",
+ "[1,?RT_PER:encode_small_number(EnumV)]"]);
+emit_enc_enumerated_case(_C, 'EXT_MARK', _Count) ->
+ %% ENUMERATED with extensionmark
+ true;
+emit_enc_enumerated_case(_C, {1,EnumName}, Count) ->
+ %% ENUMERATED with extensionmark
+ %% values higher than extension root
+ emit(["'",EnumName,"' -> [1,?RT_PER:encode_small_number(",Count,")]"]);
+emit_enc_enumerated_case(C, {0,EnumName}, Count) ->
+ %% ENUMERATED with extensionmark
+ %% values within extension root
+ emit(["'",EnumName,"' -> [0,?RT_PER:encode_integer(",{asis,C},", ",Count,")]"]);
+
+%% This clause is invoked in case of an ENUMERATED without extension mark
+emit_enc_enumerated_case(_C, EnumName, Count) ->
+ emit(["'",EnumName,"' -> ",Count]).
+
+
+get_constraint([{Key,V}],Key) ->
+ V;
+get_constraint([],_) ->
+ no;
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+get_constraints(L=[{Key,_}],Key) ->
+ L;
+get_constraints([],_) ->
+ [];
+get_constraints(C,Key) ->
+ {value,L} = keysearch_allwithkey(Key,1,C,[]),
+ L.
+
+keysearch_allwithkey(Key,Ix,C,Acc) ->
+ case lists:keysearch(Key,Ix,C) of
+ false ->
+ {value,Acc};
+ {value,T} ->
+ RestC = lists:delete(T,C),
+ keysearch_allwithkey(Key,Ix,RestC,[T|Acc])
+ end.
+
+%% effective_constraint(Type,C)
+%% Type = atom()
+%% C = [C1,...]
+%% C1 = {'SingleValue',SV} | {'ValueRange',VR} | {atom(),term()}
+%% SV = integer() | [integer(),...]
+%% VR = {Lb,Ub}
+%% Lb = 'MIN' | integer()
+%% Ub = 'MAX' | integer()
+%% Returns a single value if C only has a single value constraint, and no
+%% value range constraints, that constrains to a single value, otherwise
+%% returns a value range that has the lower bound set to the lowest value
+%% of all single values and lower bound values in C and the upper bound to
+%% the greatest value.
+effective_constraint(integer,[C={{_,_},_}|_Rest]) -> % extension
+ [C]; %% [C|effective_constraint(integer,Rest)]; XXX what is possible ???
+effective_constraint(integer,C) ->
+ SVs = get_constraints(C,'SingleValue'),
+ SV = effective_constr('SingleValue',SVs),
+ VRs = get_constraints(C,'ValueRange'),
+ VR = effective_constr('ValueRange',VRs),
+ CRange = greatest_common_range(SV,VR),
+ pre_encode(integer,CRange);
+effective_constraint(bitstring,C) ->
+% Constr=get_constraints(C,'SizeConstraint'),
+% case Constr of
+% [] -> no;
+% [{'SizeConstraint',Val}] -> Val;
+% Other -> Other
+% end;
+ get_constraint(C,'SizeConstraint');
+effective_constraint(Type,C) ->
+ io:format("Effective constraint for ~p, not implemented yet.~n",[Type]),
+ C.
+
+effective_constr(_,[]) ->
+ [];
+effective_constr('SingleValue',List) ->
+ SVList = lists:flatten(lists:map(fun(X)->element(2,X)end,List)),
+ case lists:usort(SVList) of
+ [N] ->
+ [{'SingleValue',N}];
+ L when list(L) ->
+ [{'ValueRange',{hd(L),lists:last(L)}}]
+ end;
+effective_constr('ValueRange',List) ->
+ LBs = lists:map(fun({_,{Lb,_}})-> Lb end,List),
+ UBs = lists:map(fun({_,{_,Ub}})-> Ub end,List),
+ Lb = least_Lb(LBs),
+ [{'ValueRange',{Lb,lists:max(UBs)}}].
+
+greatest_common_range([],VR) ->
+ VR;
+greatest_common_range(SV,[]) ->
+ SV;
+greatest_common_range([{_,Int}],[{_,{'MIN',Ub}}]) when integer(Int),
+ Int > Ub ->
+ [{'ValueRange',{'MIN',Int}}];
+greatest_common_range([{_,Int}],[{_,{Lb,Ub}}]) when integer(Int),
+ Int < Lb ->
+ [{'ValueRange',{Int,Ub}}];
+greatest_common_range([{_,Int}],VR=[{_,{_Lb,_Ub}}]) when integer(Int) ->
+ VR;
+greatest_common_range([{_,L}],[{_,{Lb,Ub}}]) when list(L) ->
+ Min = least_Lb([Lb|L]),
+ Max = greatest_Ub([Ub|L]),
+ [{'ValueRange',{Min,Max}}].
+
+
+least_Lb(L) ->
+ case lists:member('MIN',L) of
+ true -> 'MIN';
+ _ -> lists:min(L)
+ end.
+
+greatest_Ub(L) ->
+ case lists:member('MAX',L) of
+ true -> 'MAX';
+ _ -> lists:max(L)
+ end.
+
+% effective_constraint1('SingleValue',List) ->
+% SVList = lists:map(fun(X)->element(2,X)end,List),
+% sv_effective_constraint(hd(SVList),tl(SVList));
+% effective_constraint1('ValueRange',List) ->
+% VRList = lists:map(fun(X)->element(2,X)end,List),
+% vr_effective_constraint(lists:map(fun(X)->element(1,X)end,VRList),
+% lists:map(fun(X)->element(2,X)end,VRList)).
+
+%% vr_effective_constraint/2
+%% Gets all LowerEndPoints and UpperEndPoints as arguments
+%% Returns {'ValueRange',{Lb,Ub}} where Lb is the highest value of
+%% the LowerEndPoints and Ub is the lowest value of the UpperEndPoints,
+%% i.e. the intersection of all value ranges.
+% vr_effective_constraint(Mins,Maxs) ->
+% Lb=lists:foldl(fun(X,'MIN') when integer(X) -> X;
+% (X,'MIN') -> 'MIN';
+% (X,AccIn) when integer(X),X >= AccIn -> X;
+% (X,AccIn) -> AccIn
+% end,hd(Mins),tl(Mins)),
+% Ub = lists:min(Maxs),
+% {'ValueRange',{Lb,Ub}}.
+
+
+% sv_effective_constraint(SV,[]) ->
+% {'SingleValue',SV};
+% sv_effective_constraint([],_) ->
+% exit({error,{asn1,{illegal_single_value_constraint}}});
+% sv_effective_constraint(SV,[SV|Rest]) ->
+% sv_effective_constraint(SV,Rest);
+% sv_effective_constraint(Int,[SV|Rest]) when integer(Int),list(SV) ->
+% case lists:member(Int,SV) of
+% true ->
+% sv_effective_constraint(Int,Rest);
+% _ ->
+% exit({error,{asn1,{illegal_single_value_constraint}}})
+% end;
+% sv_effective_constraint(SV,[Int|Rest]) when integer(Int),list(SV) ->
+% case lists:member(Int,SV) of
+% true ->
+% sv_effective_constraint(Int,Rest);
+% _ ->
+% exit({error,{asn1,{illegal_single_value_constraint}}})
+% end;
+% sv_effective_constraint(SV1,[SV2|Rest]) when list(SV1),list(SV2) ->
+% sv_effective_constraint(common_set(SV1,SV2),Rest);
+% sv_effective_constraint(_,_) ->
+% exit({error,{asn1,{illegal_single_value_constraint}}}).
+
+%% common_set/2
+%% Two lists as input
+%% Returns the list with all elements that are common for both
+%% input lists
+% common_set(SV1,SV2) ->
+% lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
+
+
+
+pre_encode(integer,[]) ->
+ [];
+pre_encode(integer,C=[{'SingleValue',_}]) ->
+ C;
+pre_encode(integer,C=[{'ValueRange',VR={Lb,Ub}}]) when integer(Lb),integer(Ub)->
+ Range = Ub-Lb+1,
+ if
+ Range =< 255 ->
+ NoBits = no_bits(Range),
+ [{'ValueRange',VR,Range,{bits,NoBits}}];
+ Range =< 256 ->
+ [{'ValueRange',VR,Range,{octets,1}}];
+ Range =< 65536 ->
+ [{'ValueRange',VR,Range,{octets,2}}];
+ true ->
+ C
+ end;
+pre_encode(integer,C) ->
+ C.
+
+no_bits(2) -> 1;
+no_bits(N) when N=<4 -> 2;
+no_bits(N) when N=<8 -> 3;
+no_bits(N) when N=<16 -> 4;
+no_bits(N) when N=<32 -> 5;
+no_bits(N) when N=<64 -> 6;
+no_bits(N) when N=<128 -> 7;
+no_bits(N) when N=<255 -> 8.
+
+%% Object code generating for encoding and decoding
+%% ------------------------------------------------
+
+gen_obj_code(Erules,_Module,Obj) when record(Obj,typedef) ->
+ ObjName = Obj#typedef.name,
+ Def = Obj#typedef.typespec,
+ #'Externaltypereference'{module=Mod,type=ClassName} =
+ Def#'Object'.classname,
+ Class = asn1_db:dbget(Mod,ClassName),
+ {object,_,Fields} = Def#'Object'.def,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjName}),
+ emit({nl,"%%================================",nl}),
+ EncConstructed =
+% gen_encode_objectfields(Class#classdef.typespec,ObjName,Fields,[]),
+ gen_encode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_encode_constr_type(Erules,EncConstructed),
+ emit(nl),
+ DecConstructed =
+% gen_decode_objectfields(Class#classdef.typespec,ObjName,Fields,[]),
+ gen_decode_objectfields(ClassName,get_class_fields(Class),
+ ObjName,Fields,[]),
+ emit(nl),
+ gen_decode_constr_type(Erules,DecConstructed),
+ emit(nl);
+gen_obj_code(_Erules,_Module,Obj) when record(Obj,pobjectdef) ->
+ ok.
+
+gen_encode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(V) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",V,",_RestPrimFieldName) ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val, RestPrimFieldName) ->",nl]),
+ MaybeConstr =
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit(" <<>>"),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Val"),
+ gen_encode_default_call(ClassName,Name,DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Val"),
+ gen_encode_field_call(ObjName,Name,TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,
+ MaybeConstr++ConstrAcc);
+gen_encode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'enc_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'enc_",ObjName,"'(",{asis,Name},
+% ", Val,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_"),
+ emit([" exit({error,{'use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Val,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+ "'(H, Val, T)"});
+ TypeName ->
+ emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_encode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_encode_objectfields(ClassName,[_C|Cs],O,OF,Acc) ->
+ gen_encode_objectfields(ClassName,Cs,O,OF,Acc);
+gen_encode_objectfields(_,[],_,_,Acc) ->
+ Acc.
+
+% gen_encode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, Dummy) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_encode_prim(per,Def,"false","Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'enc_",ObjName,'_',FieldName,
+% "'(Val)"}),
+% [{['enc_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'enc_",TypeName,"'(Val)"}),
+% [];
+% TypeName ->
+% emit({" 'enc_",TypeName,"'(Val)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'enc_",ObjName,"'(",{asis,FieldName},
+% ", Val, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'enc_",TypeName,
+% "'(H, Val, T)"});
+% TypeName ->
+% emit({indent(3),"'enc_",TypeName,"'(H, Val, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} -> []
+% end,
+% gen_encode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_encode_objectfields(C,O,[_|T],Acc) ->
+% gen_encode_objectfields(C,O,T,Acc);
+% gen_encode_objectfields(_,_,[],Acc) ->
+% Acc.
+
+gen_encode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(enc,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ Name = lists:concat(["enc_",TypeDef#typedef.name]),
+ emit({Name,"(Val) ->",nl}),
+ Def = TypeDef#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules,Name,InnerType,Def),
+ gen_encode_constr_type(Erules,Rest)
+ end;
+gen_encode_constr_type(_,[]) ->
+ ok.
+
+gen_encode_field_call(ObjName,FieldName,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_encode_prim(per,Def,"false",
+ "Val"),
+ [];
+ {constructed,bif} ->
+ emit({" 'enc_",ObjName,'_',FieldName,
+ "'(Val)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'enc_",TypeName,
+ "'(Val)"}),
+ [];
+ TypeName ->
+ emit({" 'enc_",TypeName,"'(Val)"}),
+ []
+ end.
+
+gen_encode_default_call(ClassName,FieldName,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+%% asn1ct_gen:gen_encode_constructed(Erules,Typename,InnerType,Type);
+ emit([" 'enc_",ClassName,'_',FieldName,"'(Val)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_encode_prim(per,Type,"false","Val"),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'enc_",Etype,"'(Val)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'enc_",Etype,"'(Val)",nl]),
+ []
+ end.
+
+
+
+gen_decode_objectfields(ClassName,[{typefield,Name,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Bytes) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},",",Bytes,
+ ",_,_RestPrimFieldName) ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes, _, RestPrimFieldName) ->",nl]),
+ MaybeConstr=
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} -> %% this case is illegal
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_"),
+ emit([" asn1_NOVALUE"]),
+ [];
+ {false,{'DEFAULT',DefaultType}} ->
+ EmitFuncClause("Bytes"),
+ gen_decode_default_call(ClassName,Name,"Bytes",DefaultType);
+ {{Name,TypeSpec},_} ->
+ %% A specified field owerwrites any 'DEFAULT' or
+ %% 'OPTIONAL' field in the class
+ EmitFuncClause("Bytes"),
+ gen_decode_field_call(ObjName,Name,"Bytes",TypeSpec)
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,MaybeConstr++ConstrAcc);
+gen_decode_objectfields(ClassName,[{objectfield,Name,_,_,OptOrMand}|Rest],
+ ObjName,ObjectFields,ConstrAcc) ->
+ EmitFuncClause =
+ fun(Attrs) ->
+ emit(["'dec_",ObjName,"'(",{asis,Name},
+ ",",Attrs,") ->",nl])
+ end,
+% emit(["'dec_",ObjName,"'(",{asis,Name},
+% ", Bytes,_,[H|T]) ->",nl]),
+ case {get_object_field(Name,ObjectFields),OptOrMand} of
+ {false,'MANDATORY'} ->
+ exit({error,{asn1,{"missing mandatory field in object",
+ ObjName}}});
+ {false,'OPTIONAL'} ->
+ EmitFuncClause("_,_,_"),
+ emit([" exit({error,{'illegal use of missing field in object', ",Name,
+ "}})"]);
+ {false,{'DEFAULT',_DefaultObject}} ->
+ exit({error,{asn1,{"not implemented yet",Name}}});
+ {{Name,TypeSpec},_} ->
+ EmitFuncClause("Bytes,_,[H|T]"),
+ case TypeSpec#typedef.name of
+ {ExtMod,TypeName} ->
+ emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+ "'(H, Bytes, telltype, T)"});
+ TypeName ->
+ emit({indent(3),"'dec_",TypeName,"'(H, Bytes, telltype, T)"})
+ end
+ end,
+ case more_genfields(Rest) of
+ true ->
+ emit([";",nl]);
+ false ->
+ emit([".",nl])
+ end,
+ gen_decode_objectfields(ClassName,Rest,ObjName,ObjectFields,ConstrAcc);
+gen_decode_objectfields(CN,[_C|Cs],O,OF,CAcc) ->
+ gen_decode_objectfields(CN,Cs,O,OF,CAcc);
+gen_decode_objectfields(_,[],_,_,CAcc) ->
+ CAcc.
+
+
+gen_decode_field_call(ObjName,FieldName,Bytes,Type) ->
+ Def = Type#typedef.typespec,
+ case Type#typedef.name of
+ {primitive,bif} ->
+ gen_dec_prim(per,Def,Bytes),
+ [];
+ {constructed,bif} ->
+ emit({" 'dec_",ObjName,'_',FieldName,
+ "'(",Bytes,",telltype)"}),
+ [Type#typedef{name=list_to_atom(lists:concat([ObjName,'_',FieldName]))}];
+ {ExtMod,TypeName} ->
+ emit({" '",ExtMod,"':'dec_",TypeName,
+ "'(",Bytes,", telltype)"}),
+ [];
+ TypeName ->
+ emit({" 'dec_",TypeName,"'(",Bytes,", telltype)"}),
+ []
+ end.
+
+gen_decode_default_call(ClassName,FieldName,Bytes,Type) ->
+ CurrentMod = get(currmod),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ emit([" 'dec_",ClassName,'_',FieldName,"'(",Bytes,", telltype)"]),
+ [#typedef{name=list_to_atom(lists:concat([ClassName,'_',FieldName])),
+ typespec=Type}];
+ {primitive,bif} ->
+ gen_dec_prim(per,Type,Bytes),
+ [];
+ #'Externaltypereference'{module=CurrentMod,type=Etype} ->
+ emit([" 'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ [];
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit([" '",Emod,"':'dec_",Etype,"'(",Bytes,", telltype)",nl]),
+ []
+ end.
+
+%%%%%%%%%%%%%%%
+
+% gen_decode_objectfields(Class,ObjName,[{FieldName,Type}|Rest],ConstrAcc) ->
+% Fields = Class#objectclass.fields,
+
+% MaybeConstr =
+% case is_typefield(Fields,FieldName) of
+% true ->
+% Def = Type#typedef.typespec,
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, RestPrimFieldName) ->",nl}),
+
+% CAcc =
+% case Type#typedef.name of
+% {primitive,bif} ->
+% gen_dec_prim(per,Def,"Val"),
+% [];
+% {constructed,bif} ->
+% emit({" 'dec_",ObjName,'_',FieldName,
+% "'(Val, Telltype)"}),
+% [{['dec_',ObjName,'_',FieldName],Def}];
+% {ExtMod,TypeName} ->
+% emit({" '",ExtMod,"':'dec_",TypeName,
+% "'(Val, Telltype)"}),
+% [];
+% TypeName ->
+% emit({" 'dec_",TypeName,"'(Val, Telltype)"}),
+% []
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% CAcc;
+% {false,objectfield} ->
+% emit({"'dec_",ObjName,"'(",{asis,FieldName},
+% ", Val, Telltype, [H|T]) ->",nl}),
+% case Type#typedef.name of
+% {ExtMod,TypeName} ->
+% emit({indent(3),"'",ExtMod,"':'dec_",TypeName,
+% "'(H, Val, Telltype, T)"});
+% TypeName ->
+% emit({indent(3),"'dec_",TypeName,
+% "'(H, Val, Telltype, T)"})
+% end,
+% case more_genfields(Fields,Rest) of
+% true ->
+% emit({";",nl});
+% false ->
+% emit({".",nl})
+% end,
+% [];
+% {false,_} ->
+% []
+% end,
+% gen_decode_objectfields(Class,ObjName,Rest,MaybeConstr ++ ConstrAcc);
+% gen_decode_objectfields(C,O,[_|T],CAcc) ->
+% gen_decode_objectfields(C,O,T,CAcc);
+% gen_decode_objectfields(_,_,[],CAcc) ->
+% CAcc.
+
+gen_decode_constr_type(Erules,[{Name,Def}|Rest]) ->
+ emit({Name,"(Bytes,_) ->",nl}),
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules,Name,InnerType,Def),
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(Erules,[TypeDef|Rest]) when record(TypeDef,typedef) ->
+ case is_already_generated(dec,TypeDef#typedef.name) of
+ true -> ok;
+ _ ->
+ gen_decode(Erules,TypeDef)
+ end,
+ gen_decode_constr_type(Erules,Rest);
+gen_decode_constr_type(_,[]) ->
+ ok.
+
+% is_typefield(Fields,FieldName) ->
+% case lists:keysearch(FieldName,2,Fields) of
+% {value,Field} ->
+% case element(1,Field) of
+% typefield ->
+% true;
+% Other ->
+% {false,Other}
+% end;
+% _ ->
+% false
+% end.
+%% Object Set code generating for encoding and decoding
+%% ----------------------------------------------------
+gen_objectset_code(Erules,ObjSet) ->
+ ObjSetName = ObjSet#typedef.name,
+ Def = ObjSet#typedef.typespec,
+%% {ClassName,ClassDef} = Def#'ObjectSet'.class,
+ #'Externaltypereference'{module=ClassModule,
+ type=ClassName} = Def#'ObjectSet'.class,
+ ClassDef = asn1_db:dbget(ClassModule,ClassName),
+ UniqueFName = Def#'ObjectSet'.uniquefname,
+ Set = Def#'ObjectSet'.set,
+ emit({nl,nl,nl,"%%================================"}),
+ emit({nl,"%% ",ObjSetName}),
+ emit({nl,"%%================================",nl}),
+ case ClassName of
+ {_Module,ExtClassName} ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ExtClassName,ClassDef);
+ _ ->
+ gen_objset_code(Erules,ObjSetName,UniqueFName,Set,
+ ClassName,ClassDef)
+ end,
+ emit(nl).
+
+gen_objset_code(Erules,ObjSetName,UniqueFName,Set,ClassName,ClassDef)->
+ ClassFields = (ClassDef#classdef.typespec)#objectclass.fields,
+ InternalFuncs=
+ gen_objset_enc(ObjSetName,UniqueFName,Set,ClassName,
+ ClassFields,1,[]),
+ gen_objset_dec(ObjSetName,UniqueFName,Set,ClassName,ClassFields,1),
+ gen_internal_funcs(Erules,InternalFuncs).
+
+gen_objset_enc(_,{unique,undefined},_,_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ [];
+gen_objset_enc(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],
+ ClName,ClFields,NthObj,Acc)->
+ emit({"'getenc_",ObjSName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,NewNthObj}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({";",nl}),
+ gen_objset_enc(ObjSName,UniqueName,[T|Rest],ClName,ClFields,
+ NewNthObj,InternalFunc++Acc);
+gen_objset_enc(ObjSetName,UniqueName,
+ [{ObjName,Val,Fields}],_ClName,ClFields,NthObj,Acc) ->
+
+ emit({"'getenc_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ {InternalFunc,_}=
+ case ObjName of
+ no_name ->
+ gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit({" fun 'enc_",ObjName,"'/3"}),
+ {[],NthObj}
+ end,
+ emit({".",nl,nl}),
+ InternalFunc++Acc;
+gen_objset_enc(ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
+ _ClFields,_NthObj,Acc) ->
+ emit({"'getenc_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(_, Val, _) ->",nl}),
+ emit({indent(6),"Size = if",nl}),
+ emit({indent(9),"list(Val) -> length(Val);",nl}),
+ emit({indent(9),"true -> size(Val)",nl}),
+ emit({indent(6),"end,",nl}),
+ emit({indent(6),"if",nl}),
+ emit({indent(9),"Size < 256 ->",nl}),
+ emit({indent(12),"[20,Size,Val];",nl}),
+ emit({indent(9),"true ->",nl}),
+ emit({indent(12),"[21,<<Size:16>>,Val]",nl}),
+ emit({indent(6),"end",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ Acc;
+gen_objset_enc(_,_,[],_,_,_,Acc) ->
+ Acc.
+
+%% gen_inlined_enc_funs for each object iterates over all fields of a
+%% class, and for each typefield it checks if the object has that
+%% field and emits the proper code.
+gen_inlined_enc_funs(Fields,[{typefield,Name,_}|Rest],ObjSetName,NthObj) ->
+ InternalDefFunName=asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+N,Ret);
+ false ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_enc_funs(_,[],_,NthObj) ->
+ {[],NthObj}.
+
+gen_inlined_enc_funs1(Fields,[{typefield,Name,_}|Rest],ObjSetName,
+ NthObj,Acc) ->
+ InternalDefFunName = asn1ct_gen:list2name([NthObj,Name,ObjSetName]),
+ {Acc2,NAdd}=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ {Ret,N}=emit_inner_of_fun(Type,InternalDefFunName),
+ {Ret++Acc,N};
+ false ->
+ {Acc,0}
+ end,
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj+NAdd,Acc2);
+gen_inlined_enc_funs1(Fields,[_|Rest],ObjSetName,NthObj,Acc)->
+ gen_inlined_enc_funs1(Fields,Rest,ObjSetName,NthObj,Acc);
+gen_inlined_enc_funs1(_,[],_,NthObj,Acc) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ {Acc,NthObj}.
+
+emit_inner_of_fun(TDef=#typedef{name={ExtMod,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtMod,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_encode_prim(per,Type,dotag,"Val"),
+ {[],0};
+ {constructed,bif} ->
+ emit([indent(12),"'enc_",
+ InternalDefFunName,"'(Val)"]),
+ {[TDef#typedef{name=InternalDefFunName}],1};
+ _ ->
+ emit({indent(12),"'",ExtMod,"':'enc_",Name,"'(Val)"}),
+ {[],0}
+ end;
+emit_inner_of_fun(#typedef{name=Name},_) ->
+ emit({indent(12),"'enc_",Name,"'(Val)"}),
+ {[],0};
+emit_inner_of_fun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_encode_prim(erules,Type,dotag,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'enc_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'enc_",
+ T,"'(Val)"})
+ end,
+ {[],0}.
+
+indent(N) ->
+ lists:duplicate(N,32). % 32 = space
+
+
+gen_objset_dec(_,{unique,undefined},_,_,_,_) ->
+ %% There is no unique field in the class of this object set
+ %% don't bother about the constraint
+ ok;
+gen_objset_dec(ObjSName,UniqueName,[{ObjName,Val,Fields},T|Rest],ClName,
+ ClFields,NthObj)->
+
+ emit({"'getdec_",ObjSName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ NewNthObj=
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ _ ->
+ emit({" fun 'dec_",ObjName,"'/4"}),
+ NthObj
+ end,
+ emit({";",nl}),
+ gen_objset_dec(ObjSName,UniqueName,[T|Rest],ClName,ClFields,NewNthObj);
+gen_objset_dec(ObjSetName,UniqueName,[{ObjName,Val,Fields}],_ClName,
+ ClFields,NthObj) ->
+
+ emit({"'getdec_",ObjSetName,"'(",{asis,UniqueName},",",
+ {asis,Val},") ->",nl}),
+ case ObjName of
+ no_name ->
+ gen_inlined_dec_funs(Fields,ClFields,ObjSetName,NthObj);
+ _ ->
+ emit({" fun 'dec_",ObjName,"'/4"})
+ end,
+ emit({".",nl,nl}),
+ ok;
+gen_objset_dec(ObjSetName,_,['EXTENSIONMARK'],_ClName,_ClFields,
+ _NthObj) ->
+ emit({"'getdec_",ObjSetName,"'(_, _) ->",nl}),
+ emit({indent(3),"fun(Attr1, Bytes, _, _) ->",nl}),
+ %% emit({indent(6),"?RT_PER:decode_open_type(Bytes,[])",nl}),
+ emit({indent(6),"{Bytes,Attr1}",nl}),
+ emit({indent(3),"end.",nl,nl}),
+ ok;
+gen_objset_dec(_,_,[],_,_,_) ->
+ ok.
+
+gen_inlined_dec_funs(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({indent(3),"fun(Type, Val, _, _) ->",nl,
+ indent(6),"case Type of",nl}),
+ emit({indent(9),{asis,Name}," ->",nl}),
+ N=emit_inner_of_decfun(Type,InternalDefFunName),
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+ false ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj)
+ end;
+gen_inlined_dec_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
+ gen_inlined_dec_funs(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs(_,[],_,NthObj) ->
+ NthObj.
+
+gen_inlined_dec_funs1(Fields,[{typefield,Name,_}|Rest],
+ ObjSetName,NthObj) ->
+ InternalDefFunName = [NthObj,Name,ObjSetName],
+ N=
+ case lists:keysearch(Name,1,Fields) of
+ {value,{_,Type}} when record(Type,type) ->
+ emit({";",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ {value,{_,Type}} when record(Type,typedef) ->
+ emit({";",nl,indent(9),{asis,Name}," ->",nl}),
+ emit_inner_of_decfun(Type,InternalDefFunName);
+ false ->
+ 0
+ end,
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj+N);
+gen_inlined_dec_funs1(Fields,[_|Rest],ObjSetName,NthObj)->
+ gen_inlined_dec_funs1(Fields,Rest,ObjSetName,NthObj);
+gen_inlined_dec_funs1(_,[],_,NthObj) ->
+ emit({nl,indent(6),"end",nl}),
+ emit({indent(3),"end"}),
+ NthObj.
+
+emit_inner_of_decfun(#typedef{name={ExtName,Name},typespec=Type},
+ InternalDefFunName) ->
+ case {ExtName,Name} of
+ {primitive,bif} ->
+ emit(indent(12)),
+ gen_dec_prim(per,Type,"Val"),
+ 0;
+ {constructed,bif} ->
+ emit({indent(12),"'dec_",
+ asn1ct_gen:list2name(InternalDefFunName),"'(Val)"}),
+ 1;
+ _ ->
+ emit({indent(12),"'",ExtName,"':'dec_",Name,
+ "'(Val, telltype)"}),
+ 0
+ end;
+emit_inner_of_decfun(#typedef{name=Name},_) ->
+ emit({indent(12),"'dec_",Name,"'(Val, telltype)"}),
+ 0;
+emit_inner_of_decfun(Type,_) when record(Type,type) ->
+ CurrMod = get(currmod),
+ case Type#type.def of
+ Def when atom(Def) ->
+ emit({indent(9),Def," ->",nl,indent(12)}),
+ gen_dec_prim(erules,Type,"Val");
+ TRef when record(TRef,typereference) ->
+ T = TRef#typereference.val,
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=CurrMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),"'dec_",T,"'(Val)"});
+ #'Externaltypereference'{module=ExtMod,type=T} ->
+ emit({indent(9),T," ->",nl,indent(12),ExtMod,":'dec_",
+ T,"'(Val)"})
+ end,
+ 0.
+
+
+gen_internal_funcs(_Erules,[]) ->
+ ok;
+gen_internal_funcs(Erules,[TypeDef|Rest]) ->
+ gen_encode_user(Erules,TypeDef),
+ emit([nl,nl,"'dec_",TypeDef#typedef.name,"'(Bytes) ->",nl]),
+ gen_decode_user(Erules,TypeDef),
+ gen_internal_funcs(Erules,Rest).
+
+
+
+%% DECODING *****************************
+%%***************************************
+
+
+gen_decode(Erules,Type) when record(Type,typedef) ->
+ D = Type,
+ emit({nl,nl}),
+ emit({"'dec_",Type#typedef.name,"'(Bytes,_) ->",nl}),
+ dbdec(Type#typedef.name),
+ gen_decode_user(Erules,D).
+
+gen_decode(Erules,Tname,#'ComponentType'{name=Cname,typespec=Type}) ->
+ NewTname = [Cname|Tname],
+ gen_decode(Erules,NewTname,Type);
+
+gen_decode(Erules,Typename,Type) when record(Type,type) ->
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {constructed,bif} ->
+ ObjFun =
+ case Type#type.tablecinf of
+ [{objfun,_}|_R] ->
+ ", ObjFun";
+ _ ->
+ ""
+ end,
+ emit({nl,"'dec_",asn1ct_gen:list2name(Typename),
+ "'(Bytes,_",ObjFun,") ->",nl}),
+ dbdec(Typename),
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,Type);
+ _ ->
+ true
+ end.
+
+dbdec(Type) when list(Type)->
+ demit({"io:format(\"decoding: ",asn1ct_gen:list2name(Type),"~w~n\",[Bytes]),",nl});
+dbdec(Type) ->
+ demit({"io:format(\"decoding: ",{asis,Type},"~w~n\",[Bytes]),",nl}).
+
+gen_decode_user(Erules,D) when record(D,typedef) ->
+ CurrMod = get(currmod),
+ Typename = [D#typedef.name],
+ Def = D#typedef.typespec,
+ InnerType = asn1ct_gen:get_inner(Def#type.def),
+ case asn1ct_gen:type(InnerType) of
+ {primitive,bif} ->
+ gen_dec_prim(Erules,Def,"Bytes"),
+ emit({".",nl,nl});
+ 'ASN1_OPEN_TYPE' ->
+ gen_dec_prim(Erules,Def#type{def='ASN1_OPEN_TYPE'},"Bytes"),
+ emit({".",nl,nl});
+ {constructed,bif} ->
+ asn1ct_gen:gen_decode_constructed(Erules,Typename,InnerType,D);
+ #typereference{val=Dname} ->
+ emit({"'dec_",Dname,"'(Bytes,telltype)"}),
+ emit({".",nl,nl});
+ #'Externaltypereference'{module=CurrMod,type=Etype} ->
+ emit({"'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ emit({"'",Emod,"':'dec_",Etype,"'(Bytes,telltype).",nl,nl});
+ Other ->
+ exit({error,{asn1,{unknown,Other}}})
+ end.
+
+
+
+gen_dec_prim(_Erules,Att,BytesVar) ->
+ Typename = Att#type.def,
+ Constraint = Att#type.constraint,
+ case Typename of
+ 'INTEGER' ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ emit_dec_integer(EffectiveConstr,BytesVar);
+% emit({"?RT_PER:decode_integer(",BytesVar,",",
+% {asis,EffectiveConstr},")"});
+ {'INTEGER',NamedNumberList} ->
+ EffectiveConstr = effective_constraint(integer,Constraint),
+ emit_dec_integer(EffectiveConstr,BytesVar,NamedNumberList);
+% emit({"?RT_PER:decode_integer(",BytesVar,",",
+% {asis,EffectiveConstr},",",
+% {asis,NamedNumberList},")"});
+ {'BIT STRING',NamedNumberList} ->
+ case get(compact_bit_string) of
+ true ->
+ emit({"?RT_PER:decode_compact_bit_string(",
+ BytesVar,",",{asis,Constraint},",",
+ {asis,NamedNumberList},")"});
+ _ ->
+ emit({"?RT_PER:decode_bit_string(",BytesVar,",",
+ {asis,Constraint},",",
+ {asis,NamedNumberList},")"})
+ end;
+ 'NULL' ->
+ emit({"?RT_PER:decode_null(",
+ BytesVar,")"});
+ 'OBJECT IDENTIFIER' ->
+ emit({"?RT_PER:decode_object_identifier(",
+ BytesVar,")"});
+ 'ObjectDescriptor' ->
+ emit({"?RT_PER:decode_ObjectDescriptor(",
+ BytesVar,")"});
+ {'ENUMERATED',{NamedNumberList1,NamedNumberList2}} ->
+ NewTup = {list_to_tuple([X||{X,_} <- NamedNumberList1]),
+ list_to_tuple([X||{X,_} <- NamedNumberList2])},
+ NewC = [{'ValueRange',{0,size(element(1,NewTup))-1}}],
+ emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+ {asis,NewC},",",
+ {asis,NewTup},")"});
+ {'ENUMERATED',NamedNumberList} ->
+ %NewTup = list_to_tuple([X||{X,Y} <- NamedNumberList]),
+ NewNNL = [X||{X,_} <- NamedNumberList],
+ NewC = effective_constraint(integer,
+ [{'ValueRange',{0,length(NewNNL)-1}}]),
+ emit_dec_enumerated(BytesVar,NewC,NewNNL);
+% emit({"?RT_PER:decode_enumerated(",BytesVar,",",
+% {asis,NewC},",",
+% {asis,NewTup},")"});
+ 'BOOLEAN'->
+ emit({"?RT_PER:decode_boolean(",BytesVar,")"});
+ 'OCTET STRING' ->
+ emit_dec_octet_string(Constraint,BytesVar);
+% emit({"?RT_PER:decode_octet_string(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'NumericString' ->
+ emit_dec_known_multiplier_string('NumericString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_NumericString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'TeletexString' ->
+ emit({"?RT_PER:decode_TeletexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VideotexString' ->
+ emit({"?RT_PER:decode_VideotexString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'UTCTime' ->
+ emit_dec_known_multiplier_string('VisibleString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'GeneralizedTime' ->
+ emit_dec_known_multiplier_string('VisibleString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'GraphicString' ->
+ emit({"?RT_PER:decode_GraphicString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'VisibleString' ->
+ emit_dec_known_multiplier_string('VisibleString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_VisibleString(",BytesVar,",",
+% {asis,Constraint},")"});
+ 'GeneralString' ->
+ emit({"?RT_PER:decode_GeneralString(",BytesVar,",",
+ {asis,Constraint},")"});
+ 'PrintableString' ->
+ emit_dec_known_multiplier_string('PrintableString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_PrintableString(",BytesVar,",",{asis,Constraint},")"});
+ 'IA5String' ->
+ emit_dec_known_multiplier_string('IA5String',Constraint,BytesVar);
+% emit({"?RT_PER:decode_IA5String(",BytesVar,",",{asis,Constraint},")"});
+ 'BMPString' ->
+ emit_dec_known_multiplier_string('BMPString',Constraint,BytesVar);
+% emit({"?RT_PER:decode_BMPString(",BytesVar,",",{asis,Constraint},")"});
+ 'UniversalString' ->
+ emit_dec_known_multiplier_string('UniversalString',
+ Constraint,BytesVar);
+% emit({"?RT_PER:decode_UniversalString(",BytesVar,",",{asis,Constraint},")"});
+ 'ANY' ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",",
+ {asis,Constraint}, ")"]);
+ 'ASN1_OPEN_TYPE' ->
+ case Constraint of
+ [#'Externaltypereference'{type=Tname}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ [#type{def=#'Externaltypereference'{type=Tname}}] ->
+ emit(["fun(FBytes) ->",nl,
+ " {XTerm,XBytes} = "]),
+ emit(["?RT_PER:decode_open_type(FBytes,[]),",nl]),
+ emit([" {YTerm,_} = dec_",Tname,"(XTerm,mandatory),",nl]),
+ emit([" {YTerm,XBytes} end(",BytesVar,")"]);
+ _ ->
+ emit(["?RT_PER:decode_open_type(",BytesVar,",[])"])
+ end;
+ Other ->
+ exit({'cant decode' ,Other})
+ end.
+
+
+emit_dec_integer(C,BytesVar,NNL) ->
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(buffer),
+ Tmpterm = asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ Buffer = asn1ct_gen:mk_var(asn1ct_name:curr(buffer)),
+ emit({" begin {",{curr,tmpterm},",",{curr,buffer},"} = ",nl}),
+ emit_dec_integer(C,BytesVar),
+ emit({",",nl," case ",Tmpterm," of",nl}),
+ lists:map(fun({Name,Int})->emit({" ",Int," -> {",{asis,Name},",",
+ Buffer,"};",nl});
+ (_)-> exit({error,{asn1,{"error in named number list",NNL}}})
+ end,
+ NNL),
+ emit({" _ -> {",Tmpterm,",",Buffer,"}",nl}),
+ emit({" end",nl}), % end of case
+ emit(" end"). % end of begin
+
+emit_dec_integer([{'SingleValue',Int}],BytesVar) when integer(Int) ->
+ emit(["{",Int,",",BytesVar,"}"]);
+emit_dec_integer([{_,{Lb,_Ub},_Range,{BitsOrOctets,N}}],BytesVar) ->
+ GetBorO =
+ case BitsOrOctets of
+ bits -> "getbits";
+ _ -> "getoctets"
+ end,
+ asn1ct_name:new(tmpterm),
+ asn1ct_name:new(tmpremain),
+ emit({" begin",nl," {",{curr,tmpterm},",",{curr,tmpremain},"}=",
+ "?RT_PER:",GetBorO,"(",BytesVar,",",N,"),",nl}),
+ emit({" {",{curr,tmpterm},"+",Lb,",",{curr,tmpremain},"}",nl,
+ " end"});
+emit_dec_integer([{_,{'MIN',_}}],BytesVar) ->
+ emit({"?RT_PER:decode_unconstrained_number(",BytesVar,")"});
+emit_dec_integer([{_,{Lb,'MAX'}}],BytesVar) ->
+ emit({"?RT_PER:decode_semi_constrained_number(",BytesVar,",",Lb,")"});
+emit_dec_integer([{'ValueRange',VR={Lb,Ub}}],BytesVar) ->
+ Range = Ub-Lb+1,
+ emit({"?RT_PER:decode_constrained_number(",BytesVar,",",
+ {asis,VR},",",Range,")"});
+emit_dec_integer(C=[{Rc,_}],BytesVar) when tuple(Rc) ->
+ emit({"?RT_PER:decode_integer(",BytesVar,",",{asis,C},")"});
+emit_dec_integer(_,BytesVar) ->
+ emit({"?RT_PER:decode_unconstrained_number(",BytesVar,")"}).
+
+
+emit_dec_enumerated(BytesVar,C,NamedNumberList) ->
+ emit_dec_enumerated_begin(),% emits a begin if component
+ asn1ct_name:new(tmpterm),
+ Tmpterm = asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),
+ asn1ct_name:new(tmpremain),
+ Tmpremain = asn1ct_gen:mk_var(asn1ct_name:curr(tmpremain)),
+ emit({" {",{curr,tmpterm},",",{curr,tmpremain},"} =",nl}),
+ emit_dec_integer(C,BytesVar),
+ emit({",",nl," case ",Tmpterm," of "}),
+% Cases=lists:flatten(dec_enumerated_cases(NamedNumberList,asn1ct_gen:mk_var(asn1ct_name:curr(tmpremain)),0)),
+ Cases=lists:flatten(dec_enumerated_cases(NamedNumberList,Tmpremain,0)),
+ emit({Cases++"_->exit({error,{asn1,{decode_enumerated,{",Tmpterm,
+ ",",{asis,NamedNumberList},"}}}}) end",nl}),
+ emit_dec_enumerated_end().
+
+emit_dec_enumerated_begin() ->
+ case get(component_type) of
+ {true,_} ->
+ emit({" begin",nl});
+ _ -> ok
+ end.
+
+emit_dec_enumerated_end() ->
+ case get(component_type) of
+ {true,_} ->
+ emit(" end");
+ _ -> ok
+ end.
+
+% dec_enumerated_cases(NNL,Tmpremain,No) ->
+% Cases=dec_enumerated_cases1(NNL,Tmpremain,0),
+% lists:flatten(io_lib:format("(case ~s "++Cases++
+% "~s when atom(~s)->exit({error,{asn1,{namednumber,~s}}});_->~s end)",[Value,"TmpVal","TmpVal","TmpVal",Value])).
+
+dec_enumerated_cases([Name|Rest],Tmpremain,No) ->
+ io_lib:format("~w->{~w,~s};",[No,Name,Tmpremain])++
+ dec_enumerated_cases(Rest,Tmpremain,No+1);
+dec_enumerated_cases([],_,_) ->
+ "".
+
+
+% more_genfields(_Fields,[]) ->
+% false;
+% more_genfields(Fields,[{FieldName,_}|T]) ->
+% case is_typefield(Fields,FieldName) of
+% true -> true;
+% {false,objectfield} -> true;
+% {false,_} -> more_genfields(Fields,T)
+% end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_name.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_name.erl
new file mode 100644
index 0000000000..1c7769998c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_name.erl
@@ -0,0 +1,225 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_name.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_name).
+
+%%-compile(export_all).
+-export([name_server_loop/1,
+ start/0,
+ stop/0,
+ push/1,
+ pop/1,
+ curr/1,
+ clear/0,
+ delete/1,
+ active/1,
+ prev/1,
+ next/1,
+ all/1,
+ new/1]).
+
+start() ->
+ start_server(asn1_ns, asn1ct_name,name_server_loop,[[]]).
+
+stop() -> stop_server(asn1_ns).
+
+name_server_loop(Vars) ->
+%% io:format("name -- ~w~n",[Vars]),
+ receive
+ {From,{current,Variable}} ->
+ From ! {asn1_ns,get_curr(Vars,Variable)},
+ name_server_loop(Vars);
+ {From,{pop,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(pop_var(Vars,Variable));
+ {From,{push,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(push_var(Vars,Variable));
+ {From,{delete,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(delete_var(Vars,Variable));
+ {From,{new,Variable}} ->
+ From ! {asn1_ns,done},
+ name_server_loop(new_var(Vars,Variable));
+ {From,{prev,Variable}} ->
+ From ! {asn1_ns,get_prev(Vars,Variable)},
+ name_server_loop(Vars);
+ {From,{next,Variable}} ->
+ From ! {asn1_ns,get_next(Vars,Variable)},
+ name_server_loop(Vars);
+ {From,stop} ->
+ From ! {asn1_ns,stopped},
+ exit(normal)
+ end.
+
+active(V) ->
+ case curr(V) of
+ nil -> false;
+ _ -> true
+ end.
+
+req(Req) ->
+ asn1_ns ! {self(), Req},
+ receive {asn1_ns, Reply} -> Reply end.
+
+pop(V) -> req({pop,V}).
+push(V) -> req({push,V}).
+clear() -> req(stop), start().
+curr(V) -> req({current,V}).
+new(V) -> req({new,V}).
+delete(V) -> req({delete,V}).
+prev(V) ->
+ case req({prev,V}) of
+ none ->
+ exit('cant get prev of none');
+ Rep -> Rep
+ end.
+
+next(V) ->
+ case req({next,V}) of
+ none ->
+ exit('cant get next of none');
+ Rep -> Rep
+ end.
+
+all(V) ->
+ Curr = curr(V),
+ if Curr == V -> [];
+ true ->
+ lists:reverse(generate(V,last(Curr),[],0))
+ end.
+
+generate(V,Number,Res,Pos) ->
+ Ell = Pos+1,
+ if
+ Ell > Number ->
+ Res;
+ true ->
+ generate(V,Number,[list_to_atom(lists:concat([V,Ell]))|Res],Ell)
+ end.
+
+last(V) ->
+ last2(lists:reverse(atom_to_list(V))).
+
+last2(RevL) ->
+ list_to_integer(lists:reverse(get_digs(RevL))).
+
+
+get_digs([H|T]) ->
+ if
+ H < $9+1,
+ H > $0-1 ->
+ [H|get_digs(T)];
+ true ->
+ []
+ end.
+
+push_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ [{Variable,[0]}|Vars];
+ {value,{Variable,[Digit|Drest]}} ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,[Digit,Digit|Drest]}|NewVars]
+ end.
+
+pop_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ ok;
+ {value,{Variable,[_Dig]}} ->
+ lists:keydelete(Variable,1,Vars);
+ {value,{Variable,[_Dig|Digits]}} ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,Digits}|NewVars]
+ end.
+
+get_curr([],Variable) ->
+ Variable;
+get_curr([{Variable,[0|_Drest]}|_Tail],Variable) ->
+ Variable;
+get_curr([{Variable,[Digit|_Drest]}|_Tail],Variable) ->
+ list_to_atom(lists:concat([Variable,integer_to_list(Digit)]));
+
+get_curr([_|Tail],Variable) ->
+ get_curr(Tail,Variable).
+
+new_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ [{Variable,[1]}|Vars];
+ {value,{Variable,[Digit|Drest]}} ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,[Digit+1|Drest]}|NewVars]
+ end.
+
+delete_var(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ Vars;
+ {value,{Variable,[N]}} when N =< 1 ->
+ lists:keydelete(Variable,1,Vars);
+ {value,{Variable,[Digit|Drest]}} ->
+ case Digit of
+ 0 ->
+ Vars;
+ _ ->
+ NewVars = lists:keydelete(Variable,1,Vars),
+ [{Variable,[Digit-1|Drest]}|NewVars]
+ end
+ end.
+
+get_prev(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ none;
+ {value,{Variable,[Digit|_]}} when Digit =< 1 ->
+ Variable;
+ {value,{Variable,[Digit|_]}} when Digit > 1 ->
+ list_to_atom(lists:concat([Variable,
+ integer_to_list(Digit-1)]));
+ _ ->
+ none
+ end.
+
+get_next(Vars,Variable) ->
+ case lists:keysearch(Variable,1,Vars) of
+ false ->
+ list_to_atom(lists:concat([Variable,"1"]));
+ {value,{Variable,[Digit|_]}} when Digit >= 0 ->
+ list_to_atom(lists:concat([Variable,
+ integer_to_list(Digit+1)]));
+ _ ->
+ none
+ end.
+
+
+stop_server(Name) ->
+ stop_server(Name, whereis(Name)).
+stop_server(_Name, undefined) -> stopped;
+stop_server(Name, _Pid) ->
+ Name ! {self(), stop},
+ receive {Name, _} -> stopped end.
+
+
+start_server(Name,Mod,Fun,Args) ->
+ case whereis(Name) of
+ undefined ->
+ register(Name, spawn(Mod,Fun, Args));
+ _Pid ->
+ already_started
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser.yrl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser.yrl
new file mode 100644
index 0000000000..b2c1d70f6e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser.yrl
@@ -0,0 +1,1162 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_parser.yrl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+Nonterminals
+ModuleDefinition ModuleIdentifier DefinitiveIdentifier DefinitiveObjIdComponentList
+DefinitiveObjIdComponent TagDefault ExtensionDefault
+ModuleBody Exports SymbolsExported Imports SymbolsImported
+SymbolsFromModuleList SymbolsFromModule GlobalModuleReference AssignedIdentifier SymbolList
+Symbol Reference AssignmentList Assignment
+ExtensionAndException
+ComponentTypeLists
+Externaltypereference Externalvaluereference DefinedType DefinedValue
+AbsoluteReference ItemSpec ItemId ComponentId TypeAssignment
+ValueAssignment
+% ValueSetTypeAssignment
+ValueSet
+Type BuiltinType NamedType ReferencedType
+Value ValueNotNull BuiltinValue ReferencedValue NamedValue
+% BooleanType
+BooleanValue IntegerType NamedNumberList NamedNumber SignedNumber
+% inlined IntegerValue
+EnumeratedType
+% inlined Enumerations
+Enumeration EnumerationItem
+% inlined EnumeratedValue
+% RealType
+RealValue NumericRealValue SpecialRealValue BitStringType
+% inlined BitStringValue
+IdentifierList
+% OctetStringType
+% inlined OctetStringValue
+% NullType NullValue
+SequenceType ComponentTypeList ComponentType
+% SequenceValue SequenceOfValue
+ComponentValueList SequenceOfType
+SAndSOfValue ValueList SetType
+% SetValue SetOfValue
+SetOfType
+ChoiceType
+% AlternativeTypeList made common with ComponentTypeList
+ChoiceValue
+AnyValue
+AnyDefBy
+SelectionType
+TaggedType Tag ClassNumber Class
+% redundant TaggedValue
+% EmbeddedPDVType EmbeddedPDVValue ExternalType ExternalValue ObjectIdentifierType
+ObjectIdentifierValue ObjIdComponentList ObjIdComponent
+% NameForm NumberForm NameAndNumberForm
+CharacterStringType
+RestrictedCharacterStringValue CharacterStringList
+% CharSyms CharsDefn
+Quadruple
+% Group Plane Row Cell
+Tuple
+% TableColumn TableRow
+% UnrestrictedCharacterString
+CharacterStringValue
+% UnrestrictedCharacterStringValue
+ConstrainedType Constraint ConstraintSpec TypeWithConstraint
+ElementSetSpecs ElementSetSpec
+%GeneralConstraint
+UserDefinedConstraint UserDefinedConstraintParameter
+UserDefinedConstraintParameters
+ExceptionSpec
+ExceptionIdentification
+Unions
+UnionMark
+UElems
+Intersections
+IntersectionElements
+IntersectionMark
+IElems
+Elements
+Elems
+SubTypeElements
+Exclusions
+LowerEndpoint
+UpperEndpoint
+LowerEndValue
+UpperEndValue
+TypeConstraints NamedConstraint PresenceConstraint
+
+ParameterizedTypeAssignment
+ParameterList
+Parameters
+Parameter
+ParameterizedType
+
+% X.681
+ObjectClassAssignment ObjectClass ObjectClassDefn
+FieldSpecs FieldSpec OptionalitySpec WithSyntaxSpec
+TokenOrGroupSpecs TokenOrGroupSpec
+SyntaxList OptionalGroup RequiredToken Word
+TypeOptionalitySpec
+ValueOrObjectOptSpec
+VSetOrOSetOptSpec
+ValueOptionalitySpec
+ObjectOptionalitySpec
+ValueSetOptionalitySpec
+ObjectSetOptionalitySpec
+% X.681 chapter 15
+InformationFromObjects
+ValueFromObject
+%ValueSetFromObjects
+TypeFromObject
+%ObjectFromObject
+%ObjectSetFromObjects
+ReferencedObjects
+FieldName
+PrimitiveFieldName
+
+ObjectAssignment
+ObjectSetAssignment
+ObjectSet
+ObjectSetElements
+Object
+ObjectDefn
+DefaultSyntax
+DefinedSyntax
+FieldSettings
+FieldSetting
+DefinedSyntaxTokens
+DefinedSyntaxToken
+Setting
+DefinedObject
+ObjectFromObject
+ObjectSetFromObjects
+ParameterizedObject
+ExternalObjectReference
+DefinedObjectSet
+DefinedObjectClass
+ExternalObjectClassReference
+
+% X.682
+TableConstraint
+ComponentRelationConstraint
+ComponentIdList
+
+% X.683
+ActualParameter
+.
+
+%UsefulType.
+
+Terminals
+'ABSENT' 'ABSTRACT-SYNTAX' 'ALL' 'ANY'
+'APPLICATION' 'AUTOMATIC' 'BEGIN' 'BIT'
+'BOOLEAN' 'BY' 'CHARACTER' 'CHOICE' 'CLASS' 'COMPONENT'
+'COMPONENTS' 'CONSTRAINED' 'DEFAULT' 'DEFINED' 'DEFINITIONS'
+'EMBEDDED' 'END' 'ENUMERATED' 'EXCEPT' 'EXPLICIT'
+'EXPORTS' 'EXTENSIBILITY' 'EXTERNAL' 'FALSE' 'FROM' 'GeneralizedTime'
+'TYPE-IDENTIFIER'
+'IDENTIFIER' 'IMPLICIT' 'IMPLIED' 'IMPORTS'
+'INCLUDES' 'INSTANCE' 'INTEGER' 'INTERSECTION'
+'MAX' 'MIN' 'MINUS-INFINITY' 'NULL'
+'OBJECT' 'ObjectDescriptor' 'OCTET' 'OF' 'OPTIONAL' 'PDV' 'PLUS-INFINITY'
+'PRESENT' 'PRIVATE' 'REAL' 'SEQUENCE' 'SET' 'SIZE'
+'STRING' 'SYNTAX' 'TAGS' 'TRUE' 'UNION'
+'UNIQUE' 'UNIVERSAL' 'UTCTime' 'WITH'
+'{' '}' '(' ')' '.' '::=' ';' ',' '@' '*' '-' '[' ']'
+'!' '..' '...' '|' '<' ':' '^'
+number identifier typereference restrictedcharacterstringtype
+bstring hstring cstring typefieldreference valuefieldreference
+objectclassreference word.
+
+Rootsymbol ModuleDefinition.
+Endsymbol '$end'.
+
+Left 300 'EXCEPT'.
+Left 200 '^'.
+Left 200 'INTERSECTION'.
+Left 100 '|'.
+Left 100 'UNION'.
+
+
+ModuleDefinition -> ModuleIdentifier
+ 'DEFINITIONS'
+ TagDefault
+ ExtensionDefault
+ '::='
+ 'BEGIN'
+ ModuleBody
+ 'END' :
+ {'ModuleBody',Ex,Im,Types} = '$7',
+ {{typereference,Pos,Name},Defid} = '$1',
+ #module{
+ pos= Pos,
+ name= Name,
+ defid= Defid,
+ tagdefault='$3',
+ extensiondefault='$4',
+ exports=Ex,
+ imports=Im,
+ typeorval=Types}.
+% {module, '$1','$3','$6'}.
+% Results always in a record of type module defined in asn_records.hlr
+
+ModuleIdentifier -> typereference DefinitiveIdentifier :
+ put(asn1_module,'$1'#typereference.val),
+ {'$1','$2'}.
+
+DefinitiveIdentifier -> '{' DefinitiveObjIdComponentList '}' : '$2' .
+DefinitiveIdentifier -> '$empty': [].
+
+DefinitiveObjIdComponentList -> DefinitiveObjIdComponent : ['$1'].
+DefinitiveObjIdComponentList -> DefinitiveObjIdComponent DefinitiveObjIdComponentList : ['$1'|'$2'].
+
+DefinitiveObjIdComponent -> identifier : '$1' . %expanded->
+% DefinitiveObjIdComponent -> NameForm : '$1' .
+DefinitiveObjIdComponent -> number : '$1' . %expanded->
+% DefinitiveObjIdComponent -> DefinitiveNumberForm : 'fix' .
+DefinitiveObjIdComponent -> identifier '(' number ')' : {'$1','$3'} . %expanded->
+% DefinitiveObjIdComponent -> DefinitiveNameAndNumberForm : {'$1','$3'} .
+
+% DefinitiveNumberForm -> number : 'fix' .
+
+% DefinitiveNameAndNumberForm -> identifier '(' DefinitiveNumberForm ')' : 'fix' .
+
+TagDefault -> 'EXPLICIT' 'TAGS' : put(tagdefault,'EXPLICIT'),'EXPLICIT' .
+TagDefault -> 'IMPLICIT' 'TAGS' : put(tagdefault,'IMPLICIT'),'IMPLICIT' .
+TagDefault -> 'AUTOMATIC' 'TAGS' : put(tagdefault,'AUTOMATIC'),'AUTOMATIC' .
+TagDefault -> '$empty': put(tagdefault,'EXPLICIT'),'EXPLICIT'. % because this is the default
+
+ExtensionDefault -> 'EXTENSIBILITY' 'IMPLIED' : 'IMPLIED'.
+ExtensionDefault -> '$empty' : 'false'. % because this is the default
+
+ModuleBody -> Exports Imports AssignmentList : {'ModuleBody','$1','$2','$3'}.
+ModuleBody -> '$empty' : {'ModuleBody',nil,nil,[]}.
+
+Exports -> 'EXPORTS' SymbolList ';' : {exports,'$2'}.
+Exports -> 'EXPORTS' ';' : {exports,[]}.
+Exports -> '$empty' : {exports,all} .
+
+% inlined above SymbolsExported -> SymbolList : '$1'.
+% inlined above SymbolsExported -> '$empty' : [].
+
+Imports -> 'IMPORTS' SymbolsFromModuleList ';' : {imports,'$2'}.
+Imports -> 'IMPORTS' ';' : {imports,[]}.
+Imports -> '$empty' : {imports,[]} .
+
+% inlined above SymbolsImported -> SymbolsFromModuleList : '$1'.
+% inlined above SymbolsImported -> '$empty' : [].
+
+SymbolsFromModuleList -> SymbolsFromModule :['$1'].
+% SymbolsFromModuleList -> SymbolsFromModuleList SymbolsFromModule :$1.%changed
+SymbolsFromModuleList -> SymbolsFromModule SymbolsFromModuleList :['$1'|'$2'].
+
+% expanded SymbolsFromModule -> SymbolList 'FROM' GlobalModuleReference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+SymbolsFromModule -> SymbolList 'FROM' typereference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+SymbolsFromModule -> SymbolList 'FROM' typereference '{' ValueList '}': #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+%SymbolsFromModule -> SymbolList 'FROM' typereference identifier: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+%SymbolsFromModule -> SymbolList 'FROM' typereference Externalvaluereference: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+%SymbolsFromModule -> SymbolList 'FROM' typereference DefinedValue: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
+
+% inlined GlobalModuleReference -> typereference AssignedIdentifier : {'$1','$2'} .
+
+% inlined above AssignedIdentifier -> '{' ValueList '}' : '$2'.
+% replaced AssignedIdentifier -> '{' DefinedValue ObjIdComponentList '}' :{'$2','$3'}.
+% not necessary , replaced by SAndSOfValue AssignedIdentifier -> ObjectIdentifierValue :'$1'.
+% AssignedIdentifier -> DefinedValue : '$1'.
+% inlined AssignedIdentifier -> '$empty' : undefined.
+
+SymbolList -> Symbol : ['$1'].
+SymbolList -> Symbol ',' SymbolList :['$1'|'$3'].
+
+Symbol -> Reference :'$1'.
+% later Symbol -> ParameterizedReference :'$1'.
+
+Reference -> typereference :'$1'.
+Reference -> identifier:'$1'.
+Reference -> typereference '{' '}':'$1'.
+Reference -> Externaltypereference '{' '}':'$1'.
+
+% later Reference -> objectclassreference :'$1'.
+% later Reference -> objectreference :'$1'.
+% later Reference -> objectsetreference :'$1'.
+
+AssignmentList -> Assignment : ['$1'].
+% modified AssignmentList -> AssignmentList Assignment : '$1'.
+AssignmentList -> Assignment AssignmentList : ['$1'|'$2'].
+
+Assignment -> TypeAssignment : '$1'.
+Assignment -> ValueAssignment : '$1'.
+% later Assignment -> ValueSetTypeAssignment : '$1'.
+Assignment -> ObjectClassAssignment : '$1'.
+% later Assignment -> ObjectAssignment : '$1'.
+% combined with ValueAssignment Assignment -> ObjectAssignment : '$1'.
+Assignment -> ObjectSetAssignment : '$1'.
+Assignment -> ParameterizedTypeAssignment : '$1'.
+%Assignment -> ParameterizedValueAssignment : '$1'.
+%Assignment -> ParameterizedValueSetTypeAssignment : '$1'.
+%Assignment -> ParameterizedObjectClassAssignment : '$1'.
+
+ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' :
+%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5',[]}}.
+ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
+%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5','$7'}}.
+
+FieldSpecs -> FieldSpec : ['$1'].
+FieldSpecs -> FieldSpec ',' FieldSpecs : ['$1'|'$3'].
+
+FieldSpec -> typefieldreference TypeOptionalitySpec : {typefield,'$1','$2'}.
+
+FieldSpec -> valuefieldreference Type 'UNIQUE' ValueOrObjectOptSpec :
+ {fixedtypevaluefield,'$1','$2','UNIQUE','$4'}.
+FieldSpec -> valuefieldreference Type ValueOrObjectOptSpec :
+ {fixedtypevaluefield,'$1','$2',undefined,'$3'}.
+
+FieldSpec -> valuefieldreference typefieldreference ValueOrObjectOptSpec :
+ {variabletypevaluefield, '$1','$2','$3'}.
+
+FieldSpec -> typefieldreference typefieldreference VSetOrOSetOptSpec :
+ {variabletypevaluesetfield, '$1','$2','$3'}.
+
+FieldSpec -> typefieldreference Type VSetOrOSetOptSpec :
+ {fixedtypevaluesetfield, '$1','$2','$3'}.
+
+TypeOptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
+TypeOptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
+TypeOptionalitySpec -> '$empty' : 'MANDATORY'.
+
+ValueOrObjectOptSpec -> ValueOptionalitySpec : '$1'.
+ValueOrObjectOptSpec -> ObjectOptionalitySpec : '$1'.
+ValueOrObjectOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
+ValueOrObjectOptSpec -> '$empty' : 'MANDATORY'.
+
+ValueOptionalitySpec -> 'DEFAULT' Value :
+ case '$2' of
+ {identifier,_,Id} -> {'DEFAULT',Id};
+ _ -> {'DEFAULT','$2'}
+ end.
+
+%ObjectOptionalitySpec -> 'DEFAULT' Object :{'DEFAULT','$1'}.
+ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting ',' FieldSettings '}' :
+ {'DEFAULT',{object,['$2'|'$4']}}.
+ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting '}' :
+ {'DEFAULT',{object, ['$2']}}.
+%ObjectOptionalitySpec -> 'DEFAULT' '{' DefinedSyntaxTokens '}' :
+% {'DEFAULT',{object, '$2'}}.
+ObjectOptionalitySpec -> 'DEFAULT' ObjectFromObject :
+ {'DEFAULT',{object, '$2'}}.
+
+
+VSetOrOSetOptSpec -> ValueSetOptionalitySpec : '$1'.
+%VSetOrOSetOptSpec -> ObjectSetOptionalitySpec : '$1'.
+VSetOrOSetOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
+VSetOrOSetOptSpec -> '$empty' : 'MANDATORY'.
+
+ValueSetOptionalitySpec -> 'DEFAULT' ValueSet : {'DEFAULT','$1'}.
+
+%ObjectSetOptionalitySpec -> 'DEFAULT' ObjectSet : {'DEFAULT','$1'}.
+
+OptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
+OptionalitySpec -> 'DEFAULT' ValueNotNull :
+ case '$2' of
+ {identifier,_,Id} -> {'DEFAULT',Id};
+ _ -> {'DEFAULT','$2'}
+ end.
+OptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
+OptionalitySpec -> '$empty' : 'MANDATORY'.
+
+WithSyntaxSpec -> 'WITH' 'SYNTAX' SyntaxList : {'WITH SYNTAX','$3'}.
+
+SyntaxList -> '{' TokenOrGroupSpecs '}' : '$2'.
+SyntaxList -> '{' '}' : [].
+
+TokenOrGroupSpecs -> TokenOrGroupSpec : ['$1'].
+TokenOrGroupSpecs -> TokenOrGroupSpec TokenOrGroupSpecs : ['$1'|'$2'].
+
+TokenOrGroupSpec -> RequiredToken : '$1'.
+TokenOrGroupSpec -> OptionalGroup : '$1'.
+
+OptionalGroup -> '[' TokenOrGroupSpecs ']' : '$2'.
+
+RequiredToken -> typereference : '$1'.
+RequiredToken -> Word : '$1'.
+RequiredToken -> ',' : '$1'.
+RequiredToken -> PrimitiveFieldName : '$1'.
+
+Word -> 'BY' : 'BY'.
+
+ParameterizedTypeAssignment -> typereference ParameterList '::=' Type :
+ #ptypedef{pos=element(2,'$1'),name=element(3,'$1'),
+ args='$2', typespec='$4'}.
+
+ParameterList -> '{' Parameters '}':'$2'.
+
+Parameters -> Parameter: ['$1'].
+Parameters -> Parameter ',' Parameters: ['$1'|'$3'].
+
+Parameter -> typereference: '$1'.
+Parameter -> Value: '$1'.
+Parameter -> Type ':' typereference: {'$1','$3'}.
+Parameter -> Type ':' Value: {'$1','$3'}.
+Parameter -> '{' typereference '}': {objectset,'$2'}.
+
+
+% Externaltypereference -> modulereference '.' typereference : {'$1','$3'} .
+Externaltypereference -> typereference '.' typereference : #'Externaltypereference'{pos=element(2,'$1'),module=element(3,'$1'),type=element(3,'$3')}.
+
+% Externalvaluereference -> modulereference '.' valuereference : {'$1','$3'} .
+% inlined Externalvaluereference -> typereference '.' identifier : #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),value=element(3,'$3')}.
+
+
+DefinedType -> Externaltypereference : '$1' .
+DefinedType -> typereference :
+ #'Externaltypereference'{pos='$1'#typereference.pos,
+ module= get(asn1_module),
+ type= '$1'#typereference.val} .
+DefinedType -> typereference ParameterList : {pt,'$1','$2'}.
+DefinedType -> Externaltypereference ParameterList : {pt,'$1','$2'}.
+
+% ActualParameterList -> '{' ActualParameters '}' : '$1'.
+
+% ActualParameters -> ActualParameter : ['$1'].
+% ActualParameters -> ActualParameter ',' ActualParameters : ['$1'|'$3'].
+
+ActualParameter -> Type : '$1'.
+ActualParameter -> ValueNotNull : '$1'.
+ActualParameter -> ValueSet : '$1'.
+% later DefinedType -> ParameterizedType : '$1' .
+% later DefinedType -> ParameterizedValueSetType : '$1' .
+
+% inlined DefinedValue -> Externalvaluereference :'$1'.
+% inlined DefinedValue -> identifier :'$1'.
+% later DefinedValue -> ParameterizedValue :'$1'.
+
+% not referenced yet AbsoluteReference -> '@' GlobalModuleReference '.' ItemSpec :{'$2','$4'}.
+
+% not referenced yet ItemSpec -> typereference :'$1'.
+% not referenced yet ItemSpec -> ItemId '.' ComponentId : {'$1','$3'}.
+
+% not referenced yet ItemId -> ItemSpec : '$1'.
+
+% not referenced yet ComponentId -> identifier :'$1'.
+% not referenced yet ComponentId -> number :'$1'.
+% not referenced yet ComponentId -> '*' :'$1'.
+
+TypeAssignment -> typereference '::=' Type :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec='$3'}.
+
+ValueAssignment -> identifier Type '::=' Value :
+ #valuedef{pos=element(2,'$1'),name=element(3,'$1'),type='$2',value='$4'}.
+
+% later ValueSetTypeAssignment -> typereference Type '::=' ValueSet :{'ValueSetTypeAssignment','$1','$2','$4'}.
+
+
+ValueSet -> '{' ElementSetSpec '}' : {valueset,'$2'}.
+
+% record(type,{tag,def,constraint}).
+Type -> BuiltinType :#type{def='$1'}.
+Type -> 'NULL' :#type{def='NULL'}.
+Type -> TaggedType:'$1'.
+Type -> ReferencedType:#type{def='$1'}. % change notag later
+Type -> ConstrainedType:'$1'.
+
+%ANY is here for compatibility with the old ASN.1 standard from 1988
+BuiltinType -> 'ANY' AnyDefBy:
+ case '$2' of
+ [] -> 'ANY';
+ _ -> {'ANY DEFINED BY','$2'}
+ end.
+BuiltinType -> BitStringType :'$1'.
+BuiltinType -> 'BOOLEAN' :element(1,'$1').
+BuiltinType -> CharacterStringType :'$1'.
+BuiltinType -> ChoiceType :'$1'.
+BuiltinType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
+BuiltinType -> EnumeratedType :'$1'.
+BuiltinType -> 'EXTERNAL' :element(1,'$1').
+% later BuiltinType -> InstanceOfType :'$1'.
+BuiltinType -> IntegerType :'$1'.
+% BuiltinType -> 'NULL' :element(1,'$1').
+% later BuiltinType -> ObjectClassFieldType :'$1'.
+BuiltinType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
+BuiltinType -> 'OCTET' 'STRING' :'OCTET STRING'.
+BuiltinType -> 'REAL' :element(1,'$1').
+BuiltinType -> SequenceType :'$1'.
+BuiltinType -> SequenceOfType :'$1'.
+BuiltinType -> SetType :'$1'.
+BuiltinType -> SetOfType :'$1'.
+% The so called Useful types
+BuiltinType -> 'GeneralizedTime': 'GeneralizedTime'.
+BuiltinType -> 'UTCTime' :'UTCTime'.
+BuiltinType -> 'ObjectDescriptor' : 'ObjectDescriptor'.
+
+% moved BuiltinType -> TaggedType :'$1'.
+
+
+AnyDefBy -> 'DEFINED' 'BY' identifier: '$3'.
+AnyDefBy -> '$empty': [].
+
+NamedType -> identifier Type :
+%{_,Pos,Val} = '$1',
+%{'NamedType',Pos,{Val,'$2'}}.
+V1 = '$1',
+{'NamedType',V1#identifier.pos,{V1#identifier.val,'$2'}}.
+NamedType -> SelectionType :'$1'.
+
+ReferencedType -> DefinedType : '$1'.
+% redundant ReferencedType -> UsefulType : 'fix'.
+ReferencedType -> SelectionType : '$1'.
+ReferencedType -> TypeFromObject : '$1'.
+% later ReferencedType -> ValueSetFromObjects : 'fix'.
+
+% to much conflicts Value -> AnyValue :'$1'.
+Value -> ValueNotNull : '$1'.
+Value -> 'NULL' :element(1,'$1').
+
+ValueNotNull -> BuiltinValue :'$1'.
+% inlined Value -> DefinedValue :'$1'. % DefinedValue , identifier
+% inlined Externalvaluereference -> Externalvaluereference :'$1'.
+ValueNotNull -> typereference '.' identifier :
+ #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
+ value=element(3,'$3')}.
+ValueNotNull -> identifier :'$1'.
+
+
+%tmp Value -> NamedNumber: '$1'. % not a value but part of ObjIdC
+% redundant BuiltinValue -> BitStringValue :'$1'.
+BuiltinValue -> BooleanValue :'$1'.
+BuiltinValue -> CharacterStringValue :'$1'.
+BuiltinValue -> ChoiceValue :'$1'.
+% BuiltinValue -> EmbeddedPDVValue :'$1'. ==SequenceValue
+% BuiltinValue -> EnumeratedValue :'$1'. identifier
+% BuiltinValue -> ExternalValue :'$1'. ==SequenceValue
+% later BuiltinValue -> InstanceOfValue :'$1'.
+BuiltinValue -> SignedNumber :'$1'.
+% BuiltinValue -> 'NULL' :'$1'.
+% later BuiltinValue -> ObjectClassFieldValue :'$1'.
+% replaced by SAndSOfValue BuiltinValue -> ObjectIdentifierValue :'$1'.
+BuiltinValue -> bstring :element(3,'$1').
+BuiltinValue -> hstring :element(3,'$1').
+% conflict BuiltinValue -> RealValue :'$1'.
+BuiltinValue -> SAndSOfValue :'$1'.
+% replaced BuiltinValue -> SequenceOfValue :'$1'.
+% replaced BuiltinValue -> SequenceValue :'$1'.
+% replaced BuiltinValue -> SetValue :'$1'.
+% replaced BuiltinValue -> SetOfValue :'$1'.
+% conflict redundant BuiltinValue -> TaggedValue :'$1'.
+
+% inlined ReferencedValue -> DefinedValue:'$1'.
+% ReferencedValue -> Externalvaluereference:'$1'.
+% ReferencedValue -> identifier :'$1'.
+% later ReferencedValue -> ValueFromObject:'$1'.
+
+% inlined BooleanType -> BOOLEAN :'BOOLEAN'.
+
+% to much conflicts AnyValue -> Type ':' Value : {'ANYVALUE',{'$1','$3'}}.
+
+BooleanValue -> TRUE :true.
+BooleanValue -> FALSE :false.
+
+IntegerType -> 'INTEGER' : 'INTEGER'.
+IntegerType -> 'INTEGER' '{' NamedNumberList '}' : {'INTEGER','$3'}.
+
+NamedNumberList -> NamedNumber :['$1'].
+% modified NamedNumberList -> NamedNumberList ',' NamedNumber :'fix'.
+NamedNumberList -> NamedNumber ',' NamedNumberList :['$1'|'$3'].
+
+NamedNumber -> identifier '(' SignedNumber ')' : {'NamedNumber',element(3,'$1'),'$3'}.
+NamedNumber -> identifier '(' typereference '.' identifier ')' : {'NamedNumber',element(3,'$1'),{'ExternalValue',element(3,'$3'),element(3,'$5')}}.
+NamedNumber -> identifier '(' identifier ')' : {'NamedNumber',element(3,'$1'),element(3,'$3')}.
+
+%NamedValue -> identifier Value :
+% {'NamedValue',element(2,'$1'),element(3,'$1'),'$2'}.
+
+
+SignedNumber -> number : element(3,'$1').
+SignedNumber -> '-' number : - element(3,'$1').
+
+% inlined IntegerValue -> SignedNumber :'$1'.
+% conflict moved to Value IntegerValue -> identifier:'$1'.
+
+EnumeratedType -> ENUMERATED '{' Enumeration '}' :{'ENUMERATED','$3'}.
+
+% inlined Enumerations -> Enumeration :{'$1','false',[]}.
+% inlined Enumerations -> Enumeration ',' '...' : {'$1','true',[]}.
+% inlined Enumerations -> Enumeration ',' '...' ',' Enumeration : {'$1','true','$5'}.
+
+Enumeration -> EnumerationItem :['$1'].
+% modified Enumeration -> EnumerationItem ',' Enumeration :'fix'.
+Enumeration -> EnumerationItem ',' Enumeration :['$1'|'$3'].
+
+EnumerationItem -> identifier:element(3,'$1').
+EnumerationItem -> NamedNumber :'$1'.
+EnumerationItem -> '...' :'EXTENSIONMARK'.
+
+% conflict moved to Value EnumeratedValue -> identifier:'$1'.
+
+% inlined RealType -> REAL:'REAL'.
+
+RealValue -> NumericRealValue :'$1'.
+RealValue -> SpecialRealValue:'$1'.
+
+% ?? NumericRealValue -> number:'$1'. % number MUST BE '0'
+NumericRealValue -> SAndSOfValue : '$1'. % Value of the associated sequence type
+
+SpecialRealValue -> 'PLUS-INFINITY' :'$1'.
+SpecialRealValue -> 'MINUS-INFINITY' :'$1'.
+
+BitStringType -> 'BIT' 'STRING' :{'BIT STRING',[]}.
+BitStringType -> 'BIT' 'STRING' '{' NamedNumberList '}' :{'BIT STRING','$4'}.
+% NamedBitList replaced by NamedNumberList to reduce the grammar
+% Must check later that all "numbers" are positive
+
+% inlined BitStringValue -> bstring:'$1'.
+% inlined BitStringValue -> hstring:'$1'.
+% redundant use SequenceValue BitStringValue -> '{' IdentifierList '}' :$2.
+% redundant use SequenceValue BitStringValue -> '{' '}' :'fix'.
+
+IdentifierList -> identifier :[element(3,'$1')].
+% modified IdentifierList -> IdentifierList ',' identifier :'$1'.
+IdentifierList -> identifier ',' IdentifierList :[element(3,'$1')|'$3'].
+
+% inlined OctetStringType -> 'OCTET' 'STRING' :'OCTET STRING'.
+
+% inlined OctetStringValue -> bstring:'$1'.
+% inlined OctetStringValue -> hstring:'$1'.
+
+% inlined NullType -> 'NULL':'NULL'.
+
+% inlined NullValue -> NULL:'NULL'.
+
+% result is {'SEQUENCE',Optionals,Extensionmark,Componenttypelist}.
+SequenceType -> SEQUENCE '{' ComponentTypeList '}' :{'SEQUENCE','$3'}.
+% SequenceType -> SEQUENCE '{' ComponentTypeLists '}' :{'SEQUENCE','$3'}.
+% SequenceType -> SEQUENCE '{' ExtensionAndException '}' :{'SEQUENCE','$3'}.
+SequenceType -> SEQUENCE '{' '}' :{'SEQUENCE',[]}.
+
+% result is {RootComponentList,ExtensionAndException,AdditionalComponentTypeList}.
+%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException :{'$1','$3',[]}.
+%ComponentTypeLists -> ComponentTypeList :{'$1','false',[]}.
+%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException
+% ',' ComponentTypeList :{'$1','$3', '$5'}.
+%ComponentTypeLists -> ExtensionAndException ',' ComponentTypeList :{[],'$1','$3'}.
+
+ComponentTypeList -> ComponentType :['$1'].
+% modified below ComponentTypeList -> ComponentTypeList ',' ComponentType :'$1'.
+ComponentTypeList -> ComponentType ',' ComponentTypeList :['$1'|'$3'].
+
+% -record('ComponentType',{pos,name,type,attrib}).
+ComponentType -> '...' ExceptionSpec :{'EXTENSIONMARK',element(2,'$1'),'$2'}.
+ComponentType -> NamedType :
+ {'NamedType',Pos,{Name,Type}} = '$1',
+ #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop=mandatory}.
+ComponentType -> NamedType 'OPTIONAL' :
+ {'NamedType',Pos,{Name,Type}} = '$1',
+ #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop='OPTIONAL'}.
+ComponentType -> NamedType 'DEFAULT' Value:
+ {'NamedType',Pos,{Name,Type}} = '$1',
+ #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop={'DEFAULT','$3'}}.
+ComponentType -> 'COMPONENTS' 'OF' Type :{'COMPONENTS OF','$3'}.
+
+% redundant ExtensionAndException -> '...' : extensionmark.
+% ExtensionAndException -> '...' ExceptionSpec : {extensionmark,'$2'}.
+
+% replaced SequenceValue -> '{' ComponentValueList '}':'$2'.
+% replaced SequenceValue -> '{' '}':[].
+
+ValueList -> Value :['$1'].
+ValueList -> NamedNumber :['$1'].
+% modified ValueList -> ValueList ',' Value :'$1'.
+ValueList -> Value ',' ValueList :['$1'|'$3'].
+ValueList -> Value ',' '...' :['$1' |[]].
+ValueList -> Value ValueList : ['$1',space|'$2'].
+ValueList -> NamedNumber ValueList: ['$1',space|'$2'].
+
+%ComponentValueList -> identifier ObjIdComponent:[{'NamedValue','$1','$2'}].
+%ComponentValueList -> NamedValue :['$1'].
+%ComponentValueList -> NamedValue ',' ComponentValueList:['$1'|'$3'].
+%ComponentValueList -> identifier ObjIdComponent ',' ComponentValueList :[{'NamedValue', '$1','$2'}|'$4'].
+
+SequenceOfType -> SEQUENCE OF Type : {'SEQUENCE OF','$3'}.
+
+% replaced SequenceOfValue with SAndSOfValue
+
+SAndSOfValue -> '{' ValueList '}' :'$2'.
+%SAndSOfValue -> '{' ComponentValueList '}' :'$2'.
+SAndSOfValue -> '{' '}' :[].
+
+% save for later SetType ->
+% result is {'SET',Optionals,Extensionmark,Componenttypelist}.
+SetType -> SET '{' ComponentTypeList '}' :{'SET','$3'}.
+% SetType -> SET '{' ExtensionAndException '}' :{'SET','$3'}.
+SetType -> SET '{' '}' :{'SET',[]}.
+
+% replaced SetValue with SAndSOfValue
+
+SetOfType -> SET OF Type : {'SET OF','$3'}.
+
+% replaced SetOfValue with SAndSOfValue
+
+ChoiceType -> 'CHOICE' '{' ComponentTypeList '}' :{'CHOICE','$3'}.
+% AlternativeTypeList is replaced by ComponentTypeList
+ChoiceValue -> identifier ':' Value : {'ChoiceValue',element(3,'$1'),'$3'}.
+% save for later SelectionType ->
+
+TaggedType -> Tag Type : '$2'#type{tag=['$1'#tag{type={default,get(tagdefault)}}]}.
+TaggedType -> Tag IMPLICIT Type :'$3'#type{tag=['$1'#tag{type='IMPLICIT'}]}.
+TaggedType -> Tag EXPLICIT Type :'$3'#type{tag=['$1'#tag{type='EXPLICIT'}]}.
+
+Tag -> '[' Class ClassNumber ']': #tag{class='$2',number='$3'}.
+Tag -> '[' Class typereference '.' identifier ']':
+ #tag{class='$2',number=#'Externalvaluereference'{pos=element(2,'$3'),module=element(3,'$3'),
+ value=element(3,'$5')}}.
+Tag -> '[' Class number ']': #tag{class='$2',number=element(3,'$3')}.
+Tag -> '[' Class identifier ']': #tag{class='$2',number=element(3,'$3')}.
+
+ClassNumber -> number :element(3,'$1').
+% inlined above ClassNumber -> typereference '.' identifier :{'Externalvaluereference',element(3,'$1'),element(3,'$3')}.
+ClassNumber -> identifier :element(3,'$1').
+
+Class -> 'UNIVERSAL' :element(1,'$1').
+Class -> 'APPLICATION' :element(1,'$1').
+Class -> 'PRIVATE' :element(1,'$1').
+Class -> '$empty' :'CONTEXT'.
+
+% conflict redundant TaggedValue -> Value:'$1'.
+
+% inlined EmbeddedPDVType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
+
+% inlined EmbeddedPDVValue -> SequenceValue:'$1'.
+
+% inlined ExternalType -> 'EXTERNAL' :'EXTERNAL'.
+
+% inlined ExternalValue -> SequenceValue :'$1'.
+
+% inlined ObjectIdentifierType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
+
+ObjectIdentifierValue -> '{' ObjIdComponentList '}' :'$2'.
+% inlined ObjectIdentifierValue -> SequenceAndSequenceOfValue :'$1'.
+% ObjectIdentifierValue -> '{' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue','$2','$3'}.
+% ObjectIdentifierValue -> '{' typereference '.' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue',{'$2','$4'},'$5'}.
+
+ObjIdComponentList -> Value:'$1'.
+ObjIdComponentList -> Value ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> DefinedValue:'$1'.
+%ObjIdComponentList -> number:'$1'.
+%ObjIdComponentList -> DefinedValue ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> number ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
+%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
+
+% redundant ObjIdComponent -> NameForm :'$1'. % expanded
+% replaced by 2 ObjIdComponent -> NumberForm :'$1'.
+% ObjIdComponent -> number :'$1'.
+% ObjIdComponent -> DefinedValue :'$1'. % means DefinedValue
+% ObjIdComponent -> NameAndNumberForm :'$1'.
+% ObjIdComponent -> NamedNumber :'$1'.
+% NamedBit replaced by NamedNumber to reduce grammar
+% must check later that "number" is positive
+
+% NameForm -> identifier:'$1'.
+
+% inlined NumberForm -> number :'$1'.
+% inlined NumberForm -> DefinedValue :'$1'.
+
+% replaced by NamedBit NameAndNumberForm -> identifier '(' NumberForm ')'.
+% NameAndNumberForm -> NamedBit:'$1'.
+
+
+CharacterStringType -> restrictedcharacterstringtype :element(3,'$1').
+CharacterStringType -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
+
+RestrictedCharacterStringValue -> cstring :element(3, '$1').
+% modified below RestrictedCharacterStringValue -> CharacterStringList :'$1'.
+% conflict vs BuiltinValue RestrictedCharacterStringValue -> SequenceAndSequenceOfValue :'$1'.
+RestrictedCharacterStringValue -> Quadruple :'$1'.
+RestrictedCharacterStringValue -> Tuple :'$1'.
+
+% redundant CharacterStringList -> '{' ValueList '}' :'$2'. % modified
+
+% redundant CharSyms -> CharsDefn :'$1'.
+% redundant CharSyms -> CharSyms ',' CharsDefn :['$1'|'$3'].
+
+% redundant CharsDefn -> cstring :'$1'.
+% temporary replaced see below CharsDefn -> DefinedValue :'$1'.
+% redundant CharsDefn -> Value :'$1'.
+
+Quadruple -> '{' number ',' number ',' number ',' number '}' :{'Quadruple','$2','$4','$6','$8'}.
+% {Group,Plane,Row,Cell}
+
+Tuple -> '{' number ',' number '}' :{'Tuple', '$2','$4'}.
+% {TableColumn,TableRow}
+
+% inlined UnrestrictedCharacterString -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
+
+CharacterStringValue -> RestrictedCharacterStringValue :'$1'.
+% conflict vs BuiltinValue CharacterStringValue -> SequenceValue :'$1'. % UnrestrictedCharacterStringValue
+
+% inlined UsefulType -> typereference :'$1'.
+
+SelectionType -> identifier '<' Type : {'SelectionType',element(3,'$1'),'$3'}.
+
+ConstrainedType -> Type Constraint :
+ '$1'#type{constraint=merge_constraints(['$2'])}.
+ConstrainedType -> Type Constraint Constraint :
+ '$1'#type{constraint=merge_constraints(['$2','$3'])}.
+ConstrainedType -> Type Constraint Constraint Constraint:
+ '$1'#type{constraint=merge_constraints(['$2','$3','$4'])}.
+ConstrainedType -> Type Constraint Constraint Constraint Constraint:
+ '$1'#type{constraint=merge_constraints(['$2','$3','$4','$5'])}.
+%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
+%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
+ConstrainedType -> TypeWithConstraint :'$1'.
+
+TypeWithConstraint -> 'SET' Constraint 'OF' Type :
+ #type{def = {'SET OF','$4'},constraint=merge_constraints(['$2'])}.
+TypeWithConstraint -> 'SET' 'SIZE' Constraint 'OF' Type :
+ #type{def = {'SET OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
+TypeWithConstraint -> 'SEQUENCE' Constraint 'OF' Type :
+ #type{def = {'SEQUENCE OF','$4'},constraint =
+ merge_constraints(['$2'])}.
+TypeWithConstraint -> 'SEQUENCE' 'SIZE' Constraint 'OF' Type :
+ #type{def = {'SEQUENCE OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
+
+
+Constraint -> '(' ConstraintSpec ExceptionSpec ')' :
+ #constraint{c='$2',e='$3'}.
+
+% inlined Constraint -> SubTypeConstraint :'$1'.
+ConstraintSpec -> ElementSetSpecs :'$1'.
+ConstraintSpec -> UserDefinedConstraint :'$1'.
+ConstraintSpec -> TableConstraint :'$1'.
+
+TableConstraint -> ComponentRelationConstraint : '$1'.
+TableConstraint -> ObjectSet : '$1'.
+%TableConstraint -> '{' typereference '}' :tableconstraint.
+
+ComponentRelationConstraint -> '{' typereference '}' '{' '@' ComponentIdList '}' : componentrelation.
+ComponentRelationConstraint -> '{' typereference '}' '{' '@' '.' ComponentIdList '}' : componentrelation.
+
+ComponentIdList -> identifier: ['$1'].
+ComponentIdList -> identifier '.' ComponentIdList: ['$1'| '$3'].
+
+
+% later ConstraintSpec -> GeneralConstraint :'$1'.
+
+% from X.682
+UserDefinedConstraint -> 'CONSTRAINED' 'BY' '{' '}' : {constrained_by,[]}.
+UserDefinedConstraint -> 'CONSTRAINED' 'BY'
+ '{' UserDefinedConstraintParameters '}' : {constrained_by,'$4'}.
+
+UserDefinedConstraintParameters -> UserDefinedConstraintParameter : ['$1'].
+UserDefinedConstraintParameters ->
+ UserDefinedConstraintParameter ','
+ UserDefinedConstraintParameters: ['$1'|'$3'].
+
+UserDefinedConstraintParameter -> Type '.' ActualParameter : {'$1','$3'}.
+UserDefinedConstraintParameter -> ActualParameter : '$1'.
+
+
+
+ExceptionSpec -> '!' ExceptionIdentification : '$1'.
+ExceptionSpec -> '$empty' : undefined.
+
+ExceptionIdentification -> SignedNumber : '$1'.
+% inlined ExceptionIdentification -> DefinedValue : '$1'.
+ExceptionIdentification -> typereference '.' identifier :
+ #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
+ value=element(3,'$1')}.
+ExceptionIdentification -> identifier :'$1'.
+ExceptionIdentification -> Type ':' Value : {'$1','$3'}.
+
+% inlined SubTypeConstraint -> ElementSetSpec
+
+ElementSetSpecs -> ElementSetSpec : '$1'.
+ElementSetSpecs -> ElementSetSpec ',' '...': {'$1',[]}.
+ElementSetSpecs -> '...' ',' ElementSetSpec : {[],'$3'}.
+ElementSetSpecs -> ElementSetSpec ',' '...' ',' ElementSetSpec : {'$1','$5'}.
+
+ElementSetSpec -> Unions : '$1'.
+ElementSetSpec -> 'ALL' Exclusions : {'ALL','$2'}.
+
+Unions -> Intersections : '$1'.
+Unions -> UElems UnionMark IntersectionElements :
+ case {'$1','$3'} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {'SingleValue',ordsets:union(to_set(V1),to_set(V2))}
+ end.
+
+UElems -> Unions :'$1'.
+
+Intersections -> IntersectionElements :'$1'.
+Intersections -> IElems IntersectionMark IntersectionElements :
+ case {'$1','$3'} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {'SingleValue',ordsets:intersection(to_set(V1),to_set(V2))};
+ {V1,V2} when list(V1) ->
+ V1 ++ [V2];
+ {V1,V2} ->
+ [V1,V2]
+ end.
+%Intersections -> IElems '^' IntersectionElements :{'INTERSECTION','$1','$3'}.
+%Intersections -> IElems 'INTERSECTION' IntersectionElements :{'INTERSECTION','$1','$3'}.
+
+IElems -> Intersections :'$1'.
+
+IntersectionElements -> Elements :'$1'.
+IntersectionElements -> Elems Exclusions :{'$1','$2'}.
+
+Elems -> Elements :'$1'.
+
+Exclusions -> 'EXCEPT' Elements :{'EXCEPT','$2'}.
+
+IntersectionMark -> 'INTERSECTION':'$1'.
+IntersectionMark -> '^':'$1'.
+UnionMark -> 'UNION':'$1'.
+UnionMark -> '|':'$1'.
+
+
+Elements -> SubTypeElements : '$1'.
+%Elements -> ObjectSetElements : '$1'.
+Elements -> '(' ElementSetSpec ')' : '$2'.
+Elements -> ReferencedType : '$1'.
+
+SubTypeElements -> ValueList : {'SingleValue','$1'}. % NOTE it must be a Value
+% The rule above modifyed only because of conflicts
+SubTypeElements -> 'INCLUDES' Type : {'ContainedSubType','$2'}.
+%not lalr1 if this is activated SubTypeElements -> Type : {'TypeConstraint','$1'}.
+SubTypeElements -> LowerEndpoint '..' UpperEndpoint : {'ValueRange',{'$1','$3'}}.
+SubTypeElements -> 'FROM' Constraint : {'PermittedAlphabet','$2'#constraint.c}.
+SubTypeElements -> 'SIZE' Constraint: {'SizeConstraint','$2'#constraint.c}.
+% later will introduce conflicts related to NULL SubTypeElements -> Type : {'TypeConstraint','$1'}.
+SubTypeElements -> 'WITH' 'COMPONENT' Constraint:{'WITH COMPONENT','$3'}.
+SubTypeElements -> 'WITH' 'COMPONENTS' '{' TypeConstraints '}':{'WITH COMPONENTS',{'FullSpecification','$4'}}.
+SubTypeElements -> 'WITH' 'COMPONENTS' '{' '...' ',' TypeConstraints '}' :{'WITH COMPONENTS',{'PartialSpecification','$3'}}.
+
+% inlined above InnerTypeConstraints ::=
+% inlined above SingleTypeConstraint::= Constraint
+% inlined above MultipleTypeConstraints ::= FullSpecification | PartialSpecification
+% inlined above FullSpecification ::= "{" TypeConstraints "}"
+% inlined above PartialSpecification ::= "{" "..." "," TypeConstraints "}"
+% TypeConstraints -> identifier : [{'NamedConstraint',element(3,'$1'),undefined,undefined}]. % is this really meaningful or allowed
+TypeConstraints -> NamedConstraint : ['$1'].
+TypeConstraints -> NamedConstraint ',' TypeConstraints : ['$1'|'$3'].
+TypeConstraints -> identifier : ['$1'].
+TypeConstraints -> identifier ',' TypeConstraints : ['$1'|'$3'].
+
+NamedConstraint -> identifier Constraint PresenceConstraint :{'NamedConstraint',element(3,'$1'),'$2','$3'}.
+NamedConstraint -> identifier Constraint :{'NamedConstraint',element(3,'$1'),'$2',undefined}.
+NamedConstraint -> identifier PresenceConstraint :{'NamedConstraint',element(3,'$1'),undefined,'$2'}.
+
+PresenceConstraint -> 'PRESENT' : 'PRESENT'.
+PresenceConstraint -> 'ABSENT' : 'ABSENT'.
+PresenceConstraint -> 'OPTIONAL' : 'OPTIONAL'.
+
+
+
+LowerEndpoint -> LowerEndValue :'$1'.
+%LowerEndpoint -> LowerEndValue '<':{gt,'$1'}.
+LowerEndpoint -> LowerEndValue '<':('$1'+1).
+
+UpperEndpoint -> UpperEndValue :'$1'.
+%UpperEndpoint -> '<' UpperEndValue :{lt,'$2'}.
+UpperEndpoint -> '<' UpperEndValue :('$2'-1).
+
+LowerEndValue -> Value :'$1'.
+LowerEndValue -> 'MIN' :'MIN'.
+
+UpperEndValue -> Value :'$1'.
+UpperEndValue -> 'MAX' :'MAX'.
+
+
+% X.681
+
+
+% X.681 chap 15
+
+%TypeFromObject -> ReferencedObjects '.' FieldName : {'$1','$3'}.
+TypeFromObject -> typereference '.' FieldName : {'$1','$3'}.
+
+ReferencedObjects -> typereference : '$1'.
+%ReferencedObjects -> ParameterizedObject
+%ReferencedObjects -> DefinedObjectSet
+%ReferencedObjects -> ParameterizedObjectSet
+
+FieldName -> typefieldreference : ['$1'].
+FieldName -> valuefieldreference : ['$1'].
+FieldName -> FieldName '.' FieldName : ['$1' | '$3'].
+
+PrimitiveFieldName -> typefieldreference : '$1'.
+PrimitiveFieldName -> valuefieldreference : '$1'.
+
+%ObjectSetAssignment -> typereference DefinedObjectClass '::=' ObjectSet: null.
+ObjectSetAssignment -> typereference typereference '::=' ObjectSet :
+ #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'ObjectSet',element(3,'$2'), '$4'}}.
+ObjectSetAssignment -> typereference typereference '.' typereference '::=' ObjectSet.
+
+ObjectSet -> '{' ElementSetSpecs '}' : '$2'.
+ObjectSet -> '{' '...' '}' : ['EXTENSIONMARK'].
+
+%ObjectSetElements -> Object.
+% ObjectSetElements -> identifier : '$1'.
+%ObjectSetElements -> DefinedObjectSet.
+%ObjectSetElements -> ObjectSetFromObjects.
+%ObjectSetElements -> ParameterizedObjectSet.
+
+%ObjectAssignment -> identifier DefinedObjectClass '::=' Object.
+ObjectAssignment -> ValueAssignment.
+%ObjectAssignment -> identifier typereference '::=' Object.
+%ObjectAssignment -> identifier typereference '.' typereference '::=' Object.
+
+%Object -> DefinedObject: '$1'.
+%Object -> ExternalObjectReference: '$1'.%Object -> DefinedObject: '$1'.
+Object -> typereference '.' identifier: '$1'.%Object -> DefinedObject: '$1'.
+Object -> identifier: '$1'.%Object -> DefinedObject: '$1'.
+
+%Object -> ObjectDefn -> DefaultSyntax: '$1'.
+Object -> '{' FieldSetting ',' FieldSettings '}' : ['$2'|'$4'].
+Object -> '{' FieldSetting '}' :['$2'].
+
+%% For User-friendly notation
+%% Object -> ObjectDefn -> DefinedSyntax
+Object -> '{' '}'.
+Object -> '{' DefinedSyntaxTokens '}'.
+
+% later Object -> ParameterizedObject: '$1'. look in x.683
+
+%DefinedObject -> ExternalObjectReference: '$1'.
+%DefinedObject -> identifier: '$1'.
+
+DefinedObjectClass -> typereference.
+%DefinedObjectClass -> objectclassreference.
+DefinedObjectClass -> ExternalObjectClassReference.
+%DefinedObjectClass -> typereference '.' objectclassreference.
+%%DefinedObjectClass -> UsefulObjectClassReference.
+
+ExternalObjectReference -> typereference '.' identifier.
+ExternalObjectClassReference -> typereference '.' typereference.
+%%ExternalObjectClassReference -> typereference '.' objectclassreference.
+
+ObjectDefn -> DefaultSyntax: '$1'.
+%ObjectDefn -> DefinedSyntax: '$1'.
+
+ObjectFromObject -> ReferencedObjects '.' FieldName : {'ObjectFromObject','$1','$3'}.
+
+% later look in x.683 ParameterizedObject ->
+
+%DefaultSyntax -> '{' '}'.
+%DefaultSyntax -> '{' FieldSettings '}': '$2'.
+DefaultSyntax -> '{' FieldSetting ',' FieldSettings '}': '$2'.
+DefaultSyntax -> '{' FieldSetting '}': '$2'.
+
+FieldSetting -> PrimitiveFieldName Setting: {'$1','$2'}.
+
+FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
+FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
+FieldSettings -> FieldSetting: '$1'.
+
+%DefinedSyntax -> '{' '}'.
+DefinedSyntax -> '{' DefinedSyntaxTokens '}': '$2'.
+
+DefinedSyntaxTokens -> DefinedSyntaxToken: '$1'.
+DefinedSyntaxTokens -> DefinedSyntaxToken DefinedSyntaxTokens: ['$1'|'$2'].
+
+% expanded DefinedSyntaxToken -> Literal: '$1'.
+%DefinedSyntaxToken -> typereference: '$1'.
+DefinedSyntaxToken -> word: '$1'.
+DefinedSyntaxToken -> ',': '$1'.
+DefinedSyntaxToken -> Setting: '$1'.
+%DefinedSyntaxToken -> '$empty': nil .
+
+% Setting ::= Type|Value|ValueSet|Object|ObjectSet
+Setting -> Type: '$1'.
+%Setting -> Value: '$1'.
+%Setting -> ValueNotNull: '$1'.
+Setting -> BuiltinValue: '$1'.
+Setting -> ValueSet: '$1'.
+%Setting -> Object: '$1'.
+%Setting -> ExternalObjectReference.
+Setting -> typereference '.' identifier.
+Setting -> identifier.
+Setting -> ObjectDefn.
+
+Setting -> ObjectSet: '$1'.
+
+
+Erlang code.
+%%-author('[email protected]').
+-copyright('Copyright (c) 1991-99 Ericsson Telecom AB').
+-vsn('$Revision: 1.1 $').
+-include("asn1_records.hrl").
+
+to_set(V) when list(V) ->
+ ordsets:list_to_set(V);
+to_set(V) ->
+ ordsets:list_to_set([V]).
+
+merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
+ {merge_constraints(Rlist,[],[]),
+ merge_constraints(ExtList,[],[])};
+
+merge_constraints(Clist) ->
+ merge_constraints(Clist, [], []).
+
+merge_constraints([Ch|Ct],Cacc, Eacc) ->
+ NewEacc = case Ch#constraint.e of
+ undefined -> Eacc;
+ E -> [E|Eacc]
+ end,
+ merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
+
+merge_constraints([],Cacc,[]) ->
+ lists:flatten(Cacc);
+merge_constraints([],Cacc,Eacc) ->
+ lists:flatten(Cacc) ++ [{'Errors',Eacc}].
+
+fixup_constraint(C) ->
+ case C of
+ {'SingleValue',V} when list(V) ->
+ [C,
+ {'ValueRange',{lists:min(V),lists:max(V)}}];
+ {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
+ V2 = {'SingleValue',
+ ordsets:list_to_set(lists:flatten(V))},
+ {'PermittedAlphabet',V2};
+ {'PermittedAlphabet',{'SingleValue',V}} ->
+ V2 = {'SingleValue',[V]},
+ {'PermittedAlphabet',V2};
+ {'SizeConstraint',Sc} ->
+ {'SizeConstraint',fixup_size_constraint(Sc)};
+
+ List when list(List) ->
+ [fixup_constraint(Xc)||Xc <- List];
+ Other ->
+ Other
+ end.
+
+fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
+ {Lb,Ub};
+fixup_size_constraint({{'ValueRange',R},[]}) ->
+ {R,[]};
+fixup_size_constraint({[],{'ValueRange',R}}) ->
+ {[],R};
+fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
+ {R1,R2};
+fixup_size_constraint({'SingleValue',[Sv]}) ->
+ fixup_size_constraint({'SingleValue',Sv});
+fixup_size_constraint({'SingleValue',L}) when list(L) ->
+ ordsets:list_to_set(L);
+fixup_size_constraint({'SingleValue',L}) ->
+ {L,L};
+fixup_size_constraint({C1,C2}) ->
+ {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
new file mode 100644
index 0000000000..07dacb73c8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
@@ -0,0 +1,2763 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 2000, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_parser2.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_parser2).
+
+-export([parse/1]).
+-include("asn1_records.hrl").
+
+%% parse all types in module
+parse(Tokens) ->
+ case catch parse_ModuleDefinition(Tokens) of
+ {'EXIT',Reason} ->
+ {error,{{undefined,get(asn1_module),
+ [internal,error,'when',parsing,module,definition,Reason]},
+ hd(Tokens)}};
+ {asn1_error,Reason} ->
+ {error,{Reason,hd(Tokens)}};
+ {ModuleDefinition,Rest1} ->
+ {Types,Rest2} = parse_AssignmentList(Rest1),
+ case Rest2 of
+ [{'END',_}|_Rest3] ->
+ {ok,ModuleDefinition#module{typeorval = Types}};
+ _ ->
+ {error,{{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'END']},
+ hd(Rest2)}}
+ end
+ end.
+
+parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
+ put(asn1_module,ModuleIdentifier),
+ {_DefinitiveIdentifier,Rest02} =
+ case Rest0 of
+ [{'{',_}|_Rest01] ->
+ parse_ObjectIdentifierValue(Rest0);
+ _ ->
+ {[],Rest0}
+ end,
+ Rest = case Rest02 of
+ [{'DEFINITIONS',_}|Rest03] ->
+ Rest03;
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest02)),get(asn1_module),
+ [got,get_token(hd(Rest02)),
+ expected,'DEFINITIONS']}})
+ end,
+ {TagDefault,Rest2} =
+ case Rest of
+ [{'EXPLICIT',_L3},{'TAGS',_L4}|Rest1] ->
+ put(tagdefault,'EXPLICIT'), {'EXPLICIT',Rest1};
+ [{'IMPLICIT',_L3},{'TAGS',_L4}|Rest1] ->
+ put(tagdefault,'IMPLICIT'), {'IMPLICIT',Rest1};
+ [{'AUTOMATIC',_L3},{'TAGS',_L4}|Rest1] ->
+ put(tagdefault,'AUTOMATIC'), {'AUTOMATIC',Rest1};
+ Rest1 ->
+ put(tagdefault,'EXPLICIT'), {'EXPLICIT',Rest1} % The default
+ end,
+ {ExtensionDefault,Rest3} =
+ case Rest2 of
+ [{'EXTENSIBILITY',_L5}, {'IMPLIED',_L6}|Rest21] ->
+ {'IMPLIED',Rest21};
+ _ -> {false,Rest2}
+ end,
+ case Rest3 of
+ [{'::=',_L7}, {'BEGIN',_L8}|Rest4] ->
+ {Exports, Rest5} = parse_Exports(Rest4),
+ {Imports, Rest6} = parse_Imports(Rest5),
+ {#module{ pos = L1,
+ name = ModuleIdentifier,
+ defid = [], % fix this
+ tagdefault = TagDefault,
+ extensiondefault = ExtensionDefault,
+ exports = Exports,
+ imports = Imports},Rest6};
+ _ -> throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,"::= BEGIN"]}})
+ end;
+parse_ModuleDefinition(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typereference]}}).
+
+parse_Exports([{'EXPORTS',_L1},{';',_L2}|Rest]) ->
+ {{exports,[]},Rest};
+parse_Exports([{'EXPORTS',_L1}|Rest]) ->
+ {SymbolList,Rest2} = parse_SymbolList(Rest),
+ case Rest2 of
+ [{';',_}|Rest3] ->
+ {{exports,SymbolList},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,';']}})
+ end;
+parse_Exports(Rest) ->
+ {{exports,all},Rest}.
+
+parse_SymbolList(Tokens) ->
+ parse_SymbolList(Tokens,[]).
+
+parse_SymbolList(Tokens,Acc) ->
+ {Symbol,Rest} = parse_Symbol(Tokens),
+ case Rest of
+ [{',',_L1}|Rest2] ->
+ parse_SymbolList(Rest2,[Symbol|Acc]);
+ Rest2 ->
+ {lists:reverse([Symbol|Acc]),Rest2}
+ end.
+
+parse_Symbol(Tokens) ->
+ parse_Reference(Tokens).
+
+parse_Reference([{typereference,L1,TrefName},{'{',_L2},{'}',_L3}|Rest]) ->
+% {Tref,Rest};
+ {tref2Exttref(L1,TrefName),Rest};
+parse_Reference([Tref1 = {typereference,_,_},{'.',_},Tref2 = {typereference,_,_},
+ {'{',_L2},{'}',_L3}|Rest]) ->
+% {{Tref1,Tref2},Rest};
+ {{tref2Exttref(Tref1),tref2Exttref(Tref2)},Rest};
+parse_Reference([Tref = {typereference,_L1,_TrefName}|Rest]) ->
+ {tref2Exttref(Tref),Rest};
+parse_Reference([Vref = {identifier,_L1,_VName}|Rest]) ->
+ {identifier2Extvalueref(Vref),Rest};
+parse_Reference(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,identifier]]}}).
+
+parse_Imports([{'IMPORTS',_L1},{';',_L2}|Rest]) ->
+ {{imports,[]},Rest};
+parse_Imports([{'IMPORTS',_L1}|Rest]) ->
+ {SymbolsFromModuleList,Rest2} = parse_SymbolsFromModuleList(Rest),
+ case Rest2 of
+ [{';',_L2}|Rest3] ->
+ {{imports,SymbolsFromModuleList},Rest3};
+ Rest3 ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,';']}})
+ end;
+parse_Imports(Tokens) ->
+ {{imports,[]},Tokens}.
+
+parse_SymbolsFromModuleList(Tokens) ->
+ parse_SymbolsFromModuleList(Tokens,[]).
+
+parse_SymbolsFromModuleList(Tokens,Acc) ->
+ {SymbolsFromModule,Rest} = parse_SymbolsFromModule(Tokens),
+ case (catch parse_SymbolsFromModule(Rest)) of
+ {Sl,_Rest2} when record(Sl,'SymbolsFromModule') ->
+ parse_SymbolsFromModuleList(Rest,[SymbolsFromModule|Acc]);
+ _ ->
+ {lists:reverse([SymbolsFromModule|Acc]),Rest}
+ end.
+
+parse_SymbolsFromModule(Tokens) ->
+ SetRefModuleName =
+ fun(N) ->
+ fun(X) when record(X,'Externaltypereference')->
+ X#'Externaltypereference'{module=N};
+ (X) when record(X,'Externalvaluereference')->
+ X#'Externalvaluereference'{module=N}
+ end
+ end,
+ {SymbolList,Rest} = parse_SymbolList(Tokens),
+ case Rest of
+ %%How does this case correspond to x.680 ?
+ [{'FROM',_L1},Tref = {typereference,_,_},Ref={identifier,_L2,_Id},C={',',_}|Rest2] ->
+ {#'SymbolsFromModule'{symbols=SymbolList,
+ module=tref2Exttref(Tref)},[Ref,C|Rest2]};
+ %%How does this case correspond to x.680 ?
+ [{'FROM',_L1},Tref = {typereference,_,_},{identifier,_L2,_Id}|Rest2] ->
+ {#'SymbolsFromModule'{symbols=SymbolList,
+ module=tref2Exttref(Tref)},Rest2};
+ [{'FROM',_L1},Tref = {typereference,_,Name},Brace = {'{',_}|Rest2] ->
+ {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue([Brace|Rest2]), % value not used yet, fix me
+ NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ {#'SymbolsFromModule'{symbols=NewSymbolList,
+ module=tref2Exttref(Tref)},Rest3};
+ [{'FROM',_L1},Tref = {typereference,_,Name}|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ {#'SymbolsFromModule'{symbols=NewSymbolList,
+ module=tref2Exttref(Tref)},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,
+ ['FROM typerefernece identifier ,',
+ 'FROM typereference identifier',
+ 'FROM typereference {',
+ 'FROM typereference']]}})
+ end.
+
+parse_ObjectIdentifierValue([{'{',_}|Rest]) ->
+ parse_ObjectIdentifierValue(Rest,[]).
+
+parse_ObjectIdentifierValue([{number,_,Num}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[Num|Acc]);
+parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {number,_,Num}, {')',_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Num}|Acc]);
+parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {identifier,_,Id2}, {')',_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Id2}|Acc]);
+parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {typereference,_,Tref},{'.',_},{identifier,_,Id2}, {')',_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
+parse_ObjectIdentifierValue([Id = {identifier,_,_}|Rest],Acc) ->
+ parse_ObjectIdentifierValue(Rest,[identifier2Extvalueref(Id)|Acc]);
+parse_ObjectIdentifierValue([{'}',_}|Rest],Acc) ->
+ {lists:reverse(Acc),Rest};
+parse_ObjectIdentifierValue([H|_T],_Acc) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,
+ ['{ some of the following }',number,'identifier ( number )',
+ 'identifier ( identifier )',
+ 'identifier ( typereference.identifier)',identifier]]}}).
+
+parse_AssignmentList(Tokens = [{'END',_}|_Rest]) ->
+ {[],Tokens};
+parse_AssignmentList(Tokens = [{'$end',_}|_Rest]) ->
+ {[],Tokens};
+parse_AssignmentList(Tokens) ->
+ parse_AssignmentList(Tokens,[]).
+
+parse_AssignmentList(Tokens= [{'END',_}|_Rest],Acc) ->
+ {lists:reverse(Acc),Tokens};
+parse_AssignmentList(Tokens= [{'$end',_}|_Rest],Acc) ->
+ {lists:reverse(Acc),Tokens};
+parse_AssignmentList(Tokens,Acc) ->
+ case (catch parse_Assignment(Tokens)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,R} ->
+% [H|T] = Tokens,
+ throw({error,{R,hd(Tokens)}});
+ {Assignment,Rest} ->
+ parse_AssignmentList(Rest,[Assignment|Acc])
+ end.
+
+parse_Assignment(Tokens) ->
+ Flist = [fun parse_TypeAssignment/1,
+ fun parse_ValueAssignment/1,
+ fun parse_ObjectClassAssignment/1,
+ fun parse_ObjectAssignment/1,
+ fun parse_ObjectSetAssignment/1,
+ fun parse_ParameterizedAssignment/1,
+ fun parse_ValueSetTypeAssignment/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {asn1_assignment_error,Reason} ->
+ throw({asn1_error,Reason});
+ Result ->
+ Result
+ end.
+
+
+parse_or(Tokens,Flist) ->
+ parse_or(Tokens,Flist,[]).
+
+parse_or(_Tokens,[],ErrList) ->
+ case ErrList of
+ [] ->
+ throw({asn1_error,{parse_or,ErrList}});
+ L when list(L) ->
+%%% throw({asn1_error,{parse_or,hd(lists:reverse(ErrList))}});
+ %% chose to throw 1) the error with the highest line no,
+ %% 2) the last error which is not a asn1_assignment_error or
+ %% 3) the last error.
+ throw(prioritize_error(ErrList));
+ Other ->
+ throw({asn1_error,{parse_or,Other}})
+ end;
+parse_or(Tokens,[Fun|Frest],ErrList) ->
+ case (catch Fun(Tokens)) of
+ Exit = {'EXIT',_Reason} ->
+ parse_or(Tokens,Frest,[Exit|ErrList]);
+ AsnErr = {asn1_error,_} ->
+ parse_or(Tokens,Frest,[AsnErr|ErrList]);
+ AsnAssErr = {asn1_assignment_error,_} ->
+ parse_or(Tokens,Frest,[AsnAssErr|ErrList]);
+ Result = {_,L} when list(L) ->
+ Result;
+% Result ->
+% Result
+ Error ->
+ parse_or(Tokens,Frest,[Error|ErrList])
+ end.
+
+parse_TypeAssignment([{typereference,L1,Tref},{'::=',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#typedef{pos=L1,name=Tref,typespec=Type},Rest2};
+parse_TypeAssignment([H1,H2|_Rest]) ->
+ throw({asn1_assignment_error,{get_line(H1),get(asn1_module),
+ [got,[get_token(H1),get_token(H2)], expected,
+ typereference,'::=']}});
+parse_TypeAssignment([H|_T]) ->
+ throw({asn1_assignment_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,
+ typereference]}}).
+
+parse_Type(Tokens) ->
+ {Tag,Rest3} = case Tokens of
+ [Lbr= {'[',_}|Rest] ->
+ parse_Tag([Lbr|Rest]);
+ Rest-> {[],Rest}
+ end,
+ {Tag2,Rest4} = case Rest3 of
+ [{'IMPLICIT',_}|Rest31] when record(Tag,tag)->
+ {[Tag#tag{type='IMPLICIT'}],Rest31};
+ [{'EXPLICIT',_}|Rest31] when record(Tag,tag)->
+ {[Tag#tag{type='EXPLICIT'}],Rest31};
+ Rest31 when record(Tag,tag) ->
+ {[Tag#tag{type={default,get(tagdefault)}}],Rest31};
+ Rest31 ->
+ {Tag,Rest31}
+ end,
+ Flist = [fun parse_BuiltinType/1,fun parse_ReferencedType/1,fun parse_TypeWithConstraint/1],
+ {Type,Rest5} = case (catch parse_or(Rest4,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_Reason} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end,
+ case hd(Rest5) of
+ {'(',_} ->
+ {Constraints,Rest6} = parse_Constraints(Rest5),
+ if record(Type,type) ->
+ {Type#type{constraint=merge_constraints(Constraints),
+ tag=Tag2},Rest6};
+ true ->
+ {#type{def=Type,constraint=merge_constraints(Constraints),
+ tag=Tag2},Rest6}
+ end;
+ _ ->
+ if record(Type,type) ->
+ {Type#type{tag=Tag2},Rest5};
+ true ->
+ {#type{def=Type,tag=Tag2},Rest5}
+ end
+ end.
+
+parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
+ case Rest of
+ [{'{',_}|Rest2] ->
+ {NamedNumberList,Rest3} = parse_NamedNumberList(Rest2),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def={'BIT STRING',NamedNumberList}},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,'}']}})
+ end;
+ _ ->
+ {{'BIT STRING',[]},Rest}
+ end;
+parse_BuiltinType([{'BOOLEAN',_}|Rest]) ->
+ {#type{def='BOOLEAN'},Rest};
+%% CharacterStringType ::= RestrictedCharacterStringType |
+%% UnrestrictedCharacterStringType
+parse_BuiltinType([{restrictedcharacterstringtype,_,StringName}|Rest]) ->
+ {#type{def=StringName},Rest};
+parse_BuiltinType([{'CHARACTER',_},{'STRING',_}|Rest]) ->
+ {#type{def='CHARACTER STRING'},Rest};
+
+parse_BuiltinType([{'CHOICE',_},{'{',_}|Rest]) ->
+ {AlternativeTypeLists,Rest2} = parse_AlternativeTypeLists(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def={'CHOICE',AlternativeTypeLists}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'EMBEDDED',_},{'PDV',_}|Rest]) ->
+ {#type{def='EMBEDDED PDV'},Rest};
+parse_BuiltinType([{'ENUMERATED',_},{'{',_}|Rest]) ->
+ {Enumerations,Rest2} = parse_Enumerations(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def={'ENUMERATED',Enumerations}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'EXTERNAL',_}|Rest]) ->
+ {#type{def='EXTERNAL'},Rest};
+
+% InstanceOfType
+parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
+ {DefinedObjectClass,Rest2} = parse_DefinedObjectClass(Rest),
+ case Rest2 of
+ [{'(',_}|_] ->
+ {Constraint,Rest3} = parse_Constraint(Rest2),
+ {#type{def={'INSTANCE OF',DefinedObjectClass,Constraint}},Rest3};
+ _ ->
+ {#type{def={'INSTANCE OF',DefinedObjectClass,[]}},Rest2}
+ end;
+
+% parse_BuiltinType(Tokens) ->
+
+parse_BuiltinType([{'INTEGER',_}|Rest]) ->
+ case Rest of
+ [{'{',_}|Rest2] ->
+ {NamedNumberList,Rest3} = parse_NamedNumberList(Rest2),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def={'INTEGER',NamedNumberList}},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,'}']}})
+ end;
+ _ ->
+ {#type{def='INTEGER'},Rest}
+ end;
+parse_BuiltinType([{'NULL',_}|Rest]) ->
+ {#type{def='NULL'},Rest};
+
+% ObjectClassFieldType fix me later
+
+parse_BuiltinType([{'OBJECT',_},{'IDENTIFIER',_}|Rest]) ->
+ {#type{def='OBJECT IDENTIFIER'},Rest};
+parse_BuiltinType([{'OCTET',_},{'STRING',_}|Rest]) ->
+ {#type{def='OCTET STRING'},Rest};
+parse_BuiltinType([{'REAL',_}|Rest]) ->
+ {#type{def='REAL'},Rest};
+parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
+ {#type{def=#'SEQUENCE'{components=[{'EXTENSIONMARK',Line,undefined}]}},
+ Rest};
+parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
+ {ExceptionIdentification,Rest2} = parse_ExceptionIdentification(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SEQUENCE'{components=[{'EXTENSIONMARK',
+ Line,
+ ExceptionIdentification}]}},
+ Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
+ {ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#type{def={'SEQUENCE OF',Type}},Rest2};
+
+
+parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
+ {#type{def=#'SET'{components=[{'EXTENSIONMARK',Line,undefined}]}},Rest};
+parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
+ {ExceptionIdentification,Rest2} = parse_ExceptionIdentification(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SET'{components=
+ [{'EXTENSIONMARK',Line,ExceptionIdentification}]}},
+ Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
+ {ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {#type{def=#'SET'{components=ComponentTypeLists}},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_BuiltinType([{'SET',_},{'OF',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#type{def={'SET OF',Type}},Rest2};
+
+%% The so called Useful types
+parse_BuiltinType([{'GeneralizedTime',_}|Rest]) ->
+ {#type{def='GeneralizedTime'},Rest};
+parse_BuiltinType([{'UTCTime',_}|Rest]) ->
+ {#type{def='UTCTime'},Rest};
+parse_BuiltinType([{'ObjectDescriptor',_}|Rest]) ->
+ {#type{def='ObjectDescriptor'},Rest};
+
+%% For compatibility with old standard
+parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},{identifier,_,Id}|Rest]) ->
+ {#type{def={'ANY_DEFINED_BY',Id}},Rest};
+parse_BuiltinType([{'ANY',_}|Rest]) ->
+ {#type{def='ANY'},Rest};
+
+parse_BuiltinType(Tokens) ->
+ parse_ObjectClassFieldType(Tokens).
+% throw({asn1_error,unhandled_type}).
+
+
+parse_TypeWithConstraint([{'SEQUENCE',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ Constraint2 =
+ case Constraint of
+ #constraint{c=C} ->
+ Constraint#constraint{c={'SizeConstraint',C}};
+ _ -> Constraint
+ end,
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint2])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint([{'SET',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SET OF',Type}, constraint = merge_constraints([Constraint])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint([{'SET',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+ Constraint2 =
+ case Constraint of
+ #constraint{c=C} ->
+ Constraint#constraint{c={'SizeConstraint',C}};
+ _ -> Constraint
+ end,
+ case Rest2 of
+ [{'OF',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#type{def = {'SET OF',Type}, constraint = merge_constraints([Constraint2])},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'OF']}})
+ end;
+parse_TypeWithConstraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['SEQUENCE','SEQUENCE SIZE','SET','SET SIZE'],
+ followed,by,a,constraint]}}).
+
+
+%% --------------------------
+
+parse_ReferencedType(Tokens) ->
+ Flist = [fun parse_DefinedType/1,
+ fun parse_SelectionType/1,
+ fun parse_TypeFromObject/1,
+ fun parse_ValueSetFromObjects/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_DefinedType(Tokens=[{typereference,_,_},{'{',_}|_Rest]) ->
+ parse_ParameterizedType(Tokens);
+parse_DefinedType(Tokens=[{typereference,L1,TypeName},
+ T2={typereference,_,_},T3={'{',_}|Rest]) ->
+ case (catch parse_ParameterizedType(Tokens)) of
+ {'EXIT',_Reason} ->
+ Rest2 = [T2,T3|Rest],
+ {#type{def = #'Externaltypereference'{pos=L1,
+ module=get(asn1_module),
+ type=TypeName}},Rest2};
+ {asn1_error,_} ->
+ Rest2 = [T2,T3|Rest],
+ {#type{def = #'Externaltypereference'{pos=L1,
+ module=get(asn1_module),
+ type=TypeName}},Rest2};
+ Result ->
+ Result
+ end;
+parse_DefinedType([{typereference,L1,Module},{'.',_},{typereference,_,TypeName}|Rest]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=Module,type=TypeName}},Rest};
+parse_DefinedType([{typereference,L1,TypeName}|Rest]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=get(asn1_module),
+ type=TypeName}},Rest};
+parse_DefinedType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,'typereference.typereference',
+ 'typereference typereference']]}}).
+
+parse_SelectionType([{identifier,_,Name},{'<',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'SelectionType',Name,Type},Rest2};
+parse_SelectionType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'identifier <']}}).
+
+
+%% --------------------------
+
+
+%% This should probably be removed very soon
+% parse_ConstrainedType(Tokens) ->
+% case (catch parse_TypeWithConstraint(Tokens)) of
+% {'EXIT',Reason} ->
+% {Type,Rest} = parse_Type(Tokens),
+% {Constraint,Rest2} = parse_Constraint(Rest),
+% {Type#type{constraint=Constraint},Rest2};
+% {asn1_error,Reason2} ->
+% {Type,Rest} = parse_Type(Tokens),
+% {Constraint,Rest2} = parse_Constraint(Rest),
+% {Type#type{constraint=Constraint},Rest2};
+% Result ->
+% Result
+% end.
+
+parse_Constraints(Tokens) ->
+ parse_Constraints(Tokens,[]).
+
+parse_Constraints(Tokens,Acc) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ case Rest of
+ [{'(',_}|_Rest2] ->
+ parse_Constraints(Rest,[Constraint|Acc]);
+ _ ->
+ {lists:reverse([Constraint|Acc]),Rest}
+ end.
+
+parse_Constraint([{'(',_}|Rest]) ->
+ {Constraint,Rest2} = parse_ConstraintSpec(Rest),
+ {Exception,Rest3} = parse_ExceptionSpec(Rest2),
+ case Rest3 of
+ [{')',_}|Rest4] ->
+ {#constraint{c=Constraint,e=Exception},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,')']}})
+ end;
+parse_Constraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'(']}}).
+
+parse_ConstraintSpec(Tokens) ->
+ Flist = [fun parse_GeneralConstraint/1,
+ fun parse_SubtypeConstraint/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason2} ->
+ throw({asn1_error,Reason2});
+ Result ->
+ Result
+ end.
+
+parse_ExceptionSpec([LPar={')',_}|Rest]) ->
+ {undefined,[LPar|Rest]};
+parse_ExceptionSpec([{'!',_}|Rest]) ->
+ parse_ExceptionIdentification(Rest);
+parse_ExceptionSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,[')','!']]}}).
+
+parse_ExceptionIdentification(Tokens) ->
+ Flist = [fun parse_SignedNumber/1,
+ fun parse_DefinedValue/1,
+ fun parse_TypeColonValue/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason2} ->
+ throw({asn1_error,Reason2});
+ Result ->
+ Result
+ end.
+
+parse_TypeColonValue(Tokens) ->
+ {Type,Rest} = parse_Type(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Value,Rest3} = parse_Value(Rest2),
+ {{Type,Value},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+parse_SubtypeConstraint(Tokens) ->
+ parse_ElementSetSpecs(Tokens).
+
+parse_ElementSetSpecs([{'...',_}|Rest]) ->
+ {Elements,Rest2} = parse_ElementSetSpec(Rest),
+ {{[],Elements},Rest2};
+parse_ElementSetSpecs(Tokens) ->
+ {RootElems,Rest} = parse_ElementSetSpec(Tokens),
+ case Rest of
+ [{',',_},{'...',_},{',',_}|Rest2] ->
+ {AdditionalElems,Rest3} = parse_ElementSetSpec(Rest2),
+ {{RootElems,AdditionalElems},Rest3};
+ [{',',_},{'...',_}|Rest2] ->
+ {{RootElems,[]},Rest2};
+ _ ->
+ {RootElems,Rest}
+ end.
+
+parse_ElementSetSpec([{'ALL',_},{'EXCEPT',_}|Rest]) ->
+ {Exclusions,Rest2} = parse_Elements(Rest),
+ {{'ALL',{'EXCEPT',Exclusions}},Rest2};
+parse_ElementSetSpec(Tokens) ->
+ parse_Unions(Tokens).
+
+
+parse_Unions(Tokens) ->
+ {InterSec,Rest} = parse_Intersections(Tokens),
+ {Unions,Rest2} = parse_UnionsRec(Rest),
+ case {InterSec,Unions} of
+ {InterSec,[]} ->
+ {InterSec,Rest2};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest2};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ [union|V2],Rest2};
+ {V1,V2} ->
+ {[V1,union,V2],Rest2}
+% Other ->
+% throw(Other)
+ end.
+
+parse_UnionsRec([{'|',_}|Rest]) ->
+ {InterSec,Rest2} = parse_Intersections(Rest),
+ {URec,Rest3} = parse_UnionsRec(Rest2),
+ case {InterSec,URec} of
+ {V1,[]} ->
+ {V1,Rest3};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3}
+ end;
+parse_UnionsRec([{'UNION',_}|Rest]) ->
+ {InterSec,Rest2} = parse_Intersections(Rest),
+ {URec,Rest3} = parse_UnionsRec(Rest2),
+ case {InterSec,URec} of
+ {V1,[]} ->
+ {V1,Rest3};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3}
+ end;
+parse_UnionsRec(Tokens) ->
+ {[],Tokens}.
+
+parse_Intersections(Tokens) ->
+ {InterSec,Rest} = parse_IntersectionElements(Tokens),
+ {IRec,Rest2} = parse_IElemsRec(Rest),
+ case {InterSec,IRec} of
+ {V1,[]} ->
+ {V1,Rest2};
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',
+ ordsets:intersection(to_set(V1),to_set(V2))},Rest2};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ [intersection|V2],Rest2};
+ {V1,V2} ->
+ {[V1,intersection,V2],Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'a Union']}})
+ end.
+
+parse_IElemsRec([{'^',_}|Rest]) ->
+ {InterSec,Rest2} = parse_IntersectionElements(Rest),
+ {IRec,Rest3} = parse_IElemsRec(Rest2),
+ case {InterSec,IRec} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',
+ ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
+ {V1,[]} ->
+ {V1,Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'an Intersection']}})
+ end;
+parse_IElemsRec([{'INTERSECTION',_}|Rest]) ->
+ {InterSec,Rest2} = parse_IntersectionElements(Rest),
+ {IRec,Rest3} = parse_IElemsRec(Rest2),
+ case {InterSec,IRec} of
+ {{'SingleValue',V1},{'SingleValue',V2}} ->
+ {{'SingleValue',
+ ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
+ {V1,[]} ->
+ {V1,Rest3};
+ {V1,V2} when list(V2) ->
+ {[V1] ++ V2,Rest3};
+ {V1,V2} ->
+ {[V1,V2],Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'an Intersection']}})
+ end;
+parse_IElemsRec(Tokens) ->
+ {[],Tokens}.
+
+parse_IntersectionElements(Tokens) ->
+ {InterSec,Rest} = parse_Elements(Tokens),
+ case Rest of
+ [{'EXCEPT',_}|Rest2] ->
+ {Exclusion,Rest3} = parse_Elements(Rest2),
+ {{InterSec,{'EXCEPT',Exclusion}},Rest3};
+ Rest ->
+ {InterSec,Rest}
+ end.
+
+parse_Elements([{'(',_}|Rest]) ->
+ {Elems,Rest2} = parse_ElementSetSpec(Rest),
+ case Rest2 of
+ [{')',_}|Rest3] ->
+ {Elems,Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,')']}})
+ end;
+parse_Elements(Tokens) ->
+ Flist = [fun parse_SubtypeElements/1,
+ fun parse_ObjectSetElements/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ Err = {asn1_error,_} ->
+ throw(Err);
+ Result ->
+ Result
+ end.
+
+
+
+
+%% --------------------------
+
+parse_DefinedObjectClass([{typereference,_,_ModName},{'.',_},Tr={typereference,_,_ObjClName}|Rest]) ->
+%% {{objectclassname,ModName,ObjClName},Rest};
+% {{objectclassname,tref2Exttref(Tr)},Rest};
+ {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([Tr={typereference,_,_ObjClName}|Rest]) ->
+% {{objectclassname,tref2Exttref(Tr)},Rest};
+ {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([{'TYPE-IDENTIFIER',_}|Rest]) ->
+ {'TYPE-IDENTIFIER',Rest};
+parse_DefinedObjectClass([{'ABSTRACT-SYNTAX',_}|Rest]) ->
+ {'ABSTRACT-SYNTAX',Rest};
+parse_DefinedObjectClass(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['typereference . typereference',
+ typereference,
+ 'TYPE-IDENTIFIER',
+ 'ABSTRACT-SYNTAX']]}}).
+
+parse_ObjectClassAssignment([{typereference,L1,ObjClName},{'::=',_}|Rest]) ->
+ {Type,Rest2} = parse_ObjectClass(Rest),
+ {#classdef{pos=L1,name=ObjClName,typespec=Type},Rest2};
+parse_ObjectClassAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ 'typereference ::=']}}).
+
+parse_ObjectClass(Tokens) ->
+ Flist = [fun parse_DefinedObjectClass/1,
+ fun parse_ObjectClassDefn/1,
+ fun parse_ParameterizedObjectClass/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason2} ->
+ throw({asn1_error,Reason2});
+ Result ->
+ Result
+ end.
+
+parse_ObjectClassDefn([{'CLASS',_},{'{',_}|Rest]) ->
+ {Type,Rest2} = parse_FieldSpec(Rest),
+ {WithSyntaxSpec,Rest3} = parse_WithSyntaxSpec(Rest2),
+ {#objectclass{fields=Type,syntax=WithSyntaxSpec},Rest3};
+parse_ObjectClassDefn(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'CLASS {']}}).
+
+parse_FieldSpec(Tokens) ->
+ parse_FieldSpec(Tokens,[]).
+
+parse_FieldSpec(Tokens,Acc) ->
+ Flist = [fun parse_FixedTypeValueFieldSpec/1,
+ fun parse_VariableTypeValueFieldSpec/1,
+ fun parse_ObjectFieldSpec/1,
+ fun parse_FixedTypeValueSetFieldSpec/1,
+ fun parse_VariableTypeValueSetFieldSpec/1,
+ fun parse_TypeFieldSpec/1,
+ fun parse_ObjectSetFieldSpec/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {Type,[{'}',_}|Rest]} ->
+ {lists:reverse([Type|Acc]),Rest};
+ {Type,[{',',_}|Rest2]} ->
+ parse_FieldSpec(Rest2,[Type|Acc]);
+ {_,[H|_T]} ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end.
+
+parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
+ {{typefieldreference,FieldName},Rest};
+parse_PrimitiveFieldName([{valuefieldreference,_,FieldName}|Rest]) ->
+ {{valuefieldreference,FieldName},Rest};
+parse_PrimitiveFieldName(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typefieldreference,valuefieldreference]]}}).
+
+parse_FieldName(Tokens) ->
+ {Field,Rest} = parse_PrimitiveFieldName(Tokens),
+ parse_FieldName(Rest,[Field]).
+
+parse_FieldName([{'.',_}|Rest],Acc) ->
+ case (catch parse_PrimitiveFieldName(Rest)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {FieldName,Rest2} ->
+ parse_FieldName(Rest2,[FieldName|Acc])
+ end;
+parse_FieldName(Tokens,Acc) ->
+ {lists:reverse(Acc),Tokens}.
+
+parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {Unique,Rest3} =
+ case Rest2 of
+ [{'UNIQUE',_}|Rest4] ->
+ {'UNIQUE',Rest4};
+ _ ->
+ {undefined,Rest2}
+ end,
+ {OptionalitySpec,Rest5} = parse_ValueOptionalitySpec(Rest3),
+ case Unique of
+ 'UNIQUE' ->
+ case OptionalitySpec of
+ {'DEFAULT',_} ->
+ throw({asn1_error,
+ {L1,get(asn1_module),
+ ['UNIQUE and DEFAULT in same field',VFieldName]}});
+ _ ->
+ {{fixedtypevaluefield,VFieldName,Type,Unique,OptionalitySpec},Rest5}
+ end;
+ _ ->
+ {{object_or_fixedtypevalue_field,VFieldName,Type,Unique,OptionalitySpec},Rest5}
+ end;
+parse_FixedTypeValueFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+
+parse_VariableTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
+ {FieldRef,Rest2} = parse_FieldName(Rest),
+ {OptionalitySpec,Rest3} = parse_ValueOptionalitySpec(Rest2),
+ {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},Rest3};
+parse_VariableTypeValueFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+
+parse_ObjectFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ {OptionalitySpec,Rest3} = parse_ObjectOptionalitySpec(Rest2),
+ {{objectfield,VFieldName,Class,OptionalitySpec},Rest3};
+parse_ObjectFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+
+parse_TypeFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {OptionalitySpec,Rest2} = parse_TypeOptionalitySpec(Rest),
+ {{typefield,TFieldName,OptionalitySpec},Rest2};
+parse_TypeFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_FixedTypeValueSetFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
+ {{objectset_or_fixedtypevalueset_field,TFieldName,Type,
+ OptionalitySpec},Rest3};
+parse_FixedTypeValueSetFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_VariableTypeValueSetFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {FieldRef,Rest2} = parse_FieldName(Rest),
+ {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
+ {{variabletypevaluesetfield,TFieldName,FieldRef,OptionalitySpec},Rest3};
+parse_VariableTypeValueSetFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_ObjectSetFieldSpec([{typefieldreference,_,TFieldName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ {OptionalitySpec,Rest3} = parse_ObjectSetOptionalitySpec(Rest2),
+ {{objectsetfield,TFieldName,Class,OptionalitySpec},Rest3};
+parse_ObjectSetFieldSpec(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+
+parse_ValueOptionalitySpec(Tokens)->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {Value,Rest2} = parse_Value(Rest),
+ {{'DEFAULT',Value},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_ObjectOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {Object,Rest2} = parse_Object(Rest),
+ {{'DEFAULT',Object},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_TypeOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'DEFAULT',Type},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_ValueSetOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {ValueSet,Rest2} = parse_ValueSet(Rest),
+ {{'DEFAULT',ValueSet},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_ObjectSetOptionalitySpec(Tokens) ->
+ case Tokens of
+ [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
+ [{'DEFAULT',_}|Rest] ->
+ {ObjectSet,Rest2} = parse_ObjectSet(Rest),
+ {{'DEFAULT',ObjectSet},Rest2};
+ _ -> {'MANDATORY',Tokens}
+ end.
+
+parse_WithSyntaxSpec([{'WITH',_},{'SYNTAX',_}|Rest]) ->
+ {SyntaxList,Rest2} = parse_SyntaxList(Rest),
+ {{'WITH SYNTAX',SyntaxList},Rest2};
+parse_WithSyntaxSpec(Tokens) ->
+ {[],Tokens}.
+
+parse_SyntaxList([{'{',_},{'}',_}|Rest]) ->
+ {[],Rest};
+parse_SyntaxList([{'{',_}|Rest]) ->
+ parse_SyntaxList(Rest,[]);
+parse_SyntaxList(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+
+parse_SyntaxList(Tokens,Acc) ->
+ {SyntaxList,Rest} = parse_TokenOrGroupSpec(Tokens),
+ case Rest of
+ [{'}',_}|Rest2] ->
+ {lists:reverse([SyntaxList|Acc]),Rest2};
+ _ ->
+ parse_SyntaxList(Rest,[SyntaxList|Acc])
+ end.
+
+parse_TokenOrGroupSpec(Tokens) ->
+ Flist = [fun parse_RequiredToken/1,
+ fun parse_OptionalGroup/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_RequiredToken([{typereference,L1,WordName}|Rest]) ->
+ case is_word(WordName) of
+ false ->
+ throw({asn1_error,{L1,get(asn1_module),
+ [got,WordName,expected,a,'Word']}});
+ true ->
+ {WordName,Rest}
+ end;
+parse_RequiredToken([{',',L1}|Rest]) ->
+ {{',',L1},Rest};
+parse_RequiredToken([{WordName,L1}|Rest]) ->
+ case is_word(WordName) of
+ false ->
+ throw({asn1_error,{L1,get(asn1_module),
+ [got,WordName,expected,a,'Word']}});
+ true ->
+ {WordName,Rest}
+ end;
+parse_RequiredToken(Tokens) ->
+ parse_PrimitiveFieldName(Tokens).
+
+parse_OptionalGroup([{'[',_}|Rest]) ->
+ {Spec,Rest2} = parse_TokenOrGroupSpec(Rest),
+ {SpecList,Rest3} = parse_OptionalGroup(Rest2,[Spec]),
+ {SpecList,Rest3}.
+
+parse_OptionalGroup([{']',_}|Rest],Acc) ->
+ {lists:reverse(Acc),Rest};
+parse_OptionalGroup(Tokens,Acc) ->
+ {Spec,Rest} = parse_TokenOrGroupSpec(Tokens),
+ parse_OptionalGroup(Rest,[Spec|Acc]).
+
+parse_DefinedObject([Id={identifier,_,_ObjName}|Rest]) ->
+ {{object,identifier2Extvalueref(Id)},Rest};
+parse_DefinedObject([{typereference,L1,ModName},{'.',_},{identifier,_,ObjName}|Rest]) ->
+ {{object, #'Externaltypereference'{pos=L1,module=ModName,type=ObjName}},Rest};
+parse_DefinedObject(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [identifier,'typereference.identifier']]}}).
+
+parse_ObjectAssignment([{identifier,L1,ObjName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Object,Rest4} = parse_Object(Rest3),
+ {#typedef{pos=L1,name=ObjName,
+ typespec=#'Object'{classname=Class,def=Object}},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}});
+ Other ->
+ throw({asn1_error,{L1,get(asn1_module),
+ [got,Other,expected,'::=']}})
+ end;
+parse_ObjectAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_Object(Tokens) ->
+ Flist=[fun parse_ObjectDefn/1,
+ fun parse_ObjectFromObject/1,
+ fun parse_ParameterizedObject/1,
+ fun parse_DefinedObject/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ObjectDefn(Tokens) ->
+ Flist=[fun parse_DefaultSyntax/1,
+ fun parse_DefinedSyntax/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_DefaultSyntax([{'{',_},{'}',_}|Rest]) ->
+ {{object,defaultsyntax,[]},Rest};
+parse_DefaultSyntax([{'{',_}|Rest]) ->
+ parse_DefaultSyntax(Rest,[]);
+parse_DefaultSyntax(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+
+parse_DefaultSyntax(Tokens,Acc) ->
+ {Setting,Rest} = parse_FieldSetting(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_DefaultSyntax(Rest2,[Setting|Acc]);
+ [{'}',_}|Rest3] ->
+ {{object,defaultsyntax,lists:reverse([Setting|Acc])},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,[',','}']]}})
+ end.
+
+parse_FieldSetting(Tokens) ->
+ {{_,PrimFieldName},Rest} = parse_PrimitiveFieldName(Tokens),
+ {Setting,Rest2} = parse_Setting(Rest),
+ {{PrimFieldName,Setting},Rest2}.
+
+parse_DefinedSyntax([{'{',_}|Rest]) ->
+ parse_DefinedSyntax(Rest,[]).
+
+parse_DefinedSyntax(Tokens,Acc) ->
+ case Tokens of
+ [{'}',_}|Rest2] ->
+ {{object,definedsyntax,lists:reverse(Acc)},Rest2};
+ _ ->
+ {DefSynTok,Rest3} = parse_DefinedSyntaxToken(Tokens),
+ parse_DefinedSyntax(Rest3,[DefSynTok|Acc])
+ end.
+
+parse_DefinedSyntaxToken([{',',L1}|Rest]) ->
+ {{',',L1},Rest};
+parse_DefinedSyntaxToken([{typereference,L1,Name}|Rest]) ->
+ case is_word(Name) of
+ false ->
+ {{setting,L1,Name},Rest};
+ true ->
+ {{word_or_setting,L1,Name},Rest}
+ end;
+parse_DefinedSyntaxToken(Tokens) ->
+ case catch parse_Setting(Tokens) of
+ {asn1_error,_} ->
+ parse_Word(Tokens);
+ {'EXIT',Reason} ->
+ exit(Reason);
+ Result ->
+ Result
+ end.
+
+parse_Word([{Name,Pos}|Rest]) ->
+ case is_word(Name) of
+ false ->
+ throw({asn1_error,{Pos,get(asn1_module),
+ [got,Name, expected,a,'Word']}});
+ true ->
+ {{word_or_setting,Pos,Name},Rest}
+ end.
+
+parse_Setting(Tokens) ->
+ Flist = [fun parse_Type/1,
+ fun parse_Value/1,
+ fun parse_Object/1,
+ fun parse_ObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_DefinedObjectSet([{typereference,L1,ModuleName},{'.',_},
+ {typereference,L2,ObjSetName}|Rest]) ->
+ {{objectset,L1,#'Externaltypereference'{pos=L2,module=ModuleName,
+ type=ObjSetName}},Rest};
+parse_DefinedObjectSet([{typereference,L1,ObjSetName}|Rest]) ->
+ {{objectset,L1,#'Externaltypereference'{pos=L1,module=get(asn1_module),
+ type=ObjSetName}},Rest};
+parse_DefinedObjectSet(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,'typereference.typereference']]}}).
+
+parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
+ {Class,Rest2} = parse_DefinedObjectClass(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {ObjectSet,Rest4} = parse_ObjectSet(Rest3),
+ {#typedef{pos=L1,name=ObjSetName,
+ typespec=#'ObjectSet'{class=Class,
+ set=ObjectSet}},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ObjectSetAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ObjectSet([{'{',_}|Rest]) ->
+ {ObjSetSpec,Rest2} = parse_ObjectSetSpec(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {ObjSetSpec,Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+parse_ObjectSet(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ObjectSetSpec([{'...',_}|Rest]) ->
+ {['EXTENSIONMARK'],Rest};
+parse_ObjectSetSpec(Tokens) ->
+ parse_ElementSetSpecs(Tokens).
+
+parse_ObjectSetElements(Tokens) ->
+ Flist = [fun parse_Object/1,
+ fun parse_DefinedObjectSet/1,
+ fun parse_ObjectSetFromObjects/1,
+ fun parse_ParameterizedObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ObjectClassFieldType(Tokens) ->
+ {Class,Rest} = parse_DefinedObjectClass(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {FieldName,Rest3} = parse_FieldName(Rest2),
+ OCFT = #'ObjectClassFieldType'{
+ classname=Class,
+ class=Class,fieldname=FieldName},
+ {#type{def=OCFT},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw(Other)
+ end.
+
+%parse_ObjectClassFieldValue(Tokens) ->
+% Flist = [fun parse_OpenTypeFieldVal/1,
+% fun parse_FixedTypeFieldVal/1],
+% case (catch parse_or(Tokens,Flist)) of
+% {'EXIT',Reason} ->
+% throw(Reason);
+% AsnErr = {asn1_error,_} ->
+% throw(AsnErr);
+% Result ->
+% Result
+% end.
+
+parse_ObjectClassFieldValue(Tokens) ->
+ parse_OpenTypeFieldVal(Tokens).
+
+parse_OpenTypeFieldVal(Tokens) ->
+ {Type,Rest} = parse_Type(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Value,Rest3} = parse_Value(Rest2),
+ {{opentypefieldvalue,Type,Value},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+% parse_FixedTypeFieldVal(Tokens) ->
+% parse_Value(Tokens).
+
+% parse_InformationFromObjects(Tokens) ->
+% Flist = [fun parse_ValueFromObject/1,
+% fun parse_ValueSetFromObjects/1,
+% fun parse_TypeFromObject/1,
+% fun parse_ObjectFromObject/1],
+% case (catch parse_or(Tokens,Flist)) of
+% {'EXIT',Reason} ->
+% throw(Reason);
+% AsnErr = {asn1_error,_} ->
+% throw(AsnErr);
+% Result ->
+% Result
+% end.
+
+parse_ReferencedObjects(Tokens) ->
+ Flist = [fun parse_DefinedObject/1,
+ fun parse_DefinedObjectSet/1,
+ fun parse_ParameterizedObject/1,
+ fun parse_ParameterizedObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ValueFromObject(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ case lists:last(Name) of
+ {valuefieldreference,_} ->
+ {{'ValueFromObject',Objects,Name},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,typefieldreference,expected,
+ valuefieldreference]}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_ValueSetFromObjects(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ case lists:last(Name) of
+ {typefieldreference,_FieldName} ->
+ {{'ValueSetFromObjects',Objects,Name},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,
+ typefieldreference]}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_TypeFromObject(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ case lists:last(Name) of
+ {typefieldreference,_FieldName} ->
+ {{'TypeFromObject',Objects,Name},Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,
+ typefieldreference]}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_ObjectFromObject(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ {{'ObjectFromObject',Objects,Name},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+parse_ObjectSetFromObjects(Tokens) ->
+ {Objects,Rest} = parse_ReferencedObjects(Tokens),
+ case Rest of
+ [{'.',_}|Rest2] ->
+ {Name,Rest3} = parse_FieldName(Rest2),
+ {{'ObjectSetFromObjects',Objects,Name},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'.']}})
+%%% Other ->
+%%% throw({asn1_error,{got,Other,expected,'.'}})
+ end.
+
+% parse_InstanceOfType([{'INSTANCE',_},{'OF',_}|Rest]) ->
+% {Class,Rest2} = parse_DefinedObjectClass(Rest),
+% {{'InstanceOfType',Class},Rest2}.
+
+% parse_InstanceOfValue(Tokens) ->
+% parse_Value(Tokens).
+
+
+
+%% X.682 constraint specification
+
+parse_GeneralConstraint(Tokens) ->
+ Flist = [fun parse_UserDefinedConstraint/1,
+ fun parse_TableConstraint/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_UserDefinedConstraint([{'CONSTRAINED',_},{'BY',_},{'{',_},{'}',_}|Rest])->
+ {{constrained_by,[]},Rest};
+parse_UserDefinedConstraint([{'CONSTRAINED',_},
+ {'BY',_},
+ {'{',_}|Rest]) ->
+ {Param,Rest2} = parse_UserDefinedConstraintParameter(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {{constrained_by,Param},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+parse_UserDefinedConstraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['CONSTRAINED BY {}','CONSTRAINED BY {']]}}).
+
+parse_UserDefinedConstraintParameter(Tokens) ->
+ parse_UserDefinedConstraintParameter(Tokens,[]).
+parse_UserDefinedConstraintParameter(Tokens,Acc) ->
+ Flist = [fun parse_GovernorAndActualParameter/1,
+ fun parse_ActualParameter/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {Result,Rest} ->
+ case Rest of
+ [{',',_}|_Rest2] ->
+ parse_UserDefinedConstraintParameter(Tokens,[Result|Acc]);
+ _ ->
+ {lists:reverse([Result|Acc]),Rest}
+ end
+ end.
+
+parse_GovernorAndActualParameter(Tokens) ->
+ {Governor,Rest} = parse_Governor(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Params,Rest3} = parse_ActualParameter(Rest2),
+ {{'Governor_Params',Governor,Params},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+parse_TableConstraint(Tokens) ->
+ Flist = [fun parse_ComponentRelationConstraint/1,
+ fun parse_SimpleTableConstraint/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_SimpleTableConstraint(Tokens) ->
+ {ObjectSet,Rest} = parse_ObjectSet(Tokens),
+ {{simpletable,ObjectSet},Rest}.
+
+parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
+ {ObjectSet,Rest2} = parse_DefinedObjectSet(Rest),
+ case Rest2 of
+ [{'}',_},{'{',_}|Rest3] ->
+ {AtNot,Rest4} = parse_AtNotationList(Rest3,[]),
+ case Rest4 of
+ [{'}',_}|Rest5] ->
+ {{componentrelation,ObjectSet,AtNot},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,
+ 'ComponentRelationConstraint',ended,with,'}']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ComponentRelationConstraint(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_AtNotationList(Tokens,Acc) ->
+ {AtNot,Rest} = parse_AtNotation(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_AtNotationList(Rest2,[AtNot|Acc]);
+ _ ->
+ {lists:reverse([AtNot|Acc]),Rest}
+ end.
+
+parse_AtNotation([{'@',_},{'.',_}|Rest]) ->
+ {CIdList,Rest2} = parse_ComponentIdList(Rest),
+ {{innermost,CIdList},Rest2};
+parse_AtNotation([{'@',_}|Rest]) ->
+ {CIdList,Rest2} = parse_ComponentIdList(Rest),
+ {{outermost,CIdList},Rest2};
+parse_AtNotation(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,['@','@.']]}}).
+
+parse_ComponentIdList(Tokens) ->
+ parse_ComponentIdList(Tokens,[]).
+
+parse_ComponentIdList([Id = {identifier,_,_},{'.',_}|Rest],Acc) ->
+ parse_ComponentIdList(Rest,[identifier2Extvalueref(Id)|Acc]);
+parse_ComponentIdList([Id = {identifier,_,_}|Rest],Acc) ->
+ {lists:reverse([identifier2Extvalueref(Id)|Acc]),Rest};
+parse_ComponentIdList(Tokens,_) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [identifier,'identifier.']]}}).
+
+
+
+
+
+% X.683 Parameterization of ASN.1 specifications
+
+parse_Governor(Tokens) ->
+ Flist = [fun parse_Type/1,
+ fun parse_DefinedObjectClass/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ActualParameter(Tokens) ->
+ Flist = [fun parse_Type/1,
+ fun parse_Value/1,
+ fun parse_ValueSet/1,
+ fun parse_DefinedObjectClass/1,
+ fun parse_Object/1,
+ fun parse_ObjectSet/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ParameterizedAssignment(Tokens) ->
+ Flist = [fun parse_ParameterizedTypeAssignment/1,
+ fun parse_ParameterizedValueAssignment/1,
+ fun parse_ParameterizedValueSetTypeAssignment/1,
+ fun parse_ParameterizedObjectClassAssignment/1,
+ fun parse_ParameterizedObjectAssignment/1,
+ fun parse_ParameterizedObjectSetAssignment/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ AsnAssErr = {asn1_assignment_error,_} ->
+ throw(AsnAssErr);
+ Result ->
+ Result
+ end.
+
+parse_ParameterizedTypeAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Type,Rest4} = parse_Type(Rest3),
+ {#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Type},
+ Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedTypeAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Type,Rest3} = parse_Type(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {Value,Rest5} = parse_Value(Rest4),
+ {#pvaluedef{pos=L1,name=Name,args=ParameterList,type=Type,
+ value=Value},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedValueAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_ParameterizedValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Type,Rest3} = parse_Type(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {ValueSet,Rest5} = parse_ValueSet(Rest4),
+ {#pvaluesetdef{pos=L1,name=Name,args=ParameterList,
+ type=Type,valueset=ValueSet},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedValueSetTypeAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterizedObjectClassAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Class,Rest4} = parse_ObjectClass(Rest3),
+ {#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Class},
+ Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ParameterizedObjectClassAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Class,Rest3} = parse_DefinedObjectClass(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {Object,Rest5} = parse_Object(Rest4),
+ {#pobjectdef{pos=L1,name=Name,args=ParameterList,
+ class=Class,def=Object},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ParameterizedObjectAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_ParameterizedObjectSetAssignment([{typereference,L1,Name}|Rest]) ->
+ {ParameterList,Rest2} = parse_ParameterList(Rest),
+ {Class,Rest3} = parse_DefinedObjectClass(Rest2),
+ case Rest3 of
+ [{'::=',_}|Rest4] ->
+ {ObjectSet,Rest5} = parse_ObjectSet(Rest4),
+ {#pobjectsetdef{pos=L1,name=Name,args=ParameterList,
+ class=Class,def=ObjectSet},Rest5};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+%%% Other ->
+%%% throw(Other)
+ end;
+parse_ParameterizedObjectSetAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ParameterList([{'{',_}|Rest]) ->
+ parse_ParameterList(Rest,[]);
+parse_ParameterList(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ParameterList(Tokens,Acc) ->
+ {Parameter,Rest} = parse_Parameter(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_ParameterList(Rest2,[Parameter|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([Parameter|Acc]),Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,[',','}']]}})
+ end.
+
+parse_Parameter(Tokens) ->
+ Flist = [fun parse_ParamGovAndRef/1,
+ fun parse_Reference/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_ParamGovAndRef(Tokens) ->
+ {ParamGov,Rest} = parse_ParamGovernor(Tokens),
+ case Rest of
+ [{':',_}|Rest2] ->
+ {Ref,Rest3} = parse_Reference(Rest2),
+ {{ParamGov,Ref},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,':']}})
+ end.
+
+parse_ParamGovernor(Tokens) ->
+ Flist = [fun parse_Governor/1,
+ fun parse_Reference/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+% parse_ParameterizedReference(Tokens) ->
+% {Ref,Rest} = parse_Reference(Tokens),
+% case Rest of
+% [{'{',_},{'}',_}|Rest2] ->
+% {{ptref,Ref},Rest2};
+% _ ->
+% {{ptref,Ref},Rest}
+% end.
+
+parse_SimpleDefinedType([{typereference,L1,ModuleName},{'.',_},
+ {typereference,_,TypeName}|Rest]) ->
+ {#'Externaltypereference'{pos=L1,module=ModuleName,
+ type=TypeName},Rest};
+parse_SimpleDefinedType([Tref={typereference,_,_}|Rest]) ->
+% {#'Externaltypereference'{pos=L2,module=get(asn1_module),
+% type=TypeName},Rest};
+ {tref2Exttref(Tref),Rest};
+parse_SimpleDefinedType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [typereference,'typereference.typereference']]}}).
+
+parse_SimpleDefinedValue([{typereference,L1,ModuleName},{'.',_},
+ {identifier,_,Value}|Rest]) ->
+ {{simpledefinedvalue,#'Externalvaluereference'{pos=L1,module=ModuleName,
+ value=Value}},Rest};
+parse_SimpleDefinedValue([{identifier,L2,Value}|Rest]) ->
+ {{simpledefinedvalue,L2,Value},Rest};
+parse_SimpleDefinedValue(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ ['typereference.identifier',identifier]]}}).
+
+parse_ParameterizedType(Tokens) ->
+ {Type,Rest} = parse_SimpleDefinedType(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{pt,Type,Params},Rest2}.
+
+parse_ParameterizedValue(Tokens) ->
+ {Value,Rest} = parse_SimpleDefinedValue(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{pv,Value,Params},Rest2}.
+
+parse_ParameterizedObjectClass(Tokens) ->
+ {Type,Rest} = parse_DefinedObjectClass(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{poc,Type,Params},Rest2}.
+
+parse_ParameterizedObjectSet(Tokens) ->
+ {ObjectSet,Rest} = parse_DefinedObjectSet(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{pos,ObjectSet,Params},Rest2}.
+
+parse_ParameterizedObject(Tokens) ->
+ {Object,Rest} = parse_DefinedObject(Tokens),
+ {Params,Rest2} = parse_ActualParameterList(Rest),
+ {{po,Object,Params},Rest2}.
+
+parse_ActualParameterList([{'{',_}|Rest]) ->
+ parse_ActualParameterList(Rest,[]);
+parse_ActualParameterList(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ActualParameterList(Tokens,Acc) ->
+ {Parameter,Rest} = parse_ActualParameter(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_ActualParameterList(Rest2,[Parameter|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([Parameter|Acc]),Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,[',','}']]}})
+%%% Other ->
+%%% throw(Other)
+ end.
+
+
+
+
+
+
+
+%-------------------------
+
+is_word(Token) ->
+ case not_allowed_word(Token) of
+ true -> false;
+ _ ->
+ if
+ atom(Token) ->
+ Item = atom_to_list(Token),
+ is_word(Item);
+ list(Token), length(Token) == 1 ->
+ check_one_char_word(Token);
+ list(Token) ->
+ [A|Rest] = Token,
+ case check_first(A) of
+ true ->
+ check_rest(Rest);
+ _ ->
+ false
+ end
+ end
+ end.
+
+not_allowed_word(Name) ->
+ lists:member(Name,["BIT",
+ "BOOLEAN",
+ "CHARACTER",
+ "CHOICE",
+ "EMBEDDED",
+ "END",
+ "ENUMERATED",
+ "EXTERNAL",
+ "FALSE",
+ "INSTANCE",
+ "INTEGER",
+ "INTERSECTION",
+ "MINUS-INFINITY",
+ "NULL",
+ "OBJECT",
+ "OCTET",
+ "PLUS-INFINITY",
+ "REAL",
+ "SEQUENCE",
+ "SET",
+ "TRUE",
+ "UNION"]).
+
+check_one_char_word([A]) when $A =< A, $Z >= A ->
+ true;
+check_one_char_word([_]) ->
+ false. %% unknown item in SyntaxList
+
+check_first(A) when $A =< A, $Z >= A ->
+ true;
+check_first(_) ->
+ false. %% unknown item in SyntaxList
+
+check_rest([R,R|_Rs]) when $- == R ->
+ false; %% two consecutive hyphens are not allowed in a word
+check_rest([R]) when $- == R ->
+ false; %% word cannot end with hyphen
+check_rest([R|Rs]) when $A=<R, $Z>=R; $-==R ->
+ check_rest(Rs);
+check_rest([]) ->
+ true;
+check_rest(_) ->
+ false.
+
+
+to_set(V) when list(V) ->
+ ordsets:list_to_set(V);
+to_set(V) ->
+ ordsets:list_to_set([V]).
+
+
+parse_AlternativeTypeLists(Tokens) ->
+ {AlternativeTypeList,Rest1} = parse_AlternativeTypeList(Tokens),
+ {ExtensionAndException,Rest2} =
+ case Rest1 of
+ [{',',_},{'...',L1},{'!',_}|Rest12] ->
+ {_,Rest13} = parse_ExceptionIdentification(Rest12),
+ %% Exception info is currently thrown away
+ {[#'EXTENSIONMARK'{pos=L1}],Rest13};
+ [{',',_},{'...',L1}|Rest12] ->
+ {[#'EXTENSIONMARK'{pos=L1}],Rest12};
+ _ ->
+ {[],Rest1}
+ end,
+ case ExtensionAndException of
+ [] ->
+ {AlternativeTypeList,Rest2};
+ _ ->
+ {ExtensionAddition,Rest3} =
+ case Rest2 of
+ [{',',_}|Rest23] ->
+ parse_ExtensionAdditionAlternativeList(Rest23);
+ _ ->
+ {[],Rest2}
+ end,
+ {OptionalExtensionMarker,Rest4} =
+ case Rest3 of
+ [{',',_},{'...',L3}|Rest31] ->
+ {[#'EXTENSIONMARK'{pos=L3}],Rest31};
+ _ ->
+ {[],Rest3}
+ end,
+ {AlternativeTypeList ++ ExtensionAndException ++ ExtensionAddition ++ OptionalExtensionMarker, Rest4}
+ end.
+
+
+parse_AlternativeTypeList(Tokens) ->
+ parse_AlternativeTypeList(Tokens,[]).
+
+parse_AlternativeTypeList(Tokens,Acc) ->
+ {NamedType,Rest} = parse_NamedType(Tokens),
+ case Rest of
+ [{',',_},Id = {identifier,_,_}|Rest2] ->
+ parse_AlternativeTypeList([Id|Rest2],[NamedType|Acc]);
+ _ ->
+ {lists:reverse([NamedType|Acc]),Rest}
+ end.
+
+
+
+parse_ExtensionAdditionAlternativeList(Tokens) ->
+ parse_ExtensionAdditionAlternativeList(Tokens,[]).
+
+parse_ExtensionAdditionAlternativeList(Tokens,Acc) ->
+ {Element,Rest0} =
+ case Tokens of
+ [{identifier,_,_}|_Rest] ->
+ parse_NamedType(Tokens);
+ [{'[[',_}|_] ->
+ parse_ExtensionAdditionAlternatives(Tokens)
+ end,
+ case Rest0 of
+ [{',',_}|Rest01] ->
+ parse_ExtensionAdditionAlternativeList(Rest01,[Element|Acc]);
+ _ ->
+ {lists:reverse([Element|Acc]),Rest0}
+ end.
+
+parse_ExtensionAdditionAlternatives([{'[[',_}|Rest]) ->
+ parse_ExtensionAdditionAlternatives(Rest,[]);
+parse_ExtensionAdditionAlternatives(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'[[']}}).
+
+parse_ExtensionAdditionAlternatives([Id = {identifier,_,_}|Rest],Acc) ->
+ {NamedType, Rest2} = parse_NamedType([Id|Rest]),
+ case Rest2 of
+ [{',',_}|Rest21] ->
+ parse_ExtensionAdditionAlternatives(Rest21,[NamedType|Acc]);
+ [{']]',_}|Rest21] ->
+ {lists:reverse(Acc),Rest21};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,[',',']]']]}})
+ end.
+
+parse_NamedType([{identifier,L1,Idname}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {#'ComponentType'{pos=L1,name=Idname,typespec=Type,prop=mandatory},Rest2};
+parse_NamedType(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+
+parse_ComponentTypeLists(Tokens) ->
+% Resulting tuple {ComponentTypeList,Rest1} is returned
+ case Tokens of
+ [{identifier,_,_}|_Rest0] ->
+ {Clist,Rest01} = parse_ComponentTypeList(Tokens),
+ case Rest01 of
+ [{',',_}|Rest02] ->
+ parse_ComponentTypeLists(Rest02,Clist);
+ _ ->
+ {Clist,Rest01}
+ end;
+ [{'COMPONENTS',_},{'OF',_}|_Rest] ->
+ {Clist,Rest01} = parse_ComponentTypeList(Tokens),
+ case Rest01 of
+ [{',',_}|Rest02] ->
+ parse_ComponentTypeLists(Rest02,Clist);
+ _ ->
+ {Clist,Rest01}
+ end;
+ _ ->
+ parse_ComponentTypeLists(Tokens,[])
+ end.
+
+parse_ComponentTypeLists([{'...',L1},{'!',_}|Rest],Clist1) ->
+ {_,Rest2} = parse_ExceptionIdentification(Rest),
+ %% Exception info is currently thrown away
+ parse_ComponentTypeLists2(Rest2,Clist1++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists([{'...',L1}|Rest],Clist1) ->
+ parse_ComponentTypeLists2(Rest,Clist1++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists(Tokens,Clist1) ->
+ {Clist1,Tokens}.
+
+
+parse_ComponentTypeLists2(Tokens,Clist1) ->
+ {ExtensionAddition,Rest2} =
+ case Tokens of
+ [{',',_}|Rest1] ->
+ parse_ExtensionAdditionList(Rest1);
+ _ ->
+ {[],Tokens}
+ end,
+ {OptionalExtensionMarker,Rest3} =
+ case Rest2 of
+ [{',',_},{'...',L2}|Rest21] ->
+ {[#'EXTENSIONMARK'{pos=L2}],Rest21};
+ _ ->
+ {[],Rest2}
+ end,
+ {RootComponentTypeList,Rest4} =
+ case Rest3 of
+ [{',',_}|Rest31] ->
+ parse_ComponentTypeList(Rest31);
+ _ ->
+ {[],Rest3}
+ end,
+ {Clist1 ++ ExtensionAddition ++ OptionalExtensionMarker ++ RootComponentTypeList, Rest4}.
+
+
+parse_ComponentTypeList(Tokens) ->
+ parse_ComponentTypeList(Tokens,[]).
+
+parse_ComponentTypeList(Tokens,Acc) ->
+ {ComponentType,Rest} = parse_ComponentType(Tokens),
+ case Rest of
+ [{',',_},Id = {identifier,_,_}|Rest2] ->
+ parse_ComponentTypeList([Id|Rest2],[ComponentType|Acc]);
+ [{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest2] ->
+ parse_ComponentTypeList([C1,C2|Rest2],[ComponentType|Acc]);
+% _ ->
+% {lists:reverse([ComponentType|Acc]),Rest}
+ [{'}',_}|_] ->
+ {lists:reverse([ComponentType|Acc]),Rest};
+ [{',',_},{'...',_}|_] ->
+ {lists:reverse([ComponentType|Acc]),Rest};
+ _ ->
+ throw({asn1_error,
+ {get_line(hd(Tokens)),get(asn1_module),
+ [got,[get_token(hd(Rest)),get_token(hd(tl(Rest)))],
+ expected,['}',', identifier']]}})
+ end.
+
+
+parse_ExtensionAdditionList(Tokens) ->
+ parse_ExtensionAdditionList(Tokens,[]).
+
+parse_ExtensionAdditionList(Tokens,Acc) ->
+ {Element,Rest0} =
+ case Tokens of
+ [{identifier,_,_}|_Rest] ->
+ parse_ComponentType(Tokens);
+ [{'[[',_}|_] ->
+ parse_ExtensionAdditions(Tokens);
+ _ ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [identifier,'[[']]}})
+ end,
+ case Rest0 of
+ [{',',_}|Rest01] ->
+ parse_ExtensionAdditionList(Rest01,[Element|Acc]);
+ _ ->
+ {lists:reverse([Element|Acc]),Rest0}
+ end.
+
+parse_ExtensionAdditions([{'[[',_}|Rest]) ->
+ parse_ExtensionAdditions(Rest,[]);
+parse_ExtensionAdditions(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'[[']}}).
+
+parse_ExtensionAdditions([Id = {identifier,_,_}|Rest],Acc) ->
+ {ComponentType, Rest2} = parse_ComponentType([Id|Rest]),
+ case Rest2 of
+ [{',',_}|Rest21] ->
+ parse_ExtensionAdditions(Rest21,[ComponentType|Acc]);
+ [{']]',_}|Rest21] ->
+ {lists:reverse(Acc),Rest21};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,[',',']]']]}})
+ end;
+parse_ExtensionAdditions(Tokens,_) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_ComponentType([{'COMPONENTS',_},{'OF',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'COMPONENTS OF',Type},Rest2};
+parse_ComponentType(Tokens) ->
+ {NamedType,Rest} = parse_NamedType(Tokens),
+ case Rest of
+ [{'OPTIONAL',_}|Rest2] ->
+ {NamedType#'ComponentType'{prop='OPTIONAL'},Rest2};
+ [{'DEFAULT',_}|Rest2] ->
+ {Value,Rest21} = parse_Value(Rest2),
+ {NamedType#'ComponentType'{prop={'DEFAULT',Value}},Rest21};
+ _ ->
+ {NamedType,Rest}
+ end.
+
+
+
+parse_SignedNumber([{number,_,Value}|Rest]) ->
+ {Value,Rest};
+parse_SignedNumber([{'-',_},{number,_,Value}|Rest]) ->
+ {-Value,Rest};
+parse_SignedNumber(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ [number,'-number']]}}).
+
+parse_Enumerations(Tokens=[{identifier,_,_}|_Rest]) ->
+ parse_Enumerations(Tokens,[]);
+parse_Enumerations([H|_T]) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,identifier]}}).
+
+parse_Enumerations(Tokens = [{identifier,_,_},{'(',_}|_Rest], Acc) ->
+ {NamedNumber,Rest2} = parse_NamedNumber(Tokens),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_Enumerations(Rest3,[NamedNumber|Acc]);
+ _ ->
+ {lists:reverse([NamedNumber|Acc]),Rest2}
+ end;
+parse_Enumerations([{identifier,_,Id}|Rest], Acc) ->
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_Enumerations(Rest2,[Id|Acc]);
+ _ ->
+ {lists:reverse([Id|Acc]),Rest}
+ end;
+parse_Enumerations([{'...',_}|Rest], Acc) ->
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_Enumerations(Rest2,['EXTENSIONMARK'|Acc]);
+ _ ->
+ {lists:reverse(['EXTENSIONMARK'|Acc]),Rest}
+ end;
+parse_Enumerations([H|_T],_) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,identifier]}}).
+
+parse_NamedNumberList(Tokens) ->
+ parse_NamedNumberList(Tokens,[]).
+
+parse_NamedNumberList(Tokens,Acc) ->
+ {NamedNum,Rest} = parse_NamedNumber(Tokens),
+ case Rest of
+ [{',',_}|Rest2] ->
+ parse_NamedNumberList(Rest2,[NamedNum|Acc]);
+ _ ->
+ {lists:reverse([NamedNum|Acc]),Rest}
+ end.
+
+parse_NamedNumber([{identifier,_,Name},{'(',_}|Rest]) ->
+ Flist = [fun parse_SignedNumber/1,
+ fun parse_DefinedValue/1],
+ case (catch parse_or(Rest,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {NamedNum,[{')',_}|Rest2]} ->
+ {{'NamedNumber',Name,NamedNum},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'NamedNumberList']}})
+ end;
+parse_NamedNumber(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+
+parse_Tag([{'[',_}|Rest]) ->
+ {Class,Rest2} = parse_Class(Rest),
+ {ClassNumber,Rest3} =
+ case Rest2 of
+ [{number,_,Num}|Rest21] ->
+ {Num,Rest21};
+ _ ->
+ parse_DefinedValue(Rest2)
+ end,
+ case Rest3 of
+ [{']',_}|Rest4] ->
+ {#tag{class=Class,number=ClassNumber},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
+ [got,get_token(hd(Rest3)),expected,']']}})
+ end;
+parse_Tag(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'[']}}).
+
+parse_Class([{'UNIVERSAL',_}|Rest]) ->
+ {'UNIVERSAL',Rest};
+parse_Class([{'APPLICATION',_}|Rest]) ->
+ {'APPLICATION',Rest};
+parse_Class([{'PRIVATE',_}|Rest]) ->
+ {'PRIVATE',Rest};
+parse_Class(Tokens) ->
+ {'CONTEXT',Tokens}.
+
+parse_Value(Tokens) ->
+ Flist = [fun parse_BuiltinValue/1,
+ fun parse_ValueFromObject/1,
+ fun parse_DefinedValue/1],
+
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end.
+
+parse_BuiltinValue([{bstring,_,Bstr}|Rest]) ->
+ {{bstring,Bstr},Rest};
+parse_BuiltinValue([{hstring,_,Hstr}|Rest]) ->
+ {{hstring,Hstr},Rest};
+parse_BuiltinValue([{'{',_},{'}',_}|Rest]) ->
+ {[],Rest};
+parse_BuiltinValue(Tokens = [{'{',_}|_Rest]) ->
+ Flist = [
+ fun parse_SequenceOfValue/1,
+ fun parse_SequenceValue/1,
+ fun parse_ObjectIdentifierValue/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ Result ->
+ Result
+ end;
+parse_BuiltinValue([{identifier,_,IdName},{':',_}|Rest]) ->
+ {Value,Rest2} = parse_Value(Rest),
+ {{'CHOICE',{IdName,Value}},Rest2};
+parse_BuiltinValue([{'NULL',_}|Rest]) ->
+ {'NULL',Rest};
+parse_BuiltinValue([{'TRUE',_}|Rest]) ->
+ {true,Rest};
+parse_BuiltinValue([{'FALSE',_}|Rest]) ->
+ {false,Rest};
+parse_BuiltinValue([{'PLUS-INFINITY',_}|Rest]) ->
+ {'PLUS-INFINITY',Rest};
+parse_BuiltinValue([{'MINUS-INFINITY',_}|Rest]) ->
+ {'MINUS-INFINITY',Rest};
+parse_BuiltinValue([{cstring,_,Cstr}|Rest]) ->
+ {Cstr,Rest};
+parse_BuiltinValue([{number,_,Num}|Rest]) ->
+ {Num,Rest};
+parse_BuiltinValue([{'-',_},{number,_,Num}|Rest]) ->
+ {- Num,Rest};
+parse_BuiltinValue(Tokens) ->
+ parse_ObjectClassFieldValue(Tokens).
+
+%% Externalvaluereference
+parse_DefinedValue([{typereference,L1,Tname},{'.',_},{identifier,_,Idname}|Rest]) ->
+ {#'Externalvaluereference'{pos=L1,module=Tname,value=Idname},Rest};
+%% valuereference
+parse_DefinedValue([Id = {identifier,_,_}|Rest]) ->
+ {identifier2Extvalueref(Id),Rest};
+%% ParameterizedValue
+parse_DefinedValue(Tokens) ->
+ parse_ParameterizedValue(Tokens).
+
+
+parse_SequenceValue([{'{',_}|Tokens]) ->
+ parse_SequenceValue(Tokens,[]);
+parse_SequenceValue(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_SequenceValue([{identifier,_,IdName}|Rest],Acc) ->
+ {Value,Rest2} = parse_Value(Rest),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_SequenceValue(Rest3,[{IdName,Value}|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([{IdName,Value}|Acc]),Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end;
+parse_SequenceValue(Tokens,_Acc) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+parse_SequenceOfValue([{'{',_}|Tokens]) ->
+ parse_SequenceOfValue(Tokens,[]);
+parse_SequenceOfValue(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_SequenceOfValue(Tokens,Acc) ->
+ {Value,Rest2} = parse_Value(Tokens),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_SequenceOfValue(Rest3,[Value|Acc]);
+ [{'}',_}|Rest3] ->
+ {lists:reverse([Value|Acc]),Rest3};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'}']}})
+ end.
+
+parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {ValueSet,Rest4} = parse_ValueSet(Rest3),
+ {#valuedef{pos=L1,name=Name,type=Type,value=ValueSet},Rest4};
+ [H|_T] ->
+ throw({asn1_error,{get_line(L1),get(asn1_module),
+ [got,get_token(H),expected,'::=']}})
+ end;
+parse_ValueSetTypeAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,
+ typereference]}}).
+
+parse_ValueSet([{'{',_}|Rest]) ->
+ {Elems,Rest2} = parse_ElementSetSpecs(Rest),
+ case Rest2 of
+ [{'}',_}|Rest3] ->
+ {{valueset,Elems},Rest3};
+ [H|_T] ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,'}']}})
+ end;
+parse_ValueSet(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'{']}}).
+
+parse_ValueAssignment([{identifier,L1,IdName}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ case Rest2 of
+ [{'::=',_}|Rest3] ->
+ {Value,Rest4} = parse_Value(Rest3),
+ case lookahead_assignment(Rest4) of
+ ok ->
+ {#valuedef{pos=L1,name=IdName,type=Type,value=Value},Rest4};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'::=']}})
+ end;
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
+ [got,get_token(hd(Rest2)),expected,'::=']}})
+ end;
+parse_ValueAssignment(Tokens) ->
+ throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,identifier]}}).
+
+%% SizeConstraint
+parse_SubtypeElements([{'SIZE',_}|Tokens]) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ {{'SizeConstraint',Constraint#constraint.c},Rest};
+%% PermittedAlphabet
+parse_SubtypeElements([{'FROM',_}|Tokens]) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ {{'PermittedAlphabet',Constraint#constraint.c},Rest};
+%% InnerTypeConstraints
+parse_SubtypeElements([{'WITH',_},{'COMPONENT',_}|Tokens]) ->
+ {Constraint,Rest} = parse_Constraint(Tokens),
+ {{'WITH COMPONENT',Constraint},Rest};
+parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_},{'...',_},{',',_}|Tokens]) ->
+ {Constraint,Rest} = parse_TypeConstraints(Tokens),
+ case Rest of
+ [{'}',_}|Rest2] ->
+ {{'WITH COMPONENTS',{'PartialSpecification',Constraint}},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'}']}})
+ end;
+parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
+ {Constraint,Rest} = parse_TypeConstraints(Tokens),
+ case Rest of
+ [{'}',_}|Rest2] ->
+ {{'WITH COMPONENTS',{'FullSpecification',Constraint}},Rest2};
+ _ ->
+ throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
+ [got,get_token(hd(Rest)),expected,'}']}})
+ end;
+%% SingleValue
+%% ContainedSubtype
+%% ValueRange
+%% TypeConstraint
+parse_SubtypeElements(Tokens) ->
+ Flist = [fun parse_ContainedSubtype/1,
+ fun parse_Value/1,
+ fun([{'MIN',_}|T]) -> {'MIN',T} end,
+ fun parse_Type/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ {asn1_error,Reason} ->
+ throw(Reason);
+ Result = {Val,_} when record(Val,type) ->
+ Result;
+ {Lower,[{'..',_}|Rest]} ->
+ {Upper,Rest2} = parse_UpperEndpoint(Rest),
+ {{'ValueRange',{Lower,Upper}},Rest2};
+ {Lower,[{'<',_},{'..',_}|Rest]} ->
+ {Upper,Rest2} = parse_UpperEndpoint(Rest),
+ {{'ValueRange',{{gt,Lower},Upper}},Rest2};
+ {Res={'ContainedSubtype',_Type},Rest} ->
+ {Res,Rest};
+ {Value,Rest} ->
+ {{'SingleValue',Value},Rest}
+ end.
+
+parse_ContainedSubtype([{'INCLUDES',_}|Rest]) ->
+ {Type,Rest2} = parse_Type(Rest),
+ {{'ContainedSubtype',Type},Rest2};
+parse_ContainedSubtype(Tokens) ->
+ throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
+ [got,get_token(hd(Tokens)),expected,'INCLUDES']}}).
+%%parse_ContainedSubtype(Tokens) -> %this option is moved to parse_SubtypeElements
+%% parse_Type(Tokens).
+
+parse_UpperEndpoint([{'<',_}|Rest]) ->
+ parse_UpperEndpoint(lt,Rest);
+parse_UpperEndpoint(Tokens) ->
+ parse_UpperEndpoint(false,Tokens).
+
+parse_UpperEndpoint(Lt,Tokens) ->
+ Flist = [ fun([{'MAX',_}|T]) -> {'MAX',T} end,
+ fun parse_Value/1],
+ case (catch parse_or(Tokens,Flist)) of
+ {'EXIT',Reason} ->
+ exit(Reason);
+ AsnErr = {asn1_error,_} ->
+ throw(AsnErr);
+ {Value,Rest2} when Lt == lt ->
+ {{lt,Value},Rest2};
+ {Value,Rest2} ->
+ {Value,Rest2}
+ end.
+
+parse_TypeConstraints(Tokens) ->
+ parse_TypeConstraints(Tokens,[]).
+
+parse_TypeConstraints([{identifier,_,_}|Rest],Acc) ->
+ {ComponentConstraint,Rest2} = parse_ComponentConstraint(Rest),
+ case Rest2 of
+ [{',',_}|Rest3] ->
+ parse_TypeConstraints(Rest3,[ComponentConstraint|Acc]);
+ _ ->
+ {lists:reverse([ComponentConstraint|Acc]),Rest2}
+ end;
+parse_TypeConstraints([H|_T],_) ->
+ throw({asn1_error,{get_line(H),get(asn1_module),
+ [got,get_token(H),expected,identifier]}}).
+
+parse_ComponentConstraint(Tokens = [{'(',_}|_Rest]) ->
+ {ValueConstraint,Rest2} = parse_Constraint(Tokens),
+ {PresenceConstraint,Rest3} = parse_PresenceConstraint(Rest2),
+ {{ValueConstraint,PresenceConstraint},Rest3};
+parse_ComponentConstraint(Tokens) ->
+ {PresenceConstraint,Rest} = parse_PresenceConstraint(Tokens),
+ {{asn1_empty,PresenceConstraint},Rest}.
+
+parse_PresenceConstraint([{'PRESENT',_}|Rest]) ->
+ {'PRESENT',Rest};
+parse_PresenceConstraint([{'ABSENT',_}|Rest]) ->
+ {'ABSENT',Rest};
+parse_PresenceConstraint([{'OPTIONAL',_}|Rest]) ->
+ {'OPTIONAL',Rest};
+parse_PresenceConstraint(Tokens) ->
+ {asn1_empty,Tokens}.
+
+
+merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
+ {merge_constraints(Rlist,[],[]),
+ merge_constraints(ExtList,[],[])};
+
+merge_constraints(Clist) ->
+ merge_constraints(Clist, [], []).
+
+merge_constraints([Ch|Ct],Cacc, Eacc) ->
+ NewEacc = case Ch#constraint.e of
+ undefined -> Eacc;
+ E -> [E|Eacc]
+ end,
+ merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
+
+merge_constraints([],Cacc,[]) ->
+%% lists:flatten(Cacc);
+ lists:reverse(Cacc);
+merge_constraints([],Cacc,Eacc) ->
+%% lists:flatten(Cacc) ++ [{'Errors',Eacc}].
+ lists:reverse(Cacc) ++ [{'Errors',Eacc}].
+
+fixup_constraint(C) ->
+ case C of
+ {'SingleValue',SubType} when element(1,SubType) == 'ContainedSubtype' ->
+ SubType;
+ {'SingleValue',V} when list(V) ->
+ C;
+ %% [C,{'ValueRange',{lists:min(V),lists:max(V)}}];
+ %% bug, turns wrong when an element in V is a reference to a defined value
+ {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
+ %%sort and remove duplicates
+ V2 = {'SingleValue',
+ ordsets:list_to_set(lists:flatten(V))},
+ {'PermittedAlphabet',V2};
+ {'PermittedAlphabet',{'SingleValue',V}} ->
+ V2 = {'SingleValue',[V]},
+ {'PermittedAlphabet',V2};
+ {'SizeConstraint',Sc} ->
+ {'SizeConstraint',fixup_size_constraint(Sc)};
+
+ List when list(List) -> %% In This case maybe a union or intersection
+ [fixup_constraint(Xc)||Xc <- List];
+ Other ->
+ Other
+ end.
+
+fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
+ {Lb,Ub};
+fixup_size_constraint({{'ValueRange',R},[]}) ->
+ {R,[]};
+fixup_size_constraint({[],{'ValueRange',R}}) ->
+ {[],R};
+fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
+ {R1,R2};
+fixup_size_constraint({'SingleValue',[Sv]}) ->
+ fixup_size_constraint({'SingleValue',Sv});
+fixup_size_constraint({'SingleValue',L}) when list(L) ->
+ ordsets:list_to_set(L);
+fixup_size_constraint({'SingleValue',L}) ->
+ {L,L};
+fixup_size_constraint({C1,C2}) ->
+ {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
+
+get_line({_,Pos,Token}) when integer(Pos),atom(Token) ->
+ Pos;
+get_line({Token,Pos}) when integer(Pos),atom(Token) ->
+ Pos;
+get_line(_) ->
+ undefined.
+
+get_token({_,Pos,Token}) when integer(Pos),atom(Token) ->
+ Token;
+get_token({'$end',Pos}) when integer(Pos) ->
+ undefined;
+get_token({Token,Pos}) when integer(Pos),atom(Token) ->
+ Token;
+get_token(_) ->
+ undefined.
+
+prioritize_error(ErrList) ->
+ case lists:keymember(asn1_error,1,ErrList) of
+ false -> % only asn1_assignment_error -> take the last
+ lists:last(ErrList);
+ true -> % contains errors from deeper in a Type
+ NewErrList = [_Err={_,_}|_RestErr] =
+ lists:filter(fun({asn1_error,_})->true;(_)->false end,
+ ErrList),
+ SplitErrs =
+ lists:splitwith(fun({_,X})->
+ case element(1,X) of
+ Int when integer(Int) -> true;
+ _ -> false
+ end
+ end,
+ NewErrList),
+ case SplitErrs of
+ {[],UndefPosErrs} -> % if no error with Positon exists
+ lists:last(UndefPosErrs);
+ {IntPosErrs,_} ->
+ IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
+ SortedReasons = lists:keysort(1,IntPosReasons),
+ {asn1_error,lists:last(SortedReasons)}
+ end
+ end.
+
+%% most_prio_error([H={_,Reason}|T],Atom,Err) when atom(Atom) ->
+%% most_prio_error(T,element(1,Reason),H);
+%% most_prio_error([H={_,Reason}|T],Greatest,Err) ->
+%% case element(1,Reason) of
+%% Pos when integer(Pos),Pos>Greatest ->
+%% most_prio_error(
+
+
+tref2Exttref(#typereference{pos=Pos,val=Name}) ->
+ #'Externaltypereference'{pos=Pos,
+ module=get(asn1_module),
+ type=Name}.
+
+tref2Exttref(Pos,Name) ->
+ #'Externaltypereference'{pos=Pos,
+ module=get(asn1_module),
+ type=Name}.
+
+identifier2Extvalueref(#identifier{pos=Pos,val=Name}) ->
+ #'Externalvaluereference'{pos=Pos,
+ module=get(asn1_module),
+ value=Name}.
+
+%% lookahead_assignment/1 checks that the next sequence of tokens
+%% in Token contain a valid assignment or the
+%% 'END' token. Otherwise an exception is thrown.
+lookahead_assignment([{'END',_}|_Rest]) ->
+ ok;
+lookahead_assignment(Tokens) ->
+ parse_Assignment(Tokens),
+ ok.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_pretty_format.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_pretty_format.erl
new file mode 100644
index 0000000000..99dd246d5c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_pretty_format.erl
@@ -0,0 +1,197 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_pretty_format.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+
+%% usage: pretty_format:term(Term) -> PNF list of characters
+%%
+%% Note: this is usually used in expressions like:
+%% io:format('~s\n',[pretty_format:term(Term)]).
+%%
+%% Uses the following simple heuristics
+%%
+%% 1) Simple tuples are printed across the page
+%% (Simple means *all* the elements are "flat")
+%% 2) The Complex tuple {Arg1, Arg2, Arg3,....} is printed thus:
+%% {Arg1,
+%% Arg2,
+%% Arg3,
+%% ...}
+%% 3) Lists are treated as for tuples
+%% 4) Lists of printable characters are treated as strings
+%%
+%% This method seems to work reasonable well for {Tag, ...} type
+%% data structures
+
+-module(asn1ct_pretty_format).
+
+-export([term/1]).
+
+-import(io_lib, [write/1, write_string/1]).
+
+term(Term) ->
+ element(2, term(Term, 0)).
+
+%%______________________________________________________________________
+%% pretty_format:term(Term, Indent} -> {Indent', Chars}
+%% Format <Term> -- use <Indent> to indent the *next* line
+%% Note: Indent' is a new indentaion level (sometimes printing <Term>
+%% the next line to need an "extra" indent!).
+
+term([], Indent) ->
+ {Indent, [$[,$]]};
+term(L, Indent) when is_list(L) ->
+ case is_string(L) of
+ true ->
+ {Indent, write_string(L)};
+ false ->
+ case complex_list(L) of
+ true ->
+ write_complex_list(L, Indent);
+ false ->
+ write_simple_list(L, Indent)
+ end
+ end;
+term(T, Indent) when is_tuple(T) ->
+ case complex_tuple(T) of
+ true ->
+ write_complex_tuple(T, Indent);
+ false ->
+ write_simple_tuple(T, Indent)
+ end;
+term(A, Indent) ->
+ {Indent, write(A)}.
+
+%%______________________________________________________________________
+%% write_simple_list([H|T], Indent) -> {Indent', Chars}
+
+write_simple_list([H|T], Indent) ->
+ {_, S1} = term(H, Indent),
+ {_, S2} = write_simple_list_tail(T, Indent),
+ {Indent, [$[,S1|S2]}.
+
+write_simple_list_tail([H|T], Indent) ->
+ {_, S1} = term(H, Indent),
+ {_, S2} = write_simple_list_tail(T, Indent),
+ {Indent, [$,,S1| S2]};
+write_simple_list_tail([], Indent) ->
+ {Indent, "]"};
+write_simple_list_tail(Other, Indent) ->
+ {_, S} = term(Other, Indent),
+ {Indent, [$|,S,$]]}.
+
+%%______________________________________________________________________
+%% write_complex_list([H|T], Indent) -> {Indent', Chars}
+
+write_complex_list([H|T], Indent) ->
+ {I1, S1} = term(H, Indent+1),
+ {_, S2} = write_complex_list_tail(T, I1),
+ {Indent, [$[,S1|S2]}.
+
+write_complex_list_tail([H|T], Indent) ->
+ {I1, S1} = term(H, Indent),
+ {_, S2} = write_complex_list_tail(T, I1),
+ {Indent, [$,,nl_indent(Indent),S1,S2]};
+write_complex_list_tail([], Indent) ->
+ {Indent, "]"};
+write_complex_list_tail(Other, Indent) ->$,,
+ {_, S} = term(Other, Indent),
+ {Indent, [$|,S,$]]}.
+
+%%______________________________________________________________________
+%% complex_list(List) -> true | false
+%% returns true if the list is complex otherwise false
+
+complex_list([]) ->
+ false;
+complex_list([H|T]) when is_number(H); is_atom(H) ->
+ complex_list(T);
+complex_list([H|T]) ->
+ case is_string(H) of
+ true ->
+ complex_list(T);
+ false ->
+ true
+ end;
+complex_list(_) -> true.
+
+%%______________________________________________________________________
+%% complex_tuple(Tuple) -> true | false
+%% returns true if the tuple is complex otherwise false
+
+complex_tuple(T) ->
+ complex_list(tuple_to_list(T)).
+
+%%______________________________________________________________________
+%% write_simple_tuple(Tuple, Indent} -> {Indent', Chars}
+
+write_simple_tuple({}, Indent) ->
+ {Indent, "{}"};
+write_simple_tuple(Tuple, Indent) ->
+ {_, S} = write_simple_tuple_args(tuple_to_list(Tuple), Indent),
+ {Indent, [${, S, $}]}.
+
+write_simple_tuple_args([X], Indent) ->
+ term(X, Indent);
+write_simple_tuple_args([H|T], Indent) ->
+ {_, SH} = term(H, Indent),
+ {_, ST} = write_simple_tuple_args(T, Indent),
+ {Indent, [SH, $,, ST]}.
+
+%%______________________________________________________________________
+%% write_complex_tuple(Tuple, Indent} -> {Indent', Chars}
+
+write_complex_tuple(Tuple, Indent) ->
+ [H|T] = tuple_to_list(Tuple),
+ {I1, SH} = term(H, Indent+2),
+ {_, ST} = write_complex_tuple_args(T, I1),
+ {Indent, [${, SH, ST, $}]}.
+
+write_complex_tuple_args([X], Indent) ->
+ {_, S} = term(X, Indent),
+ {Indent, [$,, nl_indent(Indent), S]};
+write_complex_tuple_args([H|T], Indent) ->
+ {I1, SH} = term(H, Indent),
+ {_, ST} = write_complex_tuple_args(T, I1),
+ {Indent, [$,, nl_indent(Indent) , SH, ST]};
+write_complex_tuple_args([], Indent) ->
+ {Indent, []}.
+
+%%______________________________________________________________________
+%% utilities
+
+nl_indent(I) when I >= 0 ->
+ ["\n"|indent(I)];
+nl_indent(_) ->
+ [$\s].
+
+indent(I) when I >= 8 ->
+ [$\t|indent(I-8)];
+indent(I) when I > 0 ->
+ [$\s|indent(I-1)];
+indent(_) ->
+ [].
+
+is_string([9|T]) ->
+ is_string(T);
+is_string([10|T]) ->
+ is_string(T);
+is_string([H|T]) when H >31, H < 127 ->
+ is_string(T);
+is_string([]) ->
+ true;
+is_string(_) ->
+ false.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_tok.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_tok.erl
new file mode 100644
index 0000000000..b5ccc4a5d2
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_tok.erl
@@ -0,0 +1,351 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_tok.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_tok).
+
+%% Tokenize ASN.1 code (input to parser generated with yecc)
+
+-export([get_name/2,tokenise/2, file/1]).
+
+
+file(File) ->
+ case file:open(File, [read]) of
+ {error, Reason} ->
+ {error,{File,file:format_error(Reason)}};
+ {ok,Stream} ->
+ process0(Stream)
+ end.
+
+process0(Stream) ->
+ process(Stream,0,[]).
+
+process(Stream,Lno,R) ->
+ process(io:get_line(Stream, ''), Stream,Lno+1,R).
+
+process(eof, Stream,Lno,R) ->
+ file:close(Stream),
+ lists:flatten(lists:reverse([{'$end',Lno}|R]));
+
+
+process(L, Stream,Lno,R) when list(L) ->
+ %%io:format('read:~s',[L]),
+ case catch tokenise(L,Lno) of
+ {'ERR',Reason} ->
+ io:format("Tokeniser error on line: ~w ~w~n",[Lno,Reason]),
+ exit(0);
+ T ->
+ %%io:format('toks:~w~n',[T]),
+ process(Stream,Lno,[T|R])
+ end.
+
+
+tokenise([H|T],Lno) when $a =< H , H =< $z ->
+ {X, T1} = get_name(T, [H]),
+ [{identifier,Lno, list_to_atom(X)}|tokenise(T1,Lno)];
+
+tokenise([$&,H|T],Lno) when $A =< H , H =< $Z ->
+ {Y, T1} = get_name(T, [H]),
+ X = list_to_atom(Y),
+ [{typefieldreference, Lno, X} | tokenise(T1, Lno)];
+
+tokenise([$&,H|T],Lno) when $a =< H , H =< $z ->
+ {Y, T1} = get_name(T, [H]),
+ X = list_to_atom(Y),
+ [{valuefieldreference, Lno, X} | tokenise(T1, Lno)];
+
+tokenise([H|T],Lno) when $A =< H , H =< $Z ->
+ {Y, T1} = get_name(T, [H]),
+ X = list_to_atom(Y),
+ case reserved_word(X) of
+ true ->
+ [{X,Lno}|tokenise(T1,Lno)];
+ false ->
+ [{typereference,Lno,X}|tokenise(T1,Lno)];
+ rstrtype ->
+ [{restrictedcharacterstringtype,Lno,X}|tokenise(T1,Lno)]
+ end;
+
+tokenise([$-,H|T],Lno) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ [{number,Lno,-1 * list_to_integer(X)}|tokenise(T1,Lno)];
+
+tokenise([H|T],Lno) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ [{number,Lno,list_to_integer(X)}|tokenise(T1,Lno)];
+
+tokenise([$-,$-|T],Lno) ->
+ tokenise(skip_comment(T),Lno);
+tokenise([$:,$:,$=|T],Lno) ->
+ [{'::=',Lno}|tokenise(T,Lno)];
+
+tokenise([$'|T],Lno) ->
+ case catch collect_quoted(T,Lno,[]) of
+ {'ERR',_} ->
+ throw({'ERR','bad_quote'});
+ {Thing, T1} ->
+ [Thing|tokenise(T1,Lno)]
+ end;
+
+tokenise([$"|T],Lno) ->
+ collect_string(T,Lno);
+
+tokenise([${|T],Lno) ->
+ [{'{',Lno}|tokenise(T,Lno)];
+
+tokenise([$}|T],Lno) ->
+ [{'}',Lno}|tokenise(T,Lno)];
+
+tokenise([$]|T],Lno) ->
+ [{']',Lno}|tokenise(T,Lno)];
+
+tokenise([$[|T],Lno) ->
+ [{'[',Lno}|tokenise(T,Lno)];
+
+tokenise([$,|T],Lno) ->
+ [{',',Lno}|tokenise(T,Lno)];
+
+tokenise([$(|T],Lno) ->
+ [{'(',Lno}|tokenise(T,Lno)];
+tokenise([$)|T],Lno) ->
+ [{')',Lno}|tokenise(T,Lno)];
+
+tokenise([$.,$.,$.|T],Lno) ->
+ [{'...',Lno}|tokenise(T,Lno)];
+
+tokenise([$.,$.|T],Lno) ->
+ [{'..',Lno}|tokenise(T,Lno)];
+
+tokenise([$.|T],Lno) ->
+ [{'.',Lno}|tokenise(T,Lno)];
+tokenise([$^|T],Lno) ->
+ [{'^',Lno}|tokenise(T,Lno)];
+tokenise([$!|T],Lno) ->
+ [{'!',Lno}|tokenise(T,Lno)];
+tokenise([$||T],Lno) ->
+ [{'|',Lno}|tokenise(T,Lno)];
+
+
+tokenise([H|T],Lno) ->
+ case white_space(H) of
+ true ->
+ tokenise(T,Lno);
+ false ->
+ [{list_to_atom([H]),Lno}|tokenise(T,Lno)]
+ end;
+tokenise([],_) ->
+ [].
+
+
+collect_string(L,Lno) ->
+ collect_string(L,Lno,[]).
+
+collect_string([],_,_) ->
+ throw({'ERR','bad_quote found eof'});
+
+collect_string([H|T],Lno,Str) ->
+ case H of
+ $" ->
+ [{cstring,1,lists:reverse(Str)}|tokenise(T,Lno)];
+ Ch ->
+ collect_string(T,Lno,[Ch|Str])
+ end.
+
+
+
+% <name> is letters digits hyphens
+% hypen is not the last character. Hypen hyphen is NOT allowed
+%
+% <identifier> ::= <lowercase> <name>
+
+get_name([$-,Char|T], L) ->
+ case isalnum(Char) of
+ true ->
+ get_name(T,[Char,$-|L]);
+ false ->
+ {lists:reverse(L),[$-,Char|T]}
+ end;
+get_name([$-|T], L) ->
+ {lists:reverse(L),[$-|T]};
+get_name([Char|T], L) ->
+ case isalnum(Char) of
+ true ->
+ get_name(T,[Char|L]);
+ false ->
+ {lists:reverse(L),[Char|T]}
+ end;
+get_name([], L) ->
+ {lists:reverse(L), []}.
+
+
+isalnum(H) when $A =< H , H =< $Z ->
+ true;
+isalnum(H) when $a =< H , H =< $z ->
+ true;
+isalnum(H) when $0 =< H , H =< $9 ->
+ true;
+isalnum(_) ->
+ false.
+
+isdigit(H) when $0 =< H , H =< $9 ->
+ true;
+isdigit(_) ->
+ false.
+
+white_space(9) -> true;
+white_space(10) -> true;
+white_space(13) -> true;
+white_space(32) -> true;
+white_space(_) -> false.
+
+
+get_number([H|T], L) ->
+ case isdigit(H) of
+ true ->
+ get_number(T, [H|L]);
+ false ->
+ {lists:reverse(L), [H|T]}
+ end;
+get_number([], L) ->
+ {lists:reverse(L), []}.
+
+skip_comment([]) ->
+ [];
+skip_comment([$-,$-|T]) ->
+ T;
+skip_comment([_|T]) ->
+ skip_comment(T).
+
+collect_quoted([$',$B|T],Lno, L) ->
+ case check_bin(L) of
+ true ->
+ {{bstring,Lno, lists:reverse(L)}, T};
+ false ->
+ throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ end;
+collect_quoted([$',$H|T],Lno, L) ->
+ case check_hex(L) of
+ true ->
+ {{hstring,Lno, lists:reverse(L)}, T};
+ false ->
+ throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ end;
+collect_quoted([H|T], Lno, L) ->
+ collect_quoted(T, Lno,[H|L]);
+collect_quoted([], _, _) -> % This should be allowed FIX later
+ throw({'ERR',{eol_in_token}}).
+
+check_bin([$0|T]) ->
+ check_bin(T);
+check_bin([$1|T]) ->
+ check_bin(T);
+check_bin([]) ->
+ true;
+check_bin(_) ->
+ false.
+
+check_hex([H|T]) when $0 =< H , H =< $9 ->
+ check_hex(T);
+check_hex([H|T]) when $A =< H , H =< $F ->
+ check_hex(T);
+check_hex([]) ->
+ true;
+check_hex(_) ->
+ false.
+
+
+%% reserved_word(A) -> true|false|rstrtype
+%% A = atom()
+%% returns true if A is a reserved ASN.1 word
+%% returns false if A is not a reserved word
+%% returns rstrtype if A is a reserved word in the group
+%% RestrictedCharacterStringType
+reserved_word('ABSENT') -> true;
+%reserved_word('ABSTRACT-SYNTAX') -> true; % impl as predef item
+reserved_word('ALL') -> true;
+reserved_word('ANY') -> true;
+reserved_word('APPLICATION') -> true;
+reserved_word('AUTOMATIC') -> true;
+reserved_word('BEGIN') -> true;
+reserved_word('BIT') -> true;
+reserved_word('BMPString') -> rstrtype;
+reserved_word('BOOLEAN') -> true;
+reserved_word('BY') -> true;
+reserved_word('CHARACTER') -> true;
+reserved_word('CHOICE') -> true;
+reserved_word('CLASS') -> true;
+reserved_word('COMPONENT') -> true;
+reserved_word('COMPONENTS') -> true;
+reserved_word('CONSTRAINED') -> true;
+reserved_word('DEFAULT') -> true;
+reserved_word('DEFINED') -> true;
+reserved_word('DEFINITIONS') -> true;
+reserved_word('EMBEDDED') -> true;
+reserved_word('END') -> true;
+reserved_word('ENUMERATED') -> true;
+reserved_word('EXCEPT') -> true;
+reserved_word('EXPLICIT') -> true;
+reserved_word('EXPORTS') -> true;
+reserved_word('EXTERNAL') -> true;
+reserved_word('FALSE') -> true;
+reserved_word('FROM') -> true;
+reserved_word('GeneralizedTime') -> true;
+reserved_word('GeneralString') -> rstrtype;
+reserved_word('GraphicString') -> rstrtype;
+reserved_word('IA5String') -> rstrtype;
+% reserved_word('TYPE-IDENTIFIER') -> true; % impl as predef item
+reserved_word('IDENTIFIER') -> true;
+reserved_word('IMPLICIT') -> true;
+reserved_word('IMPORTS') -> true;
+reserved_word('INCLUDES') -> true;
+reserved_word('INSTANCE') -> true;
+reserved_word('INTEGER') -> true;
+reserved_word('INTERSECTION') -> true;
+reserved_word('ISO646String') -> rstrtype;
+reserved_word('MAX') -> true;
+reserved_word('MIN') -> true;
+reserved_word('MINUS-INFINITY') -> true;
+reserved_word('NULL') -> true;
+reserved_word('NumericString') -> rstrtype;
+reserved_word('OBJECT') -> true;
+reserved_word('ObjectDescriptor') -> true;
+reserved_word('OCTET') -> true;
+reserved_word('OF') -> true;
+reserved_word('OPTIONAL') -> true;
+reserved_word('PDV') -> true;
+reserved_word('PLUS-INFINITY') -> true;
+reserved_word('PRESENT') -> true;
+reserved_word('PrintableString') -> rstrtype;
+reserved_word('PRIVATE') -> true;
+reserved_word('REAL') -> true;
+reserved_word('SEQUENCE') -> true;
+reserved_word('SET') -> true;
+reserved_word('SIZE') -> true;
+reserved_word('STRING') -> true;
+reserved_word('SYNTAX') -> true;
+reserved_word('T61String') -> rstrtype;
+reserved_word('TAGS') -> true;
+reserved_word('TeletexString') -> rstrtype;
+reserved_word('TRUE') -> true;
+reserved_word('UNION') -> true;
+reserved_word('UNIQUE') -> true;
+reserved_word('UNIVERSAL') -> true;
+reserved_word('UniversalString') -> rstrtype;
+reserved_word('UTCTime') -> true;
+reserved_word('VideotexString') -> rstrtype;
+reserved_word('VisibleString') -> rstrtype;
+reserved_word('WITH') -> true;
+reserved_word(_) -> false.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_value.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_value.erl
new file mode 100644
index 0000000000..3d366a1a27
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_value.erl
@@ -0,0 +1,330 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1ct_value.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1ct_value).
+
+%% Generate Erlang values for ASN.1 types.
+%% The value is randomized within it's constraints
+
+-include("asn1_records.hrl").
+%-compile(export_all).
+
+-export([get_type/3]).
+
+
+
+%% Generate examples of values ******************************
+%%****************************************x
+
+
+get_type(M,Typename,Tellname) ->
+ case asn1_db:dbget(M,Typename) of
+ undefined ->
+ {asn1_error,{not_found,{M,Typename}}};
+ Tdef when record(Tdef,typedef) ->
+ Type = Tdef#typedef.typespec,
+ get_type(M,[Typename],Type,Tellname);
+ Err ->
+ {asn1_error,{other,Err}}
+ end.
+
+get_type(M,Typename,Type,Tellname) when record(Type,type) ->
+ InnerType = get_inner(Type#type.def),
+ case asn1ct_gen:type(InnerType) of
+ #'Externaltypereference'{module=Emod,type=Etype} ->
+ get_type(Emod,Etype,Tellname);
+ {_,user} ->
+ case Tellname of
+ yes -> {Typename,get_type(M,InnerType,no)};
+ no -> get_type(M,InnerType,no)
+ end;
+ {notype,_} ->
+ true;
+ {primitive,bif} ->
+ get_type_prim(Type);
+ 'ASN1_OPEN_TYPE' ->
+ case Type#type.constraint of
+ [#'Externaltypereference'{type=TrefConstraint}] ->
+ get_type(M,TrefConstraint,no);
+ _ ->
+ "open_type"
+ end;
+ {constructed,bif} ->
+ get_type_constructed(M,Typename,InnerType,Type)
+ end;
+get_type(M,Typename,#'ComponentType'{name = Name,typespec = Type},_) ->
+ get_type(M,[Name|Typename],Type,no);
+get_type(_,_,_,_) -> % 'EXTENSIONMARK'
+ undefined.
+
+get_inner(A) when atom(A) -> A;
+get_inner(Ext) when record(Ext,'Externaltypereference') -> Ext;
+get_inner({typereference,_Pos,Name}) -> Name;
+get_inner(T) when tuple(T) ->
+ case asn1ct_gen:get_inner(T) of
+ {fixedtypevaluefield,_,Type} ->
+ Type#type.def;
+ {typefield,_FieldName} ->
+ 'ASN1_OPEN_TYPE';
+ Other ->
+ Other
+ end.
+%%get_inner(T) when tuple(T) -> element(1,T).
+
+
+
+get_type_constructed(M,Typename,InnerType,D) when record(D,type) ->
+ case InnerType of
+ 'SET' ->
+ get_sequence(M,Typename,D);
+ 'SEQUENCE' ->
+ get_sequence(M,Typename,D);
+ 'CHOICE' ->
+ get_choice(M,Typename,D);
+ 'SEQUENCE OF' ->
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ get_sequence_of(M,Typename,D,NameSuffix);
+ 'SET OF' ->
+ {_,Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
+ get_sequence_of(M,Typename,D,NameSuffix);
+ _ ->
+ exit({nyi,InnerType})
+ end.
+
+get_sequence(M,Typename,Type) ->
+ {_SEQorSET,CompList} =
+ case Type#type.def of
+ #'SEQUENCE'{components=Cl} -> {'SEQUENCE',Cl};
+ #'SET'{components=Cl} -> {'SET',Cl}
+ end,
+ case get_components(M,Typename,CompList) of
+ [] ->
+ {list_to_atom(asn1ct_gen:list2rname(Typename))};
+ C ->
+ list_to_tuple([list_to_atom(asn1ct_gen:list2rname(Typename))|C])
+ end.
+
+get_components(M,Typename,{Root,Ext}) ->
+ get_components(M,Typename,Root++Ext);
+
+%% Should enhance this *** HERE *** with proper handling of extensions
+
+get_components(M,Typename,[H|T]) ->
+ [get_type(M,Typename,H,no)|
+ get_components(M,Typename,T)];
+get_components(_,_,[]) ->
+ [].
+
+get_choice(M,Typename,Type) ->
+ {'CHOICE',TCompList} = Type#type.def,
+ case TCompList of
+ [] ->
+ {asn1_EMPTY,asn1_EMPTY};
+ {CompList,ExtList} -> % Should be enhanced to handle extensions too
+ CList = CompList ++ ExtList,
+ C = lists:nth(random(length(CList)),CList),
+ {C#'ComponentType'.name,get_type(M,Typename,C,no)};
+ CompList when list(CompList) ->
+ C = lists:nth(random(length(CompList)),CompList),
+ {C#'ComponentType'.name,get_type(M,Typename,C,no)}
+ end.
+
+get_sequence_of(M,Typename,Type,TypeSuffix) ->
+ %% should generate length according to constraints later
+ {_,Oftype} = Type#type.def,
+ C = Type#type.constraint,
+ S = size_random(C),
+ NewTypeName = [TypeSuffix|Typename],
+ gen_list(M,NewTypeName,Oftype,no,S).
+
+gen_list(_,_,_,_,0) ->
+ [];
+gen_list(M,Typename,Oftype,Tellname,N) ->
+ [get_type(M,Typename,Oftype,no)|gen_list(M,Typename,Oftype,Tellname,N-1)].
+
+get_type_prim(D) ->
+ C = D#type.constraint,
+ case D#type.def of
+ 'INTEGER' ->
+ i_random(C);
+ {'INTEGER',NamedNumberList} ->
+ NN = [X||{X,_} <- NamedNumberList],
+ case NN of
+ [] ->
+ i_random(C);
+ _ ->
+ lists:nth(random(length(NN)),NN)
+ end;
+ Enum when tuple(Enum),element(1,Enum)=='ENUMERATED' ->
+ NamedNumberList =
+ case Enum of
+ {_,_,NNL} -> NNL;
+ {_,NNL} -> NNL
+ end,
+ NNew=
+ case NamedNumberList of
+ {N1,N2} ->
+ N1 ++ N2;
+ _->
+ NamedNumberList
+ end,
+ NN = [X||{X,_} <- NNew],
+ case NN of
+ [] ->
+ asn1_EMPTY;
+ _ ->
+ lists:nth(random(length(NN)),NN)
+ end;
+ {'BIT STRING',NamedNumberList} ->
+%% io:format("get_type_prim 1: ~w~n",[NamedNumberList]),
+ NN = [X||{X,_} <- NamedNumberList],
+ case NN of
+ [] ->
+ Bl1 =lists:reverse(adjust_list(size_random(C),[1,0,1,1])),
+ lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,Bl1));
+ _ ->
+%% io:format("get_type_prim 2: ~w~n",[NN]),
+ [lists:nth(random(length(NN)),NN)]
+ end;
+ 'ANY' ->
+ exit({asn1_error,nyi,'ANY'});
+ 'NULL' ->
+ 'NULL';
+ 'OBJECT IDENTIFIER' ->
+ Len = random(3),
+ Olist = [(random(1000)-1)||_X <-lists:seq(1,Len)],
+ list_to_tuple([random(3)-1,random(40)-1|Olist]);
+ 'ObjectDescriptor' ->
+ object_descriptor_nyi;
+ 'BOOLEAN' ->
+ true;
+ 'OCTET STRING' ->
+ adjust_list(size_random(C),c_string(C,"OCTET STRING"));
+ 'NumericString' ->
+ adjust_list(size_random(C),c_string(C,"0123456789"));
+ 'TeletexString' ->
+ adjust_list(size_random(C),c_string(C,"TeletexString"));
+ 'VideotexString' ->
+ adjust_list(size_random(C),c_string(C,"VideotexString"));
+ 'UTCTime' ->
+ "97100211-0500";
+ 'GeneralizedTime' ->
+ "19971002103130.5";
+ 'GraphicString' ->
+ adjust_list(size_random(C),c_string(C,"GraphicString"));
+ 'VisibleString' ->
+ adjust_list(size_random(C),c_string(C,"VisibleString"));
+ 'GeneralString' ->
+ adjust_list(size_random(C),c_string(C,"GeneralString"));
+ 'PrintableString' ->
+ adjust_list(size_random(C),c_string(C,"PrintableString"));
+ 'IA5String' ->
+ adjust_list(size_random(C),c_string(C,"IA5String"));
+ 'BMPString' ->
+ adjust_list(size_random(C),c_string(C,"BMPString"));
+ 'UniversalString' ->
+ adjust_list(size_random(C),c_string(C,"UniversalString"));
+ XX ->
+ exit({asn1_error,nyi,XX})
+ end.
+
+c_string(undefined,Default) ->
+ Default;
+c_string(C,Default) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} when list(Sv) ->
+ Sv;
+ {'SingleValue',V} when integer(V) ->
+ [V];
+ no ->
+ Default
+ end.
+
+random(Upper) ->
+ {A1,A2,A3} = erlang:now(),
+ random:seed(A1,A2,A3),
+ random:uniform(Upper).
+
+size_random(C) ->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ c_random({0,5},no);
+ {Lb,Ub} when Ub-Lb =< 4 ->
+ c_random({Lb,Ub},no);
+ {Lb,_} ->
+ c_random({Lb,Lb+4},no);
+ Sv ->
+ c_random(no,Sv)
+ end.
+
+i_random(C) ->
+ c_random(get_constraint(C,'ValueRange'),get_constraint(C,'SingleValue')).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% c_random(Range,SingleValue)
+%% only called from other X_random functions
+
+c_random(VRange,Single) ->
+ case {VRange,Single} of
+ {no,no} ->
+ random(16#fffffff) - (16#fffffff bsr 1);
+ {R,no} ->
+ case R of
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ Range = Ub - Lb +1,
+ Lb + (random(Range)-1);
+ {Lb,'MAX'} ->
+ Lb + random(16#fffffff)-1;
+ {'MIN',Ub} ->
+ Ub - random(16#fffffff)-1;
+ {A,{'ASN1_OK',B}} ->
+ Range = B - A +1,
+ A + (random(Range)-1)
+ end;
+ {_,S} when integer(S) ->
+ S;
+ {_,S} when list(S) ->
+ lists:nth(random(length(S)),S)
+%% {S1,S2} ->
+%% io:format("asn1ct_value: hejsan hoppsan~n");
+%% _ ->
+%% io:format("asn1ct_value: hejsan hoppsan 2~n")
+%% io:format("asn1ct_value: c_random/2: S1 = ~w~n"
+%% "S2 = ~w,~n",[S1,S2])
+%% exit(self(),goodbye)
+ end.
+
+adjust_list(Len,Orig) ->
+ adjust_list1(Len,Orig,Orig,[]).
+
+adjust_list1(0,_Orig,[_Oh|_Ot],Acc) ->
+ lists:reverse(Acc);
+adjust_list1(Len,Orig,[],Acc) ->
+ adjust_list1(Len,Orig,Orig,Acc);
+adjust_list1(Len,Orig,[Oh|Ot],Acc) ->
+ adjust_list1(Len-1,Orig,Ot,[Oh|Acc]).
+
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt.erl
new file mode 100644
index 0000000000..efac8daf6b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt.erl
@@ -0,0 +1,69 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt).
+
+%% Runtime functions for ASN.1 (i.e encode, decode)
+
+-export([encode/2,encode/3,decode/3,load_driver/0,unload_driver/0,info/1]).
+
+encode(Module,{Type,Term}) ->
+ encode(Module,Type,Term).
+
+encode(Module,Type,Term) ->
+ case catch apply(Module,encode,[Type,Term]) of
+ {'EXIT',undef} ->
+ {error,{asn1,{undef,Module,Type}}};
+ Result ->
+ Result
+ end.
+
+decode(Module,Type,Bytes) ->
+ case catch apply(Module,decode,[Type,Bytes]) of
+ {'EXIT',undef} ->
+ {error,{asn1,{undef,Module,Type}}};
+ Result ->
+ Result
+ end.
+
+load_driver() ->
+ asn1rt_driver_handler:load_driver(),
+ receive
+ driver_ready ->
+ ok;
+ Err={error,_Reason} ->
+ Err;
+ Error ->
+ {error,Error}
+ end.
+
+unload_driver() ->
+ case catch asn1rt_driver_handler:unload_driver() of
+ ok ->
+ ok;
+ Error ->
+ {error,Error}
+ end.
+
+
+info(Module) ->
+ case catch apply(Module,info,[]) of
+ {'EXIT',{undef,_Reason}} ->
+ {error,{asn1,{undef,Module,info}}};
+ Result ->
+ {ok,Result}
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
new file mode 100644
index 0000000000..6064515a7e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
@@ -0,0 +1,2310 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_ber_bin.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt_ber_bin).
+
+%% encoding / decoding of BER
+
+-export([decode/1]).
+-export([fixoptionals/2,split_list/2,cindex/3,restbytes2/3,
+ list_to_record/2,
+ encode_tag_val/1,decode_tag/1,peek_tag/1,
+ check_tags/3, encode_tags/3]).
+-export([encode_boolean/2,decode_boolean/3,
+ encode_integer/3,encode_integer/4,
+ decode_integer/4,decode_integer/5,encode_enumerated/2,
+ encode_enumerated/4,decode_enumerated/5,
+ encode_real/2,decode_real/4,
+ encode_bit_string/4,decode_bit_string/6,
+ decode_compact_bit_string/6,
+ encode_octet_string/3,decode_octet_string/5,
+ encode_null/2,decode_null/3,
+ encode_object_identifier/2,decode_object_identifier/3,
+ encode_restricted_string/4,decode_restricted_string/6,
+ encode_universal_string/3,decode_universal_string/5,
+ encode_BMP_string/3,decode_BMP_string/5,
+ encode_generalized_time/3,decode_generalized_time/5,
+ encode_utc_time/3,decode_utc_time/5,
+ encode_length/1,decode_length/1,
+ check_if_valid_tag/3,
+ decode_tag_and_length/1, decode_components/6,
+ decode_components/7, decode_set/6]).
+
+-export([encode_open_type/1,encode_open_type/2,decode_open_type/1,decode_open_type/2,decode_open_type/3]).
+-export([skipvalue/1, skipvalue/2]).
+
+-include("asn1_records.hrl").
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+%%% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+%%% The tag-number for universal types
+-define(N_BOOLEAN, 1).
+-define(N_INTEGER, 2).
+-define(N_BIT_STRING, 3).
+-define(N_OCTET_STRING, 4).
+-define(N_NULL, 5).
+-define(N_OBJECT_IDENTIFIER, 6).
+-define(N_OBJECT_DESCRIPTOR, 7).
+-define(N_EXTERNAL, 8).
+-define(N_REAL, 9).
+-define(N_ENUMERATED, 10).
+-define(N_EMBEDDED_PDV, 11).
+-define(N_SEQUENCE, 16).
+-define(N_SET, 17).
+-define(N_NumericString, 18).
+-define(N_PrintableString, 19).
+-define(N_TeletexString, 20).
+-define(N_VideotexString, 21).
+-define(N_IA5String, 22).
+-define(N_UTCTime, 23).
+-define(N_GeneralizedTime, 24).
+-define(N_GraphicString, 25).
+-define(N_VisibleString, 26).
+-define(N_GeneralString, 27).
+-define(N_UniversalString, 28).
+-define(N_BMPString, 30).
+
+
+% the complete tag-word of built-in types
+-define(T_BOOLEAN, ?UNIVERSAL bor ?PRIMITIVE bor 1).
+-define(T_INTEGER, ?UNIVERSAL bor ?PRIMITIVE bor 2).
+-define(T_BIT_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 3). % can be CONSTRUCTED
+-define(T_OCTET_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 4). % can be CONSTRUCTED
+-define(T_NULL, ?UNIVERSAL bor ?PRIMITIVE bor 5).
+-define(T_OBJECT_IDENTIFIER,?UNIVERSAL bor ?PRIMITIVE bor 6).
+-define(T_OBJECT_DESCRIPTOR,?UNIVERSAL bor ?PRIMITIVE bor 7).
+-define(T_EXTERNAL, ?UNIVERSAL bor ?PRIMITIVE bor 8).
+-define(T_REAL, ?UNIVERSAL bor ?PRIMITIVE bor 9).
+-define(T_ENUMERATED, ?UNIVERSAL bor ?PRIMITIVE bor 10).
+-define(T_EMBEDDED_PDV, ?UNIVERSAL bor ?PRIMITIVE bor 11).
+-define(T_SEQUENCE, ?UNIVERSAL bor ?CONSTRUCTED bor 16).
+-define(T_SET, ?UNIVERSAL bor ?CONSTRUCTED bor 17).
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_UTCTime, ?UNIVERSAL bor ?PRIMITIVE bor 23).
+-define(T_GeneralizedTime, ?UNIVERSAL bor ?PRIMITIVE bor 24).
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+-define(T_UniversalString, ?UNIVERSAL bor ?PRIMITIVE bor 28). %can be constructed
+-define(T_BMPString, ?UNIVERSAL bor ?PRIMITIVE bor 30). %can be constructed
+
+
+decode(Bin) ->
+ decode_primitive(Bin).
+
+decode_primitive(Bin) ->
+ {Tlv = {Tag,Len,V},<<>>} = decode_tlv(Bin),
+ case element(2,Tag) of
+ ?CONSTRUCTED ->
+ {Tag,Len,decode_constructed(V)};
+ _ ->
+ Tlv
+ end.
+
+decode_constructed(<<>>) ->
+ [];
+decode_constructed(Bin) ->
+ {Tlv = {Tag,Len,V},Rest} = decode_tlv(Bin),
+ NewTlv =
+ case element(2,Tag) of
+ ?CONSTRUCTED ->
+ {Tag,Len,decode_constructed(V)};
+ _ ->
+ Tlv
+ end,
+ [NewTlv|decode_constructed(Rest)].
+
+decode_tlv(Bin) ->
+ {Tag,Bin1,_Rb1} = decode_tag(Bin),
+ {{Len,Bin2},_Rb2} = decode_length(Bin1),
+ <<V:Len/binary,Bin3/binary>> = Bin2,
+ {{Tag,Len,V},Bin3}.
+
+
+
+%%%%%%%%%%%%%
+% split_list(List,HeadLen) -> {HeadList,TailList}
+%
+% splits List into HeadList (Length=HeadLen) and TailList
+% if HeadLen == indefinite -> return {List,indefinite}
+split_list(List,indefinite) ->
+ {List, indefinite};
+split_list(Bin, Len) when binary(Bin) ->
+ split_binary(Bin,Len);
+split_list(List,Len) ->
+ {lists:sublist(List,Len),lists:nthtail(Len,List)}.
+
+
+%%% new function which fixes a bug regarding indefinite length decoding
+restbytes2(indefinite,<<0,0,RemBytes/binary>>,_) ->
+ {RemBytes,2};
+restbytes2(indefinite,RemBytes,ext) ->
+ skipvalue(indefinite,RemBytes);
+restbytes2(RemBytes,<<>>,_) ->
+ {RemBytes,0};
+restbytes2(_RemBytes,Bytes,noext) ->
+ exit({error,{asn1, {unexpected,Bytes}}});
+restbytes2(RemBytes,_Bytes,ext) ->
+ {RemBytes,0}.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% skipvalue(Length, Bytes) -> {RemainingBytes, RemovedNumberOfBytes}
+%%
+%% skips the one complete (could be nested) TLV from Bytes
+%% handles both definite and indefinite length encodings
+%%
+
+skipvalue(L, Bytes) ->
+ skipvalue(L, Bytes, 0).
+
+skipvalue(indefinite, Bytes, Rb) ->
+ {_T,Bytes2,R2} = decode_tag(Bytes),
+ {{L,Bytes3},R3} = decode_length(Bytes2),
+ {Bytes4,Rb4} = case L of
+ indefinite ->
+ skipvalue(indefinite,Bytes3,R2+R3);
+ _ ->
+ <<_:L/binary, RestBytes/binary>> = Bytes3,
+ {RestBytes, R2+R3+L}
+ end,
+ case Bytes4 of
+ <<0,0,Bytes5/binary>> ->
+ {Bytes5,Rb+Rb4+2};
+ _ -> skipvalue(indefinite,Bytes4,Rb+Rb4)
+ end;
+skipvalue(L, Bytes, Rb) ->
+% <<Skip:L/binary, RestBytes/binary>> = Bytes,
+ <<_:L/binary, RestBytes/binary>> = Bytes,
+ {RestBytes,Rb+L}.
+
+%%skipvalue(indefinite, Bytes, Rb) ->
+%% {T,Bytes2,R2} = decode_tag(Bytes),
+%% {L,Bytes3,R3} = decode_length(Bytes2),
+%% {Bytes4,Rb4} = case L of
+%% indefinite ->
+%% skipvalue(indefinite,Bytes3,R2+R3);
+%% _ ->
+%% lists:nthtail(L,Bytes3) %% konstigt !?
+%% end,
+%% case Bytes4 of
+%% [0,0|Bytes5] ->
+%% {Bytes5,Rb4+2};
+%% _ -> skipvalue(indefinite,Bytes4,Rb4)
+%% end;
+%%skipvalue(L, Bytes, Rb) ->
+%% {lists:nthtail(L,Bytes),Rb+L}.
+
+skipvalue(Bytes) ->
+ {_T,Bytes2,R2} = decode_tag(Bytes),
+ {{L,Bytes3},R3} = decode_length(Bytes2),
+ skipvalue(L,Bytes3,R2+R3).
+
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Optionals, preset not filled optionals with asn1_NOVALUE
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,_Acc1,Acc2) ->
+ % return Val as a record
+ list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]).
+
+
+%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%% 8bit Int | binary
+encode_tag_val({Class, Form, TagNo}) when (TagNo =< 30) ->
+ <<(Class bsr 6):2,(Form bsr 5):1,TagNo:5>>;
+
+encode_tag_val({Class, Form, TagNo}) ->
+ {Octets,_Len} = mk_object_val(TagNo),
+ BinOct = list_to_binary(Octets),
+ <<(Class bsr 6):2, (Form bsr 5):1, 31:5,BinOct/binary>>;
+
+%% asumes whole correct tag bitpattern, multiple of 8
+encode_tag_val(Tag) when (Tag =< 255) -> Tag; %% anv�nds denna funktion??!!
+%% asumes correct bitpattern of 0-5
+encode_tag_val(Tag) -> encode_tag_val2(Tag,[]).
+
+encode_tag_val2(Tag, OctAck) when (Tag =< 255) ->
+ [Tag | OctAck];
+encode_tag_val2(Tag, OctAck) ->
+ encode_tag_val2(Tag bsr 8, [255 band Tag | OctAck]).
+
+
+%%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%%% 8bit Int | [list of octets]
+%encode_tag_val({Class, Form, TagNo}) when (TagNo =< 30) ->
+%%% <<Class:2,Form:1,TagNo:5>>;
+% [Class bor Form bor TagNo];
+%encode_tag_val({Class, Form, TagNo}) ->
+% {Octets,L} = mk_object_val(TagNo),
+% [Class bor Form bor 31 | Octets];
+
+
+%%============================================================================\%% Peek on the initial tag
+%% peek_tag(Bytes) -> TagBytes
+%% interprets the first byte and possible second, third and fourth byte as
+%% a tag and returns all the bytes comprising the tag, the constructed/primitive bit (6:th bit of first byte) is normalised to 0
+%%
+
+peek_tag(<<B7_6:2,_:1,31:5,Buffer/binary>>) ->
+ Bin = peek_tag(Buffer, <<>>),
+ <<B7_6:2,31:6,Bin/binary>>;
+%% single tag (tagno < 31)
+peek_tag(<<B7_6:2,_:1,B4_0:5,_Buffer/binary>>) ->
+ <<B7_6:2,B4_0:6>>.
+
+peek_tag(<<0:1,PartialTag:7,_Buffer/binary>>, TagAck) ->
+ <<TagAck/binary,PartialTag>>;
+peek_tag(<<PartialTag,Buffer/binary>>, TagAck) ->
+ peek_tag(Buffer,<<TagAck/binary,PartialTag>>);
+peek_tag(_,TagAck) ->
+ exit({error,{asn1, {invalid_tag,TagAck}}}).
+%%peek_tag([Tag|Buffer]) when (Tag band 31) == 31 ->
+%% [Tag band 2#11011111 | peek_tag(Buffer,[])];
+%%%% single tag (tagno < 31)
+%%peek_tag([Tag|Buffer]) ->
+%% [Tag band 2#11011111].
+
+%%peek_tag([PartialTag|Buffer], TagAck) when (PartialTag < 128 ) ->
+%% lists:reverse([PartialTag|TagAck]);
+%%peek_tag([PartialTag|Buffer], TagAck) ->
+%% peek_tag(Buffer,[PartialTag|TagAck]);
+%%peek_tag(Buffer,TagAck) ->
+%% exit({error,{asn1, {invalid_tag,lists:reverse(TagAck)}}}).
+
+
+%%===============================================================================
+%% Decode a tag
+%%
+%% decode_tag(OctetListBuffer) -> {{Class, Form, TagNo}, RestOfBuffer, RemovedBytes}
+%%===============================================================================
+
+%% multiple octet tag
+decode_tag(<<Class:2, Form:1, 31:5, Buffer/binary>>) ->
+ {TagNo, Buffer1, RemovedBytes} = decode_tag(Buffer, 0, 1),
+ {{(Class bsl 6), (Form bsl 5), TagNo}, Buffer1, RemovedBytes};
+
+%% single tag (< 31 tags)
+decode_tag(<<Class:2,Form:1,TagNo:5, Buffer/binary>>) ->
+ {{(Class bsl 6), (Form bsl 5), TagNo}, Buffer, 1}.
+
+%% last partial tag
+decode_tag(<<0:1,PartialTag:7, Buffer/binary>>, TagAck, RemovedBytes) ->
+ TagNo = (TagAck bsl 7) bor PartialTag,
+ %%<<TagNo>> = <<TagAck:1, PartialTag:7>>,
+ {TagNo, Buffer, RemovedBytes+1};
+% more tags
+decode_tag(<<_:1,PartialTag:7, Buffer/binary>>, TagAck, RemovedBytes) ->
+ TagAck1 = (TagAck bsl 7) bor PartialTag,
+ %%<<TagAck1:16>> = <<TagAck:1, PartialTag:7,0:8>>,
+ decode_tag(Buffer, TagAck1, RemovedBytes+1).
+
+%%------------------------------------------------------------------
+%% check_tags_i is the same as check_tags except that it stops and
+%% returns the remaining tags not checked when it encounters an
+%% indefinite length field
+%% only called internally within this module
+
+check_tags_i([Tag], Buffer, OptOrMand) -> % optimized very usual case
+ {[],check_one_tag(Tag, Buffer, OptOrMand)};
+check_tags_i(Tags, Buffer, OptOrMand) ->
+ check_tags_i(Tags, Buffer, 0, OptOrMand).
+
+check_tags_i([Tag1,Tag2|TagRest], Buffer, Rb, OptOrMand)
+ when Tag1#tag.type == 'IMPLICIT' ->
+ check_tags_i([Tag1#tag{type=Tag2#tag.type}|TagRest], Buffer, Rb, OptOrMand);
+
+check_tags_i([Tag1|TagRest], Buffer, Rb, OptOrMand) ->
+ {Form_Length,Buffer2,Rb1} = check_one_tag(Tag1, Buffer, OptOrMand),
+ case TagRest of
+ [] -> {TagRest, {Form_Length, Buffer2, Rb + Rb1}};
+ _ ->
+ case Form_Length of
+ {?CONSTRUCTED,_} ->
+ {TagRest, {Form_Length, Buffer2, Rb + Rb1}};
+ _ ->
+ check_tags_i(TagRest, Buffer2, Rb + Rb1, mandatory)
+ end
+ end;
+
+check_tags_i([], Buffer, Rb, _) ->
+ {[],{{0,0},Buffer,Rb}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This function is called from generated code
+
+check_tags([Tag], Buffer, OptOrMand) -> % optimized very usual case
+ check_one_tag(Tag, Buffer, OptOrMand);
+check_tags(Tags, Buffer, OptOrMand) ->
+ check_tags(Tags, Buffer, 0, OptOrMand).
+
+check_tags([Tag1,Tag2|TagRest], Buffer, Rb, OptOrMand)
+ when Tag1#tag.type == 'IMPLICIT' ->
+ check_tags([Tag1#tag{type=Tag2#tag.type}|TagRest], Buffer, Rb, OptOrMand);
+
+check_tags([Tag1|TagRest], Buffer, Rb, OptOrMand) ->
+ {Form_Length,Buffer2,Rb1} = check_one_tag(Tag1, Buffer, OptOrMand),
+ case TagRest of
+ [] -> {Form_Length, Buffer2, Rb + Rb1};
+ _ -> check_tags(TagRest, Buffer2, Rb + Rb1, mandatory)
+ end;
+
+check_tags([], Buffer, Rb, _) ->
+ {{0,0},Buffer,Rb}.
+
+check_one_tag(Tag=#tag{class=ExpectedClass,number=ExpectedNumber}, Buffer, OptOrMand) ->
+ case catch decode_tag(Buffer) of
+ {'EXIT',_Reason} ->
+ tag_error(no_data,Tag,Buffer,OptOrMand);
+ {{ExpectedClass,Form,ExpectedNumber},Buffer2,Rb} ->
+ {{L,Buffer3},RemBytes2} = decode_length(Buffer2),
+ {{Form,L}, Buffer3, RemBytes2+Rb};
+ {ErrorTag,_,_} ->
+ tag_error(ErrorTag, Tag, Buffer, OptOrMand)
+ end.
+
+tag_error(ErrorTag, Tag, Buffer, OptOrMand) ->
+ case OptOrMand of
+ mandatory ->
+ exit({error,{asn1, {invalid_tag,
+ {ErrorTag, Tag, Buffer}}}});
+ _ ->
+ exit({error,{asn1, {no_optional_tag,
+ {ErrorTag, Tag, Buffer}}}})
+ end.
+%%=======================================================================
+%%
+%% Encode all tags in the list Tags and return a possibly deep list of
+%% bytes with tag and length encoded
+%%
+%% prepend_tags(Tags, BytesSoFar, LenSoFar) -> {Bytes, Len}
+encode_tags(Tags, BytesSoFar, LenSoFar) ->
+ NewTags = encode_tags1(Tags, []),
+ %% NewTags contains the resulting tags in reverse order
+ encode_tags2(NewTags, BytesSoFar, LenSoFar).
+
+%encode_tags2([#tag{class=?UNIVERSAL,number=No}|Trest], BytesSoFar, LenSoFar) ->
+% {Bytes2,L2} = encode_length(LenSoFar),
+% encode_tags2(Trest,[[No|Bytes2],BytesSoFar], LenSoFar + 1 + L2);
+encode_tags2([Tag|Trest], BytesSoFar, LenSoFar) ->
+ {Bytes1,L1} = encode_one_tag(Tag),
+ {Bytes2,L2} = encode_length(LenSoFar),
+ encode_tags2(Trest, [Bytes1,Bytes2|BytesSoFar],
+ LenSoFar + L1 + L2);
+encode_tags2([], BytesSoFar, LenSoFar) ->
+ {BytesSoFar,LenSoFar}.
+
+encode_tags1([Tag1, Tag2| Trest], Acc)
+ when Tag1#tag.type == 'IMPLICIT' ->
+ encode_tags1([Tag1#tag{type=Tag2#tag.type,form=Tag2#tag.form}|Trest],Acc);
+encode_tags1([Tag1 | Trest], Acc) ->
+ encode_tags1(Trest, [Tag1|Acc]);
+encode_tags1([], Acc) ->
+ Acc. % the resulting tags are returned in reverse order
+
+encode_one_tag(Bin) when binary(Bin) ->
+ {Bin,size(Bin)};
+encode_one_tag(#tag{class=Class,number=No,type=Type, form = Form}) ->
+ NewForm = case Type of
+ 'EXPLICIT' ->
+ ?CONSTRUCTED;
+ _ ->
+ Form
+ end,
+ Bytes = encode_tag_val({Class,NewForm,No}),
+ {Bytes,size(Bytes)}.
+
+%%===============================================================================
+%% Change the tag (used when an implicit tagged type has a reference to something else)
+%% The constructed bit in the tag is taken from the tag to be replaced.
+%%
+%% change_tag(NewTag,[Tag,Buffer]) -> [NewTag,Buffer]
+%%===============================================================================
+
+%change_tag({NewClass,NewTagNr}, Buffer) ->
+% {{OldClass, OldForm, OldTagNo}, Buffer1, RemovedBytes} = decode_tag(lists:flatten(Buffer)),
+% [encode_tag_val({NewClass, OldForm, NewTagNr}) | Buffer1].
+
+
+
+
+
+
+
+%%===============================================================================
+%%
+%% This comment is valid for all the encode/decode functions
+%%
+%% C = Constraint -> typically {'ValueRange',LowerBound,UpperBound}
+%% used for PER-coding but not for BER-coding.
+%%
+%% Val = Value. If Val is an atom then it is a symbolic integer value
+%% (i.e the atom must be one of the names in the NamedNumberList).
+%% The NamedNumberList is used to translate the atom to an integer value
+%% before encoding.
+%%
+%%===============================================================================
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+
+%% This version does not consider Explicit tagging of the open type. It
+%% is only left because of backward compatibility.
+encode_open_type(Val) when list(Val) ->
+ {Val,size(list_to_binary(Val))};
+encode_open_type(Val) ->
+ {Val, size(Val)}.
+
+%%
+encode_open_type(Val, []) when list(Val) ->
+ {Val,size(list_to_binary(Val))};
+encode_open_type(Val,[]) ->
+ {Val, size(Val)};
+encode_open_type(Val, Tag) when list(Val) ->
+ encode_tags(Tag,Val,size(list_to_binary(Val)));
+encode_open_type(Val,Tag) ->
+ encode_tags(Tag,Val, size(Val)).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer) -> Value
+%% Bytes = [byte] with BER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes) ->
+ {_Tag, Len, _RemainingBuffer, RemovedBytes} = decode_tag_and_length(Bytes),
+ N = Len + RemovedBytes,
+ <<Val:N/binary, RemainingBytes/binary>> = Bytes,
+ {Val, RemainingBytes, Len + RemovedBytes}.
+
+decode_open_type(Bytes,ExplTag) ->
+ {Tag, Len, RemainingBuffer, RemovedBytes} = decode_tag_and_length(Bytes),
+ case {Tag,ExplTag} of
+ {{Class,Form,No},[#tag{class=Class,number=No,form=Form}]} ->
+ {_Tag2, Len2, _RemainingBuffer2, RemovedBytes2} = decode_tag_and_length(RemainingBuffer),
+ N = Len2 + RemovedBytes2,
+ <<_:RemovedBytes/unit:8,Val:N/binary,RemainingBytes/binary>> = Bytes,
+ {Val, RemainingBytes, N + RemovedBytes};
+ _ ->
+ N = Len + RemovedBytes,
+ <<Val:N/binary, RemainingBytes/binary>> = Bytes,
+ {Val, RemainingBytes, Len + RemovedBytes}
+ end.
+
+decode_open_type(ber_bin,Bytes,ExplTag) ->
+ decode_open_type(Bytes,ExplTag);
+decode_open_type(ber,Bytes,ExplTag) ->
+ {Val,RemBytes,Len}=decode_open_type(Bytes,ExplTag),
+ {binary_to_list(Val),RemBytes,Len}.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Boolean, ITU_T X.690 Chapter 8.2
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode_boolean(Integer, tag | notag) -> [octet list]
+%%===============================================================================
+
+encode_boolean({Name, Val}, DoTag) when atom(Name) ->
+ dotag(DoTag, ?N_BOOLEAN, encode_boolean(Val));
+encode_boolean(true,[]) ->
+ {[1,1,16#FF],3};
+encode_boolean(false,[]) ->
+ {[1,1,0],3};
+encode_boolean(Val, DoTag) ->
+ dotag(DoTag, ?N_BOOLEAN, encode_boolean(Val)).
+
+%% encode_boolean(Boolean) -> [Len, Boolean] = [1, $FF | 0]
+encode_boolean(true) -> {[16#FF],1};
+encode_boolean(false) -> {[0],1};
+encode_boolean(X) -> exit({error,{asn1, {encode_boolean, X}}}).
+
+
+%%===============================================================================
+%% decode_boolean(BuffList, HasTag, TotalLen) -> {true, Remain, RemovedBytes} |
+%% {false, Remain, RemovedBytes}
+%%===============================================================================
+
+decode_boolean(Buffer, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_BOOLEAN}),
+ decode_boolean_notag(Buffer, NewTags, OptOrMand).
+
+decode_boolean_notag(Buffer, Tags, OptOrMand) ->
+ {RestTags, {FormLen,Buffer0,Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val,Buffer1,Rb1} = decode_boolean_notag(Buffer00, RestTags, OptOrMand),
+ {Buffer2, Rb2} = restbytes2(RestBytes,Buffer1,noext),
+ {Val, Buffer2, Rb0+Rb1+Rb2};
+ {_,_} ->
+ decode_boolean2(Buffer0, Rb0)
+ end.
+
+decode_boolean2(<<0:8, Buffer/binary>>, RemovedBytes) ->
+ {false, Buffer, RemovedBytes + 1};
+decode_boolean2(<<_:8, Buffer/binary>>, RemovedBytes) ->
+ {true, Buffer, RemovedBytes + 1};
+decode_boolean2(Buffer, _) ->
+ exit({error,{asn1, {decode_boolean, Buffer}}}).
+
+
+
+
+%%===========================================================================
+%% Integer, ITU_T X.690 Chapter 8.3
+
+%% encode_integer(Constraint, Value, Tag) -> [octet list]
+%% encode_integer(Constraint, Name, NamedNumberList, Tag) -> [octet list]
+%% Value = INTEGER | {Name,INTEGER}
+%% Tag = tag | notag
+%%===========================================================================
+
+encode_integer(C, Val, []) when integer(Val) ->
+ {EncVal,Len}=encode_integer(C, Val),
+ dotag_universal(?N_INTEGER,EncVal,Len);
+encode_integer(C, Val, Tag) when integer(Val) ->
+ dotag(Tag, ?N_INTEGER, encode_integer(C, Val));
+encode_integer(C,{Name,Val},Tag) when atom(Name) ->
+ encode_integer(C,Val,Tag);
+encode_integer(_, Val, _) ->
+ exit({error,{asn1, {encode_integer, Val}}}).
+
+
+
+encode_integer(C, Val, NamedNumberList, Tag) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value,{_, NewVal}} ->
+ dotag(Tag, ?N_INTEGER, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {encode_integer_namednumber, Val}}})
+ end;
+encode_integer(C,{_,Val},NamedNumberList,Tag) ->
+ encode_integer(C,Val,NamedNumberList,Tag);
+encode_integer(C, Val, _NamedNumberList, Tag) ->
+ dotag(Tag, ?N_INTEGER, encode_integer(C, Val)).
+
+
+
+
+encode_integer(_C, Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, L=[B|_Acc]) when B < 128 ->
+ L;
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, L=[B1|_T]) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
+
+%%===============================================================================
+%% decode integer
+%% (Buffer, Range, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%===============================================================================
+
+
+decode_integer(Buffer, Range, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_INTEGER}),
+ decode_integer_notag(Buffer, Range, [], NewTags, OptOrMand).
+
+decode_integer(Buffer, Range, NamedNumberList, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_INTEGER}),
+ decode_integer_notag(Buffer, Range, NamedNumberList, NewTags, OptOrMand).
+
+decode_integer_notag(Buffer, Range, NamedNumberList, NewTags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(NewTags, Buffer, OptOrMand),
+% Result = {Val, Buffer2, RemovedBytes} =
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00, RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_integer_notag(Buffer00, Range, NamedNumberList,
+ RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_, Len} ->
+ Result =
+ decode_integer2(Len,Buffer0,Rb0+Len),
+ Result2 = check_integer_constraint(Result,Range),
+ resolve_named_value(Result2,NamedNumberList)
+ end.
+
+resolve_named_value(Result={Val,Buffer,RemBytes},NamedNumberList) ->
+ case NamedNumberList of
+ [] -> Result;
+ _ ->
+ NewVal = case lists:keysearch(Val, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ Val
+ end,
+ {NewVal, Buffer, RemBytes}
+ end.
+
+check_integer_constraint(Result={Val, _Buffer,_},Range) ->
+ case Range of
+ [] -> % No length constraint
+ Result;
+ {Lb,Ub} when Val >= Lb, Ub >= Val -> % variable length constraint
+ Result;
+ Val -> % fixed value constraint
+ Result;
+ {_,_} ->
+ exit({error,{asn1,{integer_range,Range,Val}}});
+ SingleValue when integer(SingleValue) ->
+ exit({error,{asn1,{integer_range,Range,Val}}});
+ _ -> % some strange constraint that we don't support yet
+ Result
+ end.
+
+%%============================================================================
+%% Enumerated value, ITU_T X.690 Chapter 8.4
+
+%% encode enumerated value
+%%============================================================================
+encode_enumerated(Val, []) when integer(Val)->
+ {EncVal,Len} = encode_integer(false,Val),
+ dotag_universal(?N_ENUMERATED,EncVal,Len);
+encode_enumerated(Val, DoTag) when integer(Val)->
+ dotag(DoTag, ?N_ENUMERATED, encode_integer(false,Val));
+encode_enumerated({Name,Val}, DoTag) when atom(Name) ->
+ encode_enumerated(Val, DoTag).
+
+%% The encode_enumerated functions below this line can be removed when the
+%% new code generation is stable. (the functions might have to be kept here
+%% a while longer for compatibility reasons)
+
+encode_enumerated(C, Val, {NamedNumberList,ExtList}, DoTag) when atom(Val) ->
+ case catch encode_enumerated(C, Val, NamedNumberList, DoTag) of
+ {'EXIT',_} -> encode_enumerated(C, Val, ExtList, DoTag);
+ Result -> Result
+ end;
+
+encode_enumerated(C, Val, NamedNumberList, DoTag) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value, {_, NewVal}} when DoTag == []->
+ {EncVal,Len} = encode_integer(C,NewVal),
+ dotag_universal(?N_ENUMERATED,EncVal,Len);
+ {value, {_, NewVal}} ->
+ dotag(DoTag, ?N_ENUMERATED, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {enumerated_not_in_range, Val}}})
+ end;
+
+encode_enumerated(C, {asn1_enum, Val}, {_,_}, DoTag) when integer(Val) ->
+ dotag(DoTag, ?N_ENUMERATED, encode_integer(C,Val));
+
+encode_enumerated(C, {Name,Val}, NamedNumberList, DoTag) when atom(Name) ->
+ encode_enumerated(C, Val, NamedNumberList, DoTag);
+
+encode_enumerated(_, Val, _, _) ->
+ exit({error,{asn1, {enumerated_not_namednumber, Val}}}).
+
+
+
+%%============================================================================
+%% decode enumerated value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) ->
+%% {Value, RemainingBuffer, RemovedBytes}
+%%===========================================================================
+decode_enumerated(Buffer, Range, NamedNumberList, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_ENUMERATED}),
+ decode_enumerated_notag(Buffer, Range, NamedNumberList,
+ NewTags, OptOrMand).
+
+decode_enumerated_notag(Buffer, Range, NNList = {NamedNumberList,ExtList}, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_enumerated_notag(Buffer00, Range, NNList, RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ {Val01, Buffer01, Rb01} =
+ decode_integer2(Len, Buffer0, Rb0+Len),
+ case decode_enumerated1(Val01, NamedNumberList) of
+ {asn1_enum,Val01} ->
+ {decode_enumerated1(Val01,ExtList), Buffer01, Rb01};
+ Result01 ->
+ {Result01, Buffer01, Rb01}
+ end
+ end;
+
+decode_enumerated_notag(Buffer, Range, NNList, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_enumerated_notag(Buffer00, Range, NNList, RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ {Val01, Buffer02, Rb02} =
+ decode_integer2(Len, Buffer0, Rb0+Len),
+ case decode_enumerated1(Val01, NNList) of
+ {asn1_enum,_} ->
+ exit({error,{asn1, {illegal_enumerated, Val01}}});
+ Result01 ->
+ {Result01, Buffer02, Rb02}
+ end
+ end.
+
+decode_enumerated1(Val, NamedNumberList) ->
+ %% it must be a named integer
+ case lists:keysearch(Val, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ {asn1_enum,Val}
+ end.
+
+
+%%============================================================================
+%%
+%% Real value, ITU_T X.690 Chapter 8.5
+%%============================================================================
+%%
+%% encode real value
+%%============================================================================
+
+%% only base 2 internally so far!!
+encode_real(0, DoTag) ->
+ dotag(DoTag, ?N_REAL, {[],0});
+encode_real('PLUS-INFINITY', DoTag) ->
+ dotag(DoTag, ?N_REAL, {[64],1});
+encode_real('MINUS-INFINITY', DoTag) ->
+ dotag(DoTag, ?N_REAL, {[65],1});
+encode_real(Val, DoTag) when tuple(Val)->
+ dotag(DoTag, ?N_REAL, encode_real(Val)).
+
+%%%%%%%%%%%%%%
+%% not optimal efficient..
+%% only base 2 of Mantissa encoding!
+%% only base 2 of ExpBase encoding!
+encode_real({Man, Base, Exp}) ->
+%% io:format("Mantissa: ~w Base: ~w, Exp: ~w~n",[Man, Base, Exp]),
+
+ OctExp = if Exp >= 0 -> list_to_binary(encode_integer_pos(Exp, []));
+ true -> list_to_binary(encode_integer_neg(Exp, []))
+ end,
+%% ok = io:format("OctExp: ~w~n",[OctExp]),
+ SignBit = if Man > 0 -> 0; % bit 7 is pos or neg, no Zeroval
+ true -> 1
+ end,
+%% ok = io:format("SignBitMask: ~w~n",[SignBitMask]),
+ InBase = if Base =:= 2 -> 0; % bit 6,5: only base 2 this far!
+ true ->
+ exit({error,{asn1, {encode_real_non_supported_encodeing, Base}}})
+ end,
+ SFactor = 0, % bit 4,3: no scaling since only base 2
+ OctExpLen = size(OctExp),
+ if OctExpLen > 255 ->
+ exit({error,{asn1, {to_big_exp_in_encode_real, OctExpLen}}});
+ true -> true %% make real assert later..
+ end,
+ {LenCode, EOctets} = case OctExpLen of % bit 2,1
+ 1 -> {0, OctExp};
+ 2 -> {1, OctExp};
+ 3 -> {2, OctExp};
+ _ -> {3, <<OctExpLen, OctExp/binary>>}
+ end,
+ FirstOctet = <<1:1,SignBit:1,InBase:2,SFactor:2,LenCode:2>>,
+ OctMantissa = if Man > 0 -> list_to_binary(minimum_octets(Man));
+ true -> list_to_binary(minimum_octets(-(Man))) % signbit keeps track of sign
+ end,
+ %% ok = io:format("LenMask: ~w EOctets: ~w~nFirstOctet: ~w OctMantissa: ~w OctExpLen: ~w~n", [LenMask, EOctets, FirstOctet, OctMantissa, OctExpLen]),
+ Bin = <<FirstOctet/binary, EOctets/binary, OctMantissa/binary>>,
+ {Bin, size(Bin)}.
+
+
+%encode_real({Man, Base, Exp}) ->
+%% io:format("Mantissa: ~w Base: ~w, Exp: ~w~n",[Man, Base, Exp]),
+
+% OctExp = if Exp >= 0 -> encode_integer_pos(Exp, []);
+% true -> encode_integer_neg(Exp, [])
+% end,
+%% ok = io:format("OctExp: ~w~n",[OctExp]),
+% SignBitMask = if Man > 0 -> 2#00000000; % bit 7 is pos or neg, no Zeroval
+% true -> 2#01000000
+% end,
+%% ok = io:format("SignBitMask: ~w~n",[SignBitMask]),
+% InternalBaseMask = if Base =:= 2 -> 2#00000000; % bit 6,5: only base 2 this far!
+% true ->
+% exit({error,{asn1, {encode_real_non_supported_encodeing, Base}}})
+% end,
+% ScalingFactorMask =2#00000000, % bit 4,3: no scaling since only base 2
+% OctExpLen = length(OctExp),
+% if OctExpLen > 255 ->
+% exit({error,{asn1, {to_big_exp_in_encode_real, OctExpLen}}});
+% true -> true %% make real assert later..
+% end,
+% {LenMask, EOctets} = case OctExpLen of % bit 2,1
+% 1 -> {0, OctExp};
+% 2 -> {1, OctExp};
+% 3 -> {2, OctExp};
+% _ -> {3, [OctExpLen, OctExp]}
+% end,
+% FirstOctet = (SignBitMask bor InternalBaseMask bor
+% ScalingFactorMask bor LenMask bor
+% 2#10000000), % bit set for binary mantissa encoding!
+% OctMantissa = if Man > 0 -> minimum_octets(Man);
+% true -> minimum_octets(-(Man)) % signbit keeps track of sign
+% end,
+%% ok = io:format("LenMask: ~w EOctets: ~w~nFirstOctet: ~w OctMantissa: ~w OctExpLen: ~w~n", [LenMask, EOctets, FirstOctet, OctMantissa, OctExpLen]),
+% {[FirstOctet, EOctets, OctMantissa],
+% length(OctMantissa) +
+% (if OctExpLen > 3 ->
+% OctExpLen + 2;
+% true ->
+% OctExpLen + 1
+% end)
+% }.
+
+
+%%============================================================================
+%% decode real value
+%%
+%% decode_real([OctetBufferList], tuple|value, tag|notag) ->
+%% {{Mantissa, Base, Exp} | realval | PLUS-INFINITY | MINUS-INFINITY | 0,
+%% RestBuff}
+%%
+%% only for base 2 decoding sofar!!
+%%============================================================================
+
+decode_real(Buffer, Form, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_REAL}),
+ decode_real_notag(Buffer, Form, NewTags, OptOrMand).
+
+decode_real_notag(Buffer, Form, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_real_notag(Buffer00, Form, RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ decode_real2(Buffer0, Form, Len, Rb0)
+ end.
+
+decode_real2(Buffer0, Form, Len, RemBytes1) ->
+ <<First, Buffer2/binary>> = Buffer0,
+ if
+ First =:= 2#01000000 -> {'PLUS-INFINITY', Buffer2};
+ First =:= 2#01000001 -> {'MINUS-INFINITY', Buffer2};
+ First =:= 2#00000000 -> {0, Buffer2};
+ true ->
+ %% have some check here to verify only supported bases (2)
+ <<_B7:1,B6:1,B5_4:2,B3_2:2,B1_0:2>> = <<First>>,
+ Sign = B6,
+ Base =
+ case B5_4 of
+ 0 -> 2; % base 2, only one so far
+ _ -> exit({error,{asn1, {non_supported_base, First}}})
+ end,
+% ScalingFactor =
+ case B3_2 of
+ 0 -> 0; % no scaling so far
+ _ -> exit({error,{asn1, {non_supported_scaling, First}}})
+ end,
+ % ok = io:format("Buffer2: ~w~n",[Buffer2]),
+ {FirstLen, {Exp, Buffer3}, RemBytes2} =
+ case B1_0 of
+ 0 -> {2, decode_integer2(1, Buffer2, RemBytes1), RemBytes1+1};
+ 1 -> {3, decode_integer2(2, Buffer2, RemBytes1), RemBytes1+2};
+ 2 -> {4, decode_integer2(3, Buffer2, RemBytes1), RemBytes1+3};
+ 3 ->
+ <<ExpLen1,RestBuffer/binary>> = Buffer2,
+ { ExpLen1 + 2,
+ decode_integer2(ExpLen1, RestBuffer, RemBytes1),
+ RemBytes1+ExpLen1}
+ end,
+ % io:format("FirstLen: ~w, Exp: ~w, Buffer3: ~w ~n",
+ % [FirstLen, Exp, Buffer3]),
+ Length = Len - FirstLen,
+ <<LongInt:Length/unit:8,RestBuff/binary>> = Buffer3,
+ {{Mantissa, Buffer4}, RemBytes3} =
+ if Sign =:= 0 ->
+ % io:format("sign plus~n"),
+ {{LongInt, RestBuff}, 1 + Length};
+ true ->
+ % io:format("sign minus~n"),
+ {{-LongInt, RestBuff}, 1 + Length}
+ end,
+ % io:format("Form: ~w~n",[Form]),
+ case Form of
+ tuple ->
+ {Val,Buf,_RemB} = Exp,
+ {{Mantissa, Base, {Val,Buf}}, Buffer4, RemBytes2+RemBytes3};
+ _value ->
+ comming
+ end
+ end.
+
+
+%%============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.6
+%%
+%% encode bitstring value
+%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constrint Len, only valid when identifiers
+%%============================================================================
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList,DoTag) when integer(Unused), binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList,DoTag);
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList, DoTag) when atom(FirstVal) ->
+ encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, DoTag);
+
+encode_bit_string(C, [{bit,X} | RestVal], NamedBitList, DoTag) ->
+ encode_bit_string_named(C, [{bit,X} | RestVal], NamedBitList, DoTag);
+
+encode_bit_string(C, [FirstVal| RestVal], NamedBitList, DoTag) when integer(FirstVal) ->
+ encode_bit_string_bits(C, [FirstVal | RestVal], NamedBitList, DoTag);
+
+encode_bit_string(_, 0, _, []) ->
+ {[?N_BIT_STRING,1,0],3};
+
+encode_bit_string(_, 0, _, DoTag) ->
+ dotag(DoTag, ?N_BIT_STRING, {<<0>>,1});
+
+encode_bit_string(_, [], _, []) ->
+ {[?N_BIT_STRING,1,0],3};
+
+encode_bit_string(_, [], _, DoTag) ->
+ dotag(DoTag, ?N_BIT_STRING, {<<0>>,1});
+
+encode_bit_string(C, IntegerVal, NamedBitList, DoTag) when integer(IntegerVal) ->
+ BitListVal = int_to_bitlist(IntegerVal),
+ encode_bit_string_bits(C, BitListVal, NamedBitList, DoTag);
+
+encode_bit_string(C, {Name,BitList}, NamedBitList, DoTag) when atom(Name) ->
+ encode_bit_string(C, BitList, NamedBitList, DoTag).
+
+
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+
+%%=================================================================
+%% Encode BIT STRING of the form {Unused,BinBits}.
+%% Unused is the number of unused bits in the last byte in BinBits
+%% and BinBits is a binary representing the BIT STRING.
+%%=================================================================
+encode_bin_bit_string(C,{Unused,BinBits},_NamedBitList,DoTag)->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ remove_unused_then_dotag(DoTag,?N_BIT_STRING,Unused,BinBits);
+ {_Min,Max} ->
+ BBLen = (size(BinBits)*8)-Unused,
+ if
+ BBLen > Max ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBLen},{maximum,Max}}}}});
+ true ->
+ remove_unused_then_dotag(DoTag,?N_BIT_STRING,
+ Unused,BinBits)
+ end;
+ Size ->
+ case ((size(BinBits)*8)-Unused) of
+ BBSize when BBSize =< Size ->
+ remove_unused_then_dotag(DoTag,?N_BIT_STRING,
+ Unused,BinBits);
+ BBSize ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBSize},{should_be,Size}}}}})
+ end
+ end.
+
+remove_unused_then_dotag(DoTag,StringType,Unused,BinBits) ->
+ case Unused of
+ 0 when (size(BinBits) == 0),DoTag==[] ->
+ %% time optimization of next case
+ {[StringType,1,0],3};
+ 0 when (size(BinBits) == 0) ->
+ dotag(DoTag,StringType,{<<0>>,1});
+ 0 when DoTag==[]-> % time optimization of next case
+ dotag_universal(StringType,[Unused|BinBits],size(BinBits)+1);
+% {LenEnc,Len} = encode_legth(size(BinBits)+1),
+% {[StringType,LenEnc,[Unused|BinBits]],size(BinBits)+1+Len+1};
+ 0 ->
+ dotag(DoTag,StringType,<<Unused,BinBits/binary>>);
+ Num when DoTag == [] -> % time optimization of next case
+ N = (size(BinBits)-1),
+ <<BBits:N/binary,LastByte>> = BinBits,
+ dotag_universal(StringType,
+ [Unused,BBits,(LastByte bsr Num) bsl Num],
+ size(BinBits)+1);
+% {LenEnc,Len} = encode_legth(size(BinBits)+1),
+% {[StringType,LenEnc,[Unused,BBits,(LastByte bsr Num) bsl Num],
+% 1+Len+size(BinBits)+1};
+ Num ->
+ N = (size(BinBits)-1),
+ <<BBits:N/binary,LastByte>> = BinBits,
+ dotag(DoTag,StringType,{[Unused,binary_to_list(BBits) ++
+ [(LastByte bsr Num) bsl Num]],
+ 1+size(BinBits)})
+ end.
+
+
+%%=================================================================
+%% Encode named bits
+%%=================================================================
+
+encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, DoTag) ->
+ {Len,Unused,OctetList} =
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal],
+ NamedBitList, []),
+ BitList = make_and_set_list(lists:max(ToSetPos)+1,
+ ToSetPos, 0),
+ encode_bitstring(BitList);
+ {_Min,Max} ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal],
+ NamedBitList, []),
+ BitList = make_and_set_list(Max, ToSetPos, 0),
+ encode_bitstring(BitList);
+ Size ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal],
+ NamedBitList, []),
+ BitList = make_and_set_list(Size, ToSetPos, 0),
+ encode_bitstring(BitList)
+ end,
+ case DoTag of
+ [] ->
+ dotag_universal(?N_BIT_STRING,[Unused|OctetList],Len+1);
+% {EncLen,LenLen} = encode_length(Len+1),
+% {[?N_BIT_STRING,EncLen,Unused,OctetList],1+LenLen+Len+1};
+ _ ->
+ dotag(DoTag, ?N_BIT_STRING, {[Unused|OctetList],Len+1})
+ end.
+
+
+%%----------------------------------------
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+%%----------------------------------------
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+get_all_bitposes([Val | Rest], NamedBitList, Ack) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+
+%%----------------------------------------
+%% make_and_set_list(Len of list to return, [list of positions to set to 1])->
+%% returns list of Len length, with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%% Len will make a list of length Len, not Len + 1.
+%% BitList = make_and_set_list(C, ToSetPos, 0),
+%%----------------------------------------
+
+make_and_set_list(0, [], _) -> [];
+make_and_set_list(0, _, _) ->
+ exit({error,{asn1,bitstring_sizeconstraint}});
+make_and_set_list(Len, [XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(Len - 1, SetPos, XPos + 1)];
+make_and_set_list(Len, [Pos|SetPos], XPos) ->
+ [0 | make_and_set_list(Len - 1, [Pos | SetPos], XPos + 1)];
+make_and_set_list(Len, [], XPos) ->
+ [0 | make_and_set_list(Len - 1, [], XPos + 1)].
+
+
+
+
+
+
+%%=================================================================
+%% Encode bit string for lists of ones and zeroes
+%%=================================================================
+encode_bit_string_bits(C, BitListVal, _NamedBitList, DoTag) when list(BitListVal) ->
+ {Len,Unused,OctetList} =
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ encode_bitstring(BitListVal);
+ Constr={Min,Max} when integer(Min),integer(Max) ->
+ encode_constr_bit_str_bits(Constr,BitListVal,DoTag);
+ {Constr={_,_},[]} ->
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,DoTag);
+ Constr={{_,_},{_,_}} ->%{{Min1,Max1},{Min2,Max2}}
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,DoTag);
+ Size ->
+ case length(BitListVal) of
+ BitSize when BitSize == Size ->
+ encode_bitstring(BitListVal);
+ BitSize when BitSize < Size ->
+ PaddedList =
+ pad_bit_list(Size-BitSize,BitListVal),
+ encode_bitstring(PaddedList);
+ BitSize ->
+ exit({error,
+ {asn1,
+ {bitstring_length,
+ {{was,BitSize},
+ {should_be,Size}}}}})
+ end
+ end,
+ %%add unused byte to the Len
+ case DoTag of
+ [] ->
+ dotag_universal(?N_BIT_STRING,[Unused|OctetList],Len+1);
+% {EncLen,LenLen}=encode_length(Len+1),
+% {[?N_BIT_STRING,EncLen,Unused|OctetList],1+LenLen+Len+1};
+ _ ->
+ dotag(DoTag, ?N_BIT_STRING,
+ {[Unused | OctetList],Len+1})
+ end.
+
+
+encode_constr_bit_str_bits({_Min,Max},BitListVal,_DoTag) ->
+ BitLen = length(BitListVal),
+ if
+ BitLen > Max ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max}}}}});
+ true ->
+ encode_bitstring(BitListVal)
+ end;
+encode_constr_bit_str_bits({{_Min1,Max1},{Min2,Max2}},BitListVal,_DoTag) ->
+ BitLen = length(BitListVal),
+ case BitLen of
+ Len when Len > Max2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max2}}}}});
+ Len when Len > Max1, Len < Min2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {not_allowed_interval,
+ Max1,Min2}}}}});
+ _ ->
+ encode_bitstring(BitListVal)
+ end.
+
+%% returns a list of length Size + length(BitListVal), with BitListVal
+%% as the most significant elements followed by padded zero elements
+pad_bit_list(Size,BitListVal) ->
+ Tail = lists:duplicate(Size,0),
+ lists:append(BitListVal,Tail).
+
+%%=================================================================
+%% Do the actual encoding
+%% ([bitlist]) -> {ListLen, UnusedBits, OctetList}
+%%=================================================================
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest]) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Val], 1);
+encode_bitstring(Val) ->
+ {Unused, Octet} = unused_bitlist(Val, 7, 0),
+ {1, Unused, [Octet]}.
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest], Ack, Len) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Ack | [Val]], Len + 1);
+%%even multiple of 8 bits..
+encode_bitstring([], Ack, Len) ->
+ {Len, 0, Ack};
+%% unused bits in last octet
+encode_bitstring(Rest, Ack, Len) ->
+% io:format("uneven ~w ~w ~w~n",[Rest, Ack, Len]),
+ {Unused, Val} = unused_bitlist(Rest, 7, 0),
+ {Len + 1, Unused, [Ack | [Val]]}.
+
+%%%%%%%%%%%%%%%%%%
+%% unused_bitlist([list of ones and zeros <= 7], 7, []) ->
+%% {Unused bits, Last octet with bits moved to right}
+unused_bitlist([], Trail, Ack) ->
+ {Trail + 1, Ack};
+unused_bitlist([Bit | Rest], Trail, Ack) ->
+%% io:format("trail Bit: ~w Rest: ~w Trail: ~w Ack:~w~n",[Bit, Rest, Trail, Ack]),
+ unused_bitlist(Rest, Trail - 1, (Bit bsl Trail) bor Ack).
+
+
+%%============================================================================
+%% decode bitstring value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%============================================================================
+
+decode_compact_bit_string(Buffer, Range, NamedNumberList, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags, LenIn,
+ NamedNumberList, OptOrMand,bin).
+
+decode_bit_string(Buffer, Range, NamedNumberList, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags, LenIn,
+ NamedNumberList, OptOrMand,old).
+
+
+decode_bit_string2(1,<<0 ,Buffer/binary>>,_NamedNumberList,RemovedBytes,BinOrOld) ->
+ case BinOrOld of
+ bin ->
+ {{0,<<>>},Buffer,RemovedBytes};
+ _ ->
+ {[], Buffer, RemovedBytes}
+ end;
+decode_bit_string2(Len,<<Unused,Buffer/binary>>,NamedNumberList,
+ RemovedBytes,BinOrOld) ->
+ L = Len - 1,
+ <<Bits:L/binary,BufferTail/binary>> = Buffer,
+ case NamedNumberList of
+ [] ->
+ case BinOrOld of
+ bin ->
+ {{Unused,Bits},BufferTail,RemovedBytes};
+ _ ->
+ BitString = decode_bitstring2(L, Unused, Buffer),
+ {BitString,BufferTail, RemovedBytes}
+ end;
+ _ ->
+ BitString = decode_bitstring2(L, Unused, Buffer),
+ {decode_bitstring_NNL(BitString,NamedNumberList),
+ BufferTail,
+ RemovedBytes}
+ end.
+
+%%----------------------------------------
+%% Decode the in buffer to bits
+%%----------------------------------------
+decode_bitstring2(1,Unused,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,_/binary>>) ->
+ lists:sublist([B7,B6,B5,B4,B3,B2,B1,B0],8-Unused);
+decode_bitstring2(Len, Unused,
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Buffer/binary>>) ->
+ [B7, B6, B5, B4, B3, B2, B1, B0 |
+ decode_bitstring2(Len - 1, Unused, Buffer)].
+
+%%decode_bitstring2(1, Unused, Buffer) ->
+%% make_bits_of_int(hd(Buffer), 128, 8-Unused);
+%%decode_bitstring2(Len, Unused, [BitVal | Buffer]) ->
+%% [B7, B6, B5, B4, B3, B2, B1, B0] = make_bits_of_int(BitVal, 128, 8),
+%% [B7, B6, B5, B4, B3, B2, B1, B0 |
+%% decode_bitstring2(Len - 1, Unused, Buffer)].
+
+
+%%make_bits_of_int(_, _, 0) ->
+%% [];
+%%make_bits_of_int(BitVal, MaskVal, Unused) when Unused > 0 ->
+%% X = case MaskVal band BitVal of
+%% 0 -> 0 ;
+%% _ -> 1
+%% end,
+%% [X | make_bits_of_int(BitVal, MaskVal bsr 1, Unused - 1)].
+
+
+
+%%----------------------------------------
+%% Decode the bitlist to names
+%%----------------------------------------
+
+
+decode_bitstring_NNL(BitList,NamedNumberList) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,0,[]).
+
+
+decode_bitstring_NNL([],_,_No,Result) ->
+ lists:reverse(Result);
+
+decode_bitstring_NNL([B|BitList],[{Name,No}|NamedNumberList],No,Result) ->
+ if
+ B == 0 ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result);
+ true ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[Name|Result])
+ end;
+decode_bitstring_NNL([1|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[{bit,No}|Result]);
+decode_bitstring_NNL([0|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result).
+
+
+%%============================================================================
+%% Octet string, ITU_T X.690 Chapter 8.7
+%%
+%% encode octet string
+%% The OctetList must be a flat list of integers in the range 0..255
+%% the function does not check this because it takes to much time
+%%============================================================================
+encode_octet_string(_C, OctetList, []) when binary(OctetList) ->
+ dotag_universal(?N_OCTET_STRING,OctetList,size(OctetList));
+encode_octet_string(_C, OctetList, DoTag) when binary(OctetList) ->
+ dotag(DoTag, ?N_OCTET_STRING, {OctetList,size(OctetList)});
+encode_octet_string(_C, OctetList, DoTag) when list(OctetList) ->
+ case length(OctetList) of
+ Len when DoTag == [] ->
+ dotag_universal(?N_OCTET_STRING,OctetList,Len);
+ Len ->
+ dotag(DoTag, ?N_OCTET_STRING, {OctetList,Len})
+ end;
+% encode_octet_string(C, OctetList, DoTag) when list(OctetList) ->
+% dotag(DoTag, ?N_OCTET_STRING, {OctetList,length(OctetList)});
+encode_octet_string(C, {Name,OctetList}, DoTag) when atom(Name) ->
+ encode_octet_string(C, OctetList, DoTag).
+
+
+%%============================================================================
+%% decode octet string
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%
+%% Octet string is decoded as a restricted string
+%%============================================================================
+decode_octet_string(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_OCTET_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_OCTET_STRING,
+ Tags, TotalLen, [], OptOrMand,old).
+
+%%============================================================================
+%% Null value, ITU_T X.690 Chapter 8.8
+%%
+%% encode NULL value
+%%============================================================================
+
+encode_null(_, []) ->
+ {[?N_NULL,0],2};
+encode_null(_, DoTag) ->
+ dotag(DoTag, ?N_NULL, {[],0}).
+
+%%============================================================================
+%% decode NULL value
+%% (Buffer, HasTag, TotalLen) -> {NULL, Remain, RemovedBytes}
+%%============================================================================
+decode_null(Buffer, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_NULL}),
+ decode_null_notag(Buffer, NewTags, OptOrMand).
+
+decode_null_notag(Buffer, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {_Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} = decode_null_notag(Buffer0, RestTags,
+ OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,0} ->
+ {'NULL', Buffer0, Rb0};
+ {_,Len} ->
+ exit({error,{asn1,{invalid_length,'NULL',Len}}})
+ end.
+
+
+%%============================================================================
+%% Object identifier, ITU_T X.690 Chapter 8.19
+%%
+%% encode Object Identifier value
+%%============================================================================
+
+encode_object_identifier({Name,Val}, DoTag) when atom(Name) ->
+ encode_object_identifier(Val, DoTag);
+encode_object_identifier(Val, []) ->
+ {EncVal,Len} = e_object_identifier(Val),
+ dotag_universal(?N_OBJECT_IDENTIFIER,EncVal,Len);
+encode_object_identifier(Val, DoTag) ->
+ dotag(DoTag, ?N_OBJECT_IDENTIFIER, e_object_identifier(Val)).
+
+e_object_identifier({'OBJECT IDENTIFIER', V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname, V}) when atom(Cname), tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname, V}) when atom(Cname), list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%%%%%%%%%%%%%%%
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+e_object_identifier([E1, E2 | Tail]) ->
+ Head = 40*E1 + E2, % wow!
+ {H,Lh} = mk_object_val(Head),
+ {R,Lr} = enc_obj_id_tail(Tail, [], 0),
+ {[H|R], Lh+Lr}.
+
+enc_obj_id_tail([], Ack, Len) ->
+ {lists:reverse(Ack), Len};
+enc_obj_id_tail([H|T], Ack, Len) ->
+ {B, L} = mk_object_val(H),
+ enc_obj_id_tail(T, [B|Ack], Len+L).
+
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+%%e_object_identifier([E1, E2 | Tail]) ->
+%% Head = 40*E1 + E2, % wow!
+%% F = fun(Val, AckLen) ->
+%% {L, Ack} = mk_object_val(Val),
+%% {L, Ack + AckLen}
+%% end,
+%% {Octets, Len} = lists:mapfoldl(F, 0, [Head | Tail]).
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+
+
+%%============================================================================
+%% decode Object Identifier value
+%% (Buffer, HasTag, TotalLen) -> {{ObjId}, Remain, RemovedBytes}
+%%============================================================================
+
+decode_object_identifier(Buffer, Tags, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,
+ number=?N_OBJECT_IDENTIFIER}),
+ decode_object_identifier_notag(Buffer, NewTags, OptOrMand).
+
+decode_object_identifier_notag(Buffer, Tags, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_object_identifier_notag(Buffer00,
+ RestTags, OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ {[AddedObjVal|ObjVals],Buffer01} =
+ dec_subidentifiers(Buffer0,0,[],Len),
+ {Val1, Val2} = if
+ AddedObjVal < 40 ->
+ {0, AddedObjVal};
+ AddedObjVal < 80 ->
+ {1, AddedObjVal - 40};
+ true ->
+ {2, AddedObjVal - 80}
+ end,
+ {list_to_tuple([Val1, Val2 | ObjVals]), Buffer01,
+ Rb0+Len}
+ end.
+
+dec_subidentifiers(Buffer,_Av,Al,0) ->
+ {lists:reverse(Al),Buffer};
+dec_subidentifiers(<<1:1,H:7,T/binary>>,Av,Al,Len) ->
+ dec_subidentifiers(T,(Av bsl 7) + H,Al,Len-1);
+dec_subidentifiers(<<H,T/binary>>,Av,Al,Len) ->
+ dec_subidentifiers(T,0,[((Av bsl 7) + H)|Al],Len-1).
+
+
+%%dec_subidentifiers(Buffer,Av,Al,0) ->
+%% {lists:reverse(Al),Buffer};
+%%dec_subidentifiers([H|T],Av,Al,Len) when H >=16#80 ->
+%% dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al,Len-1);
+%%dec_subidentifiers([H|T],Av,Al,Len) ->
+%% dec_subidentifiers(T,0,[(Av bsl 7) + H |Al],Len-1).
+
+
+%%============================================================================
+%% Restricted character string types, ITU_T X.690 Chapter 8.20
+%%
+%% encode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%%============================================================================
+encode_restricted_string(_C, OctetList, StringType, [])
+ when binary(OctetList) ->
+ dotag_universal(StringType,OctetList,size(OctetList));
+encode_restricted_string(_C, OctetList, StringType, DoTag)
+ when binary(OctetList) ->
+ dotag(DoTag, StringType, {OctetList, size(OctetList)});
+encode_restricted_string(_C, OctetList, StringType, [])
+ when list(OctetList) ->
+ dotag_universal(StringType,OctetList,length(OctetList));
+encode_restricted_string(_C, OctetList, StringType, DoTag)
+ when list(OctetList) ->
+ dotag(DoTag, StringType, {OctetList, length(OctetList)});
+encode_restricted_string(C,{Name,OctetL},StringType,DoTag) when atom(Name)->
+ encode_restricted_string(C, OctetL, StringType, DoTag).
+
+%%============================================================================
+%% decode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_restricted_string(Buffer, Range, StringType, Tags, LenIn, OptOrMand) ->
+ {Val,Buffer2,Rb} =
+ decode_restricted_string_tag(Buffer, Range, StringType, Tags,
+ LenIn, [], OptOrMand,old),
+ {check_and_convert_restricted_string(Val,StringType,Range,[],old),
+ Buffer2,Rb}.
+
+
+decode_restricted_string(Buffer, Range, StringType, Tags, LenIn, NNList, OptOrMand, BinOrOld ) ->
+ {Val,Buffer2,Rb} =
+ decode_restricted_string_tag(Buffer, Range, StringType, Tags,
+ LenIn, NNList, OptOrMand, BinOrOld),
+ {check_and_convert_restricted_string(Val,StringType,Range,NNList,BinOrOld),
+ Buffer2,Rb}.
+
+decode_restricted_string_tag(Buffer, Range, StringType, TagsIn, LenIn, NNList, OptOrMand, BinOrOld ) ->
+ NewTags = new_tags(TagsIn, #tag{class=?UNIVERSAL,number=StringType}),
+ decode_restricted_string_notag(Buffer, Range, StringType, NewTags,
+ LenIn, NNList, OptOrMand, BinOrOld).
+
+
+
+
+check_and_convert_restricted_string(Val,StringType,Range,NamedNumberList,_BinOrOld) ->
+ {StrLen,NewVal} = case StringType of
+ ?N_BIT_STRING when NamedNumberList /= [] ->
+ {no_check,Val};
+ ?N_BIT_STRING when list(Val) ->
+ {length(Val),Val};
+ ?N_BIT_STRING when tuple(Val) ->
+ {(size(element(2,Val))*8) - element(1,Val),Val};
+ _ when binary(Val) ->
+ {size(Val),binary_to_list(Val)};
+ _ when list(Val) ->
+ {length(Val), Val}
+ end,
+ case Range of
+ _ when StrLen == no_check ->
+ NewVal;
+ [] -> % No length constraint
+ NewVal;
+ {Lb,Ub} when StrLen >= Lb, Ub >= StrLen -> % variable length constraint
+ NewVal;
+ {{Lb,_Ub},[]} when StrLen >= Lb ->
+ NewVal;
+ {{Lb1,Ub1},{Lb2,Ub2}} when StrLen >= Lb1, StrLen =< Ub1;
+ StrLen =< Ub2, StrLen >= Lb2 ->
+ NewVal;
+ StrLen -> % fixed length constraint
+ NewVal;
+ {_,_} ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _Len when integer(_Len) ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _ -> % some strange constraint that we don't support yet
+ NewVal
+ end.
+
+
+%%=============================================================================
+%% Common routines for several string types including bit string
+%% handles indefinite length
+%%=============================================================================
+
+
+decode_restricted_string_notag(Buffer, _Range, StringType, TagsIn,
+ _, NamedNumberList, OptOrMand,BinOrOld) ->
+ %%-----------------------------------------------------------
+ %% Get inner (the implicit tag or no tag) and
+ %% outer (the explicit tag) lengths.
+ %%-----------------------------------------------------------
+ {RestTags, {FormLength={_,_Len01}, Buffer0, Rb0}} =
+ check_tags_i(TagsIn, Buffer, OptOrMand),
+
+ case FormLength of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00, RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_restricted_parts(Buffer00, RestBytes, [], StringType,
+ RestTags,
+ Len, NamedNumberList,
+ OptOrMand,
+ BinOrOld, 0, []),
+ {Val01, Buffer01, Rb0+Rb01};
+ {_, Len} ->
+ {Val01, Buffer01, Rb01} =
+ decode_restricted(Buffer0, Len, StringType,
+ NamedNumberList, BinOrOld),
+ {Val01, Buffer01, Rb0+Rb01}
+ end.
+
+
+decode_restricted_parts(Buffer, RestBytes, [], StringType, RestTags, Len, NNList,
+ OptOrMand, BinOrOld, AccRb, AccVal) ->
+ DecodeFun = case RestTags of
+ [] -> fun decode_restricted_string_tag/8;
+ _ -> fun decode_restricted_string_notag/8
+ end,
+ {Val, Buffer1, Rb} =
+ DecodeFun(Buffer, [], StringType, RestTags,
+ no_length, NNList,
+ OptOrMand, BinOrOld),
+ {Buffer2,More} =
+ case Buffer1 of
+ <<0,0,Buffer10/binary>> when Len == indefinite ->
+ {Buffer10,false};
+ <<>> ->
+ {RestBytes,false};
+ _ ->
+ {Buffer1,true}
+ end,
+ {NewVal, NewRb} =
+ case StringType of
+ ?N_BIT_STRING when BinOrOld == bin ->
+ {concat_bit_binaries(AccVal, Val), AccRb+Rb};
+ _ when binary(Val),binary(AccVal) ->
+ {<<AccVal/binary,Val/binary>>,AccRb+Rb};
+ _ when binary(Val), AccVal==[] ->
+ {Val,AccRb+Rb};
+ _ ->
+ {AccVal++Val, AccRb+Rb}
+ end,
+ case More of
+ false ->
+ {NewVal, Buffer2, NewRb};
+ true ->
+ decode_restricted_parts(Buffer2, RestBytes, [], StringType, RestTags, Len, NNList,
+ OptOrMand, BinOrOld, NewRb, NewVal)
+ end.
+
+
+
+decode_restricted(Buffer, InnerLen, StringType, NamedNumberList,BinOrOld) ->
+
+ case StringType of
+ ?N_BIT_STRING ->
+ decode_bit_string2(InnerLen,Buffer,NamedNumberList,InnerLen,BinOrOld);
+
+ ?N_UniversalString ->
+ <<PreBuff:InnerLen/binary,RestBuff/binary>> = Buffer,%%added for binary
+ UniString = mk_universal_string(binary_to_list(PreBuff)),
+ {UniString,RestBuff,InnerLen};
+ ?N_BMPString ->
+ <<PreBuff:InnerLen/binary,RestBuff/binary>> = Buffer,%%added for binary
+ BMP = mk_BMP_string(binary_to_list(PreBuff)),
+ {BMP,RestBuff,InnerLen};
+ _ ->
+ <<PreBuff:InnerLen/binary,RestBuff/binary>> = Buffer,%%added for binary
+ {PreBuff, RestBuff, InnerLen}
+ end.
+
+
+
+%%============================================================================
+%% encode Universal string
+%%============================================================================
+
+encode_universal_string(C, {Name, Universal}, DoTag) when atom(Name) ->
+ encode_universal_string(C, Universal, DoTag);
+encode_universal_string(_C, Universal, []) ->
+ OctetList = mk_uni_list(Universal),
+ dotag_universal(?N_UniversalString,OctetList,length(OctetList));
+encode_universal_string(_C, Universal, DoTag) ->
+ OctetList = mk_uni_list(Universal),
+ dotag(DoTag, ?N_UniversalString, {OctetList,length(OctetList)}).
+
+mk_uni_list(In) ->
+ mk_uni_list(In,[]).
+
+mk_uni_list([],List) ->
+ lists:reverse(List);
+mk_uni_list([{A,B,C,D}|T],List) ->
+ mk_uni_list(T,[D,C,B,A|List]);
+mk_uni_list([H|T],List) ->
+ mk_uni_list(T,[H,0,0,0|List]).
+
+%%===========================================================================
+%% decode Universal strings
+%% (Buffer, Range, StringType, HasTag, LenIn) ->
+%% {String, Remain, RemovedBytes}
+%%===========================================================================
+
+decode_universal_string(Buffer, Range, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag, #tag{class=?UNIVERSAL,number=?N_UniversalString}),
+ decode_restricted_string(Buffer, Range, ?N_UniversalString,
+ Tags, LenIn, [], OptOrMand,old).
+
+
+mk_universal_string(In) ->
+ mk_universal_string(In,[]).
+
+mk_universal_string([],Acc) ->
+ lists:reverse(Acc);
+mk_universal_string([0,0,0,D|T],Acc) ->
+ mk_universal_string(T,[D|Acc]);
+mk_universal_string([A,B,C,D|T],Acc) ->
+ mk_universal_string(T,[{A,B,C,D}|Acc]).
+
+
+%%============================================================================
+%% encode BMP string
+%%============================================================================
+
+encode_BMP_string(C, {Name,BMPString}, DoTag) when atom(Name)->
+ encode_BMP_string(C, BMPString, DoTag);
+encode_BMP_string(_C, BMPString, []) ->
+ OctetList = mk_BMP_list(BMPString),
+ dotag_universal(?N_BMPString,OctetList,length(OctetList));
+encode_BMP_string(_C, BMPString, DoTag) ->
+ OctetList = mk_BMP_list(BMPString),
+ dotag(DoTag, ?N_BMPString, {OctetList,length(OctetList)}).
+
+mk_BMP_list(In) ->
+ mk_BMP_list(In,[]).
+
+mk_BMP_list([],List) ->
+ lists:reverse(List);
+mk_BMP_list([{0,0,C,D}|T],List) ->
+ mk_BMP_list(T,[D,C|List]);
+mk_BMP_list([H|T],List) ->
+ mk_BMP_list(T,[H,0|List]).
+
+%%============================================================================
+%% decode (OctetList, Range(ignored), tag|notag) -> {ValList, RestList}
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+decode_BMP_string(Buffer, Range, Tags, LenIn, OptOrMand) ->
+% NewTags = new_tags(HasTag, #tag{class=?UNIVERSAL,number=?N_BMPString}),
+ decode_restricted_string(Buffer, Range, ?N_BMPString,
+ Tags, LenIn, [], OptOrMand,old).
+
+mk_BMP_string(In) ->
+ mk_BMP_string(In,[]).
+
+mk_BMP_string([],US) ->
+ lists:reverse(US);
+mk_BMP_string([0,B|T],US) ->
+ mk_BMP_string(T,[B|US]);
+mk_BMP_string([C,D|T],US) ->
+ mk_BMP_string(T,[{0,0,C,D}|US]).
+
+
+%%============================================================================
+%% Generalized time, ITU_T X.680 Chapter 39
+%%
+%% encode Generalized time
+%%============================================================================
+
+encode_generalized_time(C, {Name,OctetList}, DoTag) when atom(Name) ->
+ encode_generalized_time(C, OctetList, DoTag);
+encode_generalized_time(_C, OctetList, []) ->
+ dotag_universal(?N_GeneralizedTime,OctetList,length(OctetList));
+encode_generalized_time(_C, OctetList, DoTag) ->
+ dotag(DoTag, ?N_GeneralizedTime, {OctetList,length(OctetList)}).
+
+%%============================================================================
+%% decode Generalized time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_generalized_time(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,
+ number=?N_GeneralizedTime}),
+ decode_generalized_time_notag(Buffer, Range, NewTags, TotalLen, OptOrMand).
+
+decode_generalized_time_notag(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_generalized_time_notag(Buffer00, Range,
+ RestTags, TotalLen,
+ OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ <<PreBuff:Len/binary,RestBuff/binary>> = Buffer0,
+ {binary_to_list(PreBuff), RestBuff, Rb0+Len}
+ end.
+
+%%============================================================================
+%% Universal time, ITU_T X.680 Chapter 40
+%%
+%% encode UTC time
+%%============================================================================
+
+encode_utc_time(C, {Name,OctetList}, DoTag) when atom(Name) ->
+ encode_utc_time(C, OctetList, DoTag);
+encode_utc_time(_C, OctetList, []) ->
+ dotag_universal(?N_UTCTime, OctetList,length(OctetList));
+encode_utc_time(_C, OctetList, DoTag) ->
+ dotag(DoTag, ?N_UTCTime, {OctetList,length(OctetList)}).
+
+%%============================================================================
+%% decode UTC time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_utc_time(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ NewTags = new_tags(Tags,#tag{class=?UNIVERSAL,number=?N_UTCTime}),
+ decode_utc_time_notag(Buffer, Range, NewTags, TotalLen, OptOrMand).
+
+decode_utc_time_notag(Buffer, Range, Tags, TotalLen, OptOrMand) ->
+ {RestTags, {FormLen, Buffer0, Rb0}} =
+ check_tags_i(Tags, Buffer, OptOrMand),
+
+ case FormLen of
+ {?CONSTRUCTED,Len} ->
+ {Buffer00,RestBytes} = split_list(Buffer0,Len),
+ {Val01, Buffer01, Rb01} =
+ decode_utc_time_notag(Buffer00, Range,
+ RestTags, TotalLen,
+ OptOrMand),
+ {Buffer02, Rb02} = restbytes2(RestBytes,Buffer01,noext),
+ {Val01, Buffer02, Rb0+Rb01+Rb02};
+ {_,Len} ->
+ <<PreBuff:Len/binary,RestBuff/binary>> = Buffer0,
+ {binary_to_list(PreBuff), RestBuff, Rb0+Len}
+ end.
+
+
+%%============================================================================
+%% Length handling
+%%
+%% Encode length
+%%
+%% encode_length(Int | indefinite) ->
+%% [<127]| [128 + Int (<127),OctetList] | [16#80]
+%%============================================================================
+
+encode_length(indefinite) ->
+ {[16#80],1}; % 128
+encode_length(L) when L =< 16#7F ->
+ {[L],1};
+encode_length(L) ->
+ Oct = minimum_octets(L),
+ Len = length(Oct),
+ if
+ Len =< 126 ->
+ {[ (16#80+Len) | Oct ],Len+1};
+ true ->
+ exit({error,{asn1, to_long_length_oct, Len}})
+ end.
+
+
+%% Val must be >= 0
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(0,Acc) ->
+ Acc;
+minimum_octets(Val, Acc) ->
+ minimum_octets((Val bsr 8),[Val band 16#FF | Acc]).
+
+
+%%===========================================================================
+%% Decode length
+%%
+%% decode_length(OctetList) -> {{indefinite, RestOctetsL}, NoRemovedBytes} |
+%% {{Length, RestOctetsL}, NoRemovedBytes}
+%%===========================================================================
+
+decode_length(<<1:1,0:7,T/binary>>) ->
+ {{indefinite, T}, 1};
+decode_length(<<0:1,Length:7,T/binary>>) ->
+ {{Length,T},1};
+decode_length(<<1:1,LL:7,T/binary>>) ->
+ <<Length:LL/unit:8,Rest/binary>> = T,
+ {{Length,Rest}, LL+1}.
+
+%decode_length([128 | T]) ->
+% {{indefinite, T},1};
+%decode_length([H | T]) when H =< 127 ->
+% {{H, T},1};
+%decode_length([H | T]) ->
+% dec_long_length(H band 16#7F, T, 0, 1).
+
+
+%%dec_long_length(0, Buffer, Acc, Len) ->
+%% {{Acc, Buffer},Len};
+%%dec_long_length(Bytes, [H | T], Acc, Len) ->
+%% dec_long_length(Bytes - 1, T, (Acc bsl 8) + H, Len+1).
+
+%%===========================================================================
+%% Decode tag and length
+%%
+%% decode_tag_and_length(Buffer) -> {Tag, Len, RemainingBuffer, RemovedBytes}
+%%
+%%===========================================================================
+
+decode_tag_and_length(Buffer) ->
+ {Tag, Buffer2, RemBytesTag} = decode_tag(Buffer),
+ {{Len, Buffer3}, RemBytesLen} = decode_length(Buffer2),
+ {Tag, Len, Buffer3, RemBytesTag+RemBytesLen}.
+
+
+%%============================================================================
+%% Check if valid tag
+%%
+%% check_if_valid_tag(Tag, List_of_valid_tags, OptOrMand) -> name of the tag
+%%===============================================================================
+
+check_if_valid_tag(<<0,0,_/binary>>,_,_) ->
+ asn1_EOC;
+check_if_valid_tag(<<>>, _, OptOrMand) ->
+ check_if_valid_tag2(false,[],[],OptOrMand);
+check_if_valid_tag(Bytes, ListOfTags, OptOrMand) when binary(Bytes) ->
+ {Tag, _, _} = decode_tag(Bytes),
+ check_if_valid_tag(Tag, ListOfTags, OptOrMand);
+
+%% This alternative should be removed in the near future
+%% Bytes as input should be the only necessary call
+check_if_valid_tag(Tag, ListOfTags, OptOrMand) ->
+ {Class, _Form, TagNo} = Tag,
+ C = code_class(Class),
+ T = case C of
+ 'UNIVERSAL' ->
+ code_type(TagNo);
+ _ ->
+ TagNo
+ end,
+ check_if_valid_tag2({C,T}, ListOfTags, Tag, OptOrMand).
+
+check_if_valid_tag2(_Class_TagNo, [], Tag, mandatory) ->
+ exit({error,{asn1,{invalid_tag,Tag}}});
+check_if_valid_tag2(_Class_TagNo, [], Tag, _) ->
+ exit({error,{asn1,{no_optional_tag,Tag}}});
+
+check_if_valid_tag2(Class_TagNo, [{TagName,TagList}|T], Tag, OptOrMand) ->
+ case check_if_valid_tag_loop(Class_TagNo, TagList) of
+ true ->
+ TagName;
+ false ->
+ check_if_valid_tag2(Class_TagNo, T, Tag, OptOrMand)
+ end.
+
+check_if_valid_tag_loop(_Class_TagNo,[]) ->
+ false;
+check_if_valid_tag_loop(Class_TagNo,[H|T]) ->
+ %% It is not possible to distinguish between SEQUENCE OF and SEQUENCE, and
+ %% between SET OF and SET because both are coded as 16 and 17, respectively.
+ H_without_OF = case H of
+ {C, 'SEQUENCE OF'} ->
+ {C, 'SEQUENCE'};
+ {C, 'SET OF'} ->
+ {C, 'SET'};
+ Else ->
+ Else
+ end,
+
+ case H_without_OF of
+ Class_TagNo ->
+ true;
+ {_,_} ->
+ check_if_valid_tag_loop(Class_TagNo,T);
+ _ ->
+ check_if_valid_tag_loop(Class_TagNo,H),
+ check_if_valid_tag_loop(Class_TagNo,T)
+ end.
+
+
+
+code_class(0) -> 'UNIVERSAL';
+code_class(16#40) -> 'APPLICATION';
+code_class(16#80) -> 'CONTEXT';
+code_class(16#C0) -> 'PRIVATE'.
+
+
+code_type(1) -> 'BOOLEAN';
+code_type(2) -> 'INTEGER';
+code_type(3) -> 'BIT STRING';
+code_type(4) -> 'OCTET STRING';
+code_type(5) -> 'NULL';
+code_type(6) -> 'OBJECT IDENTIFIER';
+code_type(7) -> 'OBJECT DESCRIPTOR';
+code_type(8) -> 'EXTERNAL';
+code_type(9) -> 'REAL';
+code_type(10) -> 'ENUMERATED';
+code_type(11) -> 'EMBEDDED_PDV';
+code_type(16) -> 'SEQUENCE';
+code_type(16) -> 'SEQUENCE OF';
+code_type(17) -> 'SET';
+code_type(17) -> 'SET OF';
+code_type(18) -> 'NumericString';
+code_type(19) -> 'PrintableString';
+code_type(20) -> 'TeletexString';
+code_type(21) -> 'VideotexString';
+code_type(22) -> 'IA5String';
+code_type(23) -> 'UTCTime';
+code_type(24) -> 'GeneralizedTime';
+code_type(25) -> 'GraphicString';
+code_type(26) -> 'VisibleString';
+code_type(27) -> 'GeneralString';
+code_type(28) -> 'UniversalString';
+code_type(30) -> 'BMPString';
+code_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
+
+%%-------------------------------------------------------------------------
+%% decoding of the components of a SET
+%%-------------------------------------------------------------------------
+
+decode_set(Rb, indefinite, <<0,0,Bytes/binary>>, _OptOrMand, _Fun3, Acc) ->
+ {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_set(Rb, indefinite, Bytes, OptOrMand, Fun3, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, OptOrMand),
+ decode_set(Rb+Rb1, indefinite, Remain, OptOrMand, Fun3, [Term|Acc]);
+
+decode_set(Rb, Num, Bytes, _OptOrMand, _Fun3, Acc) when Num == 0 ->
+ {lists:reverse(Acc), Bytes, Rb};
+
+decode_set(_, Num, _, _, _, _) when Num < 0 ->
+ exit({error,{asn1,{length_error,'SET'}}});
+
+decode_set(Rb, Num, Bytes, OptOrMand, Fun3, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, OptOrMand),
+ decode_set(Rb+Rb1, Num-Rb1, Remain, OptOrMand, Fun3, [Term|Acc]).
+
+
+%%-------------------------------------------------------------------------
+%% decoding of SEQUENCE OF and SET OF
+%%-------------------------------------------------------------------------
+
+decode_components(Rb, indefinite, <<0,0,Bytes/binary>>, _Fun3, _TagIn, Acc) ->
+ {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_components(Rb, indefinite, Bytes, Fun3, TagIn, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, mandatory, TagIn),
+ decode_components(Rb+Rb1, indefinite, Remain, Fun3, TagIn, [Term|Acc]);
+
+decode_components(Rb, Num, Bytes, _Fun3, _TagIn, Acc) when Num == 0 ->
+ {lists:reverse(Acc), Bytes, Rb};
+
+decode_components(_, Num, _, _, _, _) when Num < 0 ->
+ exit({error,{asn1,{length_error,'SET/SEQUENCE OF'}}});
+
+decode_components(Rb, Num, Bytes, Fun3, TagIn, Acc) ->
+ {Term, Remain, Rb1} = Fun3(Bytes, mandatory, TagIn),
+ decode_components(Rb+Rb1, Num-Rb1, Remain, Fun3, TagIn, [Term|Acc]).
+
+%%decode_components(Rb, indefinite, [0,0|Bytes], _Fun3, _TagIn, Acc) ->
+%% {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_components(Rb, indefinite, <<0,0,Bytes/binary>>, _Fun4, _TagIn, _Fun, Acc) ->
+ {lists:reverse(Acc),Bytes,Rb+2};
+
+decode_components(Rb, indefinite, Bytes, _Fun4, TagIn, _Fun, Acc) ->
+ {Term, Remain, Rb1} = _Fun4(Bytes, mandatory, TagIn, _Fun),
+ decode_components(Rb+Rb1, indefinite, Remain, _Fun4, TagIn, _Fun, [Term|Acc]);
+
+decode_components(Rb, Num, Bytes, _Fun4, _TagIn, _Fun, Acc) when Num == 0 ->
+ {lists:reverse(Acc), Bytes, Rb};
+
+decode_components(_, Num, _, _, _, _, _) when Num < 0 ->
+ exit({error,{asn1,{length_error,'SET/SEQUENCE OF'}}});
+
+decode_components(Rb, Num, Bytes, _Fun4, TagIn, _Fun, Acc) ->
+ {Term, Remain, Rb1} = _Fun4(Bytes, mandatory, TagIn, _Fun),
+ decode_components(Rb+Rb1, Num-Rb1, Remain, _Fun4, TagIn, _Fun, [Term|Acc]).
+
+
+
+%%-------------------------------------------------------------------------
+%% INTERNAL HELPER FUNCTIONS (not exported)
+%%-------------------------------------------------------------------------
+
+
+%%==========================================================================
+%% Encode tag
+%%
+%% dotag(tag | notag, TagValpattern | TagValTuple, [Length, Value]) -> [Tag]
+%% TagValPattern is a correct bitpattern for a tag
+%% TagValTuple is a tuple of three bitpatterns, Class, Form and TagNo where
+%% Class = UNIVERSAL | APPLICATION | CONTEXT | PRIVATE
+%% Form = Primitive | Constructed
+%% TagNo = Number of tag
+%%==========================================================================
+
+
+dotag([], Tag, {Bytes,Len}) ->
+ dotag_universal(Tag,Bytes,Len);
+dotag(Tags, Tag, {Bytes,Len}) ->
+ encode_tags(Tags ++ [#tag{class=?UNIVERSAL,number=Tag,form=?PRIMITIVE}],
+ Bytes, Len);
+
+dotag(Tags, Tag, Bytes) ->
+ encode_tags(Tags ++ [#tag{class=?UNIVERSAL,number=Tag,form=?PRIMITIVE}],
+ Bytes, size(Bytes)).
+
+dotag_universal(UniversalTag,Bytes,Len) when Len =< 16#7F->
+ {[UniversalTag,Len,Bytes],2+Len};
+dotag_universal(UniversalTag,Bytes,Len) ->
+ {EncLen,LenLen}=encode_length(Len),
+ {[UniversalTag,EncLen,Bytes],1+LenLen+Len}.
+
+%% decoding postitive integer values.
+decode_integer2(Len,Bin = <<0:1,_:7,_Bs/binary>>,RemovedBytes) ->
+ <<Int:Len/unit:8,Buffer2/binary>> = Bin,
+ {Int,Buffer2,RemovedBytes};
+%% decoding negative integer values.
+decode_integer2(Len,<<1:1,B2:7,Bs/binary>>,RemovedBytes) ->
+ <<N:Len/unit:8,Buffer2/binary>> = <<B2,Bs/binary>>,
+ Int = N - (1 bsl (8 * Len - 1)),
+ {Int,Buffer2,RemovedBytes}.
+
+%%decode_integer2(Len,Buffer,Acc,RemovedBytes) when (hd(Buffer) band 16#FF) =< 16#7F ->
+%% {decode_integer_pos(Buffer, 8 * (Len - 1)),skip(Buffer,Len),RemovedBytes};
+%%decode_integer2(Len,Buffer,Acc,RemovedBytes) ->
+%% {decode_integer_neg(Buffer, 8 * (Len - 1)),skip(Buffer,Len),RemovedBytes}.
+
+%%decode_integer_pos([Byte|Tail], Shift) ->
+%% (Byte bsl Shift) bor decode_integer_pos(Tail, Shift-8);
+%%decode_integer_pos([], _) -> 0.
+
+
+%%decode_integer_neg([Byte|Tail], Shift) ->
+%% (-128 + (Byte band 127) bsl Shift) bor decode_integer_pos(Tail, Shift-8).
+
+
+concat_bit_binaries([],Bin={_,_}) ->
+ Bin;
+concat_bit_binaries({0,B1},{U2,B2}) ->
+ {U2,<<B1/binary,B2/binary>>};
+concat_bit_binaries({U1,B1},{U2,B2}) ->
+ S1 = (size(B1) * 8) - U1,
+ S2 = (size(B2) * 8) - U2,
+ PadBits = 8 - ((S1+S2) rem 8),
+ {PadBits, <<B1:S1/binary-unit:1,B2:S2/binary-unit:1,0:PadBits>>};
+concat_bit_binaries(L1,L2) when list(L1),list(L2) ->
+ %% this case occur when decoding with NNL
+ L1 ++ L2.
+
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%skip(Buffer, 0) ->
+%% Buffer;
+%%skip([H | T], Len) ->
+%% skip(T, Len-1).
+
+new_tags([],LastTag) ->
+ [LastTag];
+new_tags(Tags=[#tag{type='IMPLICIT'}],_LastTag) ->
+ Tags;
+new_tags([T1 = #tag{type='IMPLICIT'},#tag{type=T2Type}|Rest],LastTag) ->
+ new_tags([T1#tag{type=T2Type}|Rest],LastTag);
+new_tags(Tags,LastTag) ->
+ case lists:last(Tags) of
+ #tag{type='IMPLICIT'} ->
+ Tags;
+ _ ->
+ Tags ++ [LastTag]
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
new file mode 100644
index 0000000000..50a91cf201
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
@@ -0,0 +1,1849 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_ber_bin_v2.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt_ber_bin_v2).
+
+%% encoding / decoding of BER
+
+-export([decode/1, decode/2, match_tags/2, encode/1]).
+-export([fixoptionals/2, cindex/3,
+ list_to_record/2,
+ encode_tag_val/1,
+ encode_tags/3]).
+-export([encode_boolean/2,decode_boolean/2,
+ encode_integer/3,encode_integer/4,
+ decode_integer/3, decode_integer/4,
+ encode_enumerated/2,
+ encode_enumerated/4,decode_enumerated/4,
+ encode_real/2,decode_real/3,
+ encode_bit_string/4,decode_bit_string/4,
+ decode_compact_bit_string/4,
+ encode_octet_string/3,decode_octet_string/3,
+ encode_null/2,decode_null/2,
+ encode_object_identifier/2,decode_object_identifier/2,
+ encode_restricted_string/4,decode_restricted_string/4,
+ encode_universal_string/3,decode_universal_string/3,
+ encode_BMP_string/3,decode_BMP_string/3,
+ encode_generalized_time/3,decode_generalized_time/3,
+ encode_utc_time/3,decode_utc_time/3,
+ encode_length/1,decode_length/1,
+ decode_tag_and_length/1]).
+
+-export([encode_open_type/1,encode_open_type/2,
+ decode_open_type/2,decode_open_type_as_binary/2]).
+
+-export([decode_primitive_incomplete/2]).
+
+-include("asn1_records.hrl").
+
+% the encoding of class of tag bits 8 and 7
+-define(UNIVERSAL, 0).
+-define(APPLICATION, 16#40).
+-define(CONTEXT, 16#80).
+-define(PRIVATE, 16#C0).
+
+%%% primitive or constructed encoding % bit 6
+-define(PRIMITIVE, 0).
+-define(CONSTRUCTED, 2#00100000).
+
+%%% The tag-number for universal types
+-define(N_BOOLEAN, 1).
+-define(N_INTEGER, 2).
+-define(N_BIT_STRING, 3).
+-define(N_OCTET_STRING, 4).
+-define(N_NULL, 5).
+-define(N_OBJECT_IDENTIFIER, 6).
+-define(N_OBJECT_DESCRIPTOR, 7).
+-define(N_EXTERNAL, 8).
+-define(N_REAL, 9).
+-define(N_ENUMERATED, 10).
+-define(N_EMBEDDED_PDV, 11).
+-define(N_SEQUENCE, 16).
+-define(N_SET, 17).
+-define(N_NumericString, 18).
+-define(N_PrintableString, 19).
+-define(N_TeletexString, 20).
+-define(N_VideotexString, 21).
+-define(N_IA5String, 22).
+-define(N_UTCTime, 23).
+-define(N_GeneralizedTime, 24).
+-define(N_GraphicString, 25).
+-define(N_VisibleString, 26).
+-define(N_GeneralString, 27).
+-define(N_UniversalString, 28).
+-define(N_BMPString, 30).
+
+
+% the complete tag-word of built-in types
+-define(T_BOOLEAN, ?UNIVERSAL bor ?PRIMITIVE bor 1).
+-define(T_INTEGER, ?UNIVERSAL bor ?PRIMITIVE bor 2).
+-define(T_BIT_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 3). % can be CONSTRUCTED
+-define(T_OCTET_STRING, ?UNIVERSAL bor ?PRIMITIVE bor 4). % can be CONSTRUCTED
+-define(T_NULL, ?UNIVERSAL bor ?PRIMITIVE bor 5).
+-define(T_OBJECT_IDENTIFIER,?UNIVERSAL bor ?PRIMITIVE bor 6).
+-define(T_OBJECT_DESCRIPTOR,?UNIVERSAL bor ?PRIMITIVE bor 7).
+-define(T_EXTERNAL, ?UNIVERSAL bor ?PRIMITIVE bor 8).
+-define(T_REAL, ?UNIVERSAL bor ?PRIMITIVE bor 9).
+-define(T_ENUMERATED, ?UNIVERSAL bor ?PRIMITIVE bor 10).
+-define(T_EMBEDDED_PDV, ?UNIVERSAL bor ?PRIMITIVE bor 11).
+-define(T_SEQUENCE, ?UNIVERSAL bor ?CONSTRUCTED bor 16).
+-define(T_SET, ?UNIVERSAL bor ?CONSTRUCTED bor 17).
+-define(T_NumericString, ?UNIVERSAL bor ?PRIMITIVE bor 18). %can be constructed
+-define(T_PrintableString, ?UNIVERSAL bor ?PRIMITIVE bor 19). %can be constructed
+-define(T_TeletexString, ?UNIVERSAL bor ?PRIMITIVE bor 20). %can be constructed
+-define(T_VideotexString, ?UNIVERSAL bor ?PRIMITIVE bor 21). %can be constructed
+-define(T_IA5String, ?UNIVERSAL bor ?PRIMITIVE bor 22). %can be constructed
+-define(T_UTCTime, ?UNIVERSAL bor ?PRIMITIVE bor 23).
+-define(T_GeneralizedTime, ?UNIVERSAL bor ?PRIMITIVE bor 24).
+-define(T_GraphicString, ?UNIVERSAL bor ?PRIMITIVE bor 25). %can be constructed
+-define(T_VisibleString, ?UNIVERSAL bor ?PRIMITIVE bor 26). %can be constructed
+-define(T_GeneralString, ?UNIVERSAL bor ?PRIMITIVE bor 27). %can be constructed
+-define(T_UniversalString, ?UNIVERSAL bor ?PRIMITIVE bor 28). %can be constructed
+-define(T_BMPString, ?UNIVERSAL bor ?PRIMITIVE bor 30). %can be constructed
+
+% encode(Tlv={_Tag={?PRIMITIVE,_},_VList}) ->
+% encode_primitive(Tlv);
+% encode(Tlv) ->
+% encode_constructed(Tlv).
+
+encode([Tlv]) ->
+ encode(Tlv);
+encode({TlvTag,TlvVal}) when list(TlvVal) ->
+ %% constructed form of value
+ encode_tlv(TlvTag,TlvVal,?CONSTRUCTED);
+encode({TlvTag,TlvVal}) ->
+ encode_tlv(TlvTag,TlvVal,?PRIMITIVE);
+encode(Bin) when binary(Bin) ->
+ Bin.
+
+encode_tlv(TlvTag,TlvVal,Form) ->
+ Tag = encode_tlv_tag(TlvTag,Form),
+ {Val,VLen} = encode_tlv_val(TlvVal),
+ {Len,_LLen} = encode_length(VLen),
+ BinLen = list_to_binary(Len),
+ <<Tag/binary,BinLen/binary,Val/binary>>.
+
+encode_tlv_tag(ClassTagNo,Form) ->
+ Class = ClassTagNo bsr 16,
+ case encode_tag_val({Class bsl 6,Form,(ClassTagNo - (Class bsl 16))}) of
+ T when list(T) ->
+ list_to_binary(T);
+ T ->
+ T
+ end.
+
+encode_tlv_val(TlvL) when list(TlvL) ->
+ encode_tlv_list(TlvL,[]);
+encode_tlv_val(Bin) ->
+ {Bin,size(Bin)}.
+
+encode_tlv_list([Tlv|Tlvs],Acc) ->
+ EncTlv = encode(Tlv),
+ encode_tlv_list(Tlvs,[EncTlv|Acc]);
+encode_tlv_list([],Acc) ->
+ Bin=list_to_binary(lists:reverse(Acc)),
+ {Bin,size(Bin)}.
+
+% encode_primitive({{_,ClassTagNo},V}) ->
+% Len = size(V), % not sufficient as length encode
+% Class = ClassTagNo bsr 16,
+% {TagLen,Tag} =
+% case encode_tag_val({Class,?PRIMITIVE,ClassTagNo - Class}) of
+% T when list(T) ->
+% {length(T),list_to_binary(T)};
+% T ->
+% {1,T}
+% end,
+
+
+decode(B,driver) ->
+ case catch port_control(drv_complete,2,B) of
+ Bin when binary(Bin) ->
+ binary_to_term(Bin);
+ List when list(List) -> handle_error(List,B);
+ {'EXIT',{badarg,Reason}} ->
+ asn1rt_driver_handler:load_driver(),
+ receive
+ driver_ready ->
+ case catch port_control(drv_complete,2,B) of
+ Bin2 when binary(Bin2) -> binary_to_term(Bin2);
+ List when list(List) -> handle_error(List,B);
+ Error -> exit(Error)
+ end;
+ {error,Error} -> % error when loading driver
+ %% the driver could not be loaded
+ exit(Error);
+ Error={port_error,Reason} ->
+ exit(Error)
+ end;
+ {'EXIT',Reason} ->
+ exit(Reason)
+ end.
+
+handle_error([],_)->
+ exit({error,{"memory allocation problem"}});
+handle_error([$1|_],L) -> % error in driver
+ exit({error,{asn1_error,L}});
+handle_error([$2|_],L) -> % error in driver due to wrong tag
+ exit({error,{asn1_error,{"bad tag",L}}});
+handle_error([$3|_],L) -> % error in driver due to length error
+ exit({error,{asn1_error,{"bad length field",L}}});
+handle_error([$4|_],L) -> % error in driver due to indefinite length error
+ exit({error,{asn1_error,{"indefinite length without end bytes",L}}});
+handle_error(ErrL,L) ->
+ exit({error,{unknown_error,ErrL,L}}).
+
+
+decode(Bin) when binary(Bin) ->
+ decode_primitive(Bin);
+decode(Tlv) -> % assume it is a tlv
+ {Tlv,<<>>}.
+
+
+decode_primitive(Bin) ->
+ {{Form,TagNo,Len,V},Rest} = decode_tlv(Bin),
+ case Form of
+ 1 when Len == indefinite -> % constructed
+ {Vlist,Rest2} = decode_constructed_indefinite(V,[]),
+ {{TagNo,Vlist},Rest2};
+ 1 -> % constructed
+ {{TagNo,decode_constructed(V)},Rest};
+ 0 -> % primitive
+ {{TagNo,V},Rest}
+ end.
+
+decode_constructed(<<>>) ->
+ [];
+decode_constructed(Bin) ->
+ {Tlv,Rest} = decode_primitive(Bin),
+ [Tlv|decode_constructed(Rest)].
+
+decode_constructed_indefinite(<<0,0,Rest/binary>>,Acc) ->
+ {lists:reverse(Acc),Rest};
+decode_constructed_indefinite(Bin,Acc) ->
+ {Tlv,Rest} = decode_primitive(Bin),
+ decode_constructed_indefinite(Rest, [Tlv|Acc]).
+
+decode_tlv(Bin) ->
+ {Form,TagNo,Len,Bin2} = decode_tag_and_length(Bin),
+ case Len of
+ indefinite ->
+ {{Form,TagNo,Len,Bin2},[]};
+ _ ->
+ <<V:Len/binary,Bin3/binary>> = Bin2,
+ {{Form,TagNo,Len,V},Bin3}
+ end.
+
+%% decode_primitive_incomplete/2 decodes an encoded message incomplete
+%% by help of the pattern attribute (first argument).
+decode_primitive_incomplete([[default,TagNo]],Bin) -> %default
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,[],Rest);
+ _ ->
+ %{asn1_DEFAULT,Bin}
+ asn1_NOVALUE
+ end;
+decode_primitive_incomplete([[default,TagNo,Directives]],Bin) -> %default, constructed type, Directives points into this type
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ %{asn1_DEFAULT,Bin}
+ asn1_NOVALUE
+ end;
+decode_primitive_incomplete([[opt,TagNo]],Bin) -> %optional
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,[],Rest);
+ _ ->
+ %{{TagNo,asn1_NOVALUE},Bin}
+ asn1_NOVALUE
+ end;
+decode_primitive_incomplete([[opt,TagNo,Directives]],Bin) -> %optional
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ %{{TagNo,asn1_NOVALUE},Bin}
+ asn1_NOVALUE
+ end;
+%% A choice alternative that shall be undecoded
+decode_primitive_incomplete([[alt_undec,TagNo]|RestAlts],Bin) ->
+% decode_incomplete_bin(Bin);
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,_V},_R} ->
+ decode_incomplete_bin(Bin);
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[alt,TagNo]|RestAlts],Bin) ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ {{TagNo,V},Rest};
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[alt,TagNo,Directives]|RestAlts],Bin) ->
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[alt_parts,TagNo]|RestAlts],Bin) ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ {{TagNo,decode_parts_incomplete(V)},Rest};
+ _ ->
+ decode_primitive_incomplete(RestAlts,Bin)
+ end;
+decode_primitive_incomplete([[undec,_TagNo]|_RestTag],Bin) -> %incomlete decode
+ decode_incomplete_bin(Bin); %% use this if changing handling of
+decode_primitive_incomplete([[parts,TagNo]|_RestTag],Bin) ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ {{TagNo,decode_parts_incomplete(V)},Rest};
+ Err ->
+ {error,{asn1,"tag failure",TagNo,Err}}
+ end;
+decode_primitive_incomplete([mandatory|RestTag],Bin) ->
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,RestTag,Rest);
+ _ ->
+ {error,{asn1,"partial incomplete decode failure"}}
+ end;
+%% A choice that is a toptype or a mandatory component of a
+%% SEQUENCE or SET.
+decode_primitive_incomplete([[mandatory,Directives]],Bin) ->
+ case decode_tlv(Bin) of
+ {{Form,TagNo,Len,V},Rest} ->
+ decode_incomplete2(Form,TagNo,Len,V,Directives,Rest);
+ _ ->
+ {error,{asn1,"partial incomplete decode failure"}}
+ end;
+decode_primitive_incomplete([],Bin) ->
+ decode_primitive(Bin).
+
+%% decode_parts_incomplete/1 receives a number of values encoded in
+%% sequence and returns the parts as unencoded binaries
+decode_parts_incomplete(<<>>) ->
+ [];
+decode_parts_incomplete(Bin) ->
+ {ok,Rest} = skip_tag(Bin),
+ {ok,Rest2} = skip_length_and_value(Rest),
+ LenPart = size(Bin) - size(Rest2),
+ <<Part:LenPart/binary,RestBin/binary>> = Bin,
+ [Part|decode_parts_incomplete(RestBin)].
+
+
+%% decode_incomplete2 checks if V is a value of a constructed or
+%% primitive type, and continues the decode propeerly.
+decode_incomplete2(1,TagNo,indefinite,V,TagMatch,_) ->
+ %% constructed indefinite length
+ {Vlist,Rest2} = decode_constr_indef_incomplete(TagMatch,V,[]),
+ {{TagNo,Vlist},Rest2};
+decode_incomplete2(1,TagNo,_Len,V,TagMatch,Rest) ->
+ {{TagNo,decode_constructed_incomplete(TagMatch,V)},Rest};
+decode_incomplete2(0,TagNo,_Len,V,_TagMatch,Rest) ->
+ {{TagNo,V},Rest}.
+
+decode_constructed_incomplete(_TagMatch,<<>>) ->
+ [];
+decode_constructed_incomplete([mandatory|RestTag],Bin) ->
+ {Tlv,Rest} = decode_primitive(Bin),
+ [Tlv|decode_constructed_incomplete(RestTag,Rest)];
+decode_constructed_incomplete(Directives=[[Alt,_]|_],Bin)
+ when Alt == alt_undec; Alt == alt ->
+ case decode_tlv(Bin) of
+ {{_Form,TagNo,_Len,V},Rest} ->
+ case incomplete_choice_alt(TagNo,Directives) of
+ alt_undec ->
+ LenA = size(Bin)-size(Rest),
+ <<A:LenA/binary,Rest/binary>> = Bin,
+ A;
+% {UndecBin,_}=decode_incomplete_bin(Bin),
+% UndecBin;
+% [{TagNo,V}];
+ alt ->
+ {Tlv,_} = decode_primitive(V),
+ [{TagNo,Tlv}];
+ alt_parts ->
+ %{{TagNo,decode_parts_incomplete(V)},Rest}; % maybe wrong
+ [{TagNo,decode_parts_incomplete(V)}];
+ Err ->
+ {error,{asn1,"partial incomplete decode failure",Err}}
+ end;
+ _ ->
+ {error,{asn1,"partial incomplete decode failure"}}
+ end;
+decode_constructed_incomplete([TagNo|RestTag],Bin) ->
+%% {Tlv,Rest} = decode_primitive_incomplete([TagNo],Bin),
+ case decode_primitive_incomplete([TagNo],Bin) of
+ {Tlv,Rest} ->
+ [Tlv|decode_constructed_incomplete(RestTag,Rest)];
+ asn1_NOVALUE ->
+ decode_constructed_incomplete(RestTag,Bin)
+ end;
+decode_constructed_incomplete([],Bin) ->
+ {Tlv,_Rest}=decode_primitive(Bin),
+ [Tlv].
+
+decode_constr_indef_incomplete(_TagMatch,<<0,0,Rest/binary>>,Acc) ->
+ {lists:reverse(Acc),Rest};
+decode_constr_indef_incomplete([Tag|RestTags],Bin,Acc) ->
+% {Tlv,Rest} = decode_primitive_incomplete([Tag],Bin),
+ case decode_primitive_incomplete([Tag],Bin) of
+ {Tlv,Rest} ->
+ decode_constr_indef_incomplete(RestTags,Rest,[Tlv|Acc]);
+ asn1_NOVALUE ->
+ decode_constr_indef_incomplete(RestTags,Bin,Acc)
+ end.
+
+
+decode_incomplete_bin(Bin) ->
+ {ok,Rest} = skip_tag(Bin),
+ {ok,Rest2} = skip_length_and_value(Rest),
+ IncLen = size(Bin) - size(Rest2),
+ <<IncBin:IncLen/binary,Ret/binary>> = Bin,
+ {IncBin,Ret}.
+
+incomplete_choice_alt(TagNo,[[Alt,TagNo]|_Directives]) ->
+ Alt;
+incomplete_choice_alt(TagNo,[_H|Directives]) ->
+ incomplete_choice_alt(TagNo,Directives);
+incomplete_choice_alt(_,[]) ->
+ error.
+
+
+%% skip_tag and skip_length_and_value are rutines used both by
+%% decode_partial_incomplete and decode_partial (decode/2).
+
+skip_tag(<<_:3,31:5,Rest/binary>>)->
+ skip_long_tag(Rest);
+skip_tag(<<_:3,_Tag:5,Rest/binary>>) ->
+ {ok,Rest}.
+
+skip_long_tag(<<1:1,_:7,Rest/binary>>) ->
+ skip_long_tag(Rest);
+skip_long_tag(<<0:1,_:7,Rest/binary>>) ->
+ {ok,Rest}.
+
+skip_length_and_value(Binary) ->
+ case decode_length(Binary) of
+ {indefinite,RestBinary} ->
+ skip_indefinite_value(RestBinary);
+ {Length,RestBinary} ->
+ <<_:Length/unit:8,Rest/binary>> = RestBinary,
+ {ok,Rest}
+ end.
+
+skip_indefinite_value(<<0,0,Rest/binary>>) ->
+ {ok,Rest};
+skip_indefinite_value(Binary) ->
+ {ok,RestBinary}=skip_tag(Binary),
+ {ok,RestBinary2} = skip_length_and_value(RestBinary),
+ skip_indefinite_value(RestBinary2).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% match_tags takes a Tlv (Tag, Length, Value) structure and matches
+%% it with the tags in TagList. If the tags does not match the function
+%% crashes otherwise it returns the remaining Tlv after that the tags have
+%% been removed.
+%%
+%% match_tags(Tlv, TagList)
+%%
+
+
+match_tags({T,V}, [T|Tt]) ->
+ match_tags(V,Tt);
+match_tags([{T,V}],[T|Tt]) ->
+ match_tags(V, Tt);
+match_tags(Vlist = [{T,_V}|_], [T]) ->
+ Vlist;
+match_tags(Tlv, []) ->
+ Tlv;
+match_tags({Tag,_V},[T|_Tt]) ->
+ {error,{asn1,{wrong_tag,{Tag,T}}}}.
+
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Optionals, preset not filled optionals with asn1_NOVALUE
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,_Acc1,Acc2) ->
+ % return Val as a record
+ list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]).
+
+
+%%encode_tag(TagClass(?UNI, APP etc), Form (?PRIM etx), TagInteger) ->
+%% 8bit Int | binary
+encode_tag_val({Class, Form, TagNo}) when (TagNo =< 30) ->
+ <<(Class bsr 6):2,(Form bsr 5):1,TagNo:5>>;
+
+encode_tag_val({Class, Form, TagNo}) ->
+ {Octets,_Len} = mk_object_val(TagNo),
+ BinOct = list_to_binary(Octets),
+ <<(Class bsr 6):2, (Form bsr 5):1, 31:5,BinOct/binary>>;
+
+%% asumes whole correct tag bitpattern, multiple of 8
+encode_tag_val(Tag) when (Tag =< 255) -> Tag; %% anv�nds denna funktion??!!
+%% asumes correct bitpattern of 0-5
+encode_tag_val(Tag) -> encode_tag_val2(Tag,[]).
+
+encode_tag_val2(Tag, OctAck) when (Tag =< 255) ->
+ [Tag | OctAck];
+encode_tag_val2(Tag, OctAck) ->
+ encode_tag_val2(Tag bsr 8, [255 band Tag | OctAck]).
+
+
+%%===============================================================================
+%% Decode a tag
+%%
+%% decode_tag(OctetListBuffer) -> {{Form, (Class bsl 16)+ TagNo}, RestOfBuffer, RemovedBytes}
+%%===============================================================================
+
+decode_tag_and_length(<<Class:2, Form:1, TagNo:5, 0:1, Length:7, RestBuffer/binary>>) when TagNo < 31 ->
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, TagNo:5, 1:1, 0:7, T/binary>>) when TagNo < 31 ->
+ {Form, (Class bsl 16) + TagNo, indefinite, T};
+decode_tag_and_length(<<Class:2, Form:1, TagNo:5, 1:1, LL:7, T/binary>>) when TagNo < 31 ->
+ <<Length:LL/unit:8,RestBuffer/binary>> = T,
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, 0:1, TagNo:7, 0:1, Length:7, RestBuffer/binary>>) ->
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, 0:1, TagNo:7, 1:1, 0:7, T/binary>>) ->
+ {Form, (Class bsl 16) + TagNo, indefinite, T};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, 0:1, TagNo:7, 1:1, LL:7, T/binary>>) ->
+ <<Length:LL/unit:8,RestBuffer/binary>> = T,
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer};
+decode_tag_and_length(<<Class:2, Form:1, 31:5, Buffer/binary>>) ->
+ {TagNo, Buffer1} = decode_tag(Buffer, 0),
+ {Length, RestBuffer} = decode_length(Buffer1),
+ {Form, (Class bsl 16) + TagNo, Length, RestBuffer}.
+
+
+
+%% last partial tag
+decode_tag(<<0:1,PartialTag:7, Buffer/binary>>, TagAck) ->
+ TagNo = (TagAck bsl 7) bor PartialTag,
+ %%<<TagNo>> = <<TagAck:1, PartialTag:7>>,
+ {TagNo, Buffer};
+% more tags
+decode_tag(<<_:1,PartialTag:7, Buffer/binary>>, TagAck) ->
+ TagAck1 = (TagAck bsl 7) bor PartialTag,
+ %%<<TagAck1:16>> = <<TagAck:1, PartialTag:7,0:8>>,
+ decode_tag(Buffer, TagAck1).
+
+
+%%=======================================================================
+%%
+%% Encode all tags in the list Tags and return a possibly deep list of
+%% bytes with tag and length encoded
+%% The taglist must be in reverse order (fixed by the asn1 compiler)
+%% e.g [T1,T2] will result in
+%% {[EncodedT2,EncodedT1|BytesSoFar],LenSoFar+LenT2+LenT1}
+%%
+
+encode_tags([Tag|Trest], BytesSoFar, LenSoFar) ->
+% remove {Bytes1,L1} = encode_one_tag(Tag),
+ {Bytes2,L2} = encode_length(LenSoFar),
+ encode_tags(Trest, [Tag,Bytes2|BytesSoFar],
+ LenSoFar + size(Tag) + L2);
+encode_tags([], BytesSoFar, LenSoFar) ->
+ {BytesSoFar,LenSoFar}.
+
+encode_tags(TagIn, {BytesSoFar,LenSoFar}) ->
+ encode_tags(TagIn, BytesSoFar, LenSoFar).
+
+% encode_one_tag(#tag{class=Class,number=No,type=Type, form = Form}) ->
+% NewForm = case Type of
+% 'EXPLICIT' ->
+% ?CONSTRUCTED;
+% _ ->
+% Form
+% end,
+% Bytes = encode_tag_val({Class,NewForm,No}),
+% {Bytes,size(Bytes)}.
+
+
+%%===============================================================================
+%%
+%% This comment is valid for all the encode/decode functions
+%%
+%% C = Constraint -> typically {'ValueRange',LowerBound,UpperBound}
+%% used for PER-coding but not for BER-coding.
+%%
+%% Val = Value. If Val is an atom then it is a symbolic integer value
+%% (i.e the atom must be one of the names in the NamedNumberList).
+%% The NamedNumberList is used to translate the atom to an integer value
+%% before encoding.
+%%
+%%===============================================================================
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Value) -> io_list (i.e nested list with integers, binaries)
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+
+%%
+encode_open_type(Val) when list(Val) ->
+% {Val,length(Val)};
+ encode_open_type(list_to_binary(Val));
+encode_open_type(Val) ->
+ {Val, size(Val)}.
+
+%%
+encode_open_type(Val, T) when list(Val) ->
+ encode_open_type(list_to_binary(Val),T);
+encode_open_type(Val,[]) ->
+ {Val, size(Val)};
+encode_open_type(Val,Tag) ->
+ encode_tags(Tag,Val, size(Val)).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Tlv, TagIn) -> Value
+%% Tlv = {Tag,V} | V where V -> binary()
+%% TagIn = [TagVal] where TagVal -> int()
+%% Value = binary with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Tlv, TagIn) ->
+ case match_tags(Tlv,TagIn) of
+ Bin when binary(Bin) ->
+ {InnerTlv,_} = decode(Bin),
+ InnerTlv;
+ TlvBytes -> TlvBytes
+ end.
+
+
+decode_open_type_as_binary(Tlv,TagIn)->
+ case match_tags(Tlv,TagIn) of
+ V when binary(V) ->
+ V;
+ [Tlv2] -> encode(Tlv2);
+ Tlv2 -> encode(Tlv2)
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Boolean, ITU_T X.690 Chapter 8.2
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode_boolean(Integer, ReversedTagList) -> {[Octet],Len}
+%%===============================================================================
+
+encode_boolean({Name, Val}, TagIn) when atom(Name) ->
+ encode_boolean(Val, TagIn);
+encode_boolean(true, TagIn) ->
+ encode_tags(TagIn, [16#FF],1);
+encode_boolean(false, TagIn) ->
+ encode_tags(TagIn, [0],1);
+encode_boolean(X,_) ->
+ exit({error,{asn1, {encode_boolean, X}}}).
+
+
+%%===============================================================================
+%% decode_boolean(BuffList, HasTag, TotalLen) -> {true, Remain, RemovedBytes} |
+%% {false, Remain, RemovedBytes}
+%%===============================================================================
+decode_boolean(Tlv,TagIn) ->
+ Val = match_tags(Tlv, TagIn),
+ case Val of
+ <<0:8>> ->
+ false;
+ <<_:8>> ->
+ true;
+ _ ->
+ exit({error,{asn1, {decode_boolean, Val}}})
+ end.
+
+
+%%===========================================================================
+%% Integer, ITU_T X.690 Chapter 8.3
+
+%% encode_integer(Constraint, Value, Tag) -> [octet list]
+%% encode_integer(Constraint, Name, NamedNumberList, Tag) -> [octet list]
+%% Value = INTEGER | {Name,INTEGER}
+%% Tag = tag | notag
+%%===========================================================================
+
+encode_integer(C, Val, Tag) when integer(Val) ->
+ encode_tags(Tag, encode_integer(C, Val));
+encode_integer(C,{Name,Val},Tag) when atom(Name) ->
+ encode_integer(C,Val,Tag);
+encode_integer(_C, Val, _Tag) ->
+ exit({error,{asn1, {encode_integer, Val}}}).
+
+
+
+encode_integer(C, Val, NamedNumberList, Tag) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value,{_, NewVal}} ->
+ encode_tags(Tag, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {encode_integer_namednumber, Val}}})
+ end;
+encode_integer(C,{_Name,Val},NamedNumberList,Tag) ->
+ encode_integer(C,Val,NamedNumberList,Tag);
+encode_integer(C, Val, _NamedNumberList, Tag) ->
+ encode_tags(Tag, encode_integer(C, Val)).
+
+
+encode_integer(_, Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, L=[B|_Acc]) when B < 128 ->
+ L;
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, L=[B1|_T]) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
+
+%%===============================================================================
+%% decode integer
+%% (Buffer, Range, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%===============================================================================
+
+decode_integer(Tlv,Range,NamedNumberList,TagIn) ->
+ V = match_tags(Tlv,TagIn),
+ Int = decode_integer(V),
+ range_check_integer(Int,Range),
+ number2name(Int,NamedNumberList).
+
+decode_integer(Tlv,Range,TagIn) ->
+ V = match_tags(Tlv, TagIn),
+ Int = decode_integer(V),
+ range_check_integer(Int,Range),
+ Int.
+
+%% decoding postitive integer values.
+decode_integer(Bin = <<0:1,_:7,_/binary>>) ->
+ Len = size(Bin),
+% <<Int:Len/unit:8,Buffer2/binary>> = Bin,
+ <<Int:Len/unit:8>> = Bin,
+ Int;
+%% decoding negative integer values.
+decode_integer(Bin = <<1:1,B2:7,Bs/binary>>) ->
+ Len = size(Bin),
+% <<N:Len/unit:8,Buffer2/binary>> = <<B2,Bs/binary>>,
+ <<N:Len/unit:8>> = <<B2,Bs/binary>>,
+ Int = N - (1 bsl (8 * Len - 1)),
+ Int.
+
+range_check_integer(Int,Range) ->
+ case Range of
+ [] -> % No length constraint
+ Int;
+ {Lb,Ub} when Int >= Lb, Ub >= Int -> % variable length constraint
+ Int;
+ Int -> % fixed value constraint
+ Int;
+ {_,_} ->
+ exit({error,{asn1,{integer_range,Range,Int}}});
+ SingleValue when integer(SingleValue) ->
+ exit({error,{asn1,{integer_range,Range,Int}}});
+ _ -> % some strange constraint that we don't support yet
+ Int
+ end.
+
+number2name(Int,[]) ->
+ Int;
+number2name(Int,NamedNumberList) ->
+ case lists:keysearch(Int, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ Int
+ end.
+
+
+%%============================================================================
+%% Enumerated value, ITU_T X.690 Chapter 8.4
+
+%% encode enumerated value
+%%============================================================================
+encode_enumerated(Val, TagIn) when integer(Val)->
+ encode_tags(TagIn, encode_integer(false,Val));
+encode_enumerated({Name,Val}, TagIn) when atom(Name) ->
+ encode_enumerated(Val, TagIn).
+
+%% The encode_enumerated functions below this line can be removed when the
+%% new code generation is stable. (the functions might have to be kept here
+%% a while longer for compatibility reasons)
+
+encode_enumerated(C, Val, {NamedNumberList,ExtList}, TagIn) when atom(Val) ->
+ case catch encode_enumerated(C, Val, NamedNumberList, TagIn) of
+ {'EXIT',_} -> encode_enumerated(C, Val, ExtList, TagIn);
+ Result -> Result
+ end;
+
+encode_enumerated(C, Val, NamedNumberList, TagIn) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedNumberList) of
+ {value, {_, NewVal}} ->
+ encode_tags(TagIn, encode_integer(C, NewVal));
+ _ ->
+ exit({error,{asn1, {enumerated_not_in_range, Val}}})
+ end;
+
+encode_enumerated(C, {asn1_enum, Val}, {_,_}, TagIn) when integer(Val) ->
+ encode_tags(TagIn, encode_integer(C,Val));
+
+encode_enumerated(C, {Name,Val}, NamedNumberList, TagIn) when atom(Name) ->
+ encode_enumerated(C, Val, NamedNumberList, TagIn);
+
+encode_enumerated(_C, Val, _NamedNumberList, _TagIn) ->
+ exit({error,{asn1, {enumerated_not_namednumber, Val}}}).
+
+
+
+%%============================================================================
+%% decode enumerated value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> Value
+%%===========================================================================
+decode_enumerated(Tlv, Range, NamedNumberList, Tags) ->
+ Buffer = match_tags(Tlv,Tags),
+ decode_enumerated_notag(Buffer, Range, NamedNumberList, Tags).
+
+decode_enumerated_notag(Buffer, _Range, {NamedNumberList,ExtList}, _Tags) ->
+
+ IVal = decode_integer2(size(Buffer), Buffer),
+ case decode_enumerated1(IVal, NamedNumberList) of
+ {asn1_enum,IVal} ->
+ decode_enumerated1(IVal,ExtList);
+ EVal ->
+ EVal
+ end;
+decode_enumerated_notag(Buffer, _Range, NNList, _Tags) ->
+ IVal = decode_integer2(size(Buffer), Buffer),
+ case decode_enumerated1(IVal, NNList) of
+ {asn1_enum,_} ->
+ exit({error,{asn1, {illegal_enumerated, IVal}}});
+ EVal ->
+ EVal
+ end.
+
+decode_enumerated1(Val, NamedNumberList) ->
+ %% it must be a named integer
+ case lists:keysearch(Val, 2, NamedNumberList) of
+ {value,{NamedVal, _}} ->
+ NamedVal;
+ _ ->
+ {asn1_enum,Val}
+ end.
+
+
+%%============================================================================
+%%
+%% Real value, ITU_T X.690 Chapter 8.5
+%%============================================================================
+%%
+%% encode real value
+%%============================================================================
+
+%% only base 2 internally so far!!
+encode_real(0, TagIn) ->
+ encode_tags(TagIn, {[],0});
+encode_real('PLUS-INFINITY', TagIn) ->
+ encode_tags(TagIn, {[64],1});
+encode_real('MINUS-INFINITY', TagIn) ->
+ encode_tags(TagIn, {[65],1});
+encode_real(Val, TagIn) when tuple(Val)->
+ encode_tags(TagIn, encode_real(Val)).
+
+%%%%%%%%%%%%%%
+%% not optimal efficient..
+%% only base 2 of Mantissa encoding!
+%% only base 2 of ExpBase encoding!
+encode_real({Man, Base, Exp}) ->
+%% io:format("Mantissa: ~w Base: ~w, Exp: ~w~n",[Man, Base, Exp]),
+
+ OctExp = if Exp >= 0 -> list_to_binary(encode_integer_pos(Exp, []));
+ true -> list_to_binary(encode_integer_neg(Exp, []))
+ end,
+%% ok = io:format("OctExp: ~w~n",[OctExp]),
+ SignBit = if Man > 0 -> 0; % bit 7 is pos or neg, no Zeroval
+ true -> 1
+ end,
+%% ok = io:format("SignBitMask: ~w~n",[SignBitMask]),
+ InBase = if Base =:= 2 -> 0; % bit 6,5: only base 2 this far!
+ true ->
+ exit({error,{asn1, {encode_real_non_supported_encodeing, Base}}})
+ end,
+ SFactor = 0, % bit 4,3: no scaling since only base 2
+ OctExpLen = size(OctExp),
+ if OctExpLen > 255 ->
+ exit({error,{asn1, {to_big_exp_in_encode_real, OctExpLen}}});
+ true -> true %% make real assert later..
+ end,
+ {LenCode, EOctets} = case OctExpLen of % bit 2,1
+ 1 -> {0, OctExp};
+ 2 -> {1, OctExp};
+ 3 -> {2, OctExp};
+ _ -> {3, <<OctExpLen, OctExp/binary>>}
+ end,
+ FirstOctet = <<1:1,SignBit:1,InBase:2,SFactor:2,LenCode:2>>,
+ OctMantissa = if Man > 0 -> list_to_binary(minimum_octets(Man));
+ true -> list_to_binary(minimum_octets(-(Man))) % signbit keeps track of sign
+ end,
+ %% ok = io:format("LenMask: ~w EOctets: ~w~nFirstOctet: ~w OctMantissa: ~w OctExpLen: ~w~n", [LenMask, EOctets, FirstOctet, OctMantissa, OctExpLen]),
+ Bin = <<FirstOctet/binary, EOctets/binary, OctMantissa/binary>>,
+ {Bin, size(Bin)}.
+
+
+%%============================================================================
+%% decode real value
+%%
+%% decode_real([OctetBufferList], tuple|value, tag|notag) ->
+%% {{Mantissa, Base, Exp} | realval | PLUS-INFINITY | MINUS-INFINITY | 0,
+%% RestBuff}
+%%
+%% only for base 2 decoding sofar!!
+%%============================================================================
+
+decode_real(Tlv, Form, Tags) ->
+ Buffer = match_tags(Tlv,Tags),
+ decode_real_notag(Buffer, Form).
+
+decode_real_notag(_Buffer, _Form) ->
+ exit({error,{asn1, {unimplemented,real}}}).
+%% decode_real2(Buffer, Form, size(Buffer)).
+
+% decode_real2(Buffer, Form, Len) ->
+% <<First, Buffer2/binary>> = Buffer,
+% if
+% First =:= 2#01000000 -> {'PLUS-INFINITY', Buffer2};
+% First =:= 2#01000001 -> {'MINUS-INFINITY', Buffer2};
+% First =:= 2#00000000 -> {0, Buffer2};
+% true ->
+% %% have some check here to verify only supported bases (2)
+% <<B7:1,B6:1,B5_4:2,B3_2:2,B1_0:2>> = <<First>>,
+% Sign = B6,
+% Base =
+% case B5_4 of
+% 0 -> 2; % base 2, only one so far
+% _ -> exit({error,{asn1, {non_supported_base, First}}})
+% end,
+% ScalingFactor =
+% case B3_2 of
+% 0 -> 0; % no scaling so far
+% _ -> exit({error,{asn1, {non_supported_scaling, First}}})
+% end,
+
+% {FirstLen,Exp,Buffer3} =
+% case B1_0 of
+% 0 ->
+% <<_:1/unit:8,Buffer21/binary>> = Buffer2,
+% {2, decode_integer2(1, Buffer2),Buffer21};
+% 1 ->
+% <<_:2/unit:8,Buffer21/binary>> = Buffer2,
+% {3, decode_integer2(2, Buffer2)};
+% 2 ->
+% <<_:3/unit:8,Buffer21/binary>> = Buffer2,
+% {4, decode_integer2(3, Buffer2)};
+% 3 ->
+% <<ExpLen1,RestBuffer/binary>> = Buffer2,
+% <<_:ExpLen1/unit:8,RestBuffer2/binary>> = RestBuffer,
+% { ExpLen1 + 2,
+% decode_integer2(ExpLen1, RestBuffer, RemBytes1),
+% RestBuffer2}
+% end,
+% Length = Len - FirstLen,
+% <<LongInt:Length/unit:8,RestBuff/binary>> = Buffer3,
+% {Mantissa, Buffer4} =
+% if Sign =:= 0 ->
+
+% {LongInt, RestBuff};% sign plus,
+% true ->
+
+% {-LongInt, RestBuff}% sign minus
+% end,
+% case Form of
+% tuple ->
+% {Val,Buf,RemB} = Exp,
+% {{Mantissa, Base, {Val,Buf}}, Buffer4, RemBytes2+RemBytes3};
+% _value ->
+% comming
+% end
+% end.
+
+
+%%============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.6
+%%
+%% encode bitstring value
+%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constrint Len, only valid when identifiers
+%%============================================================================
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList,TagIn) when integer(Unused), binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList,TagIn);
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList, TagIn) when atom(FirstVal) ->
+ encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, TagIn);
+
+encode_bit_string(C, [{bit,X} | RestVal], NamedBitList, TagIn) ->
+ encode_bit_string_named(C, [{bit,X} | RestVal], NamedBitList, TagIn);
+
+encode_bit_string(C, [FirstVal| RestVal], NamedBitList, TagIn) when integer(FirstVal) ->
+ encode_bit_string_bits(C, [FirstVal | RestVal], NamedBitList, TagIn);
+
+encode_bit_string(_C, 0, _NamedBitList, TagIn) ->
+ encode_tags(TagIn, <<0>>,1);
+
+encode_bit_string(_C, [], _NamedBitList, TagIn) ->
+ encode_tags(TagIn, <<0>>,1);
+
+encode_bit_string(C, IntegerVal, NamedBitList, TagIn) when integer(IntegerVal) ->
+ BitListVal = int_to_bitlist(IntegerVal),
+ encode_bit_string_bits(C, BitListVal, NamedBitList, TagIn);
+
+encode_bit_string(C, {Name,BitList}, NamedBitList, TagIn) when atom(Name) ->
+ encode_bit_string(C, BitList, NamedBitList, TagIn).
+
+
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+
+%%=================================================================
+%% Encode BIT STRING of the form {Unused,BinBits}.
+%% Unused is the number of unused bits in the last byte in BinBits
+%% and BinBits is a binary representing the BIT STRING.
+%%=================================================================
+encode_bin_bit_string(C,{Unused,BinBits},_NamedBitList,TagIn)->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ remove_unused_then_dotag(TagIn, Unused, BinBits);
+ {_Min,Max} ->
+ BBLen = (size(BinBits)*8)-Unused,
+ if
+ BBLen > Max ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBLen},{maximum,Max}}}}});
+ true ->
+ remove_unused_then_dotag(TagIn, Unused, BinBits)
+ end;
+ Size ->
+ case ((size(BinBits)*8)-Unused) of
+ BBSize when BBSize =< Size ->
+ remove_unused_then_dotag(TagIn, Unused, BinBits);
+ BBSize ->
+ exit({error,{asn1,
+ {bitstring_length,
+ {{was,BBSize},{should_be,Size}}}}})
+ end
+ end.
+
+remove_unused_then_dotag(TagIn,Unused,BinBits) ->
+ case Unused of
+ 0 when (size(BinBits) == 0) ->
+ encode_tags(TagIn,<<0>>,1);
+ 0 ->
+ Bin = <<Unused,BinBits/binary>>,
+ encode_tags(TagIn,Bin,size(Bin));
+ Num ->
+ N = (size(BinBits)-1),
+ <<BBits:N/binary,LastByte>> = BinBits,
+ encode_tags(TagIn,
+ [Unused,binary_to_list(BBits) ++[(LastByte bsr Num) bsl Num]],
+ 1+size(BinBits))
+ end.
+
+
+%%=================================================================
+%% Encode named bits
+%%=================================================================
+
+encode_bit_string_named(C, [FirstVal | RestVal], NamedBitList, TagIn) ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal], NamedBitList, []),
+ Size =
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ lists:max(ToSetPos)+1;
+ {_Min,Max} ->
+ Max;
+ TSize ->
+ TSize
+ end,
+ BitList = make_and_set_list(Size, ToSetPos, 0),
+ {Len, Unused, OctetList} = encode_bitstring(BitList),
+ encode_tags(TagIn, [Unused|OctetList],Len+1).
+
+
+%%----------------------------------------
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+%%----------------------------------------
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+get_all_bitposes([Val | Rest], NamedBitList, Ack) when atom(Val) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+
+%%----------------------------------------
+%% make_and_set_list(Len of list to return, [list of positions to set to 1])->
+%% returns list of Len length, with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%% Len will make a list of length Len, not Len + 1.
+%% BitList = make_and_set_list(C, ToSetPos, 0),
+%%----------------------------------------
+
+make_and_set_list(0, [], _) -> [];
+make_and_set_list(0, _, _) ->
+ exit({error,{asn1,bitstring_sizeconstraint}});
+make_and_set_list(Len, [XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(Len - 1, SetPos, XPos + 1)];
+make_and_set_list(Len, [Pos|SetPos], XPos) ->
+ [0 | make_and_set_list(Len - 1, [Pos | SetPos], XPos + 1)];
+make_and_set_list(Len, [], XPos) ->
+ [0 | make_and_set_list(Len - 1, [], XPos + 1)].
+
+
+
+
+
+
+%%=================================================================
+%% Encode bit string for lists of ones and zeroes
+%%=================================================================
+encode_bit_string_bits(C, BitListVal, _NamedBitList, TagIn) when list(BitListVal) ->
+ case get_constraint(C,'SizeConstraint') of
+ no ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused | OctetList], Len+1);
+ Constr={Min,Max} when integer(Min),integer(Max) ->
+ encode_constr_bit_str_bits(Constr,BitListVal,TagIn);
+ {Constr={_,_},[]} ->%Constr={Min,Max}
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,TagIn);
+ Constr={{_,_},{_,_}} ->%{{Min1,Max1},{Min2,Max2}}
+ %% constraint with extension mark
+ encode_constr_bit_str_bits(Constr,BitListVal,TagIn);
+ Size ->
+ case length(BitListVal) of
+ BitSize when BitSize == Size ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused | OctetList], Len+1);
+ BitSize when BitSize < Size ->
+ PaddedList = pad_bit_list(Size-BitSize,BitListVal),
+ {Len, Unused, OctetList} = encode_bitstring(PaddedList),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused | OctetList], Len+1);
+ BitSize ->
+ exit({error,{asn1,
+ {bitstring_length, {{was,BitSize},{should_be,Size}}}}})
+ end
+
+ end.
+
+encode_constr_bit_str_bits({_Min,Max},BitListVal,TagIn) ->
+ BitLen = length(BitListVal),
+ if
+ BitLen > Max ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max}}}}});
+ true ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused, OctetList], Len+1)
+ end;
+encode_constr_bit_str_bits({{_Min1,Max1},{Min2,Max2}},BitListVal,TagIn) ->
+ BitLen = length(BitListVal),
+ case BitLen of
+ Len when Len > Max2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {maximum,Max2}}}}});
+ Len when Len > Max1, Len < Min2 ->
+ exit({error,{asn1,{bitstring_length,{{was,BitLen},
+ {not_allowed_interval,
+ Max1,Min2}}}}});
+ _ ->
+ {Len, Unused, OctetList} = encode_bitstring(BitListVal),
+ %%add unused byte to the Len
+ encode_tags(TagIn, [Unused, OctetList], Len+1)
+ end.
+
+%% returns a list of length Size + length(BitListVal), with BitListVal
+%% as the most significant elements followed by padded zero elements
+pad_bit_list(Size,BitListVal) ->
+ Tail = lists:duplicate(Size,0),
+ lists:append(BitListVal,Tail).
+
+%%=================================================================
+%% Do the actual encoding
+%% ([bitlist]) -> {ListLen, UnusedBits, OctetList}
+%%=================================================================
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest]) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Val], 1);
+encode_bitstring(Val) ->
+ {Unused, Octet} = unused_bitlist(Val, 7, 0),
+ {1, Unused, [Octet]}.
+
+encode_bitstring([B8, B7, B6, B5, B4, B3, B2, B1 | Rest], Ack, Len) ->
+ Val = (B8 bsl 7) bor (B7 bsl 6) bor (B6 bsl 5) bor (B5 bsl 4) bor
+ (B4 bsl 3) bor (B3 bsl 2) bor (B2 bsl 1) bor B1,
+ encode_bitstring(Rest, [Ack | [Val]], Len + 1);
+%%even multiple of 8 bits..
+encode_bitstring([], Ack, Len) ->
+ {Len, 0, Ack};
+%% unused bits in last octet
+encode_bitstring(Rest, Ack, Len) ->
+% io:format("uneven ~w ~w ~w~n",[Rest, Ack, Len]),
+ {Unused, Val} = unused_bitlist(Rest, 7, 0),
+ {Len + 1, Unused, [Ack | [Val]]}.
+
+%%%%%%%%%%%%%%%%%%
+%% unused_bitlist([list of ones and zeros <= 7], 7, []) ->
+%% {Unused bits, Last octet with bits moved to right}
+unused_bitlist([], Trail, Ack) ->
+ {Trail + 1, Ack};
+unused_bitlist([Bit | Rest], Trail, Ack) ->
+%% io:format("trail Bit: ~w Rest: ~w Trail: ~w Ack:~w~n",[Bit, Rest, Trail, Ack]),
+ unused_bitlist(Rest, Trail - 1, (Bit bsl Trail) bor Ack).
+
+
+%%============================================================================
+%% decode bitstring value
+%% (Buffer, Range, NamedNumberList, HasTag, TotalLen) -> {Integer, Remain, RemovedBytes}
+%%============================================================================
+
+decode_compact_bit_string(Buffer, Range, NamedNumberList, Tags) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags,
+ NamedNumberList,bin).
+
+decode_bit_string(Buffer, Range, NamedNumberList, Tags) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_BIT_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_BIT_STRING, Tags,
+ NamedNumberList,old).
+
+
+decode_bit_string2(<<0>>,_NamedNumberList,BinOrOld) ->
+ case BinOrOld of
+ bin ->
+ {0,<<>>};
+ _ ->
+ []
+ end;
+decode_bit_string2(<<Unused,Bits/binary>>,NamedNumberList,BinOrOld) ->
+ case NamedNumberList of
+ [] ->
+ case BinOrOld of
+ bin ->
+ {Unused,Bits};
+ _ ->
+ decode_bitstring2(size(Bits), Unused, Bits)
+ end;
+ _ ->
+ BitString = decode_bitstring2(size(Bits), Unused, Bits),
+ decode_bitstring_NNL(BitString,NamedNumberList)
+ end.
+
+%%----------------------------------------
+%% Decode the in buffer to bits
+%%----------------------------------------
+decode_bitstring2(1,Unused,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,_/binary>>) ->
+ lists:sublist([B7,B6,B5,B4,B3,B2,B1,B0],8-Unused);
+decode_bitstring2(Len, Unused,
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Buffer/binary>>) ->
+ [B7, B6, B5, B4, B3, B2, B1, B0 |
+ decode_bitstring2(Len - 1, Unused, Buffer)].
+
+%%decode_bitstring2(1, Unused, Buffer) ->
+%% make_bits_of_int(hd(Buffer), 128, 8-Unused);
+%%decode_bitstring2(Len, Unused, [BitVal | Buffer]) ->
+%% [B7, B6, B5, B4, B3, B2, B1, B0] = make_bits_of_int(BitVal, 128, 8),
+%% [B7, B6, B5, B4, B3, B2, B1, B0 |
+%% decode_bitstring2(Len - 1, Unused, Buffer)].
+
+
+%%make_bits_of_int(_, _, 0) ->
+%% [];
+%%make_bits_of_int(BitVal, MaskVal, Unused) when Unused > 0 ->
+%% X = case MaskVal band BitVal of
+%% 0 -> 0 ;
+%% _ -> 1
+%% end,
+%% [X | make_bits_of_int(BitVal, MaskVal bsr 1, Unused - 1)].
+
+
+
+%%----------------------------------------
+%% Decode the bitlist to names
+%%----------------------------------------
+
+
+decode_bitstring_NNL(BitList,NamedNumberList) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,0,[]).
+
+
+decode_bitstring_NNL([],_,_No,Result) ->
+ lists:reverse(Result);
+
+decode_bitstring_NNL([B|BitList],[{Name,No}|NamedNumberList],No,Result) ->
+ if
+ B == 0 ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result);
+ true ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[Name|Result])
+ end;
+decode_bitstring_NNL([1|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,[{bit,No}|Result]);
+decode_bitstring_NNL([0|BitList],NamedNumberList,No,Result) ->
+ decode_bitstring_NNL(BitList,NamedNumberList,No+1,Result).
+
+
+%%============================================================================
+%% Octet string, ITU_T X.690 Chapter 8.7
+%%
+%% encode octet string
+%% The OctetList must be a flat list of integers in the range 0..255
+%% the function does not check this because it takes to much time
+%%============================================================================
+encode_octet_string(_C, OctetList, TagIn) when binary(OctetList) ->
+ encode_tags(TagIn, OctetList, size(OctetList));
+encode_octet_string(_C, OctetList, TagIn) when list(OctetList) ->
+ encode_tags(TagIn, OctetList, length(OctetList));
+encode_octet_string(C, {Name,OctetList}, TagIn) when atom(Name) ->
+ encode_octet_string(C, OctetList, TagIn).
+
+
+%%============================================================================
+%% decode octet string
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%
+%% Octet string is decoded as a restricted string
+%%============================================================================
+decode_octet_string(Buffer, Range, Tags) ->
+% NewTags = new_tags(HasTag,#tag{class=?UNIVERSAL,number=?N_OCTET_STRING}),
+ decode_restricted_string(Buffer, Range, ?N_OCTET_STRING,
+ Tags, [], old).
+
+%%============================================================================
+%% Null value, ITU_T X.690 Chapter 8.8
+%%
+%% encode NULL value
+%%============================================================================
+
+encode_null({Name, _Val}, TagIn) when atom(Name) ->
+ encode_tags(TagIn, [], 0);
+encode_null(_Val, TagIn) ->
+ encode_tags(TagIn, [], 0).
+
+%%============================================================================
+%% decode NULL value
+%% (Buffer, HasTag, TotalLen) -> {NULL, Remain, RemovedBytes}
+%%============================================================================
+
+decode_null(Tlv, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ case Val of
+ <<>> ->
+ 'NULL';
+ _ ->
+ exit({error,{asn1,{decode_null,Val}}})
+ end.
+
+%%============================================================================
+%% Object identifier, ITU_T X.690 Chapter 8.19
+%%
+%% encode Object Identifier value
+%%============================================================================
+
+encode_object_identifier({Name,Val}, TagIn) when atom(Name) ->
+ encode_object_identifier(Val, TagIn);
+encode_object_identifier(Val, TagIn) ->
+ encode_tags(TagIn, e_object_identifier(Val)).
+
+e_object_identifier({'OBJECT IDENTIFIER', V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname, V}) when atom(Cname), tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname, V}) when atom(Cname), list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%%%%%%%%%%%%%%%
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+e_object_identifier([E1, E2 | Tail]) ->
+ Head = 40*E1 + E2, % wow!
+ {H,Lh} = mk_object_val(Head),
+ {R,Lr} = enc_obj_id_tail(Tail, [], 0),
+ {[H|R], Lh+Lr}.
+
+enc_obj_id_tail([], Ack, Len) ->
+ {lists:reverse(Ack), Len};
+enc_obj_id_tail([H|T], Ack, Len) ->
+ {B, L} = mk_object_val(H),
+ enc_obj_id_tail(T, [B|Ack], Len+L).
+
+%% e_object_identifier([List of Obect Identifiers]) ->
+%% {[Encoded Octetlist of ObjIds], IntLength}
+%%
+%%e_object_identifier([E1, E2 | Tail]) ->
+%% Head = 40*E1 + E2, % wow!
+%% F = fun(Val, AckLen) ->
+%% {L, Ack} = mk_object_val(Val),
+%% {L, Ack + AckLen}
+%% end,
+%% {Octets, Len} = lists:mapfoldl(F, 0, [Head | Tail]).
+
+%%%%%%%%%%%
+%% mk_object_val(Value) -> {OctetList, Len}
+%% returns a Val as a list of octets, the 8 bit is allways set to one except
+%% for the last octet, where its 0
+%%
+
+
+mk_object_val(Val) when Val =< 127 ->
+ {[255 band Val], 1};
+mk_object_val(Val) ->
+ mk_object_val(Val bsr 7, [Val band 127], 1).
+mk_object_val(0, Ack, Len) ->
+ {Ack, Len};
+mk_object_val(Val, Ack, Len) ->
+ mk_object_val(Val bsr 7, [((Val band 127) bor 128) | Ack], Len + 1).
+
+
+
+%%============================================================================
+%% decode Object Identifier value
+%% (Buffer, HasTag, TotalLen) -> {{ObjId}, Remain, RemovedBytes}
+%%============================================================================
+
+decode_object_identifier(Tlv, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ [AddedObjVal|ObjVals] = dec_subidentifiers(Val,0,[]),
+ {Val1, Val2} = if
+ AddedObjVal < 40 ->
+ {0, AddedObjVal};
+ AddedObjVal < 80 ->
+ {1, AddedObjVal - 40};
+ true ->
+ {2, AddedObjVal - 80}
+ end,
+ list_to_tuple([Val1, Val2 | ObjVals]).
+
+dec_subidentifiers(<<>>,_Av,Al) ->
+ lists:reverse(Al);
+dec_subidentifiers(<<1:1,H:7,T/binary>>,Av,Al) ->
+ dec_subidentifiers(T,(Av bsl 7) + H,Al);
+dec_subidentifiers(<<H,T/binary>>,Av,Al) ->
+ dec_subidentifiers(T,0,[((Av bsl 7) + H)|Al]).
+
+
+%%============================================================================
+%% Restricted character string types, ITU_T X.690 Chapter 8.20
+%%
+%% encode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%%============================================================================
+%% The StringType arg is kept for future use but might be removed
+encode_restricted_string(_C, OctetList, _StringType, TagIn)
+ when binary(OctetList) ->
+ encode_tags(TagIn, OctetList, size(OctetList));
+encode_restricted_string(_C, OctetList, _StringType, TagIn)
+ when list(OctetList) ->
+ encode_tags(TagIn, OctetList, length(OctetList));
+encode_restricted_string(C,{Name,OctetL}, StringType, TagIn) when atom(Name)->
+ encode_restricted_string(C, OctetL, StringType, TagIn).
+
+%%============================================================================
+%% decode Numeric Printable Teletex Videotex Visible IA5 Graphic General strings
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_restricted_string(Buffer, Range, StringType, Tags) ->
+ decode_restricted_string(Buffer, Range, StringType, Tags, [], old).
+
+
+decode_restricted_string(Tlv, Range, StringType, TagsIn,
+ NamedNumberList, BinOrOld) ->
+ Val = match_tags(Tlv, TagsIn),
+ Val2 =
+ case Val of
+ PartList = [_H|_T] -> % constructed val
+ Bin = collect_parts(PartList),
+ decode_restricted(Bin, StringType,
+ NamedNumberList, BinOrOld);
+ Bin ->
+ decode_restricted(Bin, StringType,
+ NamedNumberList, BinOrOld)
+ end,
+ check_and_convert_restricted_string(Val2,StringType,Range,NamedNumberList,BinOrOld).
+
+
+
+% case StringType of
+% ?N_BIT_STRING when BinOrOld == bin ->
+% {concat_bit_binaries(AccVal, Val), AccRb+Rb};
+% _ when binary(Val),binary(AccVal) ->
+% {<<AccVal/binary,Val/binary>>,AccRb+Rb};
+% _ when binary(Val), AccVal==[] ->
+% {Val,AccRb+Rb};
+% _ ->
+% {AccVal++Val, AccRb+Rb}
+% end,
+
+
+
+decode_restricted(Bin, StringType, NamedNumberList,BinOrOld) ->
+ case StringType of
+ ?N_BIT_STRING ->
+ decode_bit_string2(Bin, NamedNumberList, BinOrOld);
+ ?N_UniversalString ->
+ mk_universal_string(binary_to_list(Bin));
+ ?N_BMPString ->
+ mk_BMP_string(binary_to_list(Bin));
+ _ ->
+ Bin
+ end.
+
+
+check_and_convert_restricted_string(Val,StringType,Range,NamedNumberList,_BinOrOld) ->
+ {StrLen,NewVal} = case StringType of
+ ?N_BIT_STRING when NamedNumberList /= [] ->
+ {no_check,Val};
+ ?N_BIT_STRING when list(Val) ->
+ {length(Val),Val};
+ ?N_BIT_STRING when tuple(Val) ->
+ {(size(element(2,Val))*8) - element(1,Val),Val};
+ _ when binary(Val) ->
+ {size(Val),binary_to_list(Val)};
+ _ when list(Val) ->
+ {length(Val), Val}
+ end,
+ case Range of
+ _ when StrLen == no_check ->
+ NewVal;
+ [] -> % No length constraint
+ NewVal;
+ {Lb,Ub} when StrLen >= Lb, Ub >= StrLen -> % variable length constraint
+ NewVal;
+ {{Lb,_Ub},[]} when StrLen >= Lb ->
+ NewVal;
+ {{Lb1,Ub1},{Lb2,Ub2}} when StrLen >= Lb1, StrLen =< Ub1;
+ StrLen =< Ub2, StrLen >= Lb2 ->
+ NewVal;
+ StrLen -> % fixed length constraint
+ NewVal;
+ {_,_} ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _Len when integer(_Len) ->
+ exit({error,{asn1,{length,Range,Val}}});
+ _ -> % some strange constraint that we don't support yet
+ NewVal
+ end.
+
+
+%%============================================================================
+%% encode Universal string
+%%============================================================================
+
+encode_universal_string(C, {Name, Universal}, TagIn) when atom(Name) ->
+ encode_universal_string(C, Universal, TagIn);
+encode_universal_string(_C, Universal, TagIn) ->
+ OctetList = mk_uni_list(Universal),
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+mk_uni_list(In) ->
+ mk_uni_list(In,[]).
+
+mk_uni_list([],List) ->
+ lists:reverse(List);
+mk_uni_list([{A,B,C,D}|T],List) ->
+ mk_uni_list(T,[D,C,B,A|List]);
+mk_uni_list([H|T],List) ->
+ mk_uni_list(T,[H,0,0,0|List]).
+
+%%===========================================================================
+%% decode Universal strings
+%% (Buffer, Range, StringType, HasTag, LenIn) ->
+%% {String, Remain, RemovedBytes}
+%%===========================================================================
+
+decode_universal_string(Buffer, Range, Tags) ->
+ decode_restricted_string(Buffer, Range, ?N_UniversalString,
+ Tags, [], old).
+
+
+mk_universal_string(In) ->
+ mk_universal_string(In,[]).
+
+mk_universal_string([],Acc) ->
+ lists:reverse(Acc);
+mk_universal_string([0,0,0,D|T],Acc) ->
+ mk_universal_string(T,[D|Acc]);
+mk_universal_string([A,B,C,D|T],Acc) ->
+ mk_universal_string(T,[{A,B,C,D}|Acc]).
+
+
+%%============================================================================
+%% encode BMP string
+%%============================================================================
+
+encode_BMP_string(C, {Name,BMPString}, TagIn) when atom(Name)->
+ encode_BMP_string(C, BMPString, TagIn);
+encode_BMP_string(_C, BMPString, TagIn) ->
+ OctetList = mk_BMP_list(BMPString),
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+mk_BMP_list(In) ->
+ mk_BMP_list(In,[]).
+
+mk_BMP_list([],List) ->
+ lists:reverse(List);
+mk_BMP_list([{0,0,C,D}|T],List) ->
+ mk_BMP_list(T,[D,C|List]);
+mk_BMP_list([H|T],List) ->
+ mk_BMP_list(T,[H,0|List]).
+
+%%============================================================================
+%% decode (OctetList, Range(ignored), tag|notag) -> {ValList, RestList}
+%% (Buffer, Range, StringType, HasTag, TotalLen) ->
+%% {String, Remain, RemovedBytes}
+%%============================================================================
+decode_BMP_string(Buffer, Range, Tags) ->
+ decode_restricted_string(Buffer, Range, ?N_BMPString,
+ Tags, [], old).
+
+mk_BMP_string(In) ->
+ mk_BMP_string(In,[]).
+
+mk_BMP_string([],US) ->
+ lists:reverse(US);
+mk_BMP_string([0,B|T],US) ->
+ mk_BMP_string(T,[B|US]);
+mk_BMP_string([C,D|T],US) ->
+ mk_BMP_string(T,[{0,0,C,D}|US]).
+
+
+%%============================================================================
+%% Generalized time, ITU_T X.680 Chapter 39
+%%
+%% encode Generalized time
+%%============================================================================
+
+encode_generalized_time(C, {Name,OctetList}, TagIn) when atom(Name) ->
+ encode_generalized_time(C, OctetList, TagIn);
+encode_generalized_time(_C, OctetList, TagIn) ->
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+%%============================================================================
+%% decode Generalized time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_generalized_time(Tlv, _Range, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ NewVal = case Val of
+ PartList = [_H|_T] -> % constructed
+ collect_parts(PartList);
+ Bin ->
+ Bin
+ end,
+ binary_to_list(NewVal).
+
+%%============================================================================
+%% Universal time, ITU_T X.680 Chapter 40
+%%
+%% encode UTC time
+%%============================================================================
+
+encode_utc_time(C, {Name,OctetList}, TagIn) when atom(Name) ->
+ encode_utc_time(C, OctetList, TagIn);
+encode_utc_time(_C, OctetList, TagIn) ->
+ encode_tags(TagIn, OctetList, length(OctetList)).
+
+%%============================================================================
+%% decode UTC time
+%% (Buffer, Range, HasTag, TotalLen) -> {String, Remain, RemovedBytes}
+%%============================================================================
+
+decode_utc_time(Tlv, _Range, Tags) ->
+ Val = match_tags(Tlv, Tags),
+ NewVal = case Val of
+ PartList = [_H|_T] -> % constructed
+ collect_parts(PartList);
+ Bin ->
+ Bin
+ end,
+ binary_to_list(NewVal).
+
+
+%%============================================================================
+%% Length handling
+%%
+%% Encode length
+%%
+%% encode_length(Int | indefinite) ->
+%% [<127]| [128 + Int (<127),OctetList] | [16#80]
+%%============================================================================
+
+encode_length(indefinite) ->
+ {[16#80],1}; % 128
+encode_length(L) when L =< 16#7F ->
+ {[L],1};
+encode_length(L) ->
+ Oct = minimum_octets(L),
+ Len = length(Oct),
+ if
+ Len =< 126 ->
+ {[ (16#80+Len) | Oct ],Len+1};
+ true ->
+ exit({error,{asn1, to_long_length_oct, Len}})
+ end.
+
+
+%% Val must be >= 0
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(0,Acc) ->
+ Acc;
+minimum_octets(Val, Acc) ->
+ minimum_octets((Val bsr 8),[Val band 16#FF | Acc]).
+
+
+%%===========================================================================
+%% Decode length
+%%
+%% decode_length(OctetList) -> {{indefinite, RestOctetsL}, NoRemovedBytes} |
+%% {{Length, RestOctetsL}, NoRemovedBytes}
+%%===========================================================================
+
+decode_length(<<1:1,0:7,T/binary>>) ->
+ {indefinite, T};
+decode_length(<<0:1,Length:7,T/binary>>) ->
+ {Length,T};
+decode_length(<<1:1,LL:7,T/binary>>) ->
+ <<Length:LL/unit:8,Rest/binary>> = T,
+ {Length,Rest}.
+
+
+
+%%-------------------------------------------------------------------------
+%% INTERNAL HELPER FUNCTIONS (not exported)
+%%-------------------------------------------------------------------------
+
+
+%% decoding postitive integer values.
+decode_integer2(Len,Bin = <<0:1,_:7,_Bs/binary>>) ->
+ <<Int:Len/unit:8>> = Bin,
+ Int;
+%% decoding negative integer values.
+decode_integer2(Len,<<1:1,B2:7,Bs/binary>>) ->
+ <<N:Len/unit:8>> = <<B2,Bs/binary>>,
+ Int = N - (1 bsl (8 * Len - 1)),
+ Int.
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+collect_parts(TlvList) ->
+ collect_parts(TlvList,[]).
+
+collect_parts([{_,L}|Rest],Acc) when list(L) ->
+ collect_parts(Rest,[collect_parts(L)|Acc]);
+collect_parts([{?N_BIT_STRING,<<Unused,Bits/binary>>}|Rest],_Acc) ->
+ collect_parts_bit(Rest,[Bits],Unused);
+collect_parts([{_T,V}|Rest],Acc) ->
+ collect_parts(Rest,[V|Acc]);
+collect_parts([],Acc) ->
+ list_to_binary(lists:reverse(Acc)).
+
+collect_parts_bit([{?N_BIT_STRING,<<Unused,Bits/binary>>}|Rest],Acc,Uacc) ->
+ collect_parts_bit(Rest,[Bits|Acc],Unused+Uacc);
+collect_parts_bit([],Acc,Uacc) ->
+ list_to_binary([Uacc|lists:reverse(Acc)]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_check.erl
new file mode 100644
index 0000000000..cfda8a2a88
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_check.erl
@@ -0,0 +1,333 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_check.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+-module(asn1rt_check).
+
+-include("asn1_records.hrl").
+
+-export([check_bool/2,
+ check_int/3,
+ check_bitstring/3,
+ check_octetstring/2,
+ check_null/2,
+ check_objectidentifier/2,
+ check_objectdescriptor/2,
+ check_real/2,
+ check_enum/3,
+ check_restrictedstring/2]).
+
+-export([transform_to_EXTERNAL1990/1,
+ transform_to_EXTERNAL1994/1]).
+
+
+check_bool(_Bool,asn1_DEFAULT) ->
+ true;
+check_bool(Bool,Bool) when Bool == true; Bool == false ->
+ true;
+check_bool(_Bool1,Bool2) ->
+ throw({error,Bool2}).
+
+check_int(_,asn1_DEFAULT,_) ->
+ true;
+check_int(Value,Value,_) when integer(Value) ->
+ true;
+check_int(DefValue,Value,NNL) when atom(Value) ->
+ case lists:keysearch(Value,1,NNL) of
+ {value,{_,DefValue}} ->
+ true;
+ _ ->
+ throw({error,DefValue})
+ end;
+check_int(DefaultValue,_Value,_) ->
+ throw({error,DefaultValue}).
+
+% check_bitstring([H|T],[H|T],_) when integer(H) ->
+% true;
+% check_bitstring(V,V,_) when integer(V) ->
+% true;
+%% Two equal lists or integers
+check_bitstring(_,asn1_DEFAULT,_) ->
+ true;
+check_bitstring(V,V,_) ->
+ true;
+%% Default value as a list of 1 and 0 and user value as an integer
+check_bitstring(L=[H|T],Int,_) when integer(Int),integer(H) ->
+ case bit_list_to_int(L,length(T)) of
+ Int -> true;
+ _ -> throw({error,L,Int})
+ end;
+%% Default value as an integer, val as list
+check_bitstring(Int,Val,NBL) when integer(Int),list(Val) ->
+ BL = int_to_bit_list(Int,[],length(Val)),
+ check_bitstring(BL,Val,NBL);
+%% Default value and user value as lists of ones and zeros
+check_bitstring(L1=[H1|_T1],L2=[H2|_T2],NBL=[_H|_T]) when integer(H1),integer(H2) ->
+ L2new = remove_trailing_zeros(L2),
+ check_bitstring(L1,L2new,NBL);
+%% Default value as a list of 1 and 0 and user value as a list of atoms
+check_bitstring(L1=[H1|_T1],L2=[H2|_T2],NBL) when integer(H1),atom(H2) ->
+ case bit_list_to_nbl(L1,NBL,0,[]) of
+ L3 -> check_bitstring(L3,L2,NBL);
+ _ -> throw({error,L2})
+ end;
+%% Both default value and user value as a list of atoms
+check_bitstring(L1=[H1|T1],L2=[H2|_T2],_) when atom(H1),atom(H2) ->
+ length(L1) == length(L2),
+ case lists:member(H1,L2) of
+ true ->
+ check_bitstring1(T1,L2);
+ false -> throw({error,L2})
+ end;
+%% Default value as a list of atoms and user value as a list of 1 and 0
+check_bitstring(L1=[H1|_T1],L2=[H2|_T2],NBL) when atom(H1),integer(H2) ->
+ case bit_list_to_nbl(L2,NBL,0,[]) of
+ L3 ->
+ check_bitstring(L1,L3,NBL);
+ _ -> throw({error,L2})
+ end;
+%% User value in compact format
+check_bitstring(DefVal,CBS={_,_},NBL) ->
+ NewVal = cbs_to_bit_list(CBS),
+ check_bitstring(DefVal,NewVal,NBL);
+check_bitstring(DV,V,_) ->
+ throw({error,DV,V}).
+
+
+bit_list_to_int([0|Bs],ShL)->
+ bit_list_to_int(Bs,ShL-1) + 0;
+bit_list_to_int([1|Bs],ShL) ->
+ bit_list_to_int(Bs,ShL-1) + (1 bsl ShL);
+bit_list_to_int([],_) ->
+ 0.
+
+int_to_bit_list(0,Acc,0) ->
+ Acc;
+int_to_bit_list(Int,Acc,Len) ->
+ int_to_bit_list(Int bsr 1,[Int band 1|Acc],Len - 1).
+
+bit_list_to_nbl([0|T],NBL,Pos,Acc) ->
+ bit_list_to_nbl(T,NBL,Pos+1,Acc);
+bit_list_to_nbl([1|T],NBL,Pos,Acc) ->
+ case lists:keysearch(Pos,2,NBL) of
+ {value,{N,_}} ->
+ bit_list_to_nbl(T,NBL,Pos+1,[N|Acc]);
+ _ ->
+ throw({error,{no,named,element,at,pos,Pos}})
+ end;
+bit_list_to_nbl([],_,_,Acc) ->
+ Acc.
+
+remove_trailing_zeros(L2) ->
+ remove_trailing_zeros1(lists:reverse(L2)).
+remove_trailing_zeros1(L) ->
+ lists:reverse(lists:dropwhile(fun(0)->true;
+ (_) ->false
+ end,
+ L)).
+
+check_bitstring1([H|T],NBL) ->
+ case lists:member(H,NBL) of
+ true ->
+ check_bitstring1(T,NBL);
+ V -> throw({error,V})
+ end;
+check_bitstring1([],_) ->
+ true.
+
+cbs_to_bit_list({Unused,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>>}) when size(Rest) >= 1 ->
+ [B7,B6,B5,B4,B3,B2,B1,B0|cbs_to_bit_list({Unused,Rest})];
+cbs_to_bit_list({0,<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1>>}) ->
+ [B7,B6,B5,B4,B3,B2,B1,B0];
+cbs_to_bit_list({Unused,Bin}) when size(Bin) == 1 ->
+ Used = 8-Unused,
+ <<Int:Used,_:Unused>> = Bin,
+ int_to_bit_list(Int,[],Used).
+
+
+check_octetstring(_,asn1_DEFAULT) ->
+ true;
+check_octetstring(L,L) ->
+ true;
+check_octetstring(L,Int) when list(L),integer(Int) ->
+ case integer_to_octetlist(Int) of
+ L -> true;
+ V -> throw({error,V})
+ end;
+check_octetstring(_,V) ->
+ throw({error,V}).
+
+integer_to_octetlist(Int) ->
+ integer_to_octetlist(Int,[]).
+integer_to_octetlist(0,Acc) ->
+ Acc;
+integer_to_octetlist(Int,Acc) ->
+ integer_to_octetlist(Int bsr 8,[(Int band 255)|Acc]).
+
+check_null(_,asn1_DEFAULT) ->
+ true;
+check_null('NULL','NULL') ->
+ true;
+check_null(_,V) ->
+ throw({error,V}).
+
+check_objectidentifier(_,asn1_DEFAULT) ->
+ true;
+check_objectidentifier(OI,OI) ->
+ true;
+check_objectidentifier(DOI,OI) when tuple(DOI),tuple(OI) ->
+ check_objectidentifier1(tuple_to_list(DOI),tuple_to_list(OI));
+check_objectidentifier(_,OI) ->
+ throw({error,OI}).
+
+check_objectidentifier1([V|Rest1],[V|Rest2]) ->
+ check_objectidentifier1(Rest1,Rest2,V);
+check_objectidentifier1([V1|Rest1],[V2|Rest2]) ->
+ case reserved_objectid(V2,[]) of
+ V1 ->
+ check_objectidentifier1(Rest1,Rest2,[V1]);
+ V ->
+ throw({error,V})
+ end.
+check_objectidentifier1([V|Rest1],[V|Rest2],Above) ->
+ check_objectidentifier1(Rest1,Rest2,[V|Above]);
+check_objectidentifier1([V1|Rest1],[V2|Rest2],Above) ->
+ case reserved_objectid(V2,Above) of
+ V1 ->
+ check_objectidentifier1(Rest1,Rest2,[V1|Above]);
+ V ->
+ throw({error,V})
+ end;
+check_objectidentifier1([],[],_) ->
+ true;
+check_objectidentifier1(_,V,_) ->
+ throw({error,object,identifier,V}).
+
+%% ITU-T Rec. X.680 Annex B - D
+reserved_objectid('itu-t',[]) -> 0;
+reserved_objectid('ccitt',[]) -> 0;
+%% arcs below "itu-t"
+reserved_objectid('recommendation',[0]) -> 0;
+reserved_objectid('question',[0]) -> 1;
+reserved_objectid('administration',[0]) -> 2;
+reserved_objectid('network-operator',[0]) -> 3;
+reserved_objectid('identified-organization',[0]) -> 4;
+
+reserved_objectid(iso,[]) -> 1;
+%% arcs below "iso", note that number 1 is not used
+reserved_objectid('standard',[1]) -> 0;
+reserved_objectid('member-body',[1]) -> 2;
+reserved_objectid('identified-organization',[1]) -> 3;
+
+reserved_objectid('joint-iso-itu-t',[]) -> 2;
+reserved_objectid('joint-iso-ccitt',[]) -> 2;
+
+reserved_objectid(_,_) -> false.
+
+
+check_objectdescriptor(_,asn1_DEFAULT) ->
+ true;
+check_objectdescriptor(OD,OD) ->
+ true;
+check_objectdescriptor(OD,OD) ->
+ throw({error,{not_implemented_yet,check_objectdescriptor}}).
+
+check_real(_,asn1_DEFAULT) ->
+ true;
+check_real(R,R) ->
+ true;
+check_real(_,_) ->
+ throw({error,{not_implemented_yet,check_real}}).
+
+check_enum(_,asn1_DEFAULT,_) ->
+ true;
+check_enum(Val,Val,_) ->
+ true;
+check_enum(Int,Atom,Enumerations) when integer(Int),atom(Atom) ->
+ case lists:keysearch(Atom,1,Enumerations) of
+ {value,{_,Int}} -> true;
+ _ -> throw({error,{enumerated,Int,Atom}})
+ end;
+check_enum(DefVal,Val,_) ->
+ throw({error,{enumerated,DefVal,Val}}).
+
+
+check_restrictedstring(_,asn1_DEFAULT) ->
+ true;
+check_restrictedstring(Val,Val) ->
+ true;
+check_restrictedstring([V|Rest1],[V|Rest2]) ->
+ check_restrictedstring(Rest1,Rest2);
+check_restrictedstring([V1|Rest1],[V2|Rest2]) ->
+ check_restrictedstring(V1,V2),
+ check_restrictedstring(Rest1,Rest2);
+%% tuple format of value
+check_restrictedstring({V1,V2},[V1,V2]) ->
+ true;
+check_restrictedstring([V1,V2],{V1,V2}) ->
+ true;
+%% quadruple format of value
+check_restrictedstring({V1,V2,V3,V4},[V1,V2,V3,V4]) ->
+ true;
+check_restrictedstring([V1,V2,V3,V4],{V1,V2,V3,V4}) ->
+ true;
+%% character string list
+check_restrictedstring(V1,V2) when list(V1),tuple(V2) ->
+ check_restrictedstring(V1,tuple_to_list(V2));
+check_restrictedstring(V1,V2) ->
+ throw({error,{restricted,string,V1,V2}}).
+
+transform_to_EXTERNAL1990(Val) when tuple(Val),size(Val) == 4 ->
+ transform_to_EXTERNAL1990(tuple_to_list(Val),[]);
+transform_to_EXTERNAL1990(Val) when tuple(Val) ->
+ %% Data already in ASN1 1990 format
+ Val.
+
+transform_to_EXTERNAL1990(['EXTERNAL'|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,['EXTERNAL'|Acc]);
+transform_to_EXTERNAL1990([{syntax,Syntax}|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,[asn1_NOVALUE,Syntax|Acc]);
+transform_to_EXTERNAL1990([{'presentation-context-id',PCid}|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,[PCid,asn1_NOVALUE|Acc]);
+transform_to_EXTERNAL1990([{'context-negotiation',Context_negot}|Rest],Acc) ->
+ {_,Presentation_Cid,Transfer_syntax} = Context_negot,
+ transform_to_EXTERNAL1990(Rest,[Transfer_syntax,Presentation_Cid|Acc]);
+transform_to_EXTERNAL1990([asn1_NOVALUE|Rest],Acc) ->
+ transform_to_EXTERNAL1990(Rest,[asn1_NOVALUE|Acc]);
+transform_to_EXTERNAL1990([Data_val_desc,Data_value],Acc) when list(Data_value)->
+ list_to_tuple(lists:reverse([{'octet-aligned',Data_value},
+ Data_val_desc|Acc]));
+transform_to_EXTERNAL1990([Data_value],Acc) when list(Data_value)->
+ list_to_tuple(lists:reverse([{'octet-aligned',Data_value}|Acc])).
+
+
+transform_to_EXTERNAL1994(V={'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}) ->
+ Identification =
+ case {DRef,IndRef} of
+ {DRef,asn1_NOVALUE} ->
+ {syntax,DRef};
+ {asn1_NOVALUE,IndRef} ->
+ {'presentation-context-id',IndRef};
+ _ ->
+ {'context-negotiation',
+ {'EXTERNAL_identification_context-negotiation',IndRef,DRef}}
+ end,
+ case Encoding of
+ {_,Val} when list(Val) ->
+ {'EXTERNAL',Identification,Data_v_desc,Val};
+ _ ->
+ V
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_driver_handler.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_driver_handler.erl
new file mode 100644
index 0000000000..5200f9d2d9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_driver_handler.erl
@@ -0,0 +1,108 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_driver_handler.erl,v 1.1 2008/12/17 09:53:30 mikpe Exp $
+%%
+
+-module(asn1rt_driver_handler).
+
+-export([init/1,load_driver/0,unload_driver/0]).
+
+
+load_driver() ->
+ spawn(asn1rt_driver_handler, init, [self()]).
+
+init(From) ->
+ Port=
+ case load_driver("asn1_erl_drv") of
+ ok ->
+ open_named_port(From);
+ already_done ->
+ From ! driver_ready;
+ Error -> % if erl_ddll:load_driver fails
+ erl_ddll:unload_driver("asn1_erl_drv"),
+ From ! Error
+ end,
+ register_and_loop(Port).
+
+load_driver(DriverName) ->
+ case is_driver_loaded(DriverName) of
+ false ->
+ Dir = filename:join([code:priv_dir(asn1),"lib"]),
+ erl_ddll:load_driver(Dir,DriverName);
+ true ->
+ ok
+ end.
+
+
+is_driver_loaded(_Name) ->
+ case whereis(asn1_driver_owner) of
+ undefined ->
+ false;
+ _ ->
+ true
+ end.
+
+open_named_port(From) ->
+ case is_port_open(drv_complete) of
+ false ->
+ case catch open_port({spawn,"asn1_erl_drv"},[]) of
+ {'EXIT',Reason} ->
+ From ! {port_error,Reason};
+ Port ->
+ register(drv_complete,Port),
+ From ! driver_ready,
+ Port
+ end;
+ _ ->
+ From ! driver_ready,
+ ok
+ end.
+
+is_port_open(Name) ->
+ case whereis(Name) of
+ Port when port(Port) ->
+ true;
+ _ -> false
+ end.
+
+register_and_loop(Port) when port(Port) ->
+ register(asn1_driver_owner,self()),
+ loop();
+register_and_loop(_) ->
+ ok.
+
+loop() ->
+ receive
+ unload ->
+ case whereis(drv_complete) of
+ Port when port(Port) ->
+ port_close(Port);
+ _ -> ok
+ end,
+ erl_ddll:unload_driver("asn1_erl_drv"),
+ ok;
+ _ ->
+ loop()
+ end.
+
+unload_driver() ->
+ case whereis(asn1_driver_owner) of
+ Pid when pid(Pid) ->
+ Pid ! unload,
+ ok;
+ _ ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
new file mode 100644
index 0000000000..4999dde2cc
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
@@ -0,0 +1,1593 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/2, fixextensions/2, setoptionals/1,
+ getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/3, set_choice/3, encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_boolean/1,
+ decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1]).
+-export([encode_enumerated/3, decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_restricted_string/4, encode_restricted_string/5,
+ decode_restricted_string/4, decode_restricted_string/5,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([encode_UniversalString/2, decode_UniversalString/2,
+ encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ encode_VisibleString/2, decode_VisibleString/2,
+ encode_BMPString/2, decode_BMPString/2,
+ encode_IA5String/2, decode_IA5String/2,
+ encode_NumericString/2, decode_NumericString/2
+ ]).
+
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+ [{debug,choiceext},{bit,0}];
+setchoiceext(false) ->
+ [{debug,choiceext},{bit,1}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(true) ->
+ [{debug,ext},{bit,1}];
+setext(false) ->
+ [{debug,ext},{bit,0}].
+
+fixoptionals(OptList,Val) when tuple(Val) ->
+ fixoptionals(OptList,Val,[]);
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([],Val,Acc) ->
+ % return {Val,Opt}
+ {Val,lists:reverse(Acc)};
+fixoptionals([{_,Pos}|Ot],Val,Acc) ->
+ case element(Pos+1,Val) of
+ asn1_NOVALUE -> fixoptionals(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,[0|Acc]);
+ _ -> fixoptionals(Ot,Val,[1|Acc])
+ end.
+
+
+%setoptionals(OptList,Val) ->
+% Vlist = tuple_to_list(Val),
+% setoptionals(OptList,Vlist,1,[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,Acc1,Acc2) ->
+ % return {Val,Opt}
+ {list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
+
+setoptionals([H|T]) ->
+ [{bit,H}|setoptionals(T)];
+setoptionals([]) ->
+ [{debug,optionals}].
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+ [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_NumChoices,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_integer(Bytes,[{'ValueRange',{0,NumChoices-1}}]).
+
+getoptionals(Bytes,L,NumComp) when list(L) ->
+ {Blist,Bytes1} = getbits_as_list(length(L),Bytes),
+ {list_to_tuple(comptuple(Blist,L,NumComp,1)),Bytes1}.
+
+comptuple([Bh|Bt],[{_Name,Nr}|T],NumComp,Nr) ->
+ [Bh|comptuple(Bt,T,NumComp-1,Nr+1)];
+comptuple(Bl,[{Name,Tnr}|Tl],NumComp,Nr) ->
+ [0|comptuple(Bl,[{Name,Tnr}|Tl],NumComp-1,Nr+1)];
+comptuple(_B,_L,0,_Nr) ->
+ [];
+comptuple(B,O,N,Nr) ->
+ [0|comptuple(B,O,N-1,Nr+1)].
+
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+getbits_as_list(0,Bytes,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getbits_as_list(Num,Bytes,Acc) ->
+ {Bit,NewBytes} = getbit(Bytes),
+ getbits_as_list(Num-1,NewBytes,[Bit|Acc]).
+
+getbit(Bytes) ->
+% io:format("getbit:~p~n",[Bytes]),
+ getbit1(Bytes).
+
+getbit1({7,[H|T]}) ->
+ {H band 1,{0,T}};
+getbit1({Pos,[H|T]}) ->
+ {(H bsr (7-Pos)) band 1,{(Pos+1) rem 8,[H|T]}};
+getbit1(Bytes) when list(Bytes) ->
+ getbit1({0,Bytes}).
+
+%% This could be optimized
+getbits(Buffer,Num) ->
+% io:format("getbits:Buffer = ~p~nNum=~p~n",[Buffer,Num]),
+ getbits(Buffer,Num,0).
+
+getbits(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getbits(Buffer,Num,Acc) ->
+ {B,NewBuffer} = getbit(Buffer),
+ getbits(NewBuffer,Num-1,B + (Acc bsl 1)).
+
+
+getoctet(Bytes) when list(Bytes) ->
+ getoctet({0,Bytes});
+getoctet(Bytes) ->
+% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+ getoctet1(Bytes).
+
+getoctet1({0,[H|T]}) ->
+ {H,{0,T}};
+getoctet1({_Pos,[_,H|T]}) ->
+ {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,[_H|T]}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+getoctets(Buffer,Num) ->
+% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+ getoctets(Buffer,Num,0).
+
+getoctets(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getoctets(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+getoctets_as_list(Buffer,Num) ->
+ getoctets_as_list(Buffer,Num,[]).
+
+getoctets_as_list(Buffer,0,Acc) ->
+ {lists:reverse(Acc),Buffer};
+getoctets_as_list(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+ [{bit,0}, % the value is in the root set
+ encode_integer([{'ValueRange',{0,Len1-1}}],N)];
+ N when integer(N) ->
+ [{bit,0}]; % no encoding if only 0 or 1 alternative
+ false ->
+ [{bit,1}, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_integer([{'ValueRange',{0,Len-1}}],N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_,[],_) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_Constraint, Val) when list(Val) ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}];
+encode_open_type(_Constraint, Val) when binary(Val) ->
+ [encode_length(undefined,size(Val)),align,
+ {octets,binary_to_list(Val)}].
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _Constraint) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_NamedNumberList) when integer(V) ->
+ encode_integer(C,V).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer({Rc,_Ec},Val) ->
+ case (catch encode_integer(Rc,Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+ [{bit,1},encode_unconstrained_number(Val)];
+ Encoded ->
+ [{bit,0},Encoded]
+ end;
+encode_integer(C,Val ) when list(C) ->
+ case get_constraint(C,'SingleValue') of
+ no ->
+ encode_integer1(C,Val);
+ V when integer(V),V == Val ->
+ []; % a type restricted to a single value encodes to nothing
+ V when list(V) ->
+ case lists:member(Val,V) of
+ true ->
+ encode_integer1(C,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end.
+
+encode_integer1(C, Val) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ encode_unconstrained_number(Val);
+ {Lb,'MAX'} ->
+ encode_semi_constrained_number(Lb,Val);
+ %% positive with range
+ {Lb,Ub} when Val >= Lb,
+ Ub >= Val ->
+ encode_constrained_number(VR,Val)
+ end.
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,{Rc,_Ec}) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,Rc);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ V when list(V) ->
+ {Val,Buffer2} = decode_integer1(Buffer,C),
+ case lists:member(Val,V) of
+ true ->
+ {Val,Buffer2};
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_,_} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+% X.691:10.6 Encoding of a normally small non-negative whole number
+% Use this for encoding of CHOICE index if there is an extension marker in
+% the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+ [{bit,0},{bits,6,Val}];
+encode_small_number(Val) ->
+ [{bit,1},encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,{0,'MAX'})
+ end.
+
+% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Octs = eint_positive(Val2),
+ [encode_length(undefined,length(Octs)),{octets,Octs}].
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+ {bits,1,Val2};
+ Range =< 4 ->
+ {bits,2,Val2};
+ Range =< 8 ->
+ {bits,3,Val2};
+ Range =< 16 ->
+ {bits,4,Val2};
+ Range =< 32 ->
+ {bits,5,Val2};
+ Range =< 64 ->
+ {bits,6,Val2};
+ Range =< 128 ->
+ {bits,7,Val2};
+ Range =< 255 ->
+ {bits,8,Val2};
+ Range =< 256 ->
+ {octets,1,Val2};
+ Range =< 65536 ->
+ {octets,2,Val2};
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,3},length(Octs)),{octets,Octs}];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,4},length(Octs)),{octets,Octs}];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,5},length(Octs)),{octets,Octs}];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end.
+
+decode_constrained_number(Buffer,{Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+% Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}];
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}].
+
+%% used for positive Values which don't need a sign bit
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+%% used for signed positive values
+
+%eint(Val, Ack) ->
+% X = Val band 255,
+% Next = Val bsr 8,
+% if
+% Next == 0, X >= 127 ->
+% [0,X|Ack];
+% Next == 0 ->
+% [X|Ack];
+% true ->
+% eint(Next,[X|Ack])
+% end.
+
+%%% used for signed negative values
+%enint(Val, Acc) ->
+% NumOctets = if
+% -Val < 16#80 -> 1;
+% -Val < 16#8000 ->2;
+% -Val < 16#800000 ->3;
+% -Val < 16#80000000 ->4;
+% -Val < 16#8000000000 ->5;
+% -Val < 16#800000000000 ->6;
+% -Val < 16#80000000000000 ->7;
+% -Val < 16#8000000000000000 ->8;
+% -Val < 16#800000000000000000 ->9
+% end,
+% enint(Val,Acc,NumOctets).
+
+%enint(Val, Acc,0) ->
+% Acc;
+%enint(Val, Acc,NumOctets) ->
+% enint(Val bsr 8,[Val band 255|Acc],NumOctets-1).
+
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(Val,Acc) when Val > 0 ->
+ minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+minimum_octets(0,Acc) ->
+ Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+ {octet,Len band 16#7F};
+ Len < 16384 ->
+ {octets,2,2#1000000000000000 bor Len};
+ true ->
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length({Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number({Lb,Ub},Len);
+encode_length(SingleValue,_Len) when integer(SingleValue) ->
+ [].
+
+encode_small_length(Len) when Len =< 64 ->
+ [{bit,0},{bits,6,Len-1}];
+encode_small_length(Len) ->
+ [{bit,1},encode_length(undefined,Len)].
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ Buffer2 = align(Buffer),
+ {Bits,_} = getbits(Buffer2,2),
+ case Bits of
+ 2 ->
+ {Val,Bytes3} = getoctets(Buffer2,2),
+ {(Val band 16#3FFF),Bytes3};
+ 3 ->
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+ _ ->
+ {Val,Bytes3} = getoctet(Buffer2),
+ {Val band 16#7F,Bytes3}
+ end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+ % X.691:10.9.3.5
+decode_length(Buffer,{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+ case getbit(Buffer) of
+ {0,Remain} ->
+ getbits(Remain,7);
+ {1,_Remain} ->
+ {Val,Remain2} = getoctets(Buffer,2),
+ {Val band 2#0111111111111111, Remain2}
+ end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+% X.691:11
+encode_boolean({Name,Val}) when atom(Name) ->
+ encode_boolean(Val);
+encode_boolean(true) ->
+ {bit,1};
+encode_boolean(false) ->
+ {bit,0};
+encode_boolean(Val) ->
+ exit({error,{asn1,{encode_boolean,Val}}}).
+
+
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:12
+%% ENUMERATED
+%%
+%% encode_enumerated(C,Value,NamedNumberTup) -> CompleteList
+%%
+%%
+
+encode_enumerated(C,{Name,Value},NamedNumberList) when
+ atom(Name),list(NamedNumberList) ->
+ encode_enumerated(C,Value,NamedNumberList);
+
+%% ENUMERATED with extension mark
+encode_enumerated(_C,{asn1_enum,Value},{_Nlist1,Nlist2}) when Value >= length(Nlist2) ->
+ [{bit,1},encode_small_number(Value)];
+encode_enumerated(C,Value,{Nlist1,Nlist2}) ->
+ case enum_search(Value,Nlist1,0) of
+ NewV when integer(NewV) ->
+ [{bit,0},encode_integer(C,NewV)];
+ false ->
+ case enum_search(Value,Nlist2,0) of
+ ExtV when integer(ExtV) ->
+ [{bit,1},encode_small_number(ExtV)];
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end
+ end;
+
+encode_enumerated(C,Value,NamedNumberList) when list(NamedNumberList) ->
+ case enum_search(Value,NamedNumberList,0) of
+ NewV when integer(NewV) ->
+ encode_integer(C,NewV);
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end.
+
+%% returns the ordinal number from 0 ,1 ... in the list where Name is found
+%% or false if not found
+%%
+enum_search(Name,[Name|_NamedNumberList],Acc) ->
+ Acc;
+enum_search(Name,[_H|T],Acc) ->
+ enum_search(Name,T,Acc+1);
+enum_search(_,[],_) ->
+ false. % name not found !error
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+%% when the value is a list of named bits
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+encode_bit_string(C, [{bit,No} | RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes([{bit,No} | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+
+encode_bit_string(C, BitListValue, _NamedBitList) when list(BitListValue) ->
+ %% first remove any trailing zeroes
+ Bl1 = lists:dropwhile(fun(0)->true;(1)->false end,lists:reverse(BitListValue)),
+ BitList = [{bit,X} || X <- lists:reverse(Bl1)],
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ []; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ pad_list(V,BitList);
+ V when integer(V) -> % fixed length more than 16 bits
+ [align,pad_list(V,BitList)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ [encode_length({Lb,Ub},length(BitList)),align,BitList];
+ no ->
+ [encode_length(undefined,length(BitList)),align,BitList]
+ end;
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) ->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList).
+
+
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_to_named(Buffer,V,NamedNumberList);
+ V when integer(V) -> % fixed length 16 bits or less
+ Bytes2 = align(Buffer),
+ bit_list_to_named(Bytes2,V,NamedNumberList);
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList)
+ end.
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_to_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_to_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_to_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_to_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_to_named1(Pos+1,Bt,Names,Acc);
+bit_list_to_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_to_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_to_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_to_named1(_Pos,[],_Names,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+pad_list(0,BitList) ->
+ case BitList of
+ [] -> [];
+ _ -> exit({error,{asn1,{range_error,{bit_string,BitList}}}})
+ end;
+pad_list(N,[Bh|Bt]) ->
+ [Bh|pad_list(N-1,Bt)];
+pad_list(N,[]) ->
+ [{bit,0},pad_list(N-1,[])].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,{Name,Val}) when atom(Name) ->
+ encode_octet_string(C,false,Val);
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(_C,true,_Val) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(C,false,Val) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ 1 ->
+ [V] = Val,
+ {bits,8,V};
+ 2 ->
+ [V1,V2] = Val,
+ [{bits,8,V1},{bits,8,V2}];
+ Sv when Sv =<65535, Sv == length(Val) -> % fixed length
+ [align,{octets,Val}];
+ {Lb,Ub} ->
+ [encode_length({Lb,Ub},length(Val)),align,
+ {octets,Val}];
+ Sv when list(Sv) ->
+ [encode_length({hd(Sv),lists:max(Sv)},length(Val)),align,
+ {octets,Val}];
+ no ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}]
+ end.
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,C,false) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ {[],Bytes};
+ 1 ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+ 2 ->
+ {B1,Bytes2}= getbits(Bytes,8),
+ {B2,Bytes3}= getbits(Bytes2,8),
+ {[B1,B2],Bytes3};
+ {_,0} ->
+ {[],Bytes};
+ Sv when integer(Sv), Sv =<65535 -> % fixed length
+ Bytes2 = align(Bytes),
+ getoctets_as_list(Bytes2,Sv);
+ {Lb,Ub} ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ Sv when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ no ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+encode_restricted_string(aligned,StringType,C,Val) ->
+encode_restricted_string(aligned,StringType,C,false,Val).
+
+
+encode_restricted_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,StringType,C,false,Val);
+encode_restricted_string(aligned,StringType,C,_Ext,Val) ->
+ Result = chars_encode(C,StringType,Val),
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} ->
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ 0 ->
+ [];
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ [align,Result];
+ {Ub,Lb} ->
+ [encode_length({Ub,Lb},length(Val)),align,Result];
+ Vl when list(Vl) ->
+ [encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
+ no ->
+ [encode_length(undefined,length(Val)),align,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned,StringType,C) ->
+ decode_restricted_string(Bytes,aligned,StringType,C,false).
+
+decode_restricted_string(Bytes,aligned,StringType,C,_Ext) ->
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,C,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,C,Ub);
+ 0 ->
+ {[],Bytes};
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len)
+ end.
+
+
+
+encode_BMPString(C,Val) ->
+ encode_restricted_string(aligned,'BMPString',C,false,Val).
+decode_BMPString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'BMPString',C,false).
+
+encode_GeneralString(C,Val) ->
+ encode_restricted_string(aligned,'GeneralString',C,false,Val).
+decode_GeneralString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'GeneralString',C,false).
+
+encode_GraphicString(C,Val) ->
+ encode_restricted_string(aligned,'GraphicString',C,false,Val).
+decode_GraphicString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'GraphicString',C,false).
+
+encode_IA5String(C,Val) ->
+ encode_restricted_string(aligned,'IA5String',C,false,Val).
+decode_IA5String(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'IA5String',C,false).
+
+encode_NumericString(C,Val) ->
+ encode_restricted_string(aligned,'NumericString',C,false,Val).
+decode_NumericString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'NumericString',C,false).
+
+encode_PrintableString(C,Val) ->
+ encode_restricted_string(aligned,'PrintableString',C,false,Val).
+decode_PrintableString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'PrintableString',C,false).
+
+encode_TeletexString(C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,'TeletexString',C,false,Val).
+decode_TeletexString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'TeletexString',C,false).
+
+encode_UniversalString(C,Val) ->
+ encode_restricted_string(aligned,'UniversalString',C,false,Val).
+decode_UniversalString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'UniversalString',C,false).
+
+encode_VideotexString(C,Val) ->
+ encode_restricted_string(aligned,'VideotexString',C,false,Val).
+decode_VideotexString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'VideotexString',C,false).
+
+encode_VisibleString(C,Val) -> % equivalent with ISO646String
+ encode_restricted_string(aligned,'VisibleString',C,false,Val).
+decode_VisibleString(Bytes,C) ->
+ decode_restricted_string(Bytes,aligned,'VisibleString',C,false).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+chars_encode(C,StringType,Value) ->
+ case {StringType,get_constraint(C,'PermittedAlphabet')} of
+ {'UniversalString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+ _ ->
+ {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+ chars_encode2(Value,NumBits,CharOutTab)
+ end.
+
+chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,element(H-Min+1,Tab)}|chars_encode2(T,NumBits,{Min,Max,Tab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+ [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+ [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_NumBits,{_Min,_Max,_Tab}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'GeneralString' ->
+ exit({error,{asn1,{not implemented,'GeneralString'}}});
+ 'GraphicString' ->
+ exit({error,{asn1,{not implemented,'GraphicString'}}});
+ 'TeletexString' ->
+ exit({error,{asn1,{not implemented,'TeletexString'}}});
+ 'VideotexString' ->
+ exit({error,{asn1,{not implemented,'VideotexString'}}});
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B > 2, B =< 4 -> 4;
+ B when B > 4, B =< 8 -> 8;
+ B when B > 8, B =< 16 -> 16;
+ B when B > 16, B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',C,Len) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ no ->
+ getBMPChars(Bytes,Len);
+ _ ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet constraint"}}})
+ end;
+chars_decode(Bytes,NumBits,StringType,C,Len) ->
+ CharInTab = get_CharInTab(C,StringType),
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result = case minimum_octets(Char+Min) of
+ [NewChar] -> NewChar;
+ [C1,C2] -> {0,0,C1,C2};
+ [C1,C2,C3] -> {0,C1,C2,C3};
+ [C1,C2,C3,C4] -> {C1,C2,C3,C4}
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val);
+encode_null(_) -> []. % encodes to nothing
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier(Val) ->
+ Octets = e_object_identifier(Val,notag),
+ [{debug,object_identifier},encode_length(undefined,length(Octets)),{octets,Octets}].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V},DoTag) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),list(V) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier(V,DoTag) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+
+% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail],_DoTag) when E1 =< 2 ->
+ Head = 40*E1 + E2, % weird
+ Res = e_object_elements([Head|Tail]),
+% dotag(DoTag,[6],elength(length(Res)+1),[Head|Res]),
+ Res.
+
+e_object_elements([]) ->
+ [];
+e_object_elements([H|T]) ->
+ lists:append(e_object_element(H),e_object_elements(T)).
+
+e_object_element(Num) when Num < 128 ->
+ [Num];
+% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+complete(InList) when list(InList) ->
+ complete(InList,[],0);
+complete(InList) ->
+ complete([InList],[],0).
+
+complete([{debug,_}|T], Acc, Acclen) ->
+ complete(T,Acc,Acclen);
+complete([H|T],Acc,Acclen) when list(H) ->
+ complete(lists:concat([H,T]),Acc,Acclen);
+
+
+complete([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+ Newval = case N of
+ 1 ->
+ Val4 = Val band 16#FF,
+ [Val4];
+ 2 ->
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val3,Val4];
+ 3 ->
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val2,Val3,Val4];
+ 4 ->
+ Val1 = (Val bsr 24) band 16#FF,
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val1,Val2,Val3,Val4]
+ end,
+ complete([{octets,Newval}|T],Acc,Acclen);
+
+complete([{octets,Oct}|T],[],_Acclen) when list(Oct) ->
+ complete(T,lists:reverse(Oct),0);
+complete([{octets,Oct}|T],[Hacc|Tacc],Acclen) when list(Oct) ->
+ Rest = 8 - Acclen,
+ if
+ Rest == 8 ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc|Tacc]]),0);
+ true ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc bsl Rest|Tacc]]),0)
+ end;
+
+complete([{bit,Val}|T], Acc, Acclen) ->
+ complete([{bits,1,Val}|T],Acc,Acclen);
+complete([{octet,Val}|T], Acc, Acclen) ->
+ complete([{octets,1,Val}|T],Acc,Acclen);
+
+complete([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
+ complete(T,[Val|Acc],N);
+complete([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
+ Rest = 8 - Acclen,
+ if
+ Rest >= N ->
+ complete(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
+ true ->
+ Diff = N - Rest,
+ NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
+ Mask = element(Diff,{1,3,7,15,31,63,127,255}),
+ complete(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
+ end;
+complete([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
+ complete([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
+
+complete([align|T],Acc,0) ->
+ complete(T,Acc,0);
+complete([align|T],[Hacc|Tacc],Acclen) ->
+ Rest = 8 - Acclen,
+ complete(T,[Hacc bsl Rest|Tacc],0);
+complete([{octets,_N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
+ complete([{octets,Val}|T],Acc,Acclen);
+complete([],Acc,0) ->
+ lists:reverse(Acc);
+complete([],[Hacc|Tacc],Acclen) when Acclen > 0->
+ Rest = 8 - Acclen,
+ NewHacc = Hacc bsl Rest,
+ lists:reverse([NewHacc|Tacc]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
new file mode 100644
index 0000000000..8b4512b58e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
@@ -0,0 +1,2176 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per_bin.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per_bin).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/2, fixoptionals/3,
+ fixextensions/2,
+ getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/2, getoptionals2/2, set_choice/3, encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_small_number/1, encode_boolean/1,
+ decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1,
+ decode_compact_bit_string/3]).
+-export([decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([encode_UniversalString/2, decode_UniversalString/2,
+ encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ encode_VisibleString/2, decode_VisibleString/2,
+ encode_BMPString/2, decode_BMPString/2,
+ encode_IA5String/2, decode_IA5String/2,
+ encode_NumericString/2, decode_NumericString/2,
+ encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
+ ]).
+-export([complete_bytes/1]).
+
+-define('16K',16384).
+-define('32K',32768).
+-define('64K',65536).
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%% converts a list to a record if necessary
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple;
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]).
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+ [{debug,choiceext},{bits,1,0}];
+setchoiceext(false) ->
+ [{debug,choiceext},{bits,1,1}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(false) ->
+ [{debug,ext},{bits,1,0}];
+setext(true) ->
+ [{debug,ext},{bits,1,1}].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This version of fixoptionals/2 are left only because of
+%% backward compatibility with older generates
+
+fixoptionals(OptList,Val) when tuple(Val) ->
+ fixoptionals1(OptList,Val,[]);
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals1(OptList,Val,1,[],[]).
+
+fixoptionals1([],Val,Acc) ->
+ %% return {Val,Opt}
+ {Val,lists:reverse(Acc)};
+fixoptionals1([{_,Pos}|Ot],Val,Acc) ->
+ case element(Pos+1,Val) of
+ asn1_NOVALUE -> fixoptionals1(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals1(Ot,Val,[0|Acc]);
+ _ -> fixoptionals1(Ot,Val,[1|Acc])
+ end.
+
+
+fixoptionals1([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals1(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals1([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals1(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals1(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals1(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals1([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals1([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals1([],[],_,Acc1,Acc2) ->
+ % return {Val,Opt}
+ {list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This is the new fixoptionals/3 which is used by the new generates
+%%
+fixoptionals(OptList,OptLength,Val) when tuple(Val) ->
+ Bits = fixoptionals(OptList,Val,0),
+ {Val,{bits,OptLength,Bits}};
+
+fixoptionals([],_Val,Acc) ->
+ %% Optbits
+ Acc;
+fixoptionals([Pos|Ot],Val,Acc) ->
+ case element(Pos,Val) of
+ asn1_NOVALUE -> fixoptionals(Ot,Val,Acc bsl 1);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,Acc bsl 1);
+ _ -> fixoptionals(Ot,Val,(Acc bsl 1) + 1)
+ end.
+
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when binary(Bytes) ->
+ getbit({0,Bytes});
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+ [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_constrained_number(Bytes,{0,NumChoices-1}).
+
+%% old version kept for backward compatibility with generates from R7B
+getoptionals(Bytes,NumOpt) ->
+ {Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
+ {list_to_tuple(Blist),Bytes1}.
+
+%% new version used in generates from r8b_patch/3 and later
+getoptionals2(Bytes,NumOpt) ->
+ getbits(Bytes,NumOpt).
+
+
+%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
+%% Num = integer(),
+%% Bytes = list() | tuple(),
+%% Unused = integer(),
+%% BinBits = binary(),
+%% RestBytes = tuple()
+getbits_as_binary(Num,Bytes) when binary(Bytes) ->
+ getbits_as_binary(Num,{0,Bytes});
+getbits_as_binary(0,Buffer) ->
+ {{0,<<>>},Buffer};
+getbits_as_binary(Num,{0,Bin}) when Num > 16 ->
+ Used = Num rem 8,
+ Pad = (8 - Used) rem 8,
+% Nbytes = Num div 8,
+ <<Bits:Num,_:Pad,RestBin/binary>> = Bin,
+ {{Pad,<<Bits:Num,0:Pad>>},RestBin};
+getbits_as_binary(Num,Buffer={_Used,_Bin}) -> % Unaligned buffer
+ %% Num =< 16,
+ {Bits2,Buffer2} = getbits(Buffer,Num),
+ Pad = (8 - (Num rem 8)) rem 8,
+ {{Pad,<<Bits2:Num,0:Pad>>},Buffer2}.
+
+
+% integer_from_list(Int,[],BigInt) ->
+% BigInt;
+% integer_from_list(Int,[H|T],BigInt) when Int < 8 ->
+% (BigInt bsl Int) bor (H bsr (8-Int));
+% integer_from_list(Int,[H|T],BigInt) ->
+% integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
+
+getbits_as_list(Num,Bytes) when binary(Bytes) ->
+ getbits_as_list(Num,{0,Bytes},[]);
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+%% If buffer is empty and nothing more will be picked.
+getbits_as_list(0, B, Acc) ->
+ {lists:reverse(Acc),B};
+%% If first byte in buffer is full and at least one byte will be picked,
+%% then pick one byte.
+getbits_as_list(N,{0,Bin},Acc) when N >= 8 ->
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>> = Bin,
+ getbits_as_list(N-8,{0,Rest},[B0,B1,B2,B3,B4,B5,B6,B7|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 4, Used =< 4 ->
+ NewUsed = Used + 4,
+ Rem = 8 - NewUsed,
+ <<_:Used,B3:1,B2:1,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-4,{NewUsed rem 8,NewRest},[B0,B1,B2,B3|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 2, Used =< 6 ->
+ NewUsed = Used + 2,
+ Rem = 8 - NewUsed,
+ <<_:Used,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-2,{NewUsed rem 8,NewRest},[B0,B1|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when Used =< 7 ->
+ NewUsed = Used + 1,
+ Rem = 8 - NewUsed,
+ <<_:Used,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-1,{NewUsed rem 8,NewRest},[B0|Acc]).
+
+
+getbit({7,<<_:7,B:1,Rest/binary>>}) ->
+ {B,{0,Rest}};
+getbit({0,Buffer = <<B:1,_:7,_/binary>>}) ->
+ {B,{1,Buffer}};
+getbit({Used,Buffer}) ->
+ Unused = (8 - Used) - 1,
+ <<_:Used,B:1,_:Unused,_/binary>> = Buffer,
+ {B,{Used+1,Buffer}};
+getbit(Buffer) when binary(Buffer) ->
+ getbit({0,Buffer}).
+
+
+getbits({0,Buffer},Num) when (Num rem 8) == 0 ->
+ <<Bits:Num,Rest/binary>> = Buffer,
+ {Bits,{0,Rest}};
+getbits({Used,Bin},Num) ->
+ NumPlusUsed = Num + Used,
+ NewUsed = NumPlusUsed rem 8,
+ Unused = (8-NewUsed) rem 8,
+ case Unused of
+ 0 ->
+ <<_:Used,Bits:Num,Rest/binary>> = Bin,
+ {Bits,{0,Rest}};
+ _ ->
+ Bytes = NumPlusUsed div 8,
+ <<_:Used,Bits:Num,_UBits:Unused,_/binary>> = Bin,
+ <<_:Bytes/binary,Rest/binary>> = Bin,
+ {Bits,{NewUsed,Rest}}
+ end;
+getbits(Bin,Num) when binary(Bin) ->
+ getbits({0,Bin},Num).
+
+
+
+% getoctet(Bytes) when list(Bytes) ->
+% getoctet({0,Bytes});
+% getoctet(Bytes) ->
+% %% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+% getoctet1(Bytes).
+
+% getoctet1({0,[H|T]}) ->
+% {H,{0,T}};
+% getoctet1({Pos,[_,H|T]}) ->
+% {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,<<_H,T/binary>>}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as an integer with bit significance as in buffer.
+getoctets({0,Buffer},Num) ->
+ <<Val:Num/integer-unit:8,RestBin/binary>> = Buffer,
+ {Val,{0,RestBin}};
+getoctets({U,<<_Padding,Rest/binary>>},Num) when U /= 0 ->
+ getoctets({0,Rest},Num);
+getoctets(Buffer,Num) when binary(Buffer) ->
+ getoctets({0,Buffer},Num).
+% getoctets(Buffer,Num) ->
+% %% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+% getoctets(Buffer,Num,0).
+
+% getoctets(Buffer,0,Acc) ->
+% {Acc,Buffer};
+% getoctets(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+% getoctets_as_list(Buffer,Num) ->
+% getoctets_as_list(Buffer,Num,[]).
+
+% getoctets_as_list(Buffer,0,Acc) ->
+% {lists:reverse(Acc),Buffer};
+% getoctets_as_list(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as a binary
+getoctets_as_bin({0,Bin},Num)->
+ <<Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin({_U,Bin},Num) ->
+ <<_Padding,Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin(Bin,Num) when binary(Bin) ->
+ getoctets_as_bin({0,Bin},Num).
+
+%% same as above but returns octets as a List
+getoctets_as_list(Buffer,Num) ->
+ {Bin,Buffer2} = getoctets_as_bin(Buffer,Num),
+ {binary_to_list(Bin),Buffer2}.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+ [{bits,1,0}, % the value is in the root set
+ encode_integer([{'ValueRange',{0,Len1-1}}],N)];
+ N when integer(N) ->
+ [{bits,1,0}]; % no encoding if only 0 or 1 alternative
+ false ->
+ [{bits,1,1}, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_integer([{'ValueRange',{0,Len-1}}],N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_Alt,[],_Tag) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_fragmented_XXX; decode of values encoded fragmented according
+%% to ITU-T X.691 clause 10.9.3.8. The unit (XXX) is either bits, octets,
+%% characters or number of components (in a choice,sequence or similar).
+%% Buffer is a buffer {Used, Bin}.
+%% C is the constrained length.
+%% If the buffer is not aligned, this function does that.
+decode_fragmented_bits({0,Buffer},C) ->
+ decode_fragmented_bits(Buffer,C,[]);
+decode_fragmented_bits({_N,<<_,Bs/binary>>},C) ->
+ decode_fragmented_bits(Bs,C,[]).
+
+decode_fragmented_bits(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin, Len * ?'16K'),
+ decode_fragmented_bits(Bin2,C,[Value,Acc]);
+decode_fragmented_bits(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ BinBits = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int),C == size(BinBits) ->
+ {BinBits,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ {BinBits,{0,Bin}}
+ end;
+decode_fragmented_bits(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ Result = {BinBits,{Used,_Rest}} =
+ case (Len rem 8) of
+ 0 ->
+ <<Value:Len/binary-unit:1,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Value|Acc])),{0,Bin2}};
+ Rem ->
+ Bytes = Len div 8,
+ U = 8 - Rem,
+ <<Value:Bytes/binary-unit:8,Bits1:Rem,Bits2:U,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Bits1 bsl U,Value|Acc])),
+ {Rem,<<Bits2,Bin2/binary>>}}
+ end,
+ case C of
+ Int when integer(Int),C == (size(BinBits) - ((8 - Used) rem 8)) ->
+ Result;
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ Result
+ end.
+
+
+decode_fragmented_octets({0,Bin},C) ->
+ decode_fragmented_octets(Bin,C,[]);
+decode_fragmented_octets({_N,<<_,Bs/binary>>},C) ->
+ decode_fragmented_octets(Bs,C,[]).
+
+decode_fragmented_octets(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin,Len * ?'16K'),
+ decode_fragmented_octets(Bin2,C,[Value,Acc]);
+decode_fragmented_octets(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ Octets = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int), C == size(Octets) ->
+ {Octets,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,Octets}}});
+ _ ->
+ {Octets,{0,Bin}}
+ end;
+decode_fragmented_octets(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ <<Value:Len/binary-unit:8,Bin2/binary>> = Bin,
+ BinOctets = list_to_binary(lists:reverse([Value|Acc])),
+ case C of
+ Int when integer(Int),size(BinOctets) == Int ->
+ {BinOctets,Bin2};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinOctets}}});
+ _ ->
+ {BinOctets,Bin2}
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_C, Val) when list(Val) ->
+ Bin = list_to_binary(Val),
+ [encode_length(undefined,size(Bin)),{octets,Bin}]; % octets implies align
+encode_open_type(_C, Val) when binary(Val) ->
+ [encode_length(undefined,size(Val)),{octets,Val}]. % octets implies align
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _C) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_bin(Bytes2,Len).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_NamedNumberList) when integer(V) ->
+ encode_integer(C,V);
+encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
+ encode_integer(C,V,NamedNumberList).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer([{Rc,_Ec}],Val) when tuple(Rc) -> % XXX when is this invoked? First argument most often a list,...Ok this is the extension case...but it doesn't work.
+ case (catch encode_integer([Rc],Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+ [{bits,1,1},encode_unconstrained_number(Val)];
+ Encoded ->
+ [{bits,1,0},Encoded]
+ end;
+encode_integer(C,Val ) when list(C) ->
+ case get_constraint(C,'SingleValue') of
+ no ->
+ encode_integer1(C,Val);
+ V when integer(V),V == Val ->
+ []; % a type restricted to a single value encodes to nothing
+ V when list(V) ->
+ case lists:member(Val,V) of
+ true ->
+ encode_integer1(C,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end.
+
+encode_integer1(C, Val) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ encode_unconstrained_number(Val);
+ {Lb,'MAX'} ->
+ encode_semi_constrained_number(Lb,Val);
+ %% positive with range
+ {Lb,Ub} when Val >= Lb,
+ Ub >= Val ->
+ encode_constrained_number(VR,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,VR,Val}}})
+ end.
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,[{Rc,_Ec}]) when tuple(Rc) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,[Rc]);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ V when list(V) ->
+ {Val,Buffer2} = decode_integer1(Buffer,C),
+ case lists:member(Val,V) of
+ true ->
+ {Val,Buffer2};
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_,_} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+ % X.691:10.6 Encoding of a normally small non-negative whole number
+ % Use this for encoding of CHOICE index if there is an extension marker in
+ % the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+% [{bits,1,0},{bits,6,Val}];
+ [{bits,7,Val}]; % same as above but more efficient
+encode_small_number(Val) ->
+ [{bits,1,1},encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,0)
+ end.
+
+%% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Oct = eint_positive(Val2),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ true ->
+ [encode_length(undefined,Len),{octets,Oct}]
+ end.
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+ {bits,1,Val2};
+ Range =< 4 ->
+ {bits,2,Val2};
+ Range =< 8 ->
+ {bits,3,Val2};
+ Range =< 16 ->
+ {bits,4,Val2};
+ Range =< 32 ->
+ {bits,5,Val2};
+ Range =< 64 ->
+ {bits,6,Val2};
+ Range =< 128 ->
+ {bits,7,Val2};
+ Range =< 255 ->
+ {bits,8,Val2};
+ Range =< 256 ->
+ {octets,[Val2]};
+ Range =< 65536 ->
+ {octets,<<Val2:16>>};
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+ [{bits,2,length(Octs)-1},{octets,Octs}];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ [{bits,2,length(Octs)-1},{octets,Octs}];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ [{bits,3,length(Octs)-1},{octets,Octs}];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end;
+encode_constrained_number(Range,Val) ->
+ exit({error,{asn1,{integer_range,Range,value,Val}}}).
+
+
+decode_constrained_number(Buffer,{Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+ % Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+%% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ true ->
+ [encode_length(undefined,Len),{octets,Oct}]
+ end;
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ true ->
+ [encode_length(undefined,Len),{octets,Oct}]
+ end.
+
+
+%% used for positive Values which don't need a sign bit
+%% returns a binary
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+% minimum_octets(Val) ->
+% minimum_octets(Val,[]).
+
+% minimum_octets(Val,Acc) when Val > 0 ->
+% minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+% minimum_octets(0,Acc) ->
+% Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+ {octets,[Len]};
+ Len < 16384 ->
+ {octets,<<2:2,Len:14>>};
+ true -> % should be able to endode length >= 16384
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length(Vr={Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number(Vr,Len);
+encode_length({Lb,_Ub},Len) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ encode_length(undefined,Len);
+encode_length({Vr={Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0 ->
+ %% constrained extensible
+ [{bits,1,0},encode_constrained_number(Vr,Len)];
+encode_length(SingleValue,_Len) when integer(SingleValue) ->
+ [].
+
+%% X.691 10.9.3.4 (only used for length of bitmap that prefixes extension
+%% additions in a sequence or set
+encode_small_length(Len) when Len =< 64 ->
+%% [{bits,1,0},{bits,6,Len-1}];
+ {bits,7,Len-1}; % the same as above but more efficient
+encode_small_length(Len) ->
+ [{bits,1,1},encode_length(undefined,Len)].
+
+% decode_small_length({Used,<<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>>}) ->
+% case Buffer of
+% <<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>> ->
+% {Num,
+% case getbit(Buffer) of
+% {0,Remain} ->
+% {Bits,Remain2} = getbits(Remain,6),
+% {Bits+1,Remain2};
+% {1,Remain} ->
+% decode_length(Remain,undefined)
+% end.
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ {0,Buffer2} = align(Buffer),
+ case Buffer2 of
+ <<0:1,Oct:7,Rest/binary>> ->
+ {Oct,{0,Rest}};
+ <<2:2,Val:14,Rest/binary>> ->
+ {Val,{0,Rest}};
+ <<3:2,_:14,_Rest/binary>> ->
+ %% this case should be fixed
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}})
+ end;
+%% {Bits,_} = getbits(Buffer2,2),
+% case Bits of
+% 2 ->
+% {Val,Bytes3} = getoctets(Buffer2,2),
+% {(Val band 16#3FFF),Bytes3};
+% 3 ->
+% exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+% _ ->
+% {Val,Bytes3} = getoctet(Buffer2),
+% {Val band 16#7F,Bytes3}
+% end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+decode_length(_,{Lb,_}) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ exit({error,{asn1,{decode_length,{nyi,above_64K}}}});
+decode_length(Buffer,{{Lb,Ub},[]}) ->
+ case getbit(Buffer) of
+ {0,Buffer2} ->
+ decode_length(Buffer2, {Lb,Ub})
+ end;
+
+
+%When does this case occur with {_,_Lb,Ub} ??
+% X.691:10.9.3.5
+decode_length({Used,Bin},{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub NOTE! this case does not cover case when Ub > 65535
+ Unused = (8-Used) rem 8,
+ case Bin of
+ <<_:Used,0:1,Val:7,R:Unused,Rest/binary>> ->
+ {Val,{Used,<<R,Rest/binary>>}};
+ <<_:Used,_:Unused,2:2,Val:14,Rest/binary>> ->
+ {Val, {0,Rest}};
+ <<_:Used,_:Unused,3:2,_:14,_Rest/binary>> ->
+ exit({error,{asn1,{decode_length,{nyi,length_above_64K}}}})
+ end;
+% decode_length(Buffer,{_,_Lb,Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+% case getbit(Buffer) of
+% {0,Remain} ->
+% getbits(Remain,7);
+% {1,Remain} ->
+% {Val,Remain2} = getoctets(Buffer,2),
+% {Val band 2#0111111111111111, Remain2}
+% end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+ % X.691:11
+encode_boolean(true) ->
+ {bits,1,1};
+encode_boolean(false) ->
+ {bits,1,0};
+encode_boolean({Name,Val}) when atom(Name) ->
+ encode_boolean(Val);
+encode_boolean(Val) ->
+ exit({error,{asn1,{encode_boolean,Val}}}).
+
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+
+%% when the value is a list of {Unused,BinBits}, where
+%% Unused = integer(),
+%% BinBits = binary().
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
+ binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList);
+
+%% when the value is a list of named bits
+encode_bit_string(C, LoNB=[FirstVal | _RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes(LoNB, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+encode_bit_string(C, BL=[{bit,_No} | _RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes(BL, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+
+% encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+% Bl1 =
+% case NamedBitList of
+% [] -> % dont remove trailing zeroes
+% BitListValue;
+% _ -> % first remove any trailing zeroes
+% lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+% lists:reverse(BitListValue)))
+% end,
+% BitList = [{bit,X} || X <- Bl1],
+% %% BListLen = length(BitList),
+% case get_constraint(C,'SizeConstraint') of
+% 0 -> % fixed length
+% []; % nothing to encode
+% V when integer(V),V=<16 -> % fixed length 16 bits or less
+% pad_list(V,BitList);
+% V when integer(V) -> % fixed length 16 bits or more
+% [align,pad_list(V,BitList)]; % should be another case for V >= 65537
+% {Lb,Ub} when integer(Lb),integer(Ub) ->
+% [encode_length({Lb,Ub},length(BitList)),align,BitList];
+% no ->
+% [encode_length(undefined,length(BitList)),align,BitList];
+% Sc -> % extension marker
+% [encode_length(Sc,length(BitList)),align,BitList]
+% end;
+encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+ BitListToBinary =
+ %% fun that transforms a list of 1 and 0 to a tuple:
+ %% {UnusedBitsInLastByte, Binary}
+ fun([H|T],Acc,N,Fun) ->
+ Fun(T,(Acc bsl 1)+H,N+1,Fun);
+ ([],Acc,N,_) ->
+ Unused = (8 - (N rem 8)) rem 8,
+ {Unused,<<Acc:N,0:Unused>>}
+ end,
+ UnusedAndBin =
+ case NamedBitList of
+ [] -> % dont remove trailing zeroes
+ BitListToBinary(BitListValue,0,0,BitListToBinary);
+ _ ->
+ BitListToBinary(lists:reverse(
+ lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitListValue))),
+ 0,0,BitListToBinary)
+ end,
+ encode_bin_bit_string(C,UnusedAndBin,NamedBitList);
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a tuple
+encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
+ encode_bit_string(C,Val,NamedBitList).
+
+
+%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
+%% Unused = integer(),i.e. number unused bits in least sign. byte of
+%% BinBits = binary().
+
+
+encode_bin_bit_string(C,UnusedAndBin={_Unused,_BinBits},NamedBitList) ->
+ Constr = get_constraint(C,'SizeConstraint'),
+ UnusedAndBin1 = {Unused1,Bin1} =
+ remove_trailing_bin(NamedBitList,UnusedAndBin,lower_bound(Constr)),
+ case Constr of
+ 0 ->
+ [];
+ V when integer(V),V=<16 ->
+ {Unused2,Bin2} = pad_list(V,UnusedAndBin1),
+ <<BitVal:V,_:Unused2>> = Bin2,
+ {bits,V,BitVal};
+ V when integer(V) ->
+ [align, pad_list(V, UnusedAndBin1)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ [encode_length({Lb,Ub},size(Bin1)*8 - Unused1),
+ align,UnusedAndBin1];
+ no ->
+ [encode_length(undefined,size(Bin1)*8 - Unused1),
+ align,UnusedAndBin1];
+ Sc ->
+ [encode_length(Sc,size(Bin1)*8 - Unused1),
+ align,UnusedAndBin1]
+ end.
+
+remove_trailing_bin([], {Unused,Bin},_) ->
+ {Unused,Bin};
+remove_trailing_bin(NamedNumberList, {_Unused,Bin},C) ->
+ Size = size(Bin)-1,
+ <<Bfront:Size/binary, LastByte:8>> = Bin,
+ %% clear the Unused bits to be sure
+% LastByte1 = LastByte band (((1 bsl Unused) -1) bxor 255),
+ Unused1 = trailingZeroesInNibble(LastByte band 15),
+ Unused2 =
+ case Unused1 of
+ 4 ->
+ 4 + trailingZeroesInNibble(LastByte bsr 4);
+ _ -> Unused1
+ end,
+ case Unused2 of
+ 8 ->
+ remove_trailing_bin(NamedNumberList,{0,Bfront},C);
+ _ ->
+ case C of
+ Int when integer(Int),Int > ((size(Bin)*8)-Unused2) ->
+ %% this padding see OTP-4353
+ pad_list(Int,{Unused2,Bin});
+ _ -> {Unused2,Bin}
+ end
+ end.
+
+
+trailingZeroesInNibble(0) ->
+ 4;
+trailingZeroesInNibble(1) ->
+ 0;
+trailingZeroesInNibble(2) ->
+ 1;
+trailingZeroesInNibble(3) ->
+ 0;
+trailingZeroesInNibble(4) ->
+ 2;
+trailingZeroesInNibble(5) ->
+ 0;
+trailingZeroesInNibble(6) ->
+ 1;
+trailingZeroesInNibble(7) ->
+ 0;
+trailingZeroesInNibble(8) ->
+ 3;
+trailingZeroesInNibble(9) ->
+ 0;
+trailingZeroesInNibble(10) ->
+ 1;
+trailingZeroesInNibble(11) ->
+ 0;
+trailingZeroesInNibble(12) -> %#1100
+ 2;
+trailingZeroesInNibble(13) ->
+ 0;
+trailingZeroesInNibble(14) ->
+ 1;
+trailingZeroesInNibble(15) ->
+ 0.
+
+lower_bound({{Lb,_},_}) when integer(Lb) ->
+ Lb;
+lower_bound({Lb,_}) when integer(Lb) ->
+ Lb;
+lower_bound(C) ->
+ C.
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a tuple {Unused,Bits}. Unused is the number of unused
+%% bits, least significant bits in the last byte of Bits. Bits is
+%% the BIT STRING represented as a binary.
+%%
+decode_compact_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {{8,0},Buffer};
+ V when integer(V),V=<16 -> %fixed length 16 bits or less
+ compact_bit_string(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 -> %fixed length > 16 bits
+ Bytes2 = align(Buffer),
+ compact_bit_string(Bytes2,V,NamedNumberList);
+ V when integer(V) -> % V > 65536 => fragmented value
+ {Bin,Buffer2} = decode_fragmented_bits(Buffer,V),
+ case Buffer2 of
+ {0,_} -> {{0,Bin},Buffer2};
+ {U,_} -> {{8-U,Bin},Buffer2}
+ end;
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ no ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ Sc ->
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_or_named(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 ->
+ Bytes2 = align(Buffer),
+ bit_list_or_named(Bytes2,V,NamedNumberList);
+ V when integer(V) ->
+ Bytes2 = align(Buffer),
+ {BinBits,_} = decode_fragmented_bits(Bytes2,V),
+ bit_list_or_named(BinBits,V,NamedNumberList);
+ Sc -> % extension marker
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%% if no named bits are declared we will return a
+%% {Unused,Bits}. Unused = integer(),
+%% Bits = binary().
+compact_bit_string(Buffer,Len,[]) ->
+ getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
+compact_bit_string(Buffer,Len,NamedNumberList) ->
+ bit_list_or_named(Buffer,Len,NamedNumberList).
+
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_or_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_or_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_or_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_or_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_or_named1(Pos+1,Bt,Names,Acc);
+bit_list_or_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_or_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_or_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_or_named1(_,[],_,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(Int) when integer(Int), Int > 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)];
+int_to_bitlist(0) ->
+ [].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+pad_list(N,In={Unused,Bin}) ->
+ pad_list(N, size(Bin)*8 - Unused, In).
+
+pad_list(N,Size,In={_,_}) when N < Size ->
+ exit({error,{asn1,{range_error,{bit_string,In}}}});
+pad_list(N,Size,{Unused,Bin}) when N > Size, Unused > 0 ->
+ pad_list(N,Size+1,{Unused-1,Bin});
+pad_list(N,Size,{_Unused,Bin}) when N > Size ->
+ pad_list(N,Size+1,{7,<<Bin/binary,0>>});
+pad_list(N,N,In={_,_}) ->
+ In.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(C,Bool,{_Name,Val}) ->
+ encode_octet_string(C,Bool,Val);
+encode_octet_string(_,true,_) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(C,false,Val) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ 1 ->
+ [V] = Val,
+ {bits,8,V};
+ 2 ->
+ [V1,V2] = Val,
+ [{bits,8,V1},{bits,8,V2}];
+ Sv when Sv =<65535, Sv == length(Val) -> % fixed length
+ {octets,Val};
+ {Lb,Ub} ->
+ [encode_length({Lb,Ub},length(Val)),{octets,Val}];
+ Sv when list(Sv) ->
+ [encode_length({hd(Sv),lists:max(Sv)},length(Val)),{octets,Val}];
+ no ->
+ [encode_length(undefined,length(Val)),{octets,Val}]
+ end.
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,C,false) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ {[],Bytes};
+ 1 ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+ 2 ->
+ {Bs,Bytes2}= getbits(Bytes,16),
+ {binary_to_list(<<Bs:16>>),Bytes2};
+ {_,0} ->
+ {[],Bytes};
+ Sv when integer(Sv), Sv =<65535 -> % fixed length
+ getoctets_as_list(Bytes,Sv);
+ Sv when integer(Sv) -> % fragmented encoding
+ Bytes2 = align(Bytes),
+ decode_fragmented_octets(Bytes2,Sv);
+ {Lb,Ub} ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ getoctets_as_list(Bytes2,Len);
+ Sv when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ getoctets_as_list(Bytes2,Len);
+ no ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_list(Bytes2,Len)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+
+encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,Val);
+
+encode_restricted_string(aligned,Val) when list(Val)->
+ [encode_length(undefined,length(Val)),{octets,Val}].
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
+ encode_known_multiplier_string(aligned,StringType,C,false,Val);
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,Val) ->
+ Result = chars_encode(C,StringType,Val),
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} ->
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ 0 ->
+ [];
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ [align,Result];
+ {Ub,Lb} ->
+ [encode_length({Ub,Lb},length(Val)),align,Result];
+ Vl when list(Vl) ->
+ [encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
+ no ->
+ [encode_length(undefined,length(Val)),align,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_list(Bytes2,Len).
+
+decode_known_multiplier_string(Bytes,aligned,StringType,C,_Ext) ->
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,C,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,C,Ub);
+ 0 ->
+ {[],Bytes};
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len)
+ end.
+
+
+encode_NumericString(C,Val) ->
+ encode_known_multiplier_string(aligned,'NumericString',C,false,Val).
+decode_NumericString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'NumericString',C,false).
+
+encode_PrintableString(C,Val) ->
+ encode_known_multiplier_string(aligned,'PrintableString',C,false,Val).
+decode_PrintableString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'PrintableString',C,false).
+
+encode_VisibleString(C,Val) -> % equivalent with ISO646String
+ encode_known_multiplier_string(aligned,'VisibleString',C,false,Val).
+decode_VisibleString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'VisibleString',C,false).
+
+encode_IA5String(C,Val) ->
+ encode_known_multiplier_string(aligned,'IA5String',C,false,Val).
+decode_IA5String(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'IA5String',C,false).
+
+encode_BMPString(C,Val) ->
+ encode_known_multiplier_string(aligned,'BMPString',C,false,Val).
+decode_BMPString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'BMPString',C,false).
+
+encode_UniversalString(C,Val) ->
+ encode_known_multiplier_string(aligned,'UniversalString',C,false,Val).
+decode_UniversalString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'UniversalString',C,false).
+
+%% end of known-multiplier strings for which PER visible constraints are
+%% applied
+
+encode_GeneralString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GeneralString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_GraphicString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GraphicString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_ObjectDescriptor(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_ObjectDescriptor(Bytes) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_TeletexString(_C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,Val).
+decode_TeletexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_VideotexString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_VideotexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{0,0,O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+chars_encode(C,StringType,Value) ->
+ case {StringType,get_constraint(C,'PermittedAlphabet')} of
+ {'UniversalString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+ _ ->
+ {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+ chars_encode2(Value,NumBits,CharOutTab)
+ end.
+
+chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,exit_if_false(H,element(H-Min+1,Tab))}|chars_encode2(T,NumBits,{Min,Max,Tab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,exit_if_false({A,B,C,D},element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab))}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_,{_,_,_}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+exit_if_false(V,false)->
+ exit({error,{asn1,{"illegal value according to Permitted alphabet constraint",V}}});
+exit_if_false(_,V) ->V.
+
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B =< 4 -> 4;
+ B when B =< 8 -> 8;
+ B when B =< 16 -> 16;
+ B when B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',C,Len) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ no ->
+ getBMPChars(Bytes,Len);
+ _ ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet constraint"}}})
+ end;
+chars_decode(Bytes,NumBits,StringType,C,Len) ->
+ CharInTab = get_CharInTab(C,StringType),
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result =
+ if
+ Char < 256 -> Char;
+ true ->
+ list_to_tuple(binary_to_list(<<Char:32>>))
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+% chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+% {Char,Bytes2} = getbits(Bytes,NumBits),
+% Result = case minimum_octets(Char+Min) of
+% [NewChar] -> NewChar;
+% [C1,C2] -> {0,0,C1,C2};
+% [C1,C2,C3] -> {0,C1,C2,C3};
+% [C1,C2,C3,C4] -> {C1,C2,C3,C4}
+% end,
+% chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null(_) -> []; % encodes to nothing
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val).
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier({Name,Val}) when atom(Name) ->
+ encode_object_identifier(Val);
+encode_object_identifier(Val) ->
+ OctetList = e_object_identifier(Val),
+ Octets = list_to_binary(OctetList), % performs a flatten at the same time
+ [{debug,object_identifier},encode_length(undefined,size(Octets)),{octets,Octets}].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname,V}) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname,V}) when atom(Cname),list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail]) when E1 >= 0, E1 < 2, E2 < 40 ; E1==2 ->
+ Head = 40*E1 + E2, % weird
+ e_object_elements([Head|Tail],[]);
+e_object_identifier(Oid=[_,_|_Tail]) ->
+ exit({error,{asn1,{'illegal_value',Oid}}}).
+
+e_object_elements([],Acc) ->
+ lists:reverse(Acc);
+e_object_elements([H|T],Acc) ->
+ e_object_elements(T,[e_object_element(H)|Acc]).
+
+e_object_element(Num) when Num < 128 ->
+ Num;
+%% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint([{Key,V}],Key) ->
+ V;
+get_constraint([],_Key) ->
+ no;
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+
+% complete(L) ->
+% case complete1(L) of
+% {[],0} ->
+% <<0>>;
+% {Acc,0} ->
+% lists:reverse(Acc);
+% {[Hacc|Tacc],Acclen} -> % Acclen >0
+% Rest = 8 - Acclen,
+% NewHacc = Hacc bsl Rest,
+% lists:reverse([NewHacc|Tacc])
+% end.
+
+
+% complete1(InList) when list(InList) ->
+% complete1(InList,[]);
+% complete1(InList) ->
+% complete1([InList],[]).
+
+% complete1([{debug,_}|T], Acc) ->
+% complete1(T,Acc);
+% complete1([H|T],Acc) when list(H) ->
+% {NewH,NewAcclen} = complete1(H,Acc),
+% complete1(T,NewH,NewAcclen);
+
+% complete1([{0,Bin}|T],Acc,0) when binary(Bin) ->
+% complete1(T,[Bin|Acc],0);
+% complete1([{Unused,Bin}|T],Acc,0) when integer(Unused),binary(Bin) ->
+% Size = size(Bin)-1,
+% <<Bs:Size/binary,B>> = Bin,
+% complete1(T,[(B bsr Unused),Bs|Acc],8-Unused);
+% complete1([{Unused,Bin}|T],[Hacc|Tacc],Acclen) when integer(Unused),binary(Bin) ->
+% Rest = 8 - Acclen,
+% Used = 8 - Unused,
+% case size(Bin) of
+% 1 ->
+% if
+% Rest >= Used ->
+% <<B:Used,_:Unused>> = Bin,
+% complete1(T,[(Hacc bsl Used) + B|Tacc],
+% (Acclen+Used) rem 8);
+% true ->
+% LeftOver = 8 - Rest - Unused,
+% <<Val2:Rest,Val1:LeftOver,_:Unused>> = Bin,
+% complete1(T,[Val1,(Hacc bsl Rest) + Val2|Tacc],
+% (Acclen+Used) rem 8)
+% end;
+% N ->
+% if
+% Rest == Used ->
+% N1 = N - 1,
+% <<B:Rest,Bs:N1/binary,_:Unused>> = Bin,
+% complete1(T,[Bs,(Hacc bsl Rest) + B|Tacc],0);
+% Rest > Used ->
+% N1 = N - 2,
+% N2 = (8 - Rest) + Used,
+% <<B1:Rest,Bytes:N1/binary,B2:N2,_:Unused>> = Bin,
+% complete1(T,[B2,Bytes,(Hacc bsl Rest) + B1|Tacc],
+% (Acclen + Used) rem 8);
+% true -> % Rest < Used
+% N1 = N - 1,
+% N2 = Used - Rest,
+% <<B1:Rest,Bytes:N1/binary,B2:N2,_:Unused>> = Bin,
+% complete1(T,[B2,Bytes,(Hacc bsl Rest) + B1|Tacc],
+% (Acclen + Used) rem 8)
+% end
+% end;
+
+% %complete1([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+% % complete1([{octets,<<Val:N/unit:8>>}|T],Acc,Acclen);
+% complete1([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+% Newval = case N of
+% 1 ->
+% Val4 = Val band 16#FF,
+% [Val4];
+% 2 ->
+% Val3 = (Val bsr 8) band 16#FF,
+% Val4 = Val band 16#FF,
+% [Val3,Val4];
+% 3 ->
+% Val2 = (Val bsr 16) band 16#FF,
+% Val3 = (Val bsr 8) band 16#FF,
+% Val4 = Val band 16#FF,
+% [Val2,Val3,Val4];
+% 4 ->
+% Val1 = (Val bsr 24) band 16#FF,
+% Val2 = (Val bsr 16) band 16#FF,
+% Val3 = (Val bsr 8) band 16#FF,
+% Val4 = Val band 16#FF,
+% [Val1,Val2,Val3,Val4]
+% end,
+% complete1([{octets,Newval}|T],Acc,Acclen);
+
+% complete1([{octets,Bin}|T],Acc,Acclen) when binary(Bin) ->
+% Rest = 8 - Acclen,
+% if
+% Rest == 8 ->
+% complete1(T,[Bin|Acc],0);
+% true ->
+% [Hacc|Tacc]=Acc,
+% complete1(T,[Bin, Hacc bsl Rest|Tacc],0)
+% end;
+
+% complete1([{octets,Oct}|T],Acc,Acclen) when list(Oct) ->
+% Rest = 8 - Acclen,
+% if
+% Rest == 8 ->
+% complete1(T,[list_to_binary(Oct)|Acc],0);
+% true ->
+% [Hacc|Tacc]=Acc,
+% complete1(T,[list_to_binary(Oct), Hacc bsl Rest|Tacc],0)
+% end;
+
+% complete1([{bit,Val}|T], Acc, Acclen) ->
+% complete1([{bits,1,Val}|T],Acc,Acclen);
+% complete1([{octet,Val}|T], Acc, Acclen) ->
+% complete1([{octets,1,Val}|T],Acc,Acclen);
+
+% complete1([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
+% complete1(T,[Val|Acc],N);
+% complete1([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
+% Rest = 8 - Acclen,
+% if
+% Rest >= N ->
+% complete1(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
+% true ->
+% Diff = N - Rest,
+% NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
+% Mask = element(Diff,{1,3,7,15,31,63,127,255}),
+% complete1(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
+% end;
+% complete1([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
+% complete1([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
+
+% complete1([align|T],Acc,0) ->
+% complete1(T,Acc,0);
+% complete1([align|T],[Hacc|Tacc],Acclen) ->
+% Rest = 8 - Acclen,
+% complete1(T,[Hacc bsl Rest|Tacc],0);
+% complete1([{octets,N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
+% complete1([{octets,Val}|T],Acc,Acclen);
+
+% complete1([],Acc,Acclen) ->
+% {Acc,Acclen}.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+
+complete(L) ->
+ case complete1(L) of
+ {[],[]} ->
+ <<0>>;
+ {Acc,[]} ->
+ Acc;
+ {Acc,Bacc} ->
+ [Acc|complete_bytes(Bacc)]
+ end.
+
+%% this function builds the ugly form of lists [E1|E2] to avoid having to reverse it at the end.
+%% this is done because it is efficient and that the result always will be sent on a port or
+%% converted by means of list_to_binary/1
+complete1(InList) when list(InList) ->
+ complete1(InList,[],[]);
+complete1(InList) ->
+ complete1([InList],[],[]).
+
+complete1([],Acc,Bacc) ->
+ {Acc,Bacc};
+complete1([H|T],Acc,Bacc) when list(H) ->
+ {NewH,NewBacc} = complete1(H,Acc,Bacc),
+ complete1(T,NewH,NewBacc);
+
+complete1([{octets,Bin}|T],Acc,[]) ->
+ complete1(T,[Acc|Bin],[]);
+
+complete1([{octets,Bin}|T],Acc,Bacc) ->
+ complete1(T,[Acc|[complete_bytes(Bacc),Bin]],[]);
+
+complete1([{debug,_}|T], Acc,Bacc) ->
+ complete1(T,Acc,Bacc);
+
+complete1([{bits,N,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,N));
+
+complete1([{bit,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,1));
+
+complete1([align|T],Acc,[]) ->
+ complete1(T,Acc,[]);
+complete1([align|T],Acc,Bacc) ->
+ complete1(T,[Acc|complete_bytes(Bacc)],[]);
+complete1([{0,Bin}|T],Acc,[]) when binary(Bin) ->
+ complete1(T,[Acc|Bin],[]);
+complete1([{Unused,Bin}|T],Acc,[]) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8-Unused,
+ complete1(T,[Acc|Bs],[[B bsr Unused]|NumBits]);
+complete1([{Unused,Bin}|T],Acc,Bacc) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8 - Unused,
+ Bf = complete_bytes(Bacc),
+ complete1(T,[Acc|[Bf,Bs]],[[B bsr Unused]|NumBits]).
+
+
+complete_update_byte([],Val,Len) ->
+ complete_update_byte([[0]|0],Val,Len);
+complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len == 8 ->
+ [[0,((Byte bsl Len) + Val) band 255|Bacc]|0];
+complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len > 8 ->
+ Rem = 8 - NumBits,
+ Rest = Len - Rem,
+ complete_update_byte([[0,((Byte bsl Rem) + (Val bsr Rest)) band 255 |Bacc]|0],Val,Rest);
+complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) ->
+ [[((Byte bsl Len) + Val) band 255|Bacc]|NumBits+Len].
+
+
+complete_bytes([[_Byte|Bacc]|0]) ->
+ lists:reverse(Bacc);
+complete_bytes([[Byte|Bacc]|NumBytes]) ->
+ lists:reverse([(Byte bsl (8-NumBytes)) band 255|Bacc]);
+complete_bytes([]) ->
+ [].
+
+% complete_bytes(L) ->
+% complete_bytes1(lists:reverse(L),[],[],0,0).
+
+% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) when ((NumBits+B) rem 8) == 0 ->
+% NewReplyAcc = [complete_bytes2([H|Acc],0)|ReplyAcc],
+% complete_bytes1(T,[],NewReplyAcc,0,0);
+% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) when NumFields == 7; (NumBits+B) div 8 > 0 ->
+% Rem = (NumBits+B) rem 8,
+% NewReplyAcc = [complete_bytes2([{V bsr Rem,B - Rem}|Acc],0)|ReplyAcc],
+% complete_bytes1([{V,Rem}|T],[],NewReplyAcc,0,0);
+% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) ->
+% complete_bytes1(T,[H|Acc],ReplyAcc,NumBits+B,NumFields+1);
+% complete_bytes1([],[],ReplyAcc,_,_) ->
+% lists:reverse(ReplyAcc);
+% complete_bytes1([],Acc,ReplyAcc,NumBits,_) ->
+% PadBits = case NumBits rem 8 of
+% 0 -> 0;
+% Rem -> 8 - Rem
+% end,
+% lists:reverse([complete_bytes2(Acc,PadBits)|ReplyAcc]).
+
+
+% complete_bytes2([{V1,B1}],PadBits) ->
+% <<V1:B1,0:PadBits>>;
+% complete_bytes2([{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,0:PadBits>>;
+% complete_bytes2([{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,0:PadBits>>;
+% complete_bytes2([{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,0:PadBits>>;
+% complete_bytes2([{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,0:PadBits>>;
+% complete_bytes2([{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,0:PadBits>>;
+% complete_bytes2([{V7,B7},{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,V7:B7,0:PadBits>>;
+% complete_bytes2([{V8,B8},{V7,B7},{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
+% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,V7:B7,V8:B8,0:PadBits>>.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
new file mode 100644
index 0000000000..9f02ad4466
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
@@ -0,0 +1,2102 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per_bin_rt2ct.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per_bin_rt2ct).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/3, fixextensions/2,
+ getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/2, getoptionals2/2,
+ set_choice/3, encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_small_number/1,
+ decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1,
+ decode_compact_bit_string/3]).
+-export([decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([%encode_UniversalString/2, decode_UniversalString/2,
+ %encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ %encode_VisibleString/2, decode_VisibleString/2,
+ %encode_BMPString/2, decode_BMPString/2,
+ %encode_IA5String/2, decode_IA5String/2,
+ %encode_NumericString/2, decode_NumericString/2,
+ encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
+ ]).
+
+-export([decode_constrained_number/2,
+ decode_constrained_number/3,
+ decode_unconstrained_number/1,
+ decode_semi_constrained_number/2,
+ encode_unconstrained_number/1,
+ decode_constrained_number/4,
+ encode_octet_string/3,
+ decode_octet_string/3,
+ encode_known_multiplier_string/5,
+ decode_known_multiplier_string/5,
+ getoctets/2, getbits/2
+% start_drv/1,start_drv2/1,init_drv/1
+ ]).
+
+
+-export([eint_positive/1]).
+-export([pre_complete_bits/2]).
+
+-define('16K',16384).
+-define('32K',32768).
+-define('64K',65536).
+
+%%-define(nodriver,true).
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+%% converts a list to a record if necessary
+list_to_record(_,Tuple) when tuple(Tuple) ->
+ Tuple;
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]).
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+% [{debug,choiceext},{bits,1,0}];
+ [0];
+setchoiceext(false) ->
+% [{debug,choiceext},{bits,1,1}].
+ [1].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(false) ->
+% [{debug,ext},{bits,1,0}];
+ [0];
+setext(true) ->
+% [{debug,ext},{bits,1,1}];
+ [1].
+
+fixoptionals(OptList,_OptLength,Val) when tuple(Val) ->
+% Bits = fixoptionals(OptList,Val,0),
+% {Val,{bits,OptLength,Bits}};
+% {Val,[10,OptLength,Bits]};
+ {Val,fixoptionals(OptList,Val,[])};
+
+fixoptionals([],_,Acc) ->
+ %% Optbits
+ lists:reverse(Acc);
+fixoptionals([Pos|Ot],Val,Acc) ->
+ case element(Pos,Val) of
+% asn1_NOVALUE -> fixoptionals(Ot,Val,Acc bsl 1);
+% asn1_DEFAULT -> fixoptionals(Ot,Val,Acc bsl 1);
+% _ -> fixoptionals(Ot,Val,(Acc bsl 1) + 1)
+ asn1_NOVALUE -> fixoptionals(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,[0|Acc]);
+ _ -> fixoptionals(Ot,Val,[1|Acc])
+ end.
+
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when binary(Bytes) ->
+ getbit({0,Bytes});
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+% [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+% [encode_small_length(ExtNum),[10,ExtNum,ExtBits]]
+ [encode_small_length(ExtNum),pre_complete_bits(ExtNum,ExtBits)]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_constrained_number(Bytes,{0,NumChoices-1}).
+
+%% old version kept for backward compatibility with generates from R7B01
+getoptionals(Bytes,NumOpt) ->
+ {Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
+ {list_to_tuple(Blist),Bytes1}.
+
+%% new version used in generates from r8b_patch/3 and later
+getoptionals2(Bytes,NumOpt) ->
+ {_,_} = getbits(Bytes,NumOpt).
+
+
+%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
+%% Num = integer(),
+%% Bytes = list() | tuple(),
+%% Unused = integer(),
+%% BinBits = binary(),
+%% RestBytes = tuple()
+getbits_as_binary(Num,Bytes) when binary(Bytes) ->
+ getbits_as_binary(Num,{0,Bytes});
+getbits_as_binary(0,Buffer) ->
+ {{0,<<>>},Buffer};
+getbits_as_binary(Num,{0,Bin}) when Num > 16 ->
+ Used = Num rem 8,
+ Pad = (8 - Used) rem 8,
+%% Nbytes = Num div 8,
+ <<Bits:Num,_:Pad,RestBin/binary>> = Bin,
+ {{Pad,<<Bits:Num,0:Pad>>},RestBin};
+getbits_as_binary(Num,Buffer={_Used,_Bin}) -> % Unaligned buffer
+ %% Num =< 16,
+ {Bits2,Buffer2} = getbits(Buffer,Num),
+ Pad = (8 - (Num rem 8)) rem 8,
+ {{Pad,<<Bits2:Num,0:Pad>>},Buffer2}.
+
+
+% integer_from_list(Int,[],BigInt) ->
+% BigInt;
+% integer_from_list(Int,[H|T],BigInt) when Int < 8 ->
+% (BigInt bsl Int) bor (H bsr (8-Int));
+% integer_from_list(Int,[H|T],BigInt) ->
+% integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
+
+getbits_as_list(Num,Bytes) when binary(Bytes) ->
+ getbits_as_list(Num,{0,Bytes},[]);
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+%% If buffer is empty and nothing more will be picked.
+getbits_as_list(0, B, Acc) ->
+ {lists:reverse(Acc),B};
+%% If first byte in buffer is full and at least one byte will be picked,
+%% then pick one byte.
+getbits_as_list(N,{0,Bin},Acc) when N >= 8 ->
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>> = Bin,
+ getbits_as_list(N-8,{0,Rest},[B0,B1,B2,B3,B4,B5,B6,B7|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 4, Used =< 4 ->
+ NewUsed = Used + 4,
+ Rem = 8 - NewUsed,
+ <<_:Used,B3:1,B2:1,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-4,{NewUsed rem 8,NewRest},[B0,B1,B2,B3|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when N >= 2, Used =< 6 ->
+ NewUsed = Used + 2,
+ Rem = 8 - NewUsed,
+ <<_:Used,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-2,{NewUsed rem 8,NewRest},[B0,B1|Acc]);
+getbits_as_list(N,{Used,Bin},Acc) when Used =< 7 ->
+ NewUsed = Used + 1,
+ Rem = 8 - NewUsed,
+ <<_:Used,B0:1,_:Rem, Rest/binary>> = Bin,
+ NewRest = case Rem of 0 -> Rest; _ -> Bin end,
+ getbits_as_list(N-1,{NewUsed rem 8,NewRest},[B0|Acc]).
+
+
+getbit({7,<<_:7,B:1,Rest/binary>>}) ->
+ {B,{0,Rest}};
+getbit({0,Buffer = <<B:1,_:7,_/binary>>}) ->
+ {B,{1,Buffer}};
+getbit({Used,Buffer}) ->
+ Unused = (8 - Used) - 1,
+ <<_:Used,B:1,_:Unused,_/binary>> = Buffer,
+ {B,{Used+1,Buffer}};
+getbit(Buffer) when binary(Buffer) ->
+ getbit({0,Buffer}).
+
+
+getbits({0,Buffer},Num) when (Num rem 8) == 0 ->
+ <<Bits:Num,Rest/binary>> = Buffer,
+ {Bits,{0,Rest}};
+getbits({Used,Bin},Num) ->
+ NumPlusUsed = Num + Used,
+ NewUsed = NumPlusUsed rem 8,
+ Unused = (8-NewUsed) rem 8,
+ case Unused of
+ 0 ->
+ <<_:Used,Bits:Num,Rest/binary>> = Bin,
+ {Bits,{0,Rest}};
+ _ ->
+ Bytes = NumPlusUsed div 8,
+ <<_:Used,Bits:Num,_:Unused,_/binary>> = Bin,
+ <<_:Bytes/binary,Rest/binary>> = Bin,
+ {Bits,{NewUsed,Rest}}
+ end;
+getbits(Bin,Num) when binary(Bin) ->
+ getbits({0,Bin},Num).
+
+
+
+% getoctet(Bytes) when list(Bytes) ->
+% getoctet({0,Bytes});
+% getoctet(Bytes) ->
+% %% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+% getoctet1(Bytes).
+
+% getoctet1({0,[H|T]}) ->
+% {H,{0,T}};
+% getoctet1({Pos,[_,H|T]}) ->
+% {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,<<_H,T/binary>>}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as an integer with bit significance as in buffer.
+getoctets({0,Buffer},Num) ->
+ <<Val:Num/integer-unit:8,RestBin/binary>> = Buffer,
+ {Val,{0,RestBin}};
+getoctets({U,<<_Padding,Rest/binary>>},Num) when U /= 0 ->
+ getoctets({0,Rest},Num);
+getoctets(Buffer,Num) when binary(Buffer) ->
+ getoctets({0,Buffer},Num).
+% getoctets(Buffer,Num) ->
+% %% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+% getoctets(Buffer,Num,0).
+
+% getoctets(Buffer,0,Acc) ->
+% {Acc,Buffer};
+% getoctets(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+% getoctets_as_list(Buffer,Num) ->
+% getoctets_as_list(Buffer,Num,[]).
+
+% getoctets_as_list(Buffer,0,Acc) ->
+% {lists:reverse(Acc),Buffer};
+% getoctets_as_list(Buffer,Num,Acc) ->
+% {Oct,NewBuffer} = getoctet(Buffer),
+% getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%% First align buffer, then pick the first Num octets.
+%% Returns octets as a binary
+getoctets_as_bin({0,Bin},Num)->
+ <<Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin({_U,Bin},Num) ->
+ <<_Padding,Octets:Num/binary,RestBin/binary>> = Bin,
+ {Octets,{0,RestBin}};
+getoctets_as_bin(Bin,Num) when binary(Bin) ->
+ getoctets_as_bin({0,Bin},Num).
+
+%% same as above but returns octets as a List
+getoctets_as_list(Buffer,Num) ->
+ {Bin,Buffer2} = getoctets_as_bin(Buffer,Num),
+ {binary_to_list(Bin),Buffer2}.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+% [{bits,1,0}, % the value is in the root set
+% encode_constrained_number({0,Len1-1},N)];
+ [0, % the value is in the root set
+ encode_constrained_number({0,Len1-1},N)];
+ N when integer(N) ->
+% [{bits,1,0}]; % no encoding if only 0 or 1 alternative
+ [0]; % no encoding if only 0 or 1 alternative
+ false ->
+% [{bits,1,1}, % extension value
+ [1, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_constrained_number({0,Len-1},N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_Alt,[],_Tag) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_fragmented_XXX; decode of values encoded fragmented according
+%% to ITU-T X.691 clause 10.9.3.8. The unit (XXX) is either bits, octets,
+%% characters or number of components (in a choice,sequence or similar).
+%% Buffer is a buffer {Used, Bin}.
+%% C is the constrained length.
+%% If the buffer is not aligned, this function does that.
+decode_fragmented_bits({0,Buffer},C) ->
+ decode_fragmented_bits(Buffer,C,[]);
+decode_fragmented_bits({_N,<<_B,Bs/binary>>},C) ->
+ decode_fragmented_bits(Bs,C,[]).
+
+decode_fragmented_bits(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin, Len * ?'16K'),
+ decode_fragmented_bits(Bin2,C,[Value,Acc]);
+decode_fragmented_bits(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ BinBits = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int),C == size(BinBits) ->
+ {BinBits,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ {BinBits,{0,Bin}}
+ end;
+decode_fragmented_bits(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ Result = {BinBits,{Used,_Rest}} =
+ case (Len rem 8) of
+ 0 ->
+ <<Value:Len/binary-unit:1,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Value|Acc])),{0,Bin2}};
+ Rem ->
+ Bytes = Len div 8,
+ U = 8 - Rem,
+ <<Value:Bytes/binary-unit:8,Bits1:Rem,Bits2:U,Bin2/binary>> = Bin,
+ {list_to_binary(lists:reverse([Bits1 bsl U,Value|Acc])),
+ {Rem,<<Bits2,Bin2/binary>>}}
+ end,
+ case C of
+ Int when integer(Int),C == (size(BinBits) - ((8 - Used) rem 8)) ->
+ Result;
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinBits}}});
+ _ ->
+ Result
+ end.
+
+
+decode_fragmented_octets({0,Bin},C) ->
+ decode_fragmented_octets(Bin,C,[]);
+decode_fragmented_octets({_N,<<_B,Bs/binary>>},C) ->
+ decode_fragmented_octets(Bs,C,[]).
+
+decode_fragmented_octets(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
+ {Value,Bin2} = split_binary(Bin,Len * ?'16K'),
+ decode_fragmented_octets(Bin2,C,[Value,Acc]);
+decode_fragmented_octets(<<0:1,0:7,Bin/binary>>,C,Acc) ->
+ Octets = list_to_binary(lists:reverse(Acc)),
+ case C of
+ Int when integer(Int), C == size(Octets) ->
+ {Octets,{0,Bin}};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,Octets}}});
+ _ ->
+ {Octets,{0,Bin}}
+ end;
+decode_fragmented_octets(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
+ <<Value:Len/binary-unit:8,Bin2/binary>> = Bin,
+ BinOctets = list_to_binary(lists:reverse([Value|Acc])),
+ case C of
+ Int when integer(Int),size(BinOctets) == Int ->
+ {BinOctets,Bin2};
+ Int when integer(Int) ->
+ exit({error,{asn1,{illegal_value,C,BinOctets}}});
+ _ ->
+ {BinOctets,Bin2}
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_Constraint, Val) when list(Val) ->
+ Bin = list_to_binary(Val),
+ case size(Bin) of
+ Size when Size>255 ->
+ [encode_length(undefined,Size),[21,<<Size:16>>,Bin]];
+ Size ->
+ [encode_length(undefined,Size),[20,Size,Bin]]
+ end;
+% [encode_length(undefined,size(Bin)),{octets,Bin}]; % octets implies align
+encode_open_type(_Constraint, Val) when binary(Val) ->
+% [encode_length(undefined,size(Val)),{octets,Val}]. % octets implies align
+ case size(Val) of
+ Size when Size>255 ->
+ [encode_length(undefined,size(Val)),[21,<<Size:16>>,Val]]; % octets implies align
+ Size ->
+ [encode_length(undefined,Size),[20,Size,Val]]
+ end.
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _Constraint) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_bin(Bytes2,Len).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_NamedNumberList) when integer(V) ->
+ encode_integer(C,V);
+encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
+ encode_integer(C,V,NamedNumberList).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer([{Rc,_Ec}],Val) when tuple(Rc) -> % XXX when is this invoked? First argument most often a list,...Ok this is the extension case...but it doesn't work.
+ case (catch encode_integer([Rc],Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+% [{bits,1,1},encode_unconstrained_number(Val)];
+ [1,encode_unconstrained_number(Val)];
+ Encoded ->
+% [{bits,1,0},Encoded]
+ [0,Encoded]
+ end;
+
+encode_integer([],Val) ->
+ encode_unconstrained_number(Val);
+%% The constraint is the effective constraint, and in this case is a number
+encode_integer([{'SingleValue',V}],V) ->
+ [];
+encode_integer([{'ValueRange',VR={Lb,Ub},Range,PreEnc}],Val) when Val >= Lb,
+ Ub >= Val ->
+ %% this case when NamedNumberList
+ encode_constrained_number(VR,Range,PreEnc,Val);
+encode_integer([{'ValueRange',{Lb,'MAX'}}],Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_integer([{'ValueRange',{'MIN',_}}],Val) ->
+ encode_unconstrained_number(Val);
+encode_integer([{'ValueRange',VR={_Lb,_Ub}}],Val) ->
+ encode_constrained_number(VR,Val);
+encode_integer(_,Val) ->
+ exit({error,{asn1,{illegal_value,Val}}}).
+
+
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,[{Rc,_Ec}]) when tuple(Rc) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,[Rc]);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_Lb,_Ub} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+%% X.691:10.6 Encoding of a normally small non-negative whole number
+%% Use this for encoding of CHOICE index if there is an extension marker in
+%% the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+% [{bits,1,0},{bits,6,Val}];
+% [{bits,7,Val}]; % same as above but more efficient
+ [10,7,Val]; % same as above but more efficient
+encode_small_number(Val) ->
+% [{bits,1,1},encode_semi_constrained_number(0,Val)].
+ [1,encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,0)
+ end.
+
+%% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Oct = eint_positive(Val2),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ %{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ [20,Len+1,[Len|Oct]];
+ Len < 256 ->
+ [encode_length(undefined,Len),[20,Len,Oct]];
+ true ->
+ [encode_length(undefined,Len),[21,<<Len:16>>,Oct]]
+ end.
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number({Lb,_Ub},_Range,{bits,N},Val) ->
+ Val2 = Val-Lb,
+% {bits,N,Val2};
+ [10,N,Val2];
+encode_constrained_number({Lb,_Ub},_Range,{octets,N},Val) when N < 256->
+ %% N is 8 or 16 (1 or 2 octets)
+ Val2 = Val-Lb,
+% {octets,<<Val2:N/unit:8>>};
+ [20,N,Val2];
+encode_constrained_number({Lb,_Ub},_Range,{octets,N},Val) -> % N>255
+ %% N is 8 or 16 (1 or 2 octets)
+ Val2 = Val-Lb,
+% {octets,<<Val2:N/unit:8>>};
+ [21,<<N:16>>,Val2];
+encode_constrained_number({Lb,_Ub},Range,_,Val) ->
+ Val2 = Val-Lb,
+ if
+ Range =< 16#1000000 -> % max 3 octets
+ Octs = eint_positive(Val2),
+% [encode_length({1,3},size(Octs)),{octets,Octs}];
+ L = length(Octs),
+ [encode_length({1,3},L),[20,L,Octs]];
+ Range =< 16#100000000 -> % max 4 octets
+ Octs = eint_positive(Val2),
+% [encode_length({1,4},size(Octs)),{octets,Octs}];
+ L = length(Octs),
+ [encode_length({1,4},L),[20,L,Octs]];
+ Range =< 16#10000000000 -> % max 5 octets
+ Octs = eint_positive(Val2),
+% [encode_length({1,5},size(Octs)),{octets,Octs}];
+ L = length(Octs),
+ [encode_length({1,5},L),[20,L,Octs]];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+% Size = {bits,1,Val2};
+ [Val2];
+ Range =< 4 ->
+% Size = {bits,2,Val2};
+ [10,2,Val2];
+ Range =< 8 ->
+ [10,3,Val2];
+ Range =< 16 ->
+ [10,4,Val2];
+ Range =< 32 ->
+ [10,5,Val2];
+ Range =< 64 ->
+ [10,6,Val2];
+ Range =< 128 ->
+ [10,7,Val2];
+ Range =< 255 ->
+ [10,8,Val2];
+ Range =< 256 ->
+% Size = {octets,[Val2]};
+ [20,1,Val2];
+ Range =< 65536 ->
+% Size = {octets,<<Val2:16>>};
+ [20,2,<<Val2:16>>];
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+% [{bits,2,length(Octs)-1},{octets,Octs}];
+ Len = length(Octs),
+ [10,2,Len-1,20,Len,Octs];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ Len = length(Octs),
+ [10,2,Len-1,20,Len,Octs];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ Len = length(Octs),
+ [10,3,Len-1,20,Len,Octs];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end;
+encode_constrained_number({_,_},Val) ->
+ exit({error,{asn1,{illegal_value,Val}}}).
+
+decode_constrained_number(Buffer,VR={Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+ decode_constrained_number(Buffer,VR,Range).
+
+decode_constrained_number(Buffer,{Lb,_Ub},_Range,{bits,N}) ->
+ {Val,Remain} = getbits(Buffer,N),
+ {Val+Lb,Remain};
+decode_constrained_number(Buffer,{Lb,_Ub},_Range,{octets,N}) ->
+ {Val,Remain} = getoctets(Buffer,N),
+ {Val+Lb,Remain}.
+
+decode_constrained_number(Buffer,{Lb,_Ub},Range) ->
+ % Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+%% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+ %{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ [20,Len+1,[Len|Oct]];
+ Len < 256 ->
+% [encode_length(undefined,Len),20,Len,Oct];
+ [20,Len+2,<<2:2,Len:14>>,Oct];% equiv with encode_length(undefined,Len) but faster
+ true ->
+% [encode_length(undefined,Len),{octets,Oct}]
+ [encode_length(undefined,Len),[21,<<Len:16>>,Oct]]
+ end;
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ Len = length(Oct),
+ if
+ Len < 128 ->
+% {octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
+ [20,Len+1,[Len|Oct]];% equiv with encode_length(undefined,Len) but faster
+ Len < 256 ->
+% [encode_length(undefined,Len),20,Len,Oct];
+ [20,Len+2,<<2:2,Len:14>>,Oct];% equiv with encode_length(undefined,Len) but faster
+ true ->
+ %[encode_length(undefined,Len),{octets,Oct}]
+ [encode_length(undefined,Len),[21,<<Len:16>>,Oct]]
+ end.
+
+
+%% used for positive Values which don't need a sign bit
+%% returns a list
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+% minimum_octets(Val) ->
+% minimum_octets(Val,[]).
+
+% minimum_octets(Val,Acc) when Val > 0 ->
+% minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+% minimum_octets(0,Acc) ->
+% Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+% {octets,[Len]};
+ [20,1,Len];
+ Len < 16384 ->
+ %{octets,<<2:2,Len:14>>};
+ [20,2,<<2:2,Len:14>>];
+ true -> % should be able to endode length >= 16384 i.e. fragmented length
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length(Vr={Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number(Vr,Len);
+encode_length({Lb,_Ub},Len) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ encode_length(undefined,Len);
+encode_length({Vr={Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0,Len=<Ub ->
+ %% constrained extensible
+% [{bits,1,0},encode_constrained_number(Vr,Len)];
+ [0,encode_constrained_number(Vr,Len)];
+encode_length({{Lb,_},[]},Len) ->
+ [1,encode_semi_constrained_number(Lb,Len)];
+encode_length(SingleValue,_Len) when integer(SingleValue) ->
+ [].
+
+%% X.691 10.9.3.4 (only used for length of bitmap that prefixes extension
+%% additions in a sequence or set
+encode_small_length(Len) when Len =< 64 ->
+%% [{bits,1,0},{bits,6,Len-1}];
+% {bits,7,Len-1}; % the same as above but more efficient
+ [10,7,Len-1];
+encode_small_length(Len) ->
+% [{bits,1,1},encode_length(undefined,Len)].
+ [1,encode_length(undefined,Len)].
+
+% decode_small_length({Used,<<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>>}) ->
+% case Buffer of
+% <<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>> ->
+% {Num,
+% case getbit(Buffer) of
+% {0,Remain} ->
+% {Bits,Remain2} = getbits(Remain,6),
+% {Bits+1,Remain2};
+% {1,Remain} ->
+% decode_length(Remain,undefined)
+% end.
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ {0,Buffer2} = align(Buffer),
+ case Buffer2 of
+ <<0:1,Oct:7,Rest/binary>> ->
+ {Oct,{0,Rest}};
+ <<2:2,Val:14,Rest/binary>> ->
+ {Val,{0,Rest}};
+ <<3:2,_Val:14,_Rest/binary>> ->
+ %% this case should be fixed
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}})
+ end;
+%% {Bits,_} = getbits(Buffer2,2),
+% case Bits of
+% 2 ->
+% {Val,Bytes3} = getoctets(Buffer2,2),
+% {(Val band 16#3FFF),Bytes3};
+% 3 ->
+% exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+% _ ->
+% {Val,Bytes3} = getoctet(Buffer2),
+% {Val band 16#7F,Bytes3}
+% end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+decode_length(_Buffer,{Lb,_Ub}) when integer(Lb), Lb >= 0 -> % Ub > 65535
+ exit({error,{asn1,{decode_length,{nyi,above_64K}}}});
+decode_length(Buffer,{{Lb,Ub},[]}) ->
+ case getbit(Buffer) of
+ {0,Buffer2} ->
+ decode_length(Buffer2, {Lb,Ub})
+ end;
+
+
+%When does this case occur with {_,_Lb,Ub} ??
+% X.691:10.9.3.5
+decode_length({Used,Bin},{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub NOTE! this case does not cover case when Ub > 65535
+ Unused = (8-Used) rem 8,
+ case Bin of
+ <<_:Used,0:1,Val:7,R:Unused,Rest/binary>> ->
+ {Val,{Used,<<R,Rest/binary>>}};
+ <<_:Used,_:Unused,2:2,Val:14,Rest/binary>> ->
+ {Val, {0,Rest}};
+ <<_:Used,_:Unused,3:2,_:14,_Rest/binary>> ->
+ exit({error,{asn1,{decode_length,{nyi,length_above_64K}}}})
+ end;
+% decode_length(Buffer,{_,_Lb,Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+% case getbit(Buffer) of
+% {0,Remain} ->
+% getbits(Remain,7);
+% {1,Remain} ->
+% {Val,Remain2} = getoctets(Buffer,2),
+% {Val band 2#0111111111111111, Remain2}
+% end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+ % X.691:11
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+
+%% when the value is a list of {Unused,BinBits}, where
+%% Unused = integer(),
+%% BinBits = binary().
+
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
+ binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList);
+
+%% when the value is a list of named bits
+
+encode_bit_string(C, LoNB=[FirstVal | _RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes(LoNB, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);% consider the constraint
+
+encode_bit_string(C, BL=[{bit,_} | _RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes(BL, NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+encode_bit_string(Int, BitListValue, _)
+ when list(BitListValue),integer(Int) ->
+ %% The type is constrained by a single value size constraint
+ [40,Int,length(BitListValue),BitListValue];
+% encode_bit_string(C, BitListValue,NamedBitList)
+% when list(BitListValue) ->
+% [encode_bit_str_length(C,BitListValue),
+% 2,45,BitListValue];
+encode_bit_string(no, BitListValue,[])
+ when list(BitListValue) ->
+ [encode_length(undefined,length(BitListValue)),
+ 2,BitListValue];
+encode_bit_string(C, BitListValue,[])
+ when list(BitListValue) ->
+ [encode_length(C,length(BitListValue)),
+ 2,BitListValue];
+encode_bit_string(no, BitListValue,_NamedBitList)
+ when list(BitListValue) ->
+ %% this case with an unconstrained BIT STRING can be made more efficient
+ %% if the complete driver can take a special code so the length field
+ %% is encoded there.
+ NewBitLVal = lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitListValue))),
+ [encode_length(undefined,length(NewBitLVal)),
+ 2,NewBitLVal];
+encode_bit_string(C,BitListValue,_NamedBitList)
+ when list(BitListValue) ->% C = {_,'MAX'}
+% NewBitLVal = lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+% lists:reverse(BitListValue))),
+ NewBitLVal = bit_string_trailing_zeros(BitListValue,C),
+ [encode_length(C,length(NewBitLVal)),
+ 2,NewBitLVal];
+
+% encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+% BitListToBinary =
+% %% fun that transforms a list of 1 and 0 to a tuple:
+% %% {UnusedBitsInLastByte, Binary}
+% fun([H|T],Acc,N,Fun) ->
+% Fun(T,(Acc bsl 1)+H,N+1,Fun);
+% ([],Acc,N,_) -> % length fits in one byte
+% Unused = (8 - (N rem 8)) rem 8,
+% % case N/8 of
+% % _Len =< 255 ->
+% % [30,Unused,(Unused+N)/8,<<Acc:N,0:Unused>>];
+% % _Len ->
+% % Len = (Unused+N)/8,
+% % [31,Unused,<<Len:16>>,<<Acc:N,0:Unused>>]
+% % end
+% {Unused,<<Acc:N,0:Unused>>}
+% end,
+% UnusedAndBin =
+% case NamedBitList of
+% [] -> % dont remove trailing zeroes
+% BitListToBinary(BitListValue,0,0,BitListToBinary);
+% _ ->
+% BitListToBinary(lists:reverse(
+% lists:dropwhile(fun(0)->true;(1)->false end,
+% lists:reverse(BitListValue))),
+% 0,0,BitListToBinary)
+% end,
+% encode_bin_bit_string(C,UnusedAndBin,NamedBitList);
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a tuple
+encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
+ encode_bit_string(C,Val,NamedBitList).
+
+bit_string_trailing_zeros(BitList,C) when integer(C) ->
+ bit_string_trailing_zeros1(BitList,C,C);
+bit_string_trailing_zeros(BitList,{Lb,Ub}) when integer(Lb) ->
+ bit_string_trailing_zeros1(BitList,Lb,Ub);
+bit_string_trailing_zeros(BitList,{{Lb,Ub},_}) when integer(Lb) ->
+ bit_string_trailing_zeros1(BitList,Lb,Ub);
+bit_string_trailing_zeros(BitList,_) ->
+ BitList.
+
+bit_string_trailing_zeros1(BitList,Lb,Ub) ->
+ case length(BitList) of
+ Lb -> BitList;
+ B when B<Lb -> BitList++lists:duplicate(Lb-B,0);
+ D -> F = fun(L,LB,LB,_,_)->lists:reverse(L);
+ ([0|R],L1,LB,UB,Fun)->Fun(R,L1-1,LB,UB,Fun);
+ (L,L1,_,UB,_)when L1 =< UB -> lists:reverse(L);
+ (_,_L1,_,_,_) ->exit({error,{list_length_BIT_STRING,
+ BitList}}) end,
+ F(lists:reverse(BitList),D,Lb,Ub,F)
+ end.
+
+%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
+%% Unused = integer(),i.e. number unused bits in least sign. byte of
+%% BinBits = binary().
+encode_bin_bit_string(C,{_,BinBits},_NamedBitList)
+ when integer(C),C=<16 ->
+ [45,C,size(BinBits),BinBits];
+encode_bin_bit_string(C,{_Unused,BinBits},_NamedBitList)
+ when integer(C) ->
+ [2,45,C,size(BinBits),BinBits];
+encode_bin_bit_string(C,UnusedAndBin={_,_},NamedBitList) ->
+% UnusedAndBin1 = {Unused1,Bin1} =
+ {Unused1,Bin1} =
+ %% removes all trailing bits if NamedBitList is not empty
+ remove_trailing_bin(NamedBitList,UnusedAndBin),
+ case C of
+% case get_constraint(C,'SizeConstraint') of
+
+% 0 ->
+% []; % borde avg�ras i compile-time
+% V when integer(V),V=<16 ->
+% {Unused2,Bin2} = pad_list(V,UnusedAndBin1),
+% <<BitVal:V,_:Unused2>> = Bin2,
+% % {bits,V,BitVal};
+% [10,V,BitVal];
+% V when integer(V) ->
+% %[align, pad_list(V, UnusedAndBin1)];
+% {Unused2,Bin2} = pad_list(V, UnusedAndBin1),
+% <<BitVal:V,_:Unused2>> = Bin2,
+% [2,octets_unused_to_complete(Unused2,size(Bin2),Bin2)];
+
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+% [encode_length({Lb,Ub},size(Bin1)*8 - Unused1),
+% align,UnusedAndBin1];
+ Size=size(Bin1),
+ [encode_length({Lb,Ub},Size*8 - Unused1),
+ 2,octets_unused_to_complete(Unused1,Size,Bin1)];
+ no ->
+ Size=size(Bin1),
+ [encode_length(undefined,Size*8 - Unused1),
+ 2,octets_unused_to_complete(Unused1,Size,Bin1)];
+ Sc ->
+ Size=size(Bin1),
+ [encode_length(Sc,Size*8 - Unused1),
+ 2,octets_unused_to_complete(Unused1,Size,Bin1)]
+ end.
+
+remove_trailing_bin([], {Unused,Bin}) ->
+ {Unused,Bin};
+remove_trailing_bin(NamedNumberList, {_Unused,Bin}) ->
+ Size = size(Bin)-1,
+ <<Bfront:Size/binary, LastByte:8>> = Bin,
+ %% clear the Unused bits to be sure
+% LastByte1 = LastByte band (((1 bsl Unused) -1) bxor 255),% why this???
+ Unused1 = trailingZeroesInNibble(LastByte band 15),
+ Unused2 =
+ case Unused1 of
+ 4 ->
+ 4 + trailingZeroesInNibble(LastByte bsr 4);
+ _ -> Unused1
+ end,
+ case Unused2 of
+ 8 ->
+ remove_trailing_bin(NamedNumberList,{0,Bfront});
+ _ ->
+ {Unused2,Bin}
+ end.
+
+
+trailingZeroesInNibble(0) ->
+ 4;
+trailingZeroesInNibble(1) ->
+ 0;
+trailingZeroesInNibble(2) ->
+ 1;
+trailingZeroesInNibble(3) ->
+ 0;
+trailingZeroesInNibble(4) ->
+ 2;
+trailingZeroesInNibble(5) ->
+ 0;
+trailingZeroesInNibble(6) ->
+ 1;
+trailingZeroesInNibble(7) ->
+ 0;
+trailingZeroesInNibble(8) ->
+ 3;
+trailingZeroesInNibble(9) ->
+ 0;
+trailingZeroesInNibble(10) ->
+ 1;
+trailingZeroesInNibble(11) ->
+ 0;
+trailingZeroesInNibble(12) -> %#1100
+ 2;
+trailingZeroesInNibble(13) ->
+ 0;
+trailingZeroesInNibble(14) ->
+ 1;
+trailingZeroesInNibble(15) ->
+ 0.
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a tuple {Unused,Bits}. Unused is the number of unused
+%% bits, least significant bits in the last byte of Bits. Bits is
+%% the BIT STRING represented as a binary.
+%%
+decode_compact_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {{8,0},Buffer};
+ V when integer(V),V=<16 -> %fixed length 16 bits or less
+ compact_bit_string(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 -> %fixed length > 16 bits
+ Bytes2 = align(Buffer),
+ compact_bit_string(Bytes2,V,NamedNumberList);
+ V when integer(V) -> % V > 65536 => fragmented value
+ {Bin,Buffer2} = decode_fragmented_bits(Buffer,V),
+ case Buffer2 of
+ {0,_} -> {{0,Bin},Buffer2};
+ {U,_} -> {{8-U,Bin},Buffer2}
+ end;
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ no ->
+ %% This case may demand decoding of fragmented length/value
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ Sc ->
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList);
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_or_named(Buffer,V,NamedNumberList);
+ V when integer(V),V=<65536 ->
+ Bytes2 = align(Buffer),
+ bit_list_or_named(Bytes2,V,NamedNumberList);
+ V when integer(V) ->
+ Bytes2 = align(Buffer),
+ {BinBits,_Bytes3} = decode_fragmented_bits(Bytes2,V),
+ bit_list_or_named(BinBits,V,NamedNumberList);
+ Sc -> % extension marker
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ bit_list_or_named(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%% if no named bits are declared we will return a
+%% {Unused,Bits}. Unused = integer(),
+%% Bits = binary().
+compact_bit_string(Buffer,Len,[]) ->
+ getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
+compact_bit_string(Buffer,Len,NamedNumberList) ->
+ bit_list_or_named(Buffer,Len,NamedNumberList).
+
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_or_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_or_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_or_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_or_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_or_named1(Pos+1,Bt,Names,Acc);
+bit_list_or_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_or_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_or_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_or_named1(_Pos,[],_Names,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(Int) when integer(Int), Int > 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)];
+int_to_bitlist(0) ->
+ [].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+% pad_list(N,In={Unused,Bin}) ->
+% pad_list(N, size(Bin)*8 - Unused, In).
+
+% pad_list(N,Size,In={Unused,Bin}) when N < Size ->
+% exit({error,{asn1,{range_error,{bit_string,In}}}});
+% pad_list(N,Size,{Unused,Bin}) when N > Size, Unused > 0 ->
+% pad_list(N,Size+1,{Unused-1,Bin});
+% pad_list(N,Size,{Unused,Bin}) when N > Size ->
+% pad_list(N,Size+1,{7,<<Bin/binary,0>>});
+% pad_list(N,N,In={Unused,Bin}) ->
+% In.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(C,Bool,{_Name,Val}) ->
+ encode_octet_string(C,Bool,Val);
+encode_octet_string(_C,true,_Val) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(SZ={_,_},false,Val) ->
+% [encode_length(SZ,length(Val)),align,
+% {octets,Val}];
+ Len = length(Val),
+ [encode_length(SZ,Len),2,
+ octets_to_complete(Len,Val)];
+encode_octet_string(SZ,false,Val) when list(SZ) ->
+ Len = length(Val),
+ [encode_length({hd(SZ),lists:max(SZ)},Len),2,
+ octets_to_complete(Len,Val)];
+encode_octet_string(no,false,Val) ->
+ Len = length(Val),
+ [encode_length(undefined,Len),2,
+ octets_to_complete(Len,Val)];
+encode_octet_string(C,_,_) ->
+ exit({error,{not_implemented,C}}).
+
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,1,false) ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+decode_octet_string(Bytes,2,false) ->
+ {Bs,Bytes2}= getbits(Bytes,16),
+ {binary_to_list(<<Bs:16>>),Bytes2};
+decode_octet_string(Bytes,Sv,false) when integer(Sv),Sv=<65535 ->
+ Bytes2 = align(Bytes),
+ getoctets_as_list(Bytes2,Sv);
+decode_octet_string(Bytes,Sv,false) when integer(Sv) ->
+ Bytes2 = align(Bytes),
+ decode_fragmented_octets(Bytes2,Sv);
+decode_octet_string(Bytes,{Lb,Ub},false) ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+decode_octet_string(Bytes,Sv,false) when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+decode_octet_string(Bytes,no,false) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+
+encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,Val);
+
+encode_restricted_string(aligned,Val) when list(Val)->
+ Len = length(Val),
+% [encode_length(undefined,length(Val)),{octets,Val}].
+ [encode_length(undefined,Len),octets_to_complete(Len,Val)].
+
+
+encode_known_multiplier_string(StringType,SizeC,NumBits,CharOutTab,{Name,Val}) when atom(Name) ->
+ encode_known_multiplier_string(StringType,SizeC,NumBits,CharOutTab,Val);
+encode_known_multiplier_string(StringType,SizeC,NumBits,CharOutTab,Val) ->
+ Result = chars_encode2(Val,NumBits,CharOutTab),
+ case SizeC of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} -> %% this case cannot happen !!??
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+%% [align,Result];
+ [2,Result];
+ {Ub,Lb} ->
+% [encode_length({Ub,Lb},length(Val)),align,Result];
+ [encode_length({Ub,Lb},length(Val)),2,Result];
+ no ->
+% [encode_length(undefined,length(Val)),align,Result]
+ [encode_length(undefined,length(Val)),2,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ getoctets_as_list(Bytes2,Len).
+
+decode_known_multiplier_string(StringType,SizeC,NumBits,CharInTab,Bytes) ->
+ case SizeC of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,CharInTab,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,CharInTab,Ub);
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,CharInTab,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,CharInTab,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,CharInTab,Len)
+ end.
+
+encode_GeneralString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GeneralString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_GraphicString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GraphicString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_ObjectDescriptor(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_ObjectDescriptor(Bytes) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_TeletexString(_C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,Val).
+decode_TeletexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_VideotexString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_VideotexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{0,0,O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+% chars_encode(C,StringType,Value) ->
+% case {StringType,get_constraint(C,'PermittedAlphabet')} of
+% {'UniversalString',{_,Sv}} ->
+% exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+% {'BMPString',{_,Sv}} ->
+% exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+% _ ->
+% {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+% chars_encode2(Value,NumBits,CharOutTab)
+% end.
+
+
+chars_encode2([H|T],NumBits,T1={Min,Max,notab}) when H =< Max, H >= Min ->
+% [[10,NumBits,H-Min]|chars_encode2(T,NumBits,T1)];
+ [pre_complete_bits(NumBits,H-Min)|chars_encode2(T,NumBits,T1)];
+chars_encode2([H|T],NumBits,T1={Min,Max,Tab}) when H =< Max, H >= Min ->
+% [[10,NumBits,element(H-Min+1,Tab)]|chars_encode2(T,NumBits,T1)];
+ [pre_complete_bits(NumBits,exit_if_false(H,element(H-Min+1,Tab)))|
+ chars_encode2(T,NumBits,T1)];
+chars_encode2([{A,B,C,D}|T],NumBits,T1={Min,_Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+% [[10,NumBits,((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min]|chars_encode2(T,NumBits,T1)];
+ [pre_complete_bits(NumBits,
+ ((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min)|
+ chars_encode2(T,NumBits,T1)];
+chars_encode2([H={A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+ [pre_complete_bits(NumBits,exit_if_false(H,element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab)))|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_NumBits,{_Min,_Max,_Tab}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+exit_if_false(V,false)->
+ exit({error,{asn1,{"illegal value according to Permitted alphabet constraint",V}}});
+exit_if_false(_,V) ->V.
+
+pre_complete_bits(NumBits,Val) when NumBits =< 8 ->
+ [10,NumBits,Val];
+pre_complete_bits(NumBits,Val) when NumBits =< 16 ->
+ [10,NumBits-8,Val bsr 8,10,8,(Val band 255)];
+pre_complete_bits(NumBits,Val) when NumBits =< 2040 -> % 255 * 8
+% LBUsed = NumBits rem 8,
+% {Unused,Len} = case (8 - LBUsed) of
+% 8 -> {0,NumBits div 8};
+% U -> {U,(NumBits div 8) + 1}
+% end,
+% NewVal = Val bsr LBUsed,
+% [30,Unused,Len,<<NewVal:Len/unit:8,Val:LBUsed,0:Unused>>].
+ Unused = (8 - (NumBits rem 8)) rem 8,
+ Len = NumBits + Unused,
+ [30,Unused,Len div 8,<<(Val bsl Unused):Len>>].
+
+% get_NumBits(C,StringType) ->
+% case get_constraint(C,'PermittedAlphabet') of
+% {'SingleValue',Sv} ->
+% charbits(length(Sv),aligned);
+% no ->
+% case StringType of
+% 'IA5String' ->
+% charbits(128,aligned); % 16#00..16#7F
+% 'VisibleString' ->
+% charbits(95,aligned); % 16#20..16#7E
+% 'PrintableString' ->
+% charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+% 'NumericString' ->
+% charbits(11,aligned); % $ ,"0123456789"
+% 'UniversalString' ->
+% 32;
+% 'BMPString' ->
+% 16
+% end
+% end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+% get_CharOutTab(C,StringType) ->
+% get_CharTab(C,StringType,out).
+
+% get_CharInTab(C,StringType) ->
+% get_CharTab(C,StringType,in).
+
+% get_CharTab(C,StringType,InOut) ->
+% case get_constraint(C,'PermittedAlphabet') of
+% {'SingleValue',Sv} ->
+% get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+% no ->
+% case StringType of
+% 'IA5String' ->
+% {0,16#7F,notab};
+% 'VisibleString' ->
+% get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+% 'PrintableString' ->
+% Chars = lists:sort(
+% " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+% get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+% 'NumericString' ->
+% get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+% 'UniversalString' ->
+% {0,16#FFFFFFFF,notab};
+% 'BMPString' ->
+% {0,16#FFFF,notab}
+% end
+% end.
+
+% get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+% BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+% if
+% Max =< BitValMax ->
+% {0,Max,notab};
+% true ->
+% case InOut of
+% out ->
+% {Min,Max,create_char_tab(Min,Chars)};
+% in ->
+% {Min,Max,list_to_tuple(Chars)}
+% end
+% end.
+
+% create_char_tab(Min,L) ->
+% list_to_tuple(create_char_tab(Min,L,0)).
+% create_char_tab(Min,[Min|T],V) ->
+% [V|create_char_tab(Min+1,T,V+1)];
+% create_char_tab(_Min,[],_V) ->
+% [];
+% create_char_tab(Min,L,V) ->
+% [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+% charbits(NumOfChars,aligned) ->
+% case charbits(NumOfChars) of
+% 1 -> 1;
+% 2 -> 2;
+% B when B =< 4 -> 4;
+% B when B =< 8 -> 8;
+% B when B =< 16 -> 16;
+% B when B =< 32 -> 32
+% end.
+
+% charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+% charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+% charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+% charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+% charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+% charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+% charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+% charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+% charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+% charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+% charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+% charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+% charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+% charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+% charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+% charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+% charbits(NumOfChars) when integer(NumOfChars) ->
+% 16 + charbits1(NumOfChars bsr 16).
+
+% charbits1(0) ->
+% 0;
+% charbits1(NumOfChars) ->
+% 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',_,Len) ->
+ getBMPChars(Bytes,Len);
+chars_decode(Bytes,NumBits,_StringType,CharInTab,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result =
+ if
+ Char < 256 -> Char;
+ true ->
+ list_to_tuple(binary_to_list(<<Char:32>>))
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null(_Val) -> []; % encodes to nothing
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val).
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier({Name,Val}) when atom(Name) ->
+ encode_object_identifier(Val);
+encode_object_identifier(Val) ->
+ OctetList = e_object_identifier(Val),
+ Octets = list_to_binary(OctetList), % performs a flatten at the same time
+% [{debug,object_identifier},encode_length(undefined,size(Octets)),{octets,Octets}].
+ [encode_length(undefined,size(Octets)),
+ octets_to_complete(size(Octets),Octets)].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V}) ->
+ e_object_identifier(V);
+e_object_identifier({Cname,V}) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+e_object_identifier({Cname,V}) when atom(Cname),list(V) ->
+ e_object_identifier(V);
+e_object_identifier(V) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V));
+
+%% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail]) when E1 >= 0, E1 < 2, E2 < 40 ; E1==2 ->
+ Head = 40*E1 + E2, % weird
+ e_object_elements([Head|Tail],[]);
+e_object_identifier(Oid=[_,_|_Tail]) ->
+ exit({error,{asn1,{'illegal_value',Oid}}}).
+
+e_object_elements([],Acc) ->
+ lists:reverse(Acc);
+e_object_elements([H|T],Acc) ->
+ e_object_elements(T,[e_object_element(H)|Acc]).
+
+e_object_element(Num) when Num < 128 ->
+ Num;
+%% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint([{Key,V}],Key) ->
+ V;
+get_constraint([],_) ->
+ no;
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+
+-ifdef(nodriver).
+
+complete(L) ->
+ case complete1(L) of
+ {[],[]} ->
+ <<0>>;
+ {Acc,[]} ->
+ Acc;
+ {Acc,Bacc} ->
+ [Acc|complete_bytes(Bacc)]
+ end.
+
+
+% this function builds the ugly form of lists [E1|E2] to avoid having to reverse it at the end.
+% this is done because it is efficient and that the result always will be sent on a port or
+% converted by means of list_to_binary/1
+ complete1(InList) when list(InList) ->
+ complete1(InList,[],[]);
+ complete1(InList) ->
+ complete1([InList],[],[]).
+
+ complete1([],Acc,Bacc) ->
+ {Acc,Bacc};
+ complete1([H|T],Acc,Bacc) when list(H) ->
+ {NewH,NewBacc} = complete1(H,Acc,Bacc),
+ complete1(T,NewH,NewBacc);
+
+ complete1([{octets,Bin}|T],Acc,[]) ->
+ complete1(T,[Acc|Bin],[]);
+
+ complete1([{octets,Bin}|T],Acc,Bacc) ->
+ complete1(T,[Acc|[complete_bytes(Bacc),Bin]],[]);
+
+ complete1([{debug,_}|T], Acc,Bacc) ->
+ complete1(T,Acc,Bacc);
+
+ complete1([{bits,N,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,N));
+
+ complete1([{bit,Val}|T],Acc,Bacc) ->
+ complete1(T,Acc,complete_update_byte(Bacc,Val,1));
+
+ complete1([align|T],Acc,[]) ->
+ complete1(T,Acc,[]);
+ complete1([align|T],Acc,Bacc) ->
+ complete1(T,[Acc|complete_bytes(Bacc)],[]);
+ complete1([{0,Bin}|T],Acc,[]) when binary(Bin) ->
+ complete1(T,[Acc|Bin],[]);
+ complete1([{Unused,Bin}|T],Acc,[]) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8-Unused,
+ complete1(T,[Acc|Bs],[[B bsr Unused]|NumBits]);
+ complete1([{Unused,Bin}|T],Acc,Bacc) when integer(Unused),binary(Bin) ->
+ Size = size(Bin)-1,
+ <<Bs:Size/binary,B>> = Bin,
+ NumBits = 8 - Unused,
+ Bf = complete_bytes(Bacc),
+ complete1(T,[Acc|[Bf,Bs]],[[B bsr Unused]|NumBits]).
+
+
+ complete_update_byte([],Val,Len) ->
+ complete_update_byte([[0]|0],Val,Len);
+ complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len == 8 ->
+ [[0,((Byte bsl Len) + Val) band 255|Bacc]|0];
+ complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len > 8 ->
+ Rem = 8 - NumBits,
+ Rest = Len - Rem,
+ complete_update_byte([[0,((Byte bsl Rem) + (Val bsr Rest)) band 255 |Bacc]|0],Val,Rest);
+ complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) ->
+ [[((Byte bsl Len) + Val) band 255|Bacc]|NumBits+Len].
+
+
+ complete_bytes([[Byte|Bacc]|0]) ->
+ lists:reverse(Bacc);
+ complete_bytes([[Byte|Bacc]|NumBytes]) ->
+ lists:reverse([(Byte bsl (8-NumBytes)) band 255|Bacc]);
+ complete_bytes([]) ->
+ [].
+
+-else.
+
+
+ complete(L) ->
+ case catch port_control(drv_complete,1,L) of
+ Bin when binary(Bin) ->
+ Bin;
+ List when list(List) -> handle_error(List,L);
+ {'EXIT',{badarg,Reason}} ->
+ asn1rt_driver_handler:load_driver(),
+ receive
+ driver_ready ->
+ case catch port_control(drv_complete,1,L) of
+ Bin2 when binary(Bin2) -> Bin2;
+ List when list(List) -> handle_error(List,L);
+ Error -> exit(Error)
+ end;
+ {error,Error} -> % error when loading driver
+ %% the driver could not be loaded
+ exit(Error);
+ Error={port_error,Reason} ->
+ exit(Error)
+ end;
+ {'EXIT',Reason} ->
+ exit(Reason)
+ end.
+
+handle_error([],_)->
+ exit({error,{"memory allocation problem"}});
+handle_error("1",L) -> % error in complete in driver
+ exit({error,{asn1_error,L}});
+handle_error(ErrL,L) ->
+ exit({error,{unknown_error,ErrL,L}}).
+
+-endif.
+
+
+octets_to_complete(Len,Val) when Len < 256 ->
+ [20,Len,Val];
+octets_to_complete(Len,Val) ->
+ [21,<<Len:16>>,Val].
+
+octets_unused_to_complete(Unused,Len,Val) when Len < 256 ->
+ [30,Unused,Len,Val];
+octets_unused_to_complete(Unused,Len,Val) ->
+ [31,Unused,<<Len:16>>,Val].
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_v1.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_v1.erl
new file mode 100644
index 0000000000..90ffb0cb1c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_v1.erl
@@ -0,0 +1,1827 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: asn1rt_per_v1.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+%%
+-module(asn1rt_per_v1).
+
+%% encoding / decoding of PER aligned
+
+-include("asn1_records.hrl").
+
+-export([dec_fixup/3, cindex/3, list_to_record/2]).
+-export([setchoiceext/1, setext/1, fixoptionals/2, fixextensions/2,
+ setoptionals/1, fixoptionals2/3, getext/1, getextension/2,
+ skipextensions/3, getbit/1, getchoice/3 ]).
+-export([getoptionals/2, getoptionals/3, set_choice/3,
+ getoptionals2/2,
+ encode_integer/2, encode_integer/3 ]).
+-export([decode_integer/2, decode_integer/3, encode_small_number/1,
+ encode_boolean/1, decode_boolean/1, encode_length/2,
+ decode_length/1, decode_length/2,
+ encode_small_length/1, decode_small_length/1,
+ decode_compact_bit_string/3]).
+-export([encode_enumerated/3, decode_enumerated/3,
+ encode_bit_string/3, decode_bit_string/3 ]).
+-export([encode_octet_string/2, decode_octet_string/2,
+ encode_null/1, decode_null/1,
+ encode_object_identifier/1, decode_object_identifier/1,
+ complete/1]).
+
+-export([encode_open_type/2, decode_open_type/2]).
+
+-export([encode_UniversalString/2, decode_UniversalString/2,
+ encode_PrintableString/2, decode_PrintableString/2,
+ encode_GeneralString/2, decode_GeneralString/2,
+ encode_GraphicString/2, decode_GraphicString/2,
+ encode_TeletexString/2, decode_TeletexString/2,
+ encode_VideotexString/2, decode_VideotexString/2,
+ encode_VisibleString/2, decode_VisibleString/2,
+ encode_BMPString/2, decode_BMPString/2,
+ encode_IA5String/2, decode_IA5String/2,
+ encode_NumericString/2, decode_NumericString/2,
+ encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
+ ]).
+
+
+dec_fixup(Terms,Cnames,RemBytes) ->
+ dec_fixup(Terms,Cnames,RemBytes,[]).
+
+dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,Acc);
+dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
+ dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
+dec_fixup([],_Cnames,RemBytes,Acc) ->
+ {lists:reverse(Acc),RemBytes}.
+
+cindex(Ix,Val,Cname) ->
+ case element(Ix,Val) of
+ {Cname,Val2} -> Val2;
+ X -> X
+ end.
+
+% converts a list to a record if necessary
+list_to_record(Name,List) when list(List) ->
+ list_to_tuple([Name|List]);
+list_to_record(_Name,Tuple) when tuple(Tuple) ->
+ Tuple.
+
+%%--------------------------------------------------------
+%% setchoiceext(InRootSet) -> [{bit,X}]
+%% X is set to 1 when InRootSet==false
+%% X is set to 0 when InRootSet==true
+%%
+setchoiceext(true) ->
+ [{debug,choiceext},{bit,0}];
+setchoiceext(false) ->
+ [{debug,choiceext},{bit,1}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% setext(true|false) -> CompleteList
+%%
+
+setext(true) ->
+ [{debug,ext},{bit,1}];
+setext(false) ->
+ [{debug,ext},{bit,0}].
+
+%%
+
+fixoptionals2(OptList,OptLength,Val) when tuple(Val) ->
+ Bits = fixoptionals2(OptList,Val,0),
+ {Val,{bits,OptLength,Bits}};
+
+fixoptionals2([],_Val,Acc) ->
+ %% Optbits
+ Acc;
+fixoptionals2([Pos|Ot],Val,Acc) ->
+ case element(Pos,Val) of
+ asn1_NOVALUE -> fixoptionals2(Ot,Val,Acc bsl 1);
+ asn1_DEFAULT -> fixoptionals2(Ot,Val,Acc bsl 1);
+ _ -> fixoptionals2(Ot,Val,(Acc bsl 1) + 1)
+ end.
+
+
+%%
+%% fixoptionals remains only for backward compatibility purpose
+fixoptionals(OptList,Val) when tuple(Val) ->
+ fixoptionals(OptList,Val,[]);
+
+fixoptionals(OptList,Val) when list(Val) ->
+ fixoptionals(OptList,Val,1,[],[]).
+
+fixoptionals([],Val,Acc) ->
+ % return {Val,Opt}
+ {Val,lists:reverse(Acc)};
+fixoptionals([{_,Pos}|Ot],Val,Acc) ->
+ case element(Pos+1,Val) of
+ asn1_NOVALUE -> fixoptionals(Ot,Val,[0|Acc]);
+ asn1_DEFAULT -> fixoptionals(Ot,Val,[0|Acc]);
+ _ -> fixoptionals(Ot,Val,[1|Acc])
+ end.
+
+
+%setoptionals(OptList,Val) ->
+% Vlist = tuple_to_list(Val),
+% setoptionals(OptList,Vlist,1,[]).
+
+fixoptionals([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
+ fixoptionals(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
+fixoptionals([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
+ fixoptionals(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
+fixoptionals(O,[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[Vh|Vt],Pos,Acc1,Acc2) ->
+ fixoptionals([],Vt,Pos+1,Acc1,[Vh|Acc2]);
+fixoptionals([],[],_,Acc1,Acc2) ->
+ % return {Val,Opt}
+ {list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
+
+setoptionals([H|T]) ->
+ [{bit,H}|setoptionals(T)];
+setoptionals([]) ->
+ [{debug,optionals}].
+
+getext(Bytes) when tuple(Bytes) ->
+ getbit(Bytes);
+getext(Bytes) when list(Bytes) ->
+ getbit({0,Bytes}).
+
+getextension(0, Bytes) ->
+ {{},Bytes};
+getextension(1, Bytes) ->
+ {Len,Bytes2} = decode_small_length(Bytes),
+ {Blist, Bytes3} = getbits_as_list(Len,Bytes2),
+ {list_to_tuple(Blist),Bytes3}.
+
+fixextensions({ext,ExtPos,ExtNum},Val) ->
+ case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
+ 0 -> [];
+ ExtBits ->
+ [encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
+ end.
+
+fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
+ Acc;
+fixextensions(Pos,ExtPos,Val,Acc) ->
+ Bit = case catch(element(Pos+1,Val)) of
+ asn1_NOVALUE ->
+ 0;
+ asn1_NOEXTVALUE ->
+ 0;
+ {'EXIT',_} ->
+ 0;
+ _ ->
+ 1
+ end,
+ fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
+
+skipextensions(Bytes,Nr,ExtensionBitPattern) ->
+ case (catch element(Nr,ExtensionBitPattern)) of
+ 1 ->
+ {_,Bytes2} = decode_open_type(Bytes,[]),
+ skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
+ 0 ->
+ skipextensions(Bytes, Nr+1, ExtensionBitPattern);
+ {'EXIT',_} -> % badarg, no more extensions
+ Bytes
+ end.
+
+
+getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
+ {0,Bytes};
+getchoice(Bytes,_NumChoices,1) ->
+ decode_small_number(Bytes);
+getchoice(Bytes,NumChoices,0) ->
+ decode_integer(Bytes,[{'ValueRange',{0,NumChoices-1}}]).
+
+getoptionals2(Bytes,NumOpt) ->
+ getbits(Bytes,NumOpt).
+
+%% getoptionals is kept only for bakwards compatibility
+getoptionals(Bytes,NumOpt) ->
+ {Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
+ {list_to_tuple(Blist),Bytes1}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getoptionals/3 is only here for compatibility from 1.3.2
+%% the codegenerator uses getoptionals/2
+
+getoptionals(Bytes,L,NumComp) when list(L) ->
+ {Blist,Bytes1} = getbits_as_list(length(L),Bytes),
+ {list_to_tuple(comptuple(Blist,L,NumComp,1)),Bytes1}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% comptuple is only here for compatibility not used from 1.3.2
+comptuple([Bh|Bt],[{_Name,Nr}|T],NumComp,Nr) ->
+ [Bh|comptuple(Bt,T,NumComp-1,Nr+1)];
+comptuple(Bl,[{Name,Tnr}|Tl],NumComp,Nr) ->
+ [0|comptuple(Bl,[{Name,Tnr}|Tl],NumComp-1,Nr+1)];
+comptuple(_B,_L,0,_Nr) ->
+ [];
+comptuple(B,O,N,Nr) ->
+ [0|comptuple(B,O,N-1,Nr+1)].
+
+%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
+%% Num = integer(),
+%% Bytes = list() | tuple(),
+%% Unused = integer(),
+%% BinBits = binary(),
+%% RestBytes = tuple()
+getbits_as_binary(Num,Bytes) when list(Bytes) ->
+ getbits_as_binary(Num,{0,Bytes});
+getbits_as_binary(_Num,{Used,[]}) ->
+ {{0,<<>>},{Used,[]}};
+getbits_as_binary(Num,{Used,Bits=[H|T]}) ->
+ B1 = case (Num+Used) =< 8 of
+ true -> Num;
+ _ -> 8-Used
+ end,
+ B2 = Num - B1,
+ Pad = (8 - ((B1+B2) rem 8)) rem 8,% Pad /= 8
+ RestBits = lists:nthtail((B1+B2) div 8,Bits),
+ Int = integer_from_list(B2,T,0),
+ NewUsed = (Used + Num) rem 8,
+ {{Pad,<<(H bsr (8-(Used+B1))):B1,Int:B2,0:Pad>>},{NewUsed,RestBits}}.
+
+integer_from_list(_Int,[],BigInt) ->
+ BigInt;
+integer_from_list(Int,[H|_T],BigInt) when Int < 8 ->
+ (BigInt bsl Int) bor (H bsr (8-Int));
+integer_from_list(Int,[H|T],BigInt) ->
+ integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
+
+getbits_as_list(Num,Bytes) ->
+ getbits_as_list(Num,Bytes,[]).
+
+getbits_as_list(0,Bytes,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getbits_as_list(Num,Bytes,Acc) ->
+ {Bit,NewBytes} = getbit(Bytes),
+ getbits_as_list(Num-1,NewBytes,[Bit|Acc]).
+
+getbit(Bytes) ->
+% io:format("getbit:~p~n",[Bytes]),
+ getbit1(Bytes).
+
+getbit1({7,[H|T]}) ->
+ {H band 1,{0,T}};
+getbit1({Pos,[H|T]}) ->
+ {(H bsr (7-Pos)) band 1,{(Pos+1) rem 8,[H|T]}};
+getbit1(Bytes) when list(Bytes) ->
+ getbit1({0,Bytes}).
+
+%% This could be optimized
+getbits(Buffer,Num) ->
+% io:format("getbits:Buffer = ~p~nNum=~p~n",[Buffer,Num]),
+ getbits(Buffer,Num,0).
+
+getbits(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getbits(Buffer,Num,Acc) ->
+ {B,NewBuffer} = getbit(Buffer),
+ getbits(NewBuffer,Num-1,B + (Acc bsl 1)).
+
+
+getoctet(Bytes) when list(Bytes) ->
+ getoctet({0,Bytes});
+getoctet(Bytes) ->
+% io:format("getoctet:Buffer = ~p~n",[Bytes]),
+ getoctet1(Bytes).
+
+getoctet1({0,[H|T]}) ->
+ {H,{0,T}};
+getoctet1({_Pos,[_,H|T]}) ->
+ {H,{0,T}}.
+
+align({0,L}) ->
+ {0,L};
+align({_Pos,[_H|T]}) ->
+ {0,T};
+align(Bytes) ->
+ {0,Bytes}.
+
+getoctets(Buffer,Num) ->
+% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
+ getoctets(Buffer,Num,0).
+
+getoctets(Buffer,0,Acc) ->
+ {Acc,Buffer};
+getoctets(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
+
+getoctets_as_list(Buffer,Num) ->
+ getoctets_as_list(Buffer,Num,[]).
+
+getoctets_as_list(Buffer,0,Acc) ->
+ {lists:reverse(Acc),Buffer};
+getoctets_as_list(Buffer,Num,Acc) ->
+ {Oct,NewBuffer} = getoctet(Buffer),
+ getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
+%% Alt = atom()
+%% Altnum = integer() | {integer(),integer()}% number of alternatives
+%% Choices = [atom()] | {[atom()],[atom()]}
+%% When Choices is a tuple the first list is the Rootset and the
+%% second is the Extensions and then Altnum must also be a tuple with the
+%% lengths of the 2 lists
+%%
+set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
+ case set_choice_tag(Alt,L1) of
+ N when integer(N), Len1 > 1 ->
+ [{bit,0}, % the value is in the root set
+ encode_integer([{'ValueRange',{0,Len1-1}}],N)];
+ N when integer(N) ->
+ [{bit,0}]; % no encoding if only 0 or 1 alternative
+ false ->
+ [{bit,1}, % extension value
+ case set_choice_tag(Alt,L2) of
+ N2 when integer(N2) ->
+ encode_small_number(N2);
+ false ->
+ unknown_choice_alt
+ end]
+ end;
+set_choice(Alt,L,Len) ->
+ case set_choice_tag(Alt,L) of
+ N when integer(N), Len > 1 ->
+ encode_integer([{'ValueRange',{0,Len-1}}],N);
+ N when integer(N) ->
+ []; % no encoding if only 0 or 1 alternative
+ false ->
+ [unknown_choice_alt]
+ end.
+
+set_choice_tag(Alt,Choices) ->
+ set_choice_tag(Alt,Choices,0).
+
+set_choice_tag(Alt,[Alt|_Rest],Tag) ->
+ Tag;
+set_choice_tag(Alt,[_H|Rest],Tag) ->
+ set_choice_tag(Alt,Rest,Tag+1);
+set_choice_tag(_Alt,[],_Tag) ->
+ false.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_open_type(Constraint, Value) -> CompleteList
+%% Value = list of bytes of an already encoded value (the list must be flat)
+%% | binary
+%% Contraint = not used in this version
+%%
+encode_open_type(_Constraint, Val) when list(Val) ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}];
+encode_open_type(_Constraint, Val) when binary(Val) ->
+ [encode_length(undefined,size(Val)),align,
+ {octets,binary_to_list(Val)}].
+%% the binary_to_list is not optimal but compatible with the current solution
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_open_type(Buffer,Constraint) -> Value
+%% Constraint is not used in this version
+%% Buffer = [byte] with PER encoded data
+%% Value = [byte] with decoded data (which must be decoded again as some type)
+%%
+decode_open_type(Bytes, _Constraint) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
+%% encode_integer(Constraint,Value) -> CompleteList
+%% encode_integer(Constraint,{Name,Value}) -> CompleteList
+%%
+%%
+encode_integer(C,V,NamedNumberList) when atom(V) ->
+ case lists:keysearch(V,1,NamedNumberList) of
+ {value,{_,NewV}} ->
+ encode_integer(C,NewV);
+ _ ->
+ exit({error,{asn1,{namednumber,V}}})
+ end;
+encode_integer(C,V,_) when integer(V) ->
+ encode_integer(C,V);
+encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
+ encode_integer(C,V,NamedNumberList).
+
+encode_integer(C,{Name,Val}) when atom(Name) ->
+ encode_integer(C,Val);
+
+encode_integer({Rc,_Ec},Val) ->
+ case (catch encode_integer(Rc,Val)) of
+ {'EXIT',{error,{asn1,_}}} ->
+ [{bit,1},encode_unconstrained_number(Val)];
+ Encoded ->
+ [{bit,0},Encoded]
+ end;
+encode_integer(C,Val ) when list(C) ->
+ case get_constraint(C,'SingleValue') of
+ no ->
+ encode_integer1(C,Val);
+ V when integer(V),V == Val ->
+ []; % a type restricted to a single value encodes to nothing
+ V when list(V) ->
+ case lists:member(Val,V) of
+ true ->
+ encode_integer1(C,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end.
+
+encode_integer1(C, Val) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ encode_unconstrained_number(Val);
+ {Lb,'MAX'} ->
+ encode_semi_constrained_number(Lb,Val);
+ %% positive with range
+ {Lb,Ub} when Val >= Lb,
+ Ub >= Val ->
+ encode_constrained_number(VR,Val);
+ _ ->
+ exit({error,{asn1,{illegal_value,VR,Val}}})
+ end.
+
+decode_integer(Buffer,Range,NamedNumberList) ->
+ {Val,Buffer2} = decode_integer(Buffer,Range),
+ case lists:keysearch(Val,2,NamedNumberList) of
+ {value,{NewVal,_}} -> {NewVal,Buffer2};
+ _ -> {Val,Buffer2}
+ end.
+
+decode_integer(Buffer,{Rc,_Ec}) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> decode_integer(Buffer2,Rc);
+ 1 -> decode_unconstrained_number(Buffer2)
+ end;
+decode_integer(Buffer,undefined) ->
+ decode_unconstrained_number(Buffer);
+decode_integer(Buffer,C) ->
+ case get_constraint(C,'SingleValue') of
+ V when integer(V) ->
+ {V,Buffer};
+ V when list(V) ->
+ {Val,Buffer2} = decode_integer1(Buffer,C),
+ case lists:member(Val,V) of
+ true ->
+ {Val,Buffer2};
+ _ ->
+ exit({error,{asn1,{illegal_value,Val}}})
+ end;
+ _ ->
+ decode_integer1(Buffer,C)
+ end.
+
+decode_integer1(Buffer,C) ->
+ case VR = get_constraint(C,'ValueRange') of
+ no ->
+ decode_unconstrained_number(Buffer);
+ {Lb, 'MAX'} ->
+ decode_semi_constrained_number(Buffer,Lb);
+ {_,_} ->
+ decode_constrained_number(Buffer,VR)
+ end.
+
+% X.691:10.6 Encoding of a normally small non-negative whole number
+% Use this for encoding of CHOICE index if there is an extension marker in
+% the CHOICE
+encode_small_number({Name,Val}) when atom(Name) ->
+ encode_small_number(Val);
+encode_small_number(Val) when Val =< 63 ->
+ [{bit,0},{bits,6,Val}];
+encode_small_number(Val) ->
+ [{bit,1},encode_semi_constrained_number(0,Val)].
+
+decode_small_number(Bytes) ->
+ {Bit,Bytes2} = getbit(Bytes),
+ case Bit of
+ 0 ->
+ getbits(Bytes2,6);
+ 1 ->
+ decode_semi_constrained_number(Bytes2,{0,'MAX'})
+ end.
+
+% X.691:10.7 Encoding of a semi-constrained whole number
+%% might be an optimization encode_semi_constrained_number(0,Val) ->
+encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
+ encode_semi_constrained_number(C,Val);
+encode_semi_constrained_number({Lb,'MAX'},Val) ->
+ encode_semi_constrained_number(Lb,Val);
+encode_semi_constrained_number(Lb,Val) ->
+ Val2 = Val - Lb,
+ Octs = eint_positive(Val2),
+ [encode_length(undefined,length(Octs)),{octets,Octs}].
+
+decode_semi_constrained_number(Bytes,{Lb,_}) ->
+ decode_semi_constrained_number(Bytes,Lb);
+decode_semi_constrained_number(Bytes,Lb) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {V,Bytes3} = getoctets(Bytes2,Len),
+ {V+Lb,Bytes3}.
+
+encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
+ encode_constrained_number(Range,Val);
+encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
+ Range = Ub - Lb + 1,
+ Val2 = Val - Lb,
+ if
+ Range == 2 ->
+ {bits,1,Val2};
+ Range =< 4 ->
+ {bits,2,Val2};
+ Range =< 8 ->
+ {bits,3,Val2};
+ Range =< 16 ->
+ {bits,4,Val2};
+ Range =< 32 ->
+ {bits,5,Val2};
+ Range =< 64 ->
+ {bits,6,Val2};
+ Range =< 128 ->
+ {bits,7,Val2};
+ Range =< 255 ->
+ {bits,8,Val2};
+ Range =< 256 ->
+ {octets,1,Val2};
+ Range =< 65536 ->
+ {octets,2,Val2};
+ Range =< 16#1000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,3},length(Octs)),{octets,Octs}];
+ Range =< 16#100000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,4},length(Octs)),{octets,Octs}];
+ Range =< 16#10000000000 ->
+ Octs = eint_positive(Val2),
+ [encode_length({1,5},length(Octs)),{octets,Octs}];
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end.
+
+decode_constrained_number(Buffer,{Lb,Ub}) ->
+ Range = Ub - Lb + 1,
+% Val2 = Val - Lb,
+ {Val,Remain} =
+ if
+ Range == 2 ->
+ getbits(Buffer,1);
+ Range =< 4 ->
+ getbits(Buffer,2);
+ Range =< 8 ->
+ getbits(Buffer,3);
+ Range =< 16 ->
+ getbits(Buffer,4);
+ Range =< 32 ->
+ getbits(Buffer,5);
+ Range =< 64 ->
+ getbits(Buffer,6);
+ Range =< 128 ->
+ getbits(Buffer,7);
+ Range =< 255 ->
+ getbits(Buffer,8);
+ Range =< 256 ->
+ getoctets(Buffer,1);
+ Range =< 65536 ->
+ getoctets(Buffer,2);
+ Range =< 16#1000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,3}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#100000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,4}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ Range =< 16#10000000000 ->
+ {Len,Bytes2} = decode_length(Buffer,{1,5}),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_pos_integer(Octs),Bytes3};
+ true ->
+ exit({not_supported,{integer_range,Range}})
+ end,
+ {Val+Lb,Remain}.
+
+% X.691:10.8 Encoding of an unconstrained whole number
+
+encode_unconstrained_number(Val) when Val >= 0 ->
+ Oct = eint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}];
+encode_unconstrained_number(Val) -> % negative
+ Oct = enint(Val,[]),
+ [{debug,unconstrained_number},
+ encode_length({0,'MAX'},length(Oct)),
+ {octets,Oct}].
+
+%% used for positive Values which don't need a sign bit
+eint_positive(Val) ->
+ case eint(Val,[]) of
+ [0,B1|T] ->
+ [B1|T];
+ T ->
+ T
+ end.
+
+eint(0, [B|Acc]) when B < 128 ->
+ [B|Acc];
+eint(N, Acc) ->
+ eint(N bsr 8, [N band 16#ff| Acc]).
+
+enint(-1, [B1|T]) when B1 > 127 ->
+ [B1|T];
+enint(N, Acc) ->
+ enint(N bsr 8, [N band 16#ff|Acc]).
+
+%% used for signed positive values
+
+%eint(Val, Ack) ->
+% X = Val band 255,
+% Next = Val bsr 8,
+% if
+% Next == 0, X >= 127 ->
+% [0,X|Ack];
+% Next == 0 ->
+% [X|Ack];
+% true ->
+% eint(Next,[X|Ack])
+% end.
+
+%%% used for signed negative values
+%enint(Val, Acc) ->
+% NumOctets = if
+% -Val < 16#80 -> 1;
+% -Val < 16#8000 ->2;
+% -Val < 16#800000 ->3;
+% -Val < 16#80000000 ->4;
+% -Val < 16#8000000000 ->5;
+% -Val < 16#800000000000 ->6;
+% -Val < 16#80000000000000 ->7;
+% -Val < 16#8000000000000000 ->8;
+% -Val < 16#800000000000000000 ->9
+% end,
+% enint(Val,Acc,NumOctets).
+
+%enint(Val, Acc,0) ->
+% Acc;
+%enint(Val, Acc,NumOctets) ->
+% enint(Val bsr 8,[Val band 255|Acc],NumOctets-1).
+
+
+decode_unconstrained_number(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
+ {dec_integer(Ints),Bytes3}.
+
+dec_pos_integer(Ints) ->
+ decpint(Ints, 8 * (length(Ints) - 1)).
+dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
+ decpint(Ints, 8 * (length(Ints) - 1));
+dec_integer(Ints) -> %% Negative
+ decnint(Ints, 8 * (length(Ints) - 1)).
+
+decpint([Byte|Tail], Shift) ->
+ (Byte bsl Shift) bor decpint(Tail, Shift-8);
+decpint([], _) -> 0.
+
+decnint([Byte|Tail], Shift) ->
+ (-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
+
+minimum_octets(Val) ->
+ minimum_octets(Val,[]).
+
+minimum_octets(Val,Acc) when Val > 0 ->
+ minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
+minimum_octets(0,Acc) ->
+ Acc.
+
+
+%% X.691:10.9 Encoding of a length determinant
+%%encode_small_length(undefined,Len) -> % null means no UpperBound
+%% encode_small_number(Len).
+
+%% X.691:10.9.3.5
+%% X.691:10.9.3.7
+encode_length(undefined,Len) -> % un-constrained
+ if
+ Len < 128 ->
+ {octet,Len band 16#7F};
+ Len < 16384 ->
+ {octets,2,2#1000000000000000 bor Len};
+ true ->
+ exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
+ end;
+
+encode_length({0,'MAX'},Len) ->
+ encode_length(undefined,Len);
+encode_length({Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ encode_constrained_number({Lb,Ub},Len);
+encode_length({{Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0 ->
+ %% constrained extensible
+ [{bit,0},encode_constrained_number({Lb,Ub},Len)];
+encode_length(SingleValue,_) when integer(SingleValue) ->
+ [].
+
+encode_small_length(Len) when Len =< 64 ->
+ [{bit,0},{bits,6,Len-1}];
+encode_small_length(Len) ->
+ [{bit,1},encode_length(undefined,Len)].
+
+decode_small_length(Buffer) ->
+ case getbit(Buffer) of
+ {0,Remain} ->
+ {Bits,Remain2} = getbits(Remain,6),
+ {Bits+1,Remain2};
+ {1,Remain} ->
+ decode_length(Remain,undefined)
+ end.
+
+decode_length(Buffer) ->
+ decode_length(Buffer,undefined).
+
+decode_length(Buffer,undefined) -> % un-constrained
+ Buffer2 = align(Buffer),
+ {Bits,_} = getbits(Buffer2,2),
+ case Bits of
+ 2 ->
+ {Val,Bytes3} = getoctets(Buffer2,2),
+ {(Val band 16#3FFF),Bytes3};
+ 3 ->
+ exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
+ _ ->
+ {Val,Bytes3} = getoctet(Buffer2),
+ {Val band 16#7F,Bytes3}
+ end;
+
+decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
+ decode_constrained_number(Buffer,{Lb,Ub});
+
+decode_length(Buffer,{{Lb,Ub},[]}) ->
+ case getbit(Buffer) of
+ {0,Buffer2} ->
+ decode_length(Buffer2, {Lb,Ub})
+ end;
+ % X.691:10.9.3.5
+decode_length(Buffer,{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
+ case getbit(Buffer) of
+ {0,Remain} ->
+ getbits(Remain,7);
+ {1,_Remain} ->
+ {Val,Remain2} = getoctets(Buffer,2),
+ {Val band 2#0111111111111111, Remain2}
+ end;
+decode_length(Buffer,SingleValue) when integer(SingleValue) ->
+ {SingleValue,Buffer}.
+
+
+% X.691:11
+encode_boolean({Name,Val}) when atom(Name) ->
+ encode_boolean(Val);
+encode_boolean(true) ->
+ {bit,1};
+encode_boolean(false) ->
+ {bit,0};
+encode_boolean(Val) ->
+ exit({error,{asn1,{encode_boolean,Val}}}).
+
+
+decode_boolean(Buffer) -> %when record(Buffer,buffer)
+ case getbit(Buffer) of
+ {1,Remain} -> {true,Remain};
+ {0,Remain} -> {false,Remain}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:12
+%% ENUMERATED
+%%
+%% encode_enumerated(C,Value,NamedNumberTup) -> CompleteList
+%%
+%%
+
+encode_enumerated(C,{Name,Value},NamedNumberList) when
+ atom(Name),list(NamedNumberList) ->
+ encode_enumerated(C,Value,NamedNumberList);
+
+%% ENUMERATED with extension mark
+encode_enumerated(_C,{asn1_enum,Value},{_Nlist1,Nlist2}) when Value >= length(Nlist2) ->
+ [{bit,1},encode_small_number(Value)];
+encode_enumerated(C,Value,{Nlist1,Nlist2}) ->
+ case enum_search(Value,Nlist1,0) of
+ NewV when integer(NewV) ->
+ [{bit,0},encode_integer(C,NewV)];
+ false ->
+ case enum_search(Value,Nlist2,0) of
+ ExtV when integer(ExtV) ->
+ [{bit,1},encode_small_number(ExtV)];
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end
+ end;
+
+encode_enumerated(C,Value,NamedNumberList) when list(NamedNumberList) ->
+ case enum_search(Value,NamedNumberList,0) of
+ NewV when integer(NewV) ->
+ encode_integer(C,NewV);
+ false ->
+ exit({error,{asn1,{encode_enumerated,Value}}})
+ end.
+
+%% returns the ordinal number from 0 ,1 ... in the list where Name is found
+%% or false if not found
+%%
+enum_search(Name,[Name|_NamedNumberList],Acc) ->
+ Acc;
+enum_search(Name,[_H|T],Acc) ->
+ enum_search(Name,T,Acc+1);
+enum_search(_,[],_) ->
+ false. % name not found !error
+
+%% ENUMERATED with extension marker
+decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
+ {Ext,Buffer2} = getext(Buffer),
+ case Ext of
+ 0 -> % not an extension value
+ {Val,Buffer3} = decode_integer(Buffer2,C),
+ case catch (element(Val+1,Ntup1)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
+ end;
+ 1 -> % this an extension value
+ {Val,Buffer3} = decode_small_number(Buffer2),
+ case catch (element(Val+1,Ntup2)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer3};
+ _ -> {{asn1_enum,Val},Buffer3}
+ end
+ end;
+
+decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
+ {Val,Buffer2} = decode_integer(Buffer,C),
+ case catch (element(Val+1,NamedNumberTup)) of
+ NewVal when atom(NewVal) -> {NewVal,Buffer2};
+ _Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
+ end.
+
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+%% Bitstring value, ITU_T X.690 Chapter 8.5
+%%===============================================================================
+%%===============================================================================
+%%===============================================================================
+
+%%===============================================================================
+%% encode bitstring value
+%%===============================================================================
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% bitstring NamedBitList
+%% Val can be of:
+%% - [identifiers] where only named identifers are set to one,
+%% the Constraint must then have some information of the
+%% bitlength.
+%% - [list of ones and zeroes] all bits
+%% - integer value representing the bitlist
+%% C is constraint Len, only valid when identifiers
+
+
+%% when the value is a list of {Unused,BinBits}, where
+%% Unused = integer(),
+%% BinBits = binary().
+encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
+ binary(BinBits) ->
+ encode_bin_bit_string(C,Bin,NamedBitList);
+
+%% when the value is a list of named bits
+encode_bit_string(C, [FirstVal | RestVal], NamedBitList) when atom(FirstVal) ->
+ ToSetPos = get_all_bitposes([FirstVal | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+encode_bit_string(C, [{bit,No} | RestVal], NamedBitList) ->
+ ToSetPos = get_all_bitposes([{bit,No} | RestVal], NamedBitList, []),
+ BitList = make_and_set_list(ToSetPos,0),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a list of ones and zeroes
+
+encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
+ Bl1 =
+ case NamedBitList of
+ [] -> % dont remove trailing zeroes
+ BitListValue;
+ _ -> % first remove any trailing zeroes
+ lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitListValue)))
+ end,
+ BitList = [{bit,X} || X <- Bl1],
+ BListLen = length(BitList),
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ []; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ pad_list(V,BitList);
+ V when integer(V) -> % fixed length 16 bits or less
+ [align,pad_list(V,BitList)];
+ {Lb,Ub} when integer(Lb),integer(Ub),BListLen<Lb ->
+ %% padding due to OTP-4353
+ [encode_length({Lb,Ub},Lb),align,pad_list(Lb,BitList)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ [encode_length({Lb,Ub},length(BitList)),align,BitList];
+ no ->
+ [encode_length(undefined,length(BitList)),align,BitList];
+ Sc={{Lb,Ub},_} when integer(Lb),integer(Ub),BListLen<Lb ->
+ %% padding due to OTP-4353
+ [encode_length(Sc,Lb),align,pad_list(Lb,BitList)];
+ Sc -> % extension marker
+ [encode_length(Sc,length(BitList)),align,BitList]
+ end;
+
+%% when the value is an integer
+encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
+ BitList = int_to_bitlist(IntegerVal),
+ encode_bit_string(C,BitList,NamedBitList);
+
+%% when the value is a tuple
+encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
+ encode_bit_string(C,Val,NamedBitList).
+
+
+%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
+%% Unused = integer(),
+%% BinBits = binary().
+
+encode_bin_bit_string(C,{Unused,BinBits},NamedBitList) ->
+ RemoveZerosIfNNL =
+ fun({NNL,BitList}) ->
+ case NNL of
+ [] -> BitList;
+ _ ->
+ lists:reverse(
+ lists:dropwhile(fun(0)->true;(1)->false end,
+ lists:reverse(BitList)))
+ end
+ end,
+ {OctetList,OLSize,LastBits} =
+ case size(BinBits) of
+ N when N>1 ->
+ IntList = binary_to_list(BinBits),
+ [H|T] = lists:reverse(IntList),
+ Bl1 = RemoveZerosIfNNL({NamedBitList,lists:reverse(int_to_bitlist(H,8-Unused))}),% lists:sublist obsolete if trailing bits are zero !
+ {[{octet,X} || X <- lists:reverse(T)],size(BinBits)-1,
+ [{bit,X} || X <- Bl1]};
+ 1 ->
+ <<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1>> = BinBits,
+ {[],0,[{bit,X} || X <- lists:sublist([B7,B6,B5,B4,B3,B2,B1,B0],8-Unused)]};
+ _ ->
+ {[],0,[]}
+ end,
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ V when integer(V),V=<16 ->
+ [OctetList, pad_list(V,LastBits)];
+ V when integer(V) ->
+% [OctetList, align, pad_list(V rem 8,LastBits)];
+ [align,OctetList, pad_list(V rem 8,LastBits)];
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ NewLastBits = maybe_pad(Lb,length(LastBits)+(OLSize*8),LastBits,NamedBitList),
+ [encode_length({Lb,Ub},length(NewLastBits)+(OLSize*8)),
+% OctetList,align,LastBits];
+ align,OctetList,NewLastBits];
+ no ->
+ [encode_length(undefined,length(LastBits)+(OLSize*8)),
+% OctetList,align,LastBits];
+ align,OctetList,LastBits];
+ Sc={{Lb,_},_} when integer(Lb) ->
+ NewLastBits = maybe_pad(Lb,length(LastBits)+(OLSize*8),LastBits,NamedBitList),
+ [encode_length(Sc,length(NewLastBits)+(OLSize*8)),
+ align,OctetList,NewLastBits];
+ Sc ->
+ [encode_length(Sc,length(LastBits)+(OLSize*8)),
+% OctetList,align,LastBits]
+ align,OctetList,LastBits]
+ end.
+
+maybe_pad(_,_,Bits,[]) ->
+ Bits;
+maybe_pad(Lb,LenBits,Bits,_) when Lb>LenBits ->
+ pad_list(Lb,Bits);
+maybe_pad(_,_,Bits,_) ->
+ Bits.
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a tuple {Unused,Bits}. Unused is the number of unused
+%% bits, least significant bits in the last byte of Bits. Bits is
+%% the BIT STRING represented as a binary.
+%%
+decode_compact_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {{0,<<>>},Buffer};
+ V when integer(V),V=<16 -> %fixed length 16 bits or less
+ compact_bit_string(Buffer,V,NamedNumberList);
+ V when integer(V) -> %fixed length > 16 bits
+ Bytes2 = align(Buffer),
+ compact_bit_string(Bytes2,V,NamedNumberList);
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList);
+ Sc ->
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ compact_bit_string(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%%%%%%%%%%%%%%%
+%% The result is presented as a list of named bits (if possible)
+%% else as a list of 0 and 1.
+%%
+decode_bit_string(Buffer, C, NamedNumberList) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 -> % fixed length
+ {[],Buffer}; % nothing to encode
+ V when integer(V),V=<16 -> % fixed length 16 bits or less
+ bit_list_to_named(Buffer,V,NamedNumberList);
+ V when integer(V) -> % fixed length 16 bits or less
+ Bytes2 = align(Buffer),
+ bit_list_to_named(Bytes2,V,NamedNumberList);
+ {Lb,Ub} when integer(Lb),integer(Ub) ->
+ {Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList);
+ no ->
+ {Len,Bytes2} = decode_length(Buffer,undefined),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList);
+ Sc -> % extension marker
+ {Len,Bytes2} = decode_length(Buffer,Sc),
+ Bytes3 = align(Bytes2),
+ bit_list_to_named(Bytes3,Len,NamedNumberList)
+ end.
+
+
+%% if no named bits are declared we will return a
+%% {Unused,Bits}. Unused = integer(),
+%% Bits = binary().
+compact_bit_string(Buffer,Len,[]) ->
+ getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
+compact_bit_string(Buffer,Len,NamedNumberList) ->
+ bit_list_to_named(Buffer,Len,NamedNumberList).
+
+
+%% if no named bits are declared we will return a
+%% BitList = [0 | 1]
+
+bit_list_to_named(Buffer,Len,[]) ->
+ getbits_as_list(Len,Buffer);
+
+%% if there are named bits declared we will return a named
+%% BitList where the names are atoms and unnamed bits represented
+%% as {bit,Pos}
+%% BitList = [atom() | {bit,Pos}]
+%% Pos = integer()
+
+bit_list_to_named(Buffer,Len,NamedNumberList) ->
+ {BitList,Rest} = getbits_as_list(Len,Buffer),
+ {bit_list_to_named1(0,BitList,NamedNumberList,[]), Rest}.
+
+bit_list_to_named1(Pos,[0|Bt],Names,Acc) ->
+ bit_list_to_named1(Pos+1,Bt,Names,Acc);
+bit_list_to_named1(Pos,[1|Bt],Names,Acc) ->
+ case lists:keysearch(Pos,2,Names) of
+ {value,{Name,_}} ->
+ bit_list_to_named1(Pos+1,Bt,Names,[Name|Acc]);
+ _ ->
+ bit_list_to_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
+ end;
+bit_list_to_named1(_Pos,[],_Names,Acc) ->
+ lists:reverse(Acc).
+
+
+
+%%%%%%%%%%%%%%%
+%%
+
+int_to_bitlist(0) ->
+ [];
+int_to_bitlist(Int) when integer(Int), Int >= 0 ->
+ [Int band 1 | int_to_bitlist(Int bsr 1)].
+
+int_to_bitlist(_Int,0) ->
+ [];
+int_to_bitlist(0,N) ->
+ [0|int_to_bitlist(0,N-1)];
+int_to_bitlist(Int,N) ->
+ [Int band 1 | int_to_bitlist(Int bsr 1, N-1)].
+
+
+%%%%%%%%%%%%%%%%%%
+%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
+%% [sorted_list_of_bitpositions_to_set]
+
+get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
+
+get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
+ case lists:keysearch(Val, 1, NamedBitList) of
+ {value, {_ValName, ValPos}} ->
+ get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
+ _ ->
+ exit({error,{asn1, {bitstring_namedbit, Val}}})
+ end;
+get_all_bitposes([], _NamedBitList, Ack) ->
+ lists:sort(Ack).
+
+%%%%%%%%%%%%%%%%%%
+%% make_and_set_list([list of positions to set to 1])->
+%% returns list with all in SetPos set.
+%% in positioning in list the first element is 0, the second 1 etc.., but
+%%
+
+make_and_set_list([XPos|SetPos], XPos) ->
+ [1 | make_and_set_list(SetPos, XPos + 1)];
+make_and_set_list([Pos|SetPos], XPos) ->
+ [0 | make_and_set_list([Pos | SetPos], XPos + 1)];
+make_and_set_list([], _XPos) ->
+ [].
+
+%%%%%%%%%%%%%%%%%
+%% pad_list(N,BitList) -> PaddedList
+%% returns a padded (with trailing {bit,0} elements) list of length N
+%% if Bitlist contains more than N significant bits set an exit asn1_error
+%% is generated
+
+pad_list(0,BitList) ->
+ case BitList of
+ [] -> [];
+ _ -> exit({error,{asn1,{range_error,{bit_string,BitList}}}})
+ end;
+pad_list(N,[Bh|Bt]) ->
+ [Bh|pad_list(N-1,Bt)];
+pad_list(N,[]) ->
+ [{bit,0},pad_list(N-1,[])].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% X.691:16
+%% encode_octet_string(Constraint,ExtensionMarker,Val)
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+encode_octet_string(C,{Name,Val}) when atom(Name) ->
+ encode_octet_string(C,false,Val);
+encode_octet_string(C,Val) ->
+ encode_octet_string(C,false,Val).
+
+encode_octet_string(C,Bool,{_Name,Val}) ->
+ encode_octet_string(C,Bool,Val);
+encode_octet_string(_,true,_) ->
+ exit({error,{asn1,{'not_supported',extensionmarker}}});
+encode_octet_string(C,false,Val) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ [];
+ 1 ->
+ [V] = Val,
+ {bits,8,V};
+ 2 ->
+ [V1,V2] = Val,
+ [{bits,8,V1},{bits,8,V2}];
+ Sv when Sv =<65535, Sv == length(Val) -> % fixed length
+ [align,{octets,Val}];
+ {Lb,Ub} ->
+ [encode_length({Lb,Ub},length(Val)),align,
+ {octets,Val}];
+ Sv when list(Sv) ->
+ [encode_length({hd(Sv),lists:max(Sv)},length(Val)),align,
+ {octets,Val}];
+ no ->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}]
+ end.
+
+decode_octet_string(Bytes,Range) ->
+ decode_octet_string(Bytes,Range,false).
+
+decode_octet_string(Bytes,C,false) ->
+ case get_constraint(C,'SizeConstraint') of
+ 0 ->
+ {[],Bytes};
+ 1 ->
+ {B1,Bytes2} = getbits(Bytes,8),
+ {[B1],Bytes2};
+ 2 ->
+ {B1,Bytes2}= getbits(Bytes,8),
+ {B2,Bytes3}= getbits(Bytes2,8),
+ {[B1,B2],Bytes3};
+ {_,0} ->
+ {[],Bytes};
+ Sv when integer(Sv), Sv =<65535 -> % fixed length
+ Bytes2 = align(Bytes),
+ getoctets_as_list(Bytes2,Sv);
+ {Lb,Ub} ->
+ {Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ Sv when list(Sv) ->
+ {Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len);
+ no ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restricted char string types
+%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
+%% X.691:26 and X.680:34-36
+%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
+
+
+encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
+ encode_restricted_string(aligned,Val);
+
+encode_restricted_string(aligned,Val) when list(Val)->
+ [encode_length(undefined,length(Val)),align,
+ {octets,Val}].
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
+ encode_known_multiplier_string(aligned,StringType,C,false,Val);
+
+encode_known_multiplier_string(aligned,StringType,C,_Ext,Val) ->
+ Result = chars_encode(C,StringType,Val),
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ case {StringType,Result} of
+ {'BMPString',{octets,Ol}} ->
+ [{bits,8,Oct}||Oct <- Ol];
+ _ ->
+ Result
+ end;
+ 0 ->
+ [];
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ [align,Result];
+ {Ub,Lb} ->
+ [encode_length({Ub,Lb},length(Val)),align,Result];
+ Vl when list(Vl) ->
+ [encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
+ no ->
+ [encode_length(undefined,length(Val)),align,Result]
+ end.
+
+decode_restricted_string(Bytes,aligned) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ Bytes3 = align(Bytes2),
+ getoctets_as_list(Bytes3,Len).
+
+decode_known_multiplier_string(Bytes,aligned,StringType,C,_Ext) ->
+ NumBits = get_NumBits(C,StringType),
+ case get_constraint(C,'SizeConstraint') of
+ Ub when integer(Ub), Ub*NumBits =< 16 ->
+ chars_decode(Bytes,NumBits,StringType,C,Ub);
+ Ub when integer(Ub),Ub =<65535 -> % fixed length
+ Bytes1 = align(Bytes),
+ chars_decode(Bytes1,NumBits,StringType,C,Ub);
+ 0 ->
+ {[],Bytes};
+ Vl when list(Vl) ->
+ {Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ no ->
+ {Len,Bytes1} = decode_length(Bytes,undefined),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len);
+ {Lb,Ub}->
+ {Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
+ Bytes2 = align(Bytes1),
+ chars_decode(Bytes2,NumBits,StringType,C,Len)
+ end.
+
+
+encode_NumericString(C,Val) ->
+ encode_known_multiplier_string(aligned,'NumericString',C,false,Val).
+decode_NumericString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'NumericString',C,false).
+
+encode_PrintableString(C,Val) ->
+ encode_known_multiplier_string(aligned,'PrintableString',C,false,Val).
+decode_PrintableString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'PrintableString',C,false).
+
+encode_VisibleString(C,Val) -> % equivalent with ISO646String
+ encode_known_multiplier_string(aligned,'VisibleString',C,false,Val).
+decode_VisibleString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'VisibleString',C,false).
+
+encode_IA5String(C,Val) ->
+ encode_known_multiplier_string(aligned,'IA5String',C,false,Val).
+decode_IA5String(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'IA5String',C,false).
+
+encode_BMPString(C,Val) ->
+ encode_known_multiplier_string(aligned,'BMPString',C,false,Val).
+decode_BMPString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'BMPString',C,false).
+
+encode_UniversalString(C,Val) ->
+ encode_known_multiplier_string(aligned,'UniversalString',C,false,Val).
+decode_UniversalString(Bytes,C) ->
+ decode_known_multiplier_string(Bytes,aligned,'UniversalString',C,false).
+
+%% end of known-multiplier strings for which PER visible constraints are
+%% applied
+
+encode_GeneralString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GeneralString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_GraphicString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_GraphicString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_ObjectDescriptor(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_ObjectDescriptor(Bytes) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_TeletexString(_C,Val) -> % equivalent with T61String
+ encode_restricted_string(aligned,Val).
+decode_TeletexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+encode_VideotexString(_C,Val) ->
+ encode_restricted_string(aligned,Val).
+decode_VideotexString(Bytes,_C) ->
+ decode_restricted_string(Bytes,aligned).
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
+%%
+getBMPChars(Bytes,1) ->
+ {O1,Bytes2} = getbits(Bytes,8),
+ {O2,Bytes3} = getbits(Bytes2,8),
+ if
+ O1 == 0 ->
+ {[O2],Bytes3};
+ true ->
+ {[{0,0,O1,O2}],Bytes3}
+ end;
+getBMPChars(Bytes,Len) ->
+ getBMPChars(Bytes,Len,[]).
+
+getBMPChars(Bytes,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+getBMPChars(Bytes,Len,Acc) ->
+ {Octs,Bytes1} = getoctets_as_list(Bytes,2),
+ case Octs of
+ [0,O2] ->
+ getBMPChars(Bytes1,Len-1,[O2|Acc]);
+ [O1,O2]->
+ getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% chars_encode(C,StringType,Value) -> ValueList
+%%
+%% encodes chars according to the per rules taking the constraint PermittedAlphabet
+%% into account.
+%% This function does only encode the value part and NOT the length
+
+chars_encode(C,StringType,Value) ->
+ case {StringType,get_constraint(C,'PermittedAlphabet')} of
+ {'UniversalString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
+ {'BMPString',{_,_Sv}} ->
+ exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
+ _ ->
+ {NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
+ chars_encode2(Value,NumBits,CharOutTab)
+ end.
+
+chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
+ [{bits,NumBits,element(H-Min+1,Tab)}|chars_encode2(T,NumBits,{Min,Max,Tab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,((((((A bsl 8) + B) bsl 8) + C) bsl 8) + D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
+ %% no value range check here (ought to be, but very expensive)
+% [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+ [{bits,NumBits,element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
+chars_encode2([H|_T],_,{_,_,_}) ->
+ exit({error,{asn1,{illegal_char_value,H}}});
+chars_encode2([],_,_) ->
+ [].
+
+
+get_NumBits(C,StringType) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ charbits(length(Sv),aligned);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ charbits(128,aligned); % 16#00..16#7F
+ 'VisibleString' ->
+ charbits(95,aligned); % 16#20..16#7E
+ 'PrintableString' ->
+ charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+ 'NumericString' ->
+ charbits(11,aligned); % $ ,"0123456789"
+ 'UniversalString' ->
+ 32;
+ 'BMPString' ->
+ 16
+ end
+ end.
+
+%%Maybe used later
+%%get_MaxChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% lists:nth(length(Sv),Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#7F; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#7E; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $9; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#ffffffff;
+%% 'BMPString' ->
+%% 16#ffff
+%% end
+%% end.
+
+%%Maybe used later
+%%get_MinChar(C,StringType) ->
+%% case get_constraint(C,'PermittedAlphabet') of
+%% {'SingleValue',Sv} ->
+%% hd(Sv);
+%% no ->
+%% case StringType of
+%% 'IA5String' ->
+%% 16#00; % 16#00..16#7F
+%% 'VisibleString' ->
+%% 16#20; % 16#20..16#7E
+%% 'PrintableString' ->
+%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
+%% 'NumericString' ->
+%% $\s; % $ ,"0123456789"
+%% 'UniversalString' ->
+%% 16#00;
+%% 'BMPString' ->
+%% 16#00
+%% end
+%% end.
+
+get_CharOutTab(C,StringType) ->
+ get_CharTab(C,StringType,out).
+
+get_CharInTab(C,StringType) ->
+ get_CharTab(C,StringType,in).
+
+get_CharTab(C,StringType,InOut) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ {'SingleValue',Sv} ->
+ get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
+ no ->
+ case StringType of
+ 'IA5String' ->
+ {0,16#7F,notab};
+ 'VisibleString' ->
+ get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
+ 'PrintableString' ->
+ Chars = lists:sort(
+ " '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
+ get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
+ 'NumericString' ->
+ get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
+ 'UniversalString' ->
+ {0,16#FFFFFFFF,notab};
+ 'BMPString' ->
+ {0,16#FFFF,notab}
+ end
+ end.
+
+get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
+ BitValMax = (1 bsl get_NumBits(C,StringType))-1,
+ if
+ Max =< BitValMax ->
+ {0,Max,notab};
+ true ->
+ case InOut of
+ out ->
+ {Min,Max,create_char_tab(Min,Chars)};
+ in ->
+ {Min,Max,list_to_tuple(Chars)}
+ end
+ end.
+
+create_char_tab(Min,L) ->
+ list_to_tuple(create_char_tab(Min,L,0)).
+create_char_tab(Min,[Min|T],V) ->
+ [V|create_char_tab(Min+1,T,V+1)];
+create_char_tab(_Min,[],_V) ->
+ [];
+create_char_tab(Min,L,V) ->
+ [false|create_char_tab(Min+1,L,V)].
+
+%% This very inefficient and should be moved to compiletime
+charbits(NumOfChars,aligned) ->
+ case charbits(NumOfChars) of
+ 1 -> 1;
+ 2 -> 2;
+ B when B > 2, B =< 4 -> 4;
+ B when B > 4, B =< 8 -> 8;
+ B when B > 8, B =< 16 -> 16;
+ B when B > 16, B =< 32 -> 32
+ end.
+
+charbits(NumOfChars) when NumOfChars =< 2 -> 1;
+charbits(NumOfChars) when NumOfChars =< 4 -> 2;
+charbits(NumOfChars) when NumOfChars =< 8 -> 3;
+charbits(NumOfChars) when NumOfChars =< 16 -> 4;
+charbits(NumOfChars) when NumOfChars =< 32 -> 5;
+charbits(NumOfChars) when NumOfChars =< 64 -> 6;
+charbits(NumOfChars) when NumOfChars =< 128 -> 7;
+charbits(NumOfChars) when NumOfChars =< 256 -> 8;
+charbits(NumOfChars) when NumOfChars =< 512 -> 9;
+charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
+charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
+charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
+charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
+charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
+charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
+charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
+charbits(NumOfChars) when integer(NumOfChars) ->
+ 16 + charbits1(NumOfChars bsr 16).
+
+charbits1(0) ->
+ 0;
+charbits1(NumOfChars) ->
+ 1 + charbits1(NumOfChars bsr 1).
+
+
+chars_decode(Bytes,_,'BMPString',C,Len) ->
+ case get_constraint(C,'PermittedAlphabet') of
+ no ->
+ getBMPChars(Bytes,Len);
+ _ ->
+ exit({error,{asn1,
+ {'not implemented',
+ "BMPString with PermittedAlphabet constraint"}}})
+ end;
+chars_decode(Bytes,NumBits,StringType,C,Len) ->
+ CharInTab = get_CharInTab(C,StringType),
+ chars_decode2(Bytes,CharInTab,NumBits,Len).
+
+
+chars_decode2(Bytes,CharInTab,NumBits,Len) ->
+ chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
+
+chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
+ {lists:reverse(Acc),Bytes};
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ Result = case minimum_octets(Char+Min) of
+ [NewChar] -> NewChar;
+ [C1,C2] -> {0,0,C1,C2};
+ [C1,C2,C3] -> {0,C1,C2,C3};
+ [C1,C2,C3,C4] -> {C1,C2,C3,C4}
+ end,
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
+chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
+
+%% BMPString and UniversalString with PermittedAlphabet is currently not supported
+chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
+ {Char,Bytes2} = getbits(Bytes,NumBits),
+ chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
+
+
+ % X.691:17
+encode_null({Name,Val}) when atom(Name) ->
+ encode_null(Val);
+encode_null(_) -> []. % encodes to nothing
+
+decode_null(Bytes) ->
+ {'NULL',Bytes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% encode_object_identifier(Val) -> CompleteList
+%% encode_object_identifier({Name,Val}) -> CompleteList
+%% Val -> {Int1,Int2,...,IntN} % N >= 2
+%% Name -> atom()
+%% Int1 -> integer(0..2)
+%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
+%% Int3-N -> integer()
+%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
+%%
+encode_object_identifier({Name,Val}) when atom(Name) ->
+ encode_object_identifier(Val);
+encode_object_identifier(Val) ->
+ Octets = e_object_identifier(Val,notag),
+ [{debug,object_identifier},encode_length(undefined,length(Octets)),{octets,Octets}].
+
+%% This code is copied from asn1_encode.erl (BER) and corrected and modified
+
+e_object_identifier({'OBJECT IDENTIFIER',V},DoTag) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+e_object_identifier({Cname,V},DoTag) when atom(Cname),list(V) ->
+ e_object_identifier(V,DoTag);
+e_object_identifier(V,DoTag) when tuple(V) ->
+ e_object_identifier(tuple_to_list(V),DoTag);
+
+% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
+e_object_identifier([E1,E2|Tail],_DoTag) when E1 =< 2 ->
+ Head = 40*E1 + E2, % weird
+ Res = e_object_elements([Head|Tail]),
+% dotag(DoTag,[6],elength(length(Res)+1),[Head|Res]),
+ Res.
+
+e_object_elements([]) ->
+ [];
+e_object_elements([H|T]) ->
+ lists:append(e_object_element(H),e_object_elements(T)).
+
+e_object_element(Num) when Num < 128 ->
+ [Num];
+% must be changed to handle more than 2 octets
+e_object_element(Num) -> %% when Num < ???
+ Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
+ Right = Num band 2#1111111 ,
+ [Left,Right].
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
+%% ObjId -> {integer(),integer(),...} % at least 2 integers
+%% RemainingBytes -> [integer()] when integer() (0..255)
+decode_object_identifier(Bytes) ->
+ {Len,Bytes2} = decode_length(Bytes,undefined),
+ {Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
+ [First|Rest] = dec_subidentifiers(Octs,0,[]),
+ Idlist = if
+ First < 40 ->
+ [0,First|Rest];
+ First < 80 ->
+ [1,First - 40|Rest];
+ true ->
+ [2,First - 80|Rest]
+ end,
+ {list_to_tuple(Idlist),Bytes3}.
+
+dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
+ dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
+dec_subidentifiers([H|T],Av,Al) ->
+ dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
+dec_subidentifiers([],_Av,Al) ->
+ lists:reverse(Al).
+
+get_constraint(C,Key) ->
+ case lists:keysearch(Key,1,C) of
+ false ->
+ no;
+ {value,{_,V}} ->
+ V
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% complete(InList) -> ByteList
+%% Takes a coded list with bits and bytes and converts it to a list of bytes
+%% Should be applied as the last step at encode of a complete ASN.1 type
+%%
+complete(InList) when list(InList) ->
+ complete(InList,[],0);
+complete(InList) ->
+ complete([InList],[],0).
+
+complete([{debug,_}|T], Acc, Acclen) ->
+ complete(T,Acc,Acclen);
+complete([H|T],Acc,Acclen) when list(H) ->
+ complete(lists:concat([H,T]),Acc,Acclen);
+
+
+complete([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
+ Newval = case N of
+ 1 ->
+ Val4 = Val band 16#FF,
+ [Val4];
+ 2 ->
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val3,Val4];
+ 3 ->
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val2,Val3,Val4];
+ 4 ->
+ Val1 = (Val bsr 24) band 16#FF,
+ Val2 = (Val bsr 16) band 16#FF,
+ Val3 = (Val bsr 8) band 16#FF,
+ Val4 = Val band 16#FF,
+ [Val1,Val2,Val3,Val4]
+ end,
+ complete([{octets,Newval}|T],Acc,Acclen);
+
+complete([{octets,Oct}|T],[],_Acclen) when list(Oct) ->
+ complete(T,lists:reverse(Oct),0);
+complete([{octets,Oct}|T],[Hacc|Tacc],Acclen) when list(Oct) ->
+ Rest = 8 - Acclen,
+ if
+ Rest == 8 ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc|Tacc]]),0);
+ true ->
+ complete(T,lists:concat([lists:reverse(Oct),[Hacc bsl Rest|Tacc]]),0)
+ end;
+
+complete([{bit,Val}|T], Acc, Acclen) ->
+ complete([{bits,1,Val}|T],Acc,Acclen);
+complete([{octet,Val}|T], Acc, Acclen) ->
+ complete([{octets,1,Val}|T],Acc,Acclen);
+
+complete([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
+ complete(T,[Val|Acc],N);
+complete([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
+ Rest = 8 - Acclen,
+ if
+ Rest >= N ->
+ complete(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
+ true ->
+ Diff = N - Rest,
+ NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
+ Mask = element(Diff,{1,3,7,15,31,63,127,255}),
+ complete(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
+ end;
+complete([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
+ complete([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
+
+complete([align|T],Acc,0) ->
+ complete(T,Acc,0);
+complete([align|T],[Hacc|Tacc],Acclen) ->
+ Rest = 8 - Acclen,
+ complete(T,[Hacc bsl Rest|Tacc],0);
+complete([{octets,_N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
+ complete([{octets,Val}|T],Acc,Acclen);
+
+complete([],[],0) ->
+ [0]; % a complete encoding must always be at least 1 byte
+complete([],Acc,0) ->
+ lists:reverse(Acc);
+complete([],[Hacc|Tacc],Acclen) when Acclen > 0->
+ Rest = 8 - Acclen,
+ NewHacc = Hacc bsl Rest,
+ lists:reverse([NewHacc|Tacc]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_history.sgml b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_history.sgml
new file mode 100644
index 0000000000..3b50c1b73f
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_history.sgml
@@ -0,0 +1,97 @@
+<!doctype chapter PUBLIC "-//Stork//DTD chapter//EN">
+<!--
+ ``The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved via the world wide web at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+ Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+ AB. All Rights Reserved.''
+
+ $Id: notes_history.sgml,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+-->
+<chapter>
+ <header>
+ <title>ASN1 Release Notes (Old)</title>
+ <prepared>Kenneth Lundin</prepared>
+ <responsible>Kenneth Lundin</responsible>
+ <docno></docno>
+ <approved>Kenneth Lundin</approved>
+ <checked>Kenneth Lundin</checked>
+ <date>98-02-02</date>
+ <rev>A</rev>
+ <file>notes_history.sgml</file>
+ </header>
+
+ <p>This document describes the changes made to old versions of the <c>asn1</c> application.
+
+ <section>
+ <title>ASN1 0.8.1</title>
+ <p>This is the first release of the ASN1 application. This version is released
+ for beta-testing. Some functionality will be added until the 1.0 version is
+ released. A list of missing features and restrictions can be found in the
+ chapter below.
+
+ <section>
+ <title>Missing features and other restrictions</title>
+ <list>
+ <item>
+ <p>The encoding rules BER and PER (aligned) is supported. <em>PER (unaligned)
+ IS NOT SUPPORTED</em>.
+ <item>
+ <p>NOT SUPPORTED types <c>ANY</c> and <c>ANY DEFINED BY</c>
+ (is not in the standard any more).
+ <item>
+ <p>NOT SUPPORTED types <c>EXTERNAL</c> and <c>EMBEDDED-PDV</c>.
+ <item>
+ <p>NOT SUPPORTED type <c>REAL</c> (planned to be implemented).
+ <item>
+ <p>The code generation support for value definitions in the ASN.1 notation is very limited
+ (planned to be enhanced).
+ <item>
+ <p>The support for constraints is limited to:
+ <list>
+ <item><p>
+ SizeConstraint SIZE(X)
+ <item><p>
+ SingleValue (1)
+ <item><p>
+ ValueRange (X..Y)
+ <item><p>
+ PermittedAlpabet FROM (but not for BMPString and UniversalString when generating PER).
+ </list>
+ <p>Complex expressions in constraints is not supported (planned to be extended).
+ <item>
+ <p>The current version of the compiler has very limited error checking:
+ <list>
+ <item><p>Stops at first syntax error.
+ <item><p>Does not stop when a reference to an undefined type is found ,
+ but prints an error message. Compilation of the generated
+ Erlang module will then fail.
+ <item><p>A whole number of other semantical controls is currently missing. This
+ means that the compiler will give little or bad help to detect what's wrong
+ with an ASN.1 specification, but will mostly work very well when the
+ ASN.1 specification is correct.
+ </list>
+ <item>
+ <p>The maximum INTEGER supported in this version is a signed 64 bit integer. This
+ limitation is probably quite reasonable. (Planned to be extended).
+ <item>
+ <p>Only AUTOMATIC TAGS supported for PER.
+ <item>
+ <p>Only EXPLICIT and IMPLICIT TAGS supported for BER.
+ <item>
+ <p>The compiler supports decoding of BER-data with indefinite length but it is
+ not possible to produce data with indefinite length with the encoder.
+ </list>
+ </section>
+
+ </section>
+</chapter>
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_latest.sgml b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_latest.sgml
new file mode 100644
index 0000000000..ff1f5adfa2
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/notes_latest.sgml
@@ -0,0 +1,97 @@
+<!doctype chapter PUBLIC "-//Stork//DTD chapter//EN">
+<!--
+ ``The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved via the world wide web at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+ Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+ AB. All Rights Reserved.''
+
+ $Id: notes_latest.sgml,v 1.1 2008/12/17 09:53:31 mikpe Exp $
+-->
+<chapter>
+ <header>
+ <title>ASN1 Release Notes</title>
+ <prepared>Kenneth Lundin</prepared>
+ <responsible>Kenneth Lundin</responsible>
+ <docno></docno>
+ <approved>Kenneth Lundin</approved>
+ <checked>Kenneth Lundin</checked>
+ <date>97-10-07</date>
+ <rev>A</rev>
+ <file>notes_latest.sgml</file>
+ </header>
+
+ <p>This document describes the changes made to the asn1 application.
+
+ <section>
+ <title>ASN1 0.8.1</title>
+ <p>This is the first release of the ASN1 application. This version is released
+ for beta-testing. Some functionality will be added until the 1.0 version is
+ released. A list of missing features and restrictions can be found in the
+ chapter below.
+
+ <section>
+ <title>Missing features and other restrictions</title>
+ <list>
+ <item>
+ <p>The encoding rules BER and PER (aligned) is supported. <em>PER (unaligned)
+ IS NOT SUPPORTED</em>.
+ <item>
+ <p>NOT SUPPORTED types <c>ANY</c> and <c>ANY DEFINED BY</c>
+ (is not in the standard any more).
+ <item>
+ <p>NOT SUPPORTED types <c>EXTERNAL</c> and <c>EMBEDDED-PDV</c>.
+ <item>
+ <p>NOT SUPPORTED type <c>REAL</c> (planned to be implemented).
+ <item>
+ <p>The code generation support for value definitions in the ASN.1 notation is very limited
+ (planned to be enhanced).
+ <item>
+ <p>The support for constraints is limited to:
+ <list>
+ <item><p>
+ SizeConstraint SIZE(X)
+ <item><p>
+ SingleValue (1)
+ <item><p>
+ ValueRange (X..Y)
+ <item><p>
+ PermittedAlpabet FROM (but not for BMPString and UniversalString when generating PER).
+ </list>
+ <p>Complex expressions in constraints is not supported (planned to be extended).
+ <item>
+ <p>The current version of the compiler has very limited error checking:
+ <list>
+ <item><p>Stops at first syntax error.
+ <item><p>Does not stop when a reference to an undefined type is found ,
+ but prints an error message. Compilation of the generated
+ Erlang module will then fail.
+ <item><p>A whole number of other semantical controls is currently missing. This
+ means that the compiler will give little or bad help to detect what's wrong
+ with an ASN.1 specification, but will mostly work very well when the
+ ASN.1 specification is correct.
+ </list>
+ <item>
+ <p>The maximum INTEGER supported in this version is a signed 64 bit integer. This
+ limitation is probably quite reasonable. (Planned to be extended).
+ <item>
+ <p>Only AUTOMATIC TAGS supported for PER.
+ <item>
+ <p>Only EXPLICIT and IMPLICIT TAGS supported for BER.
+ <item>
+ <p>The compiler supports decoding of BER-data with indefinite length but it is
+ not possible to produce data with indefinite length with the encoder.
+ </list>
+ </section>
+
+ </section>
+</chapter>
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/Makefile b/lib/dialyzer/test/r9c_SUITE_data/src/inets/Makefile
new file mode 100644
index 0000000000..be63eb73b2
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/Makefile
@@ -0,0 +1,178 @@
+# ``The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved via the world wide web at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+# Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+# AB. All Rights Reserved.''
+#
+# $Id: Makefile,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+#
+include $(ERL_TOP)/make/target.mk
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+
+VSN = $(INETS_VSN)
+APP_VSN = "inets-$(VSN)"
+
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/inets-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULES = \
+ ftp \
+ http \
+ http_lib \
+ httpc_handler \
+ httpc_manager \
+ uri \
+ httpd \
+ httpd_acceptor \
+ httpd_acceptor_sup \
+ httpd_conf \
+ httpd_example \
+ httpd_manager \
+ httpd_misc_sup \
+ httpd_parse \
+ httpd_request_handler \
+ httpd_response \
+ httpd_socket \
+ httpd_sup \
+ httpd_util \
+ httpd_verbosity \
+ inets_sup \
+ mod_actions \
+ mod_alias \
+ mod_auth \
+ mod_auth_plain \
+ mod_auth_dets \
+ mod_auth_mnesia \
+ mod_auth_server \
+ mod_browser \
+ mod_cgi \
+ mod_dir \
+ mod_disk_log \
+ mod_esi \
+ mod_get \
+ mod_head \
+ mod_htaccess \
+ mod_include \
+ mod_log \
+ mod_range \
+ mod_responsecontrol \
+ mod_trace \
+ mod_security \
+ mod_security_server
+
+HRL_FILES = httpd.hrl httpd_verbosity.hrl mod_auth.hrl \
+ http.hrl jnets_httpd.hrl
+
+ERL_FILES = $(MODULES:%=%.erl)
+
+TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
+
+APP_FILE= inets.app
+APPUP_FILE= inets.appup
+
+APP_SRC= $(APP_FILE).src
+APP_TARGET= $(EBIN)/$(APP_FILE)
+
+APPUP_SRC= $(APPUP_FILE).src
+APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
+
+# ----------------------------------------------------
+# INETS FLAGS
+# ----------------------------------------------------
+# DONT_USE_VERBOSITY = -Ddont_use_verbosity=true
+INETS_FLAGS = -D'SERVER_SOFTWARE="inets/$(VSN)"' \
+ -Ddefault_verbosity=silence \
+ $(DONT_USE_VERBOSITY)
+
+# INETS_DEBUG_DEFAULT = d
+ifeq ($(INETS_DEBUG),)
+ INETS_DEBUG = $(INETS_DEBUG_DEFAULT)
+endif
+
+ifeq ($(INETS_DEBUG),c)
+ INETS_FLAGS += -Dinets_cdebug -Dinets_debug -Dinets_log -Dinets_error
+endif
+ifeq ($(INETS_DEBUG),d)
+ INETS_FLAGS += -Dinets_debug -Dinets_log -Dinets_error
+endif
+ifeq ($(INETS_DEBUG),l)
+ INETS_FLAGS += -Dinets_log -Dinets_error
+endif
+ifeq ($(INETS_DEBUG),e)
+ INETS_FLAGS += -Dinets_error
+endif
+
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_FLAGS +=
+
+ifeq ($(WARN_UNUSED_WARS),true)
+ERL_COMPILE_FLAGS += +warn_unused_vars
+endif
+
+ERL_COMPILE_FLAGS += $(INETS_FLAGS) \
+ +'{parse_transform,sys_pre_attributes}' \
+ +'{attribute,insert,app_vsn,$(APP_VSN)}'
+
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+debug opt: $(TARGET_FILES)
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
+
+release_docs_spec:
+
+info:
+ @echo "INETS_DEBUG = $(INETS_DEBUG)"
+ @echo "INETS_FLAGS = $(INETS_FLAGS)"
+ @echo "ERL_COMPILE_FLAGS = $(ERL_COMPILE_FLAGS)"
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
new file mode 100644
index 0000000000..312bb3a5c8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
@@ -0,0 +1,1582 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: ftp.erl,v 1.2 2009/03/03 01:55:01 kostis Exp $
+%%
+-module(ftp).
+
+-behaviour(gen_server).
+
+%% This module implements an ftp client based on socket(3)/gen_tcp(3),
+%% file(3) and filename(3).
+%%
+
+
+-define(OPEN_TIMEOUT, 60*1000).
+-define(BYTE_TIMEOUT, 1000). % Timeout for _ONE_ byte to arrive. (ms)
+-define(OPER_TIMEOUT, 300). % Operation timeout (seconds)
+-define(FTP_PORT, 21).
+
+%% Client interface
+-export([cd/2, close/1, delete/2, formaterror/1, help/0,
+ lcd/2, lpwd/1, ls/1, ls/2,
+ mkdir/2, nlist/1, nlist/2,
+ open/1, open/2, open/3,
+ pwd/1,
+ recv/2, recv/3, recv_bin/2,
+ recv_chunk_start/2, recv_chunk/1,
+ rename/3, rmdir/2,
+ send/2, send/3, send_bin/3,
+ send_chunk_start/2, send_chunk/2, send_chunk_end/1,
+ type/2, user/3,user/4,account/2,
+ append/3, append/2, append_bin/3,
+ append_chunk/2, append_chunk_end/1, append_chunk_start/2]).
+
+%% Internal
+-export([init/1, handle_call/3, handle_cast/2,
+ handle_info/2, terminate/2,code_change/3]).
+
+
+%%
+%% CLIENT FUNCTIONS
+%%
+
+%% open(Host)
+%% open(Host, Flags)
+%%
+%% Purpose: Start an ftp client and connect to a host.
+%% Args: Host = string(),
+%% Port = integer(),
+%% Flags = [Flag],
+%% Flag = verbose | debug
+%% Returns: {ok, Pid} | {error, ehost}
+
+%%Tho only option was the host in textual form
+open({option_list,Option_list})->
+ %% Dbg = {debug,[trace,log,statistics]},
+ %% Options = [Dbg],
+ Options = [],
+ {ok,Pid1}=case lists:keysearch(flags,1,Option_list) of
+ {value,{flags,Flags}}->
+ {ok, Pid} = gen_server:start_link(?MODULE, [Flags], Options);
+ false ->
+ {ok, Pid} = gen_server:start_link(?MODULE, [], Options)
+ end,
+ gen_server:call(Pid1, {open, ip_comm,Option_list}, infinity);
+
+
+%%The only option was the tuple form of the ip-number
+open(Host)when tuple(Host) ->
+ open(Host, ?FTP_PORT, []);
+
+%%Host is the string form of the hostname
+open(Host)->
+ open(Host,?FTP_PORT,[]).
+
+
+
+open(Host, Port) when integer(Port) ->
+ open(Host,Port,[]);
+
+open(Host, Flags) when list(Flags) ->
+ open(Host,?FTP_PORT, Flags).
+
+open(Host,Port,Flags) when integer(Port), list(Flags) ->
+ %% Dbg = {debug,[trace,log,statistics]},
+ %% Options = [Dbg],
+ Options = [],
+ {ok, Pid} = gen_server:start_link(?MODULE, [Flags], Options),
+ gen_server:call(Pid, {open, ip_comm, Host, Port}, infinity).
+
+%% user(Pid, User, Pass)
+%% Purpose: Login.
+%% Args: Pid = pid(), User = Pass = string()
+%% Returns: ok | {error, euser} | {error, econn}
+user(Pid, User, Pass) ->
+ gen_server:call(Pid, {user, User, Pass}, infinity).
+
+%% user(Pid, User, Pass,Acc)
+%% Purpose: Login whith a supplied account name
+%% Args: Pid = pid(), User = Pass = Acc = string()
+%% Returns: ok | {error, euser} | {error, econn} | {error, eacct}
+user(Pid, User, Pass,Acc) ->
+ gen_server:call(Pid, {user, User, Pass,Acc}, infinity).
+
+%% account(Pid,Acc)
+%% Purpose: Set a user Account.
+%% Args: Pid = pid(), Acc= string()
+%% Returns: ok | {error, eacct}
+account(Pid,Acc) ->
+ gen_server:call(Pid, {account,Acc}, infinity).
+
+%% pwd(Pid)
+%%
+%% Purpose: Get the current working directory at remote server.
+%% Args: Pid = pid()
+%% Returns: {ok, Dir} | {error, elogin} | {error, econn}
+pwd(Pid) ->
+ gen_server:call(Pid, pwd, infinity).
+
+%% lpwd(Pid)
+%%
+%% Purpose: Get the current working directory at local server.
+%% Args: Pid = pid()
+%% Returns: {ok, Dir} | {error, elogin}
+lpwd(Pid) ->
+ gen_server:call(Pid, lpwd, infinity).
+
+%% cd(Pid, Dir)
+%%
+%% Purpose: Change current working directory at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+cd(Pid, Dir) ->
+ gen_server:call(Pid, {cd, Dir}, infinity).
+
+%% lcd(Pid, Dir)
+%%
+%% Purpose: Change current working directory for the local client.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath}
+lcd(Pid, Dir) ->
+ gen_server:call(Pid, {lcd, Dir}, infinity).
+
+%% ls(Pid)
+%% ls(Pid, Dir)
+%%
+%% Purpose: List the contents of current directory (ls/1) or directory
+%% Dir (ls/2) at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: {ok, Listing} | {error, epath} | {error, elogin} | {error, econn}
+ls(Pid) ->
+ ls(Pid, "").
+ls(Pid, Dir) ->
+ gen_server:call(Pid, {dir, long, Dir}, infinity).
+
+%% nlist(Pid)
+%% nlist(Pid, Dir)
+%%
+%% Purpose: List the contents of current directory (ls/1) or directory
+%% Dir (ls/2) at remote server. The returned list is a stream
+%% of file names.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: {ok, Listing} | {error, epath} | {error, elogin} | {error, econn}
+nlist(Pid) ->
+ nlist(Pid, "").
+nlist(Pid, Dir) ->
+ gen_server:call(Pid, {dir, short, Dir}, infinity).
+
+%% rename(Pid, CurrFile, NewFile)
+%%
+%% Purpose: Rename a file at remote server.
+%% Args: Pid = pid(), CurrFile = NewFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+rename(Pid, CurrFile, NewFile) ->
+ gen_server:call(Pid, {rename, CurrFile, NewFile}, infinity).
+
+%% delete(Pid, File)
+%%
+%% Purpose: Remove file at remote server.
+%% Args: Pid = pid(), File = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+delete(Pid, File) ->
+ gen_server:call(Pid, {delete, File}, infinity).
+
+%% mkdir(Pid, Dir)
+%%
+%% Purpose: Make directory at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+mkdir(Pid, Dir) ->
+ gen_server:call(Pid, {mkdir, Dir}, infinity).
+
+%% rmdir(Pid, Dir)
+%%
+%% Purpose: Remove directory at remote server.
+%% Args: Pid = pid(), Dir = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+rmdir(Pid, Dir) ->
+ gen_server:call(Pid, {rmdir, Dir}, infinity).
+
+%% type(Pid, Type)
+%%
+%% Purpose: Set transfer type.
+%% Args: Pid = pid(), Type = ascii | binary
+%% Returns: ok | {error, etype} | {error, elogin} | {error, econn}
+type(Pid, Type) ->
+ gen_server:call(Pid, {type, Type}, infinity).
+
+%% recv(Pid, RFile [, LFile])
+%%
+%% Purpose: Transfer file from remote server.
+%% Args: Pid = pid(), RFile = LFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+recv(Pid, RFile) ->
+ recv(Pid, RFile, "").
+
+recv(Pid, RFile, LFile) ->
+ gen_server:call(Pid, {recv, RFile, LFile}, infinity).
+
+%% recv_bin(Pid, RFile)
+%%
+%% Purpose: Transfer file from remote server into binary.
+%% Args: Pid = pid(), RFile = string()
+%% Returns: {ok, Bin} | {error, epath} | {error, elogin} | {error, econn}
+recv_bin(Pid, RFile) ->
+ gen_server:call(Pid, {recv_bin, RFile}, infinity).
+
+%% recv_chunk_start(Pid, RFile)
+%%
+%% Purpose: Start receive of chunks of remote file.
+%% Args: Pid = pid(), RFile = string().
+%% Returns: ok | {error, elogin} | {error, epath} | {error, econn}
+recv_chunk_start(Pid, RFile) ->
+ gen_server:call(Pid, {recv_chunk_start, RFile}, infinity).
+
+
+%% recv_chunk(Pid, RFile)
+%%
+%% Purpose: Transfer file from remote server into binary in chunks
+%% Args: Pid = pid(), RFile = string()
+%% Returns: Reference
+recv_chunk(Pid) ->
+ gen_server:call(Pid, recv_chunk, infinity).
+
+%% send(Pid, LFile [, RFile])
+%%
+%% Purpose: Transfer file to remote server.
+%% Args: Pid = pid(), LFile = RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+send(Pid, LFile) ->
+ send(Pid, LFile, "").
+
+send(Pid, LFile, RFile) ->
+ gen_server:call(Pid, {send, LFile, RFile}, infinity).
+
+%% send_bin(Pid, Bin, RFile)
+%%
+%% Purpose: Transfer a binary to a remote file.
+%% Args: Pid = pid(), Bin = binary(), RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, enotbinary}
+%% | {error, econn}
+send_bin(Pid, Bin, RFile) when binary(Bin) ->
+ gen_server:call(Pid, {send_bin, Bin, RFile}, infinity);
+send_bin(Pid, Bin, RFile) ->
+ {error, enotbinary}.
+
+%% send_chunk_start(Pid, RFile)
+%%
+%% Purpose: Start transfer of chunks to remote file.
+%% Args: Pid = pid(), RFile = string().
+%% Returns: ok | {error, elogin} | {error, epath} | {error, econn}
+send_chunk_start(Pid, RFile) ->
+ gen_server:call(Pid, {send_chunk_start, RFile}, infinity).
+
+
+%% append_chunk_start(Pid, RFile)
+%%
+%% Purpose: Start append chunks of data to remote file.
+%% Args: Pid = pid(), RFile = string().
+%% Returns: ok | {error, elogin} | {error, epath} | {error, econn}
+append_chunk_start(Pid, RFile) ->
+ gen_server:call(Pid, {append_chunk_start, RFile}, infinity).
+
+
+%% send_chunk(Pid, Bin)
+%%
+%% Purpose: Send chunk to remote file.
+%% Args: Pid = pid(), Bin = binary().
+%% Returns: ok | {error, elogin} | {error, enotbinary} | {error, echunk}
+%% | {error, econn}
+send_chunk(Pid, Bin) when binary(Bin) ->
+ gen_server:call(Pid, {send_chunk, Bin}, infinity);
+send_chunk(Pid, Bin) ->
+ {error, enotbinary}.
+
+%%append_chunk(Pid, Bin)
+%%
+%% Purpose: Append chunk to remote file.
+%% Args: Pid = pid(), Bin = binary().
+%% Returns: ok | {error, elogin} | {error, enotbinary} | {error, echunk}
+%% | {error, econn}
+append_chunk(Pid, Bin) when binary(Bin) ->
+ gen_server:call(Pid, {append_chunk, Bin}, infinity);
+append_chunk(Pid, Bin) ->
+ {error, enotbinary}.
+
+%% send_chunk_end(Pid)
+%%
+%% Purpose: End sending of chunks to remote file.
+%% Args: Pid = pid().
+%% Returns: ok | {error, elogin} | {error, echunk} | {error, econn}
+send_chunk_end(Pid) ->
+ gen_server:call(Pid, send_chunk_end, infinity).
+
+%% append_chunk_end(Pid)
+%%
+%% Purpose: End appending of chunks to remote file.
+%% Args: Pid = pid().
+%% Returns: ok | {error, elogin} | {error, echunk} | {error, econn}
+append_chunk_end(Pid) ->
+ gen_server:call(Pid, append_chunk_end, infinity).
+
+%% append(Pid, LFile,RFile)
+%%
+%% Purpose: Append the local file to the remote file
+%% Args: Pid = pid(), LFile = RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, econn}
+append(Pid, LFile) ->
+ append(Pid, LFile, "").
+
+append(Pid, LFile, RFile) ->
+ gen_server:call(Pid, {append, LFile, RFile}, infinity).
+
+%% append_bin(Pid, Bin, RFile)
+%%
+%% Purpose: Append a binary to a remote file.
+%% Args: Pid = pid(), Bin = binary(), RFile = string()
+%% Returns: ok | {error, epath} | {error, elogin} | {error, enotbinary}
+%% | {error, econn}
+append_bin(Pid, Bin, RFile) when binary(Bin) ->
+ gen_server:call(Pid, {append_bin, Bin, RFile}, infinity);
+append_bin(Pid, Bin, RFile) ->
+ {error, enotbinary}.
+
+
+%% close(Pid)
+%%
+%% Purpose: End the ftp session.
+%% Args: Pid = pid()
+%% Returns: ok
+close(Pid) ->
+ case (catch gen_server:call(Pid, close, 30000)) of
+ ok ->
+ ok;
+ {'EXIT',{noproc,_}} ->
+ %% Already gone...
+ ok;
+ Res ->
+ Res
+ end.
+
+%% formaterror(Tag)
+%%
+%% Purpose: Return diagnostics.
+%% Args: Tag = atom() | {error, atom()}
+%% Returns: string().
+formaterror(Tag) ->
+ errstr(Tag).
+
+%% help()
+%%
+%% Purpose: Print list of valid commands.
+%%
+%% Undocumented.
+%%
+help() ->
+ io:format("\n Commands:\n"
+ " ---------\n"
+ " cd(Pid, Dir)\n"
+ " close(Pid)\n"
+ " delete(Pid, File)\n"
+ " formaterror(Tag)\n"
+ " help()\n"
+ " lcd(Pid, Dir)\n"
+ " lpwd(Pid)\n"
+ " ls(Pid [, Dir])\n"
+ " mkdir(Pid, Dir)\n"
+ " nlist(Pid [, Dir])\n"
+ " open(Host [Port, Flags])\n"
+ " pwd(Pid)\n"
+ " recv(Pid, RFile [, LFile])\n"
+ " recv_bin(Pid, RFile)\n"
+ " recv_chunk_start(Pid, RFile)\n"
+ " recv_chunk(Pid)\n"
+ " rename(Pid, CurrFile, NewFile)\n"
+ " rmdir(Pid, Dir)\n"
+ " send(Pid, LFile [, RFile])\n"
+ " send_chunk(Pid, Bin)\n"
+ " send_chunk_start(Pid, RFile)\n"
+ " send_chunk_end(Pid)\n"
+ " send_bin(Pid, Bin, RFile)\n"
+ " append(Pid, LFile [, RFile])\n"
+ " append_chunk(Pid, Bin)\n"
+ " append_chunk_start(Pid, RFile)\n"
+ " append_chunk_end(Pid)\n"
+ " append_bin(Pid, Bin, RFile)\n"
+ " type(Pid, Type)\n"
+ " account(Pid,Account)\n"
+ " user(Pid, User, Pass)\n"
+ " user(Pid, User, Pass,Account)\n").
+
+%%
+%% INIT
+%%
+
+-record(state, {csock = undefined, dsock = undefined, flags = undefined,
+ ldir = undefined, type = undefined, chunk = false,
+ pending = undefined}).
+
+init([Flags]) ->
+ sock_start(),
+ put(debug,get_debug(Flags)),
+ put(verbose,get_verbose(Flags)),
+ process_flag(priority, low),
+ {ok, LDir} = file:get_cwd(),
+ {ok, #state{flags = Flags, ldir = LDir}}.
+
+%%
+%% HANDLERS
+%%
+
+%% First group of reply code digits
+-define(POS_PREL, 1).
+-define(POS_COMPL, 2).
+-define(POS_INTERM, 3).
+-define(TRANS_NEG_COMPL, 4).
+-define(PERM_NEG_COMPL, 5).
+
+%% Second group of reply code digits
+-define(SYNTAX,0).
+-define(INFORMATION,1).
+-define(CONNECTION,2).
+-define(AUTH_ACC,3).
+-define(UNSPEC,4).
+-define(FILE_SYSTEM,5).
+
+
+-define(STOP_RET(E),{stop, normal, {error, E},
+ State#state{csock = undefined}}).
+
+
+rescode(?POS_PREL,_,_) -> pos_prel; %%Positive Preleminary Reply
+rescode(?POS_COMPL,_,_) -> pos_compl; %%Positive Completion Reply
+rescode(?POS_INTERM,?AUTH_ACC,2) -> pos_interm_acct; %%Positive Intermediate Reply nedd account
+rescode(?POS_INTERM,_,_) -> pos_interm; %%Positive Intermediate Reply
+rescode(?TRANS_NEG_COMPL,?FILE_SYSTEM,2) -> trans_no_space; %%No storage area no action taken
+rescode(?TRANS_NEG_COMPL,_,_) -> trans_neg_compl;%%Temporary Error, no action taken
+rescode(?PERM_NEG_COMPL,?FILE_SYSTEM,2) -> perm_no_space; %%Permanent disk space error, the user shall not try again
+rescode(?PERM_NEG_COMPL,?FILE_SYSTEM,3) -> perm_fname_not_allowed;
+rescode(?PERM_NEG_COMPL,_,_) -> perm_neg_compl.
+
+retcode(trans_no_space,_) -> etnospc;
+retcode(perm_no_space,_) -> epnospc;
+retcode(perm_fname_not_allowed,_) -> efnamena;
+retcode(_,Otherwise) -> Otherwise.
+
+handle_call({open,ip_comm,Conn_data},From,State) ->
+ case lists:keysearch(host,1,Conn_data) of
+ {value,{host,Host}}->
+ Port=get_key1(port,Conn_data,?FTP_PORT),
+ Timeout=get_key1(timeout,Conn_data,?OPEN_TIMEOUT),
+ open(Host,Port,Timeout,State);
+ false ->
+ ehost
+ end;
+
+handle_call({open,ip_comm,Host,Port},From,State) ->
+ open(Host,Port,?OPEN_TIMEOUT,State);
+
+handle_call({user, User, Pass}, _From, State) ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "USER ~s", [User]) of
+ pos_interm ->
+ case ctrl_cmd(CSock, "PASS ~s", [Pass]) of
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error,enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+
+handle_call({user, User, Pass,Acc}, _From, State) ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "USER ~s", [User]) of
+ pos_interm ->
+ case ctrl_cmd(CSock, "PASS ~s", [Pass]) of
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ pos_interm_acct->
+ case ctrl_cmd(CSock,"ACCT ~s",[Acc]) of
+ pos_compl->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error,enotconn}->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, eacct}, State}
+ end;
+ {error,enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+ pos_compl ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, euser}, State}
+ end;
+
+%%set_account(Acc,State)->Reply
+%%Reply={reply, {error, euser}, State} | {error,enotconn}->
+handle_call({account,Acc},_From,State)->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock,"ACCT ~s",[Acc]) of
+ pos_compl->
+ {reply, ok,State};
+ {error,enotconn}->
+ ?STOP_RET(econn);
+ Error ->
+ debug(" error: ~p",[Error]),
+ {reply, {error, eacct}, State}
+ end;
+
+handle_call(pwd, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ %%
+ %% NOTE: The directory string comes over the control connection.
+ case sock_write(CSock, mk_cmd("PWD", [])) of
+ ok ->
+ {_, Line} = result_line(CSock),
+ {_, Cs} = split($", Line), % XXX Ugly
+ {Dir0, _} = split($", Cs),
+ Dir = lists:delete($", Dir0),
+ {reply, {ok, Dir}, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn)
+ end;
+
+handle_call(lpwd, _From, State) ->
+ #state{csock = CSock, ldir = LDir} = State,
+ {reply, {ok, LDir}, State};
+
+handle_call({cd, Dir}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "CWD ~s", [Dir]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({lcd, Dir}, _From, State) ->
+ #state{csock = CSock, ldir = LDir0} = State,
+ LDir = absname(LDir0, Dir),
+ case file:read_file_info(LDir) of
+ {ok, _ } ->
+ {reply, ok, State#state{ldir = LDir}};
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({dir, Len, Dir}, _From, State) when State#state.chunk == false ->
+ debug(" dir : ~p: ~s~n",[Len,Dir]),
+ #state{csock = CSock, type = Type} = State,
+ set_type(ascii, Type, CSock),
+ LSock = listen_data(CSock, raw),
+ Cmd = case Len of
+ short -> "NLST";
+ long -> "LIST"
+ end,
+ Result = case Dir of
+ "" ->
+ ctrl_cmd(CSock, Cmd, "");
+ _ ->
+ ctrl_cmd(CSock, Cmd ++ " ~s", [Dir])
+ end,
+ debug(" ctrl : command result: ~p~n",[Result]),
+ case Result of
+ pos_prel ->
+ debug(" dbg : await the data connection", []),
+ DSock = accept_data(LSock),
+ debug(" dbg : await the data", []),
+ Reply0 =
+ case recv_data(DSock) of
+ {ok, DirData} ->
+ debug(" data : DirData: ~p~n",[DirData]),
+ case result(CSock) of
+ pos_compl ->
+ {ok, DirData};
+ _ ->
+ {error, epath}
+ end;
+ {error, Reason} ->
+ sock_close(DSock),
+ verbose(" data : error: ~p, ~p~n",[Reason, result(CSock)]),
+ {error, epath}
+ end,
+
+ debug(" ctrl : reply: ~p~n",[Reply0]),
+ reset_type(ascii, Type, CSock),
+ {reply, Reply0, State};
+ {closed, _Why} ->
+ ?STOP_RET(econn);
+ _ ->
+ sock_close(LSock),
+ {reply, {error, epath}, State}
+ end;
+
+
+handle_call({rename, CurrFile, NewFile}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "RNFR ~s", [CurrFile]) of
+ pos_interm ->
+ case ctrl_cmd(CSock, "RNTO ~s", [NewFile]) of
+ pos_compl ->
+ {reply, ok, State};
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({delete, File}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "DELE ~s", [File]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({mkdir, Dir}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "MKD ~s", [Dir]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({rmdir, Dir}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case ctrl_cmd(CSock, "RMD ~s", [Dir]) of
+ pos_compl ->
+ {reply, ok, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({type, Type}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ case Type of
+ ascii ->
+ set_type(ascii, CSock),
+ {reply, ok, State#state{type = ascii}};
+ binary ->
+ set_type(binary, CSock),
+ {reply, ok, State#state{type = binary}};
+ _ ->
+ {reply, {error, etype}, State}
+ end;
+
+handle_call({recv, RFile, LFile}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock, ldir = LDir} = State,
+ ALFile = case LFile of
+ "" ->
+ absname(LDir, RFile);
+ _ ->
+ absname(LDir, LFile)
+ end,
+ case file_open(ALFile, write) of
+ {ok, Fd} ->
+ LSock = listen_data(CSock, binary),
+ Ret = case ctrl_cmd(CSock, "RETR ~s", [RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ recv_file(DSock, Fd),
+ Reply0 = case result(CSock) of
+ pos_compl ->
+ ok;
+ _ ->
+ {error, epath}
+ end,
+ sock_close(DSock),
+ {reply, Reply0, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end,
+ file_close(Fd),
+ Ret;
+ {error, _What} ->
+ {reply, {error, epath}, State}
+ end;
+
+handle_call({recv_bin, RFile}, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock, ldir = LDir} = State,
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "RETR ~s", [RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ Reply = recv_binary(DSock,CSock),
+ sock_close(DSock),
+ {reply, Reply, State};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ _ ->
+ {reply, {error, epath}, State}
+ end;
+
+
+handle_call({recv_chunk_start, RFile}, _From, State)
+ when State#state.chunk == false ->
+ start_chunk_transfer("RETR",RFile,State);
+
+handle_call(recv_chunk, _From, State)
+ when State#state.chunk == true ->
+ do_recv_chunk(State);
+
+
+handle_call({send, LFile, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_file("STOR",LFile,RFile,State);
+
+handle_call({append, LFile, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_file("APPE",LFile,RFile,State);
+
+
+handle_call({send_bin, Bin, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_data("STOR",Bin,RFile,State);
+
+handle_call({append_bin, Bin, RFile}, _From, State)
+ when State#state.chunk == false ->
+ transfer_data("APPE",Bin,RFile,State);
+
+
+
+handle_call({send_chunk_start, RFile}, _From, State)
+ when State#state.chunk == false ->
+ start_chunk_transfer("STOR",RFile,State);
+
+handle_call({append_chunk_start,RFile},_From,State)
+ when State#state.chunk==false->
+ start_chunk_transfer("APPE",RFile,State);
+
+handle_call({send_chunk, Bin}, _From, State)
+ when State#state.chunk == true ->
+ chunk_transfer(Bin,State);
+
+handle_call({append_chunk, Bin}, _From, State)
+ when State#state.chunk == true ->
+ chunk_transfer(Bin,State);
+
+handle_call(append_chunk_end, _From, State)
+ when State#state.chunk == true ->
+ end_chunk_transfer(State);
+
+handle_call(send_chunk_end, _From, State)
+ when State#state.chunk == true ->
+ end_chunk_transfer(State);
+
+
+
+handle_call(close, _From, State) when State#state.chunk == false ->
+ #state{csock = CSock} = State,
+ ctrl_cmd(CSock, "QUIT", []),
+ sock_close(CSock),
+ {stop, normal, ok, State};
+
+handle_call(_, _From, State) when State#state.chunk == true ->
+ {reply, {error, echunk}, State}.
+
+
+handle_cast(Msg, State) ->
+ {noreply, State}.
+
+
+handle_info({Sock, {fromsocket, Bytes}}, State) when Sock == State#state.csock ->
+ put(leftovers, Bytes ++ leftovers()),
+ {noreply, State};
+
+%% Data connection closed (during chunk sending)
+handle_info({Sock, {socket_closed, _Reason}}, State) when Sock == State#state.dsock ->
+ {noreply, State#state{dsock = undefined}};
+
+%% Control connection closed.
+handle_info({Sock, {socket_closed, _Reason}}, State) when Sock == State#state.csock ->
+ debug(" sc : ~s~n",[leftovers()]),
+ {stop, ftp_server_close, State#state{csock = undefined}};
+
+handle_info(Info, State) ->
+ error_logger:info_msg("ftp : ~w : Unexpected message: ~w\n", [self(),Info]),
+ {noreply, State}.
+
+code_change(OldVsn,State,Extra)->
+ {ok,State}.
+
+terminate(Reason, State) ->
+ ok.
+%%
+%% OPEN CONNECTION
+%%
+open(Host,Port,Timeout,State)->
+ case sock_connect(Host,Port,Timeout) of
+ {error, What} ->
+ {stop, normal, {error, What}, State};
+ CSock ->
+ case result(CSock, State#state.flags) of
+ {error,Reason} ->
+ sock_close(CSock),
+ {stop,normal,{error,Reason},State};
+ _ -> % We should really check this...
+ {reply, {ok, self()}, State#state{csock = CSock}}
+ end
+ end.
+
+
+
+%%
+%% CONTROL CONNECTION
+%%
+
+ctrl_cmd(CSock, Fmt, Args) ->
+ Cmd = mk_cmd(Fmt, Args),
+ case sock_write(CSock, Cmd) of
+ ok ->
+ debug(" cmd : ~s",[Cmd]),
+ result(CSock);
+ {error, enotconn} ->
+ {error, enotconn};
+ Other ->
+ Other
+ end.
+
+mk_cmd(Fmt, Args) ->
+ [io_lib:format(Fmt, Args)| "\r\n"]. % Deep list ok.
+
+%%
+%% TRANSFER TYPE
+%%
+
+%%
+%% set_type(NewType, CurrType, CSock)
+%% reset_type(NewType, CurrType, CSock)
+%%
+set_type(Type, Type, CSock) ->
+ ok;
+set_type(NewType, _OldType, CSock) ->
+ set_type(NewType, CSock).
+
+reset_type(Type, Type, CSock) ->
+ ok;
+reset_type(_NewType, OldType, CSock) ->
+ set_type(OldType, CSock).
+
+set_type(ascii, CSock) ->
+ ctrl_cmd(CSock, "TYPE A", []);
+set_type(binary, CSock) ->
+ ctrl_cmd(CSock, "TYPE I", []).
+
+%%
+%% DATA CONNECTION
+%%
+
+%% Create a listen socket for a data connection and send a PORT command
+%% containing the IP address and port number. Mode is binary or raw.
+%%
+listen_data(CSock, Mode) ->
+ {IP, _} = sock_name(CSock), % IP address of control conn.
+ LSock = sock_listen(Mode, IP),
+ Port = sock_listen_port(LSock),
+ {A1, A2, A3, A4} = IP,
+ {P1, P2} = {Port div 256, Port rem 256},
+ ctrl_cmd(CSock, "PORT ~w,~w,~w,~w,~w,~w", [A1, A2, A3, A4, P1, P2]),
+ LSock.
+
+%%
+%% Accept the data connection and close the listen socket.
+%%
+accept_data(LSock) ->
+ Sock = sock_accept(LSock),
+ sock_close(LSock),
+ Sock.
+
+%%
+%% DATA COLLECTION (ls, dir)
+%%
+%% Socket is a byte stream in ASCII mode.
+%%
+
+%% Receive data (from data connection).
+recv_data(Sock) ->
+ recv_data(Sock, [], 0).
+recv_data(Sock, Sofar, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ {ok, lists:flatten(lists:reverse(Sofar))};
+recv_data(Sock, Sofar, Retry) ->
+ case sock_read(Sock) of
+ {ok, Data} ->
+ debug(" dbg : received some data: ~n~s", [Data]),
+ recv_data(Sock, [Data| Sofar], 0);
+ {error, timeout} ->
+ %% Retry..
+ recv_data(Sock, Sofar, Retry+1);
+ {error, Reason} ->
+ SoFar1 = lists:flatten(lists:reverse(Sofar)),
+ {error, {socket_error, Reason, SoFar1, Retry}};
+ {closed, _} ->
+ {ok, lists:flatten(lists:reverse(Sofar))}
+ end.
+
+%%
+%% BINARY TRANSFER
+%%
+
+%% --------------------------------------------------
+
+%% recv_binary(DSock,CSock) = {ok,Bin} | {error,Reason}
+%%
+recv_binary(DSock,CSock) ->
+ recv_binary1(recv_binary2(DSock,[],0),CSock).
+
+recv_binary1(Reply,Sock) ->
+ case result(Sock) of
+ pos_compl -> Reply;
+ _ -> {error, epath}
+ end.
+
+recv_binary2(Sock, _Bs, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ {error,eclosed};
+recv_binary2(Sock, Bs, Retry) ->
+ case sock_read(Sock) of
+ {ok, Bin} ->
+ recv_binary2(Sock, [Bs, Bin], 0);
+ {error, timeout} ->
+ recv_binary2(Sock, Bs, Retry+1);
+ {closed, _Why} ->
+ {ok,list_to_binary(Bs)}
+ end.
+
+%% --------------------------------------------------
+
+%%
+%% recv_chunk
+%%
+
+do_recv_chunk(#state{dsock = undefined} = State) ->
+ {reply, {error,econn}, State};
+do_recv_chunk(State) ->
+ recv_chunk1(recv_chunk2(State, 0), State).
+
+recv_chunk1({ok, _Bin} = Reply, State) ->
+ {reply, Reply, State};
+%% Reply = ok | {error, Reason}
+recv_chunk1(Reply, #state{csock = CSock} = State) ->
+ State1 = State#state{dsock = undefined, chunk = false},
+ case result(CSock) of
+ pos_compl ->
+ {reply, Reply, State1};
+ _ ->
+ {reply, {error, epath}, State1}
+ end.
+
+recv_chunk2(#state{dsock = DSock} = State, ?OPER_TIMEOUT) ->
+ sock_close(DSock),
+ {error, eclosed};
+recv_chunk2(#state{dsock = DSock} = State, Retry) ->
+ case sock_read(DSock) of
+ {ok, Bin} ->
+ {ok, Bin};
+ {error, timeout} ->
+ recv_chunk2(State, Retry+1);
+ {closed, Reason} ->
+ debug(" dbg : socket closed: ~p", [Reason]),
+ ok
+ end.
+
+
+%% --------------------------------------------------
+
+%%
+%% FILE TRANSFER
+%%
+
+recv_file(Sock, Fd) ->
+ recv_file(Sock, Fd, 0).
+
+recv_file(Sock, Fd, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ {closed, timeout};
+recv_file(Sock, Fd, Retry) ->
+ case sock_read(Sock) of
+ {ok, Bin} ->
+ file_write(Fd, Bin),
+ recv_file(Sock, Fd);
+ {error, timeout} ->
+ recv_file(Sock, Fd, Retry+1);
+% {error, Reason} ->
+% SoFar1 = lists:flatten(lists:reverse(Sofar)),
+% exit({socket_error, Reason, Sock, SoFar1, Retry});
+ {closed, How} ->
+ {closed, How}
+ end.
+
+%%
+%% send_file(Fd, Sock) = ok | {error, Why}
+%%
+
+send_file(Fd, Sock) ->
+ {N, Bin} = file_read(Fd),
+ if
+ N > 0 ->
+ case sock_write(Sock, Bin) of
+ ok ->
+ send_file(Fd, Sock);
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ true ->
+ ok
+ end.
+
+
+
+%%
+%% PARSING OF RESULT LINES
+%%
+
+%% Excerpt from RFC 959:
+%%
+%% "A reply is defined to contain the 3-digit code, followed by Space
+%% <SP>, followed by one line of text (where some maximum line length
+%% has been specified), and terminated by the Telnet end-of-line
+%% code. There will be cases however, where the text is longer than
+%% a single line. In these cases the complete text must be bracketed
+%% so the User-process knows when it may stop reading the reply (i.e.
+%% stop processing input on the control connection) and go do other
+%% things. This requires a special format on the first line to
+%% indicate that more than one line is coming, and another on the
+%% last line to designate it as the last. At least one of these must
+%% contain the appropriate reply code to indicate the state of the
+%% transaction. To satisfy all factions, it was decided that both
+%% the first and last line codes should be the same.
+%%
+%% Thus the format for multi-line replies is that the first line
+%% will begin with the exact required reply code, followed
+%% immediately by a Hyphen, "-" (also known as Minus), followed by
+%% text. The last line will begin with the same code, followed
+%% immediately by Space <SP>, optionally some text, and the Telnet
+%% end-of-line code.
+%%
+%% For example:
+%% 123-First line
+%% Second line
+%% 234 A line beginning with numbers
+%% 123 The last line
+%%
+%% The user-process then simply needs to search for the second
+%% occurrence of the same reply code, followed by <SP> (Space), at
+%% the beginning of a line, and ignore all intermediary lines. If
+%% an intermediary line begins with a 3-digit number, the Server
+%% must pad the front to avoid confusion.
+%%
+%% This scheme allows standard system routines to be used for
+%% reply information (such as for the STAT reply), with
+%% "artificial" first and last lines tacked on. In rare cases
+%% where these routines are able to generate three digits and a
+%% Space at the beginning of any line, the beginning of each
+%% text line should be offset by some neutral text, like Space.
+%%
+%% This scheme assumes that multi-line replies may not be nested."
+
+%% We have to collect the stream of result characters into lines (ending
+%% in "\r\n"; we check for "\n"). When a line is assembled, left-over
+%% characters are saved in the process dictionary.
+%%
+
+%% result(Sock) = rescode()
+%%
+result(Sock) ->
+ result(Sock, false).
+
+result_line(Sock) ->
+ result(Sock, true).
+
+%% result(Sock, Bool) = {error,Reason} | rescode() | {rescode(), Lines}
+%% Printout if Bool = true.
+%%
+result(Sock, RetForm) ->
+ case getline(Sock) of
+ Line when length(Line) > 3 ->
+ [D1, D2, D3| Tail] = Line,
+ case Tail of
+ [$-| _] ->
+ parse_to_end(Sock, [D1, D2, D3, $ ]); % 3 digits + space
+ _ ->
+ ok
+ end,
+ result(D1,D2,D3,Line,RetForm);
+ _ ->
+ retform(rescode(?PERM_NEG_COMPL,-1,-1),[],RetForm)
+ end.
+
+result(D1,_D2,_D3,Line,_RetForm) when D1 - $0 > 10 ->
+ {error,{invalid_server_response,Line}};
+result(D1,_D2,_D3,Line,_RetForm) when D1 - $0 < 0 ->
+ {error,{invalid_server_response,Line}};
+result(D1,D2,D3,Line,RetForm) ->
+ Res1 = D1 - $0,
+ Res2 = D2 - $0,
+ Res3 = D3 - $0,
+ verbose(" ~w : ~s", [Res1, Line]),
+ retform(rescode(Res1,Res2,Res3),Line,RetForm).
+
+retform(ResCode,Line,true) ->
+ {ResCode,Line};
+retform(ResCode,_,_) ->
+ ResCode.
+
+leftovers() ->
+ case get(leftovers) of
+ undefined -> [];
+ X -> X
+ end.
+
+%% getline(Sock) = Line
+%%
+getline(Sock) ->
+ getline(Sock, leftovers()).
+
+getline(Sock, Rest) ->
+ getline1(Sock, split($\n, Rest), 0).
+
+getline1(Sock, {[], Rest}, ?OPER_TIMEOUT) ->
+ sock_close(Sock),
+ put(leftovers, Rest),
+ [];
+getline1(Sock, {[], Rest}, Retry) ->
+ case sock_read(Sock) of
+ {ok, More} ->
+ debug(" read : ~s~n",[More]),
+ getline(Sock, Rest ++ More);
+ {error, timeout} ->
+ %% Retry..
+ getline1(Sock, {[], Rest}, Retry+1);
+ Error ->
+ put(leftovers, Rest),
+ []
+ end;
+getline1(Sock, {Line, Rest}, Retry) ->
+ put(leftovers, Rest),
+ Line.
+
+parse_to_end(Sock, Prefix) ->
+ Line = getline(Sock),
+ case lists:prefix(Prefix, Line) of
+ false ->
+ parse_to_end(Sock, Prefix);
+ true ->
+ ok
+ end.
+
+
+%% Split list after first occurence of S.
+%% Returns {Prefix, Suffix} ({[], Cs} if S not found).
+split(S, Cs) ->
+ split(S, Cs, []).
+
+split(S, [S| Cs], As) ->
+ {lists:reverse([S|As]), Cs};
+split(S, [C| Cs], As) ->
+ split(S, Cs, [C| As]);
+split(_, [], As) ->
+ {[], lists:reverse(As)}.
+
+%%
+%% FILE INTERFACE
+%%
+%% All files are opened raw in binary mode.
+%%
+-define(BUFSIZE, 4096).
+
+file_open(File, Option) ->
+ file:open(File, [raw, binary, Option]).
+
+file_close(Fd) ->
+ file:close(Fd).
+
+
+file_read(Fd) -> % Compatible with pre R2A.
+ case file:read(Fd, ?BUFSIZE) of
+ {ok, {N, Bytes}} ->
+ {N, Bytes};
+ {ok, Bytes} ->
+ {size(Bytes), Bytes};
+ eof ->
+ {0, []}
+ end.
+
+file_write(Fd, Bytes) ->
+ file:write(Fd, Bytes).
+
+absname(Dir, File) -> % Args swapped.
+ filename:absname(File, Dir).
+
+
+
+%% sock_start()
+%%
+
+%%
+%% USE GEN_TCP
+%%
+
+sock_start() ->
+ inet_db:start().
+
+%%
+%% Connect to FTP server at Host (default is TCP port 21) in raw mode,
+%% in order to establish a control connection.
+%%
+
+sock_connect(Host,Port,TimeOut) ->
+ debug(" info : connect to server on ~p:~p~n",[Host,Port]),
+ Opts = [{packet, 0}, {active, false}],
+ case (catch gen_tcp:connect(Host, Port, Opts,TimeOut)) of
+ {'EXIT', R1} -> % XXX Probably no longer needed.
+ debug(" error: socket connectionn failed with exit reason:"
+ "~n ~p",[R1]),
+ {error, ehost};
+ {error, R2} ->
+ debug(" error: socket connectionn failed with exit reason:"
+ "~n ~p",[R2]),
+ {error, ehost};
+ {ok, Sock} ->
+ Sock
+ end.
+
+%%
+%% Create a listen socket (any port) in binary or raw non-packet mode for
+%% data connection.
+%%
+sock_listen(Mode, IP) ->
+ Opts = case Mode of
+ binary ->
+ [binary, {packet, 0}];
+ raw ->
+ [{packet, 0}]
+ end,
+ {ok, Sock} = gen_tcp:listen(0, [{ip, IP}, {active, false} | Opts]),
+ Sock.
+
+sock_accept(LSock) ->
+ {ok, Sock} = gen_tcp:accept(LSock),
+ Sock.
+
+sock_close(undefined) ->
+ ok;
+sock_close(Sock) ->
+ gen_tcp:close(Sock).
+
+sock_read(Sock) ->
+ case gen_tcp:recv(Sock, 0, ?BYTE_TIMEOUT) of
+ {ok, Bytes} ->
+ {ok, Bytes};
+
+ {error, closed} ->
+ {closed, closed}; % Yes
+
+ %% --- OTP-4770 begin ---
+ %%
+ %% This seems to happen on windows
+ %% "Someone" tried to close an already closed socket...
+ %%
+
+ {error, enotsock} ->
+ {closed, enotsock};
+
+ %%
+ %% --- OTP-4770 end ---
+
+ {error, etimedout} ->
+ {error, timeout};
+
+ Other ->
+ Other
+ end.
+
+%% receive
+%% {tcp, Sock, Bytes} ->
+%% {ok, Bytes};
+%% {tcp_closed, Sock} ->
+%% {closed, closed}
+%% end.
+
+sock_write(Sock, Bytes) ->
+ gen_tcp:send(Sock, Bytes).
+
+sock_name(Sock) ->
+ {ok, {IP, Port}} = inet:sockname(Sock),
+ {IP, Port}.
+
+sock_listen_port(LSock) ->
+ {ok, Port} = inet:port(LSock),
+ Port.
+
+
+%%
+%% ERROR STRINGS
+%%
+errstr({error, Reason}) ->
+ errstr(Reason);
+
+errstr(echunk) -> "Synchronisation error during chung sending.";
+errstr(eclosed) -> "Session has been closed.";
+errstr(econn) -> "Connection to remote server prematurely closed.";
+errstr(eexists) ->"File or directory already exists.";
+errstr(ehost) -> "Host not found, FTP server not found, "
+"or connection rejected.";
+errstr(elogin) -> "User not logged in.";
+errstr(enotbinary) -> "Term is not a binary.";
+errstr(epath) -> "No such file or directory, already exists, "
+"or permission denied.";
+errstr(etype) -> "No such type.";
+errstr(euser) -> "User name or password not valid.";
+errstr(etnospc) -> "Insufficient storage space in system.";
+errstr(epnospc) -> "Exceeded storage allocation "
+"(for current directory or dataset).";
+errstr(efnamena) -> "File name not allowed.";
+errstr(Reason) ->
+ lists:flatten(io_lib:format("Unknown error: ~w", [Reason])).
+
+
+
+%% ----------------------------------------------------------
+
+get_verbose(Params) -> check_param(verbose,Params).
+
+get_debug(Flags) -> check_param(debug,Flags).
+
+check_param(P,Ps) -> lists:member(P,Ps).
+
+
+%% verbose -> ok
+%%
+%% Prints the string if the Flags list is non-epmty
+%%
+%% Params: F Format string
+%% A Arguments to the format string
+%%
+verbose(F,A) -> verbose(get(verbose),F,A).
+
+verbose(true,F,A) -> print(F,A);
+verbose(_,_F,_A) -> ok.
+
+
+
+
+%% debug -> ok
+%%
+%% Prints the string if debug enabled
+%%
+%% Params: F Format string
+%% A Arguments to the format string
+%%
+debug(F,A) -> debug(get(debug),F,A).
+
+debug(true,F,A) -> print(F,A);
+debug(_,_F,_A) -> ok.
+
+
+print(F,A) -> io:format(F,A).
+
+
+
+transfer_file(Cmd,LFile,RFile,State)->
+ #state{csock = CSock, ldir = LDir} = State,
+ ARFile = case RFile of
+ "" ->
+ LFile;
+ _ ->
+ RFile
+ end,
+ ALFile = absname(LDir, LFile),
+ case file_open(ALFile, read) of
+ {ok, Fd} ->
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "~s ~s", [Cmd,ARFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ SFreply = send_file(Fd, DSock),
+ file_close(Fd),
+ sock_close(DSock),
+ case {SFreply,result(CSock)} of
+ {ok,pos_compl} ->
+ {reply, ok, State};
+ {ok,Other} ->
+ debug(" error: unknown reply: ~p~n",[Other]),
+ {reply, {error, epath}, State};
+ {{error,Why},Result} ->
+ ?STOP_RET(retcode(Result,econn))
+ end;
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ Other ->
+ debug(" error: ctrl failed: ~p~n",[Other]),
+ {reply, {error, epath}, State}
+ end;
+ {error, Reason} ->
+ debug(" error: file open: ~p~n",[Reason]),
+ {reply, {error, epath}, State}
+ end.
+
+transfer_data(Cmd,Bin,RFile,State)->
+ #state{csock = CSock, ldir = LDir} = State,
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "~s ~s", [Cmd,RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ SReply = sock_write(DSock, Bin),
+ sock_close(DSock),
+ case {SReply,result(CSock)} of
+ {ok,pos_compl} ->
+ {reply, ok, State};
+ {ok,trans_no_space} ->
+ ?STOP_RET(etnospc);
+ {ok,perm_no_space} ->
+ ?STOP_RET(epnospc);
+ {ok,perm_fname_not_allowed} ->
+ ?STOP_RET(efnamena);
+ {ok,Other} ->
+ debug(" error: unknown reply: ~p~n",[Other]),
+ {reply, {error, epath}, State};
+ {{error,Why},Result} ->
+ ?STOP_RET(retcode(Result,econn))
+ %% {{error,_Why},_Result} ->
+ %% ?STOP_RET(econn)
+ end;
+
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+
+ Other ->
+ debug(" error: ctrl failed: ~p~n",[Other]),
+ {reply, {error, epath}, State}
+ end.
+
+
+start_chunk_transfer(Cmd, RFile, #state{csock = CSock} = State) ->
+ LSock = listen_data(CSock, binary),
+ case ctrl_cmd(CSock, "~s ~s", [Cmd,RFile]) of
+ pos_prel ->
+ DSock = accept_data(LSock),
+ {reply, ok, State#state{dsock = DSock, chunk = true}};
+ {error, enotconn} ->
+ ?STOP_RET(econn);
+ Otherwise ->
+ debug(" error: ctrl failed: ~p~n",[Otherwise]),
+ {reply, {error, epath}, State}
+ end.
+
+
+chunk_transfer(Bin,State)->
+ #state{dsock = DSock, csock = CSock} = State,
+ case DSock of
+ undefined ->
+ {reply,{error,econn},State};
+ _ ->
+ case sock_write(DSock, Bin) of
+ ok ->
+ {reply, ok, State};
+ Other ->
+ debug(" error: chunk write error: ~p~n",[Other]),
+ {reply, {error, econn}, State#state{dsock = undefined}}
+ end
+ end.
+
+
+
+end_chunk_transfer(State)->
+ #state{csock = CSock, dsock = DSock} = State,
+ case DSock of
+ undefined ->
+ Result = result(CSock),
+ case Result of
+ pos_compl ->
+ {reply,ok,State#state{dsock = undefined,
+ chunk = false}};
+ trans_no_space ->
+ ?STOP_RET(etnospc);
+ perm_no_space ->
+ ?STOP_RET(epnospc);
+ perm_fname_not_allowed ->
+ ?STOP_RET(efnamena);
+ Result ->
+ debug(" error: send chunk end (1): ~p~n",
+ [Result]),
+ {reply,{error,epath},State#state{dsock = undefined,
+ chunk = false}}
+ end;
+ _ ->
+ sock_close(DSock),
+ Result = result(CSock),
+ case Result of
+ pos_compl ->
+ {reply,ok,State#state{dsock = undefined,
+ chunk = false}};
+ trans_no_space ->
+ sock_close(CSock),
+ ?STOP_RET(etnospc);
+ perm_no_space ->
+ sock_close(CSock),
+ ?STOP_RET(epnospc);
+ perm_fname_not_allowed ->
+ sock_close(CSock),
+ ?STOP_RET(efnamena);
+ Result ->
+ debug(" error: send chunk end (2): ~p~n",
+ [Result]),
+ {reply,{error,epath},State#state{dsock = undefined,
+ chunk = false}}
+ end
+ end.
+
+get_key1(Key,List,Default)->
+ case lists:keysearch(Key,1,List)of
+ {value,{_,Val}}->
+ Val;
+ false->
+ Default
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
new file mode 100644
index 0000000000..a732f23aec
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
@@ -0,0 +1,260 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+%%% This version of the HTTP/1.1 client implements:
+%%% - RFC 2616 HTTP 1.1 client part
+%%% - RFC 2817 Upgrading to TLS Within HTTP/1.1 (not yet!)
+%%% - RFC 2818 HTTP Over TLS
+%%% - RFC 3229 Delta encoding in HTTP (not yet!)
+%%% - RFC 3230 Instance Digests in HTTP (not yet!)
+%%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!)
+%%% - HTTP/1.1 Specification Errata found at
+%%% http://world.std.com/~lawrence/http_errata.html
+%%% Additionaly follows the following recommendations:
+%%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!)
+%%% - draft-nottingham-hdrreg-http-00.txt (not yet!)
+%%%
+%%% Depends on
+%%% - uri.erl for all URL parsing (except what is handled by the C driver)
+%%% - http_lib.erl for all parsing of body and headers
+%%%
+%%% Supported Settings are:
+%%% http_timeout % (int) Milliseconds before a request times out
+%%% http_useproxy % (bool) True if a proxy should be used
+%%% http_proxy % (string) Proxy
+%%% http_noproxylist % (list) List with hosts not requiring proxy
+%%% http_autoredirect % (bool) True if automatic redirection on 30X responses
+%%% http_ssl % (list) SSL settings. A non-empty list enables SSL/TLS
+%%% support in the HTTP client
+%%% http_pipelinesize % (int) Length of pipeline. 1 means no pipeline.
+%%% Only has effect when initiating a new session.
+%%% http_sessions % (int) Max number of open sessions for {Addr,Port}
+%%%
+%%% TODO: (Known bugs!)
+%% - Cache handling
+%% - Doesn't handle a bunch of entity headers properly
+%% - Better handling of status codes different from 200,30X and 50X
+%% - Many of the settings above are not implemented!
+%% - close_session/2 and cancel_request/1 doesn't work
+%% - Variable pipe size.
+%% - Due to the fact that inet_drv only has a single timer, the timeouts given
+%% for pipelined requests are not ok (too long)
+%%
+%% Note:
+%% - Some servers (e.g. Microsoft-IIS/5.0) may sometimes not return a proper
+%% 'Location' header on a redirect.
+%% The client will fail with {error,no_scheme} in these cases.
+
+-module(http).
+-author("[email protected]").
+
+-export([start/0,
+ request/3,request/4,cancel_request/1,
+ request_sync/2,request_sync/3]).
+
+-include("http.hrl").
+-include("jnets_httpd.hrl").
+
+-define(START_OPTIONS,[]).
+
+%%% HTTP Client manager. Used to store open connections.
+%%% Will be started automatically unless started explicitly.
+start() ->
+ application:start(ssl),
+ httpc_manager:start().
+
+%%% Asynchronous HTTP request that spawns a handler.
+%%% Method HTTPReq
+%%% options,get,head,delete,trace = {Url,Headers}
+%%% post,put = {Url,Headers,ContentType,Body}
+%%% where Url is a {Scheme,Host,Port,PathQuery} tuple, as returned by uri.erl
+%%%
+%%% Returns: {ok,ReqId} |
+%%% {error,Reason}
+%%% If {ok,Pid} was returned, the handler will return with
+%%% gen_server:cast(From,{Ref,ReqId,{error,Reason}}) |
+%%% gen_server:cast(From,{Ref,ReqId,{Status,Headers,Body}})
+%%% where Reason is an atom and Headers a #res_headers{} record
+%%% http:format_error(Reason) gives a more informative description.
+%%%
+%%% Note:
+%%% - Always try to find an open connection to a given host and port, and use
+%%% the associated socket.
+%%% - Unless a 'Connection: close' header is provided don't close the socket
+%%% after a response is given
+%%% - A given Pid, found in the database, might be terminated before the
+%%% message is sent to the Pid. This will happen e.g., if the connection is
+%%% closed by the other party and there are no pending requests.
+%%% - The HTTP connection process is spawned, if necessary, in
+%%% httpc_manager:add_connection/4
+request(Ref,Method,HTTPReqCont) ->
+ request(Ref,Method,HTTPReqCont,[],self()).
+
+request(Ref,Method,HTTPReqCont,Settings) ->
+ request(Ref,Method,HTTPReqCont,Settings,self()).
+
+request(Ref,Method,{{Scheme,Host,Port,PathQuery},
+ Headers,ContentType,Body},Settings,From) ->
+ case create_settings(Settings,#client_settings{}) of
+ {error,Reason} ->
+ {error,Reason};
+ CS ->
+ case create_headers(Headers,#req_headers{}) of
+ {error,Reason} ->
+ {error,Reason};
+ H ->
+ Req=#request{ref=Ref,from=From,
+ scheme=Scheme,address={Host,Port},
+ pathquery=PathQuery,method=Method,
+ headers=H,content={ContentType,Body},
+ settings=CS},
+ httpc_manager:request(Req)
+ end
+ end;
+request(Ref,Method,{Url,Headers},Settings,From) ->
+ request(Ref,Method,{Url,Headers,[],[]},Settings,From).
+
+%%% Cancels requests identified with ReqId.
+%%% FIXME! Doesn't work...
+cancel_request(ReqId) ->
+ httpc_manager:cancel_request(ReqId).
+
+%%% Close all sessions currently open to Host:Port
+%%% FIXME! Doesn't work...
+close_session(Host,Port) ->
+ httpc_manager:close_session(Host,Port).
+
+
+%%% Synchronous HTTP request that waits until a response is created
+%%% (e.g. successfull reply or timeout)
+%%% Method HTTPReq
+%%% options,get,head,delete,trace = {Url,Headers}
+%%% post,put = {Url,Headers,ContentType,Body}
+%%% where Url is a string() or a {Scheme,Host,Port,PathQuery} tuple
+%%%
+%%% Returns: {Status,Headers,Body} |
+%%% {error,Reason}
+%%% where Reason is an atom.
+%%% http:format_error(Reason) gives a more informative description.
+request_sync(Method,HTTPReqCont) ->
+ request_sync(Method,HTTPReqCont,[]).
+
+request_sync(Method,{Url,Headers},Settings)
+ when Method==options;Method==get;Method==head;Method==delete;Method==trace ->
+ case uri:parse(Url) of
+ {error,Reason} ->
+ {error,Reason};
+ ParsedUrl ->
+ request_sync(Method,{ParsedUrl,Headers,[],[]},Settings,0)
+ end;
+request_sync(Method,{Url,Headers,ContentType,Body},Settings)
+ when Method==post;Method==put ->
+ case uri:parse(Url) of
+ {error,Reason} ->
+ {error,Reason};
+ ParsedUrl ->
+ request_sync(Method,{ParsedUrl,Headers,ContentType,Body},Settings,0)
+ end;
+request_sync(Method,Request,Settings) ->
+ {error,bad_request}.
+
+request_sync(Method,HTTPCont,Settings,_Redirects) ->
+ case request(request_sync,Method,HTTPCont,Settings,self()) of
+ {ok,_ReqId} ->
+ receive
+ {'$gen_cast',{request_sync,_ReqId2,{Status,Headers,Body}}} ->
+ {Status,pp_headers(Headers),binary_to_list(Body)};
+ {'$gen_cast',{request_sync,_ReqId2,{error,Reason}}} ->
+ {error,Reason};
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+
+create_settings([],Out) ->
+ Out;
+create_settings([{http_timeout,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{timeout=Val});
+create_settings([{http_useproxy,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{useproxy=Val});
+create_settings([{http_proxy,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{proxy=Val});
+create_settings([{http_noproxylist,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{noproxylist=Val});
+create_settings([{http_autoredirect,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{autoredirect=Val});
+create_settings([{http_ssl,Val}|Settings],Out) ->
+ create_settings(Settings,Out#client_settings{ssl=Val});
+create_settings([{http_pipelinesize,Val}|Settings],Out)
+ when integer(Val),Val>0 ->
+ create_settings(Settings,Out#client_settings{max_quelength=Val});
+create_settings([{http_sessions,Val}|Settings],Out)
+ when integer(Val),Val>0 ->
+ create_settings(Settings,Out#client_settings{max_sessions=Val});
+create_settings([{Key,_Val}|_Settings],_Out) ->
+ io:format("ERROR bad settings, got ~p~n",[Key]),
+ {error,bad_settings}.
+
+
+create_headers([],Req) ->
+ Req;
+create_headers([{Key,Val}|Rest],Req) ->
+ case httpd_util:to_lower(Key) of
+ "expect" ->
+ create_headers(Rest,Req#req_headers{expect=Val});
+ OtherKey ->
+ create_headers(Rest,
+ Req#req_headers{other=[{OtherKey,Val}|
+ Req#req_headers.other]})
+ end.
+
+
+pp_headers(#res_headers{connection=Connection,
+ transfer_encoding=Transfer_encoding,
+ retry_after=Retry_after,
+ content_length=Content_length,
+ content_type=Content_type,
+ location=Location,
+ other=Other}) ->
+ H1=case Connection of
+ undefined -> [];
+ _ -> [{'Connection',Connection}]
+ end,
+ H2=case Transfer_encoding of
+ undefined -> [];
+ _ -> [{'Transfer-Encoding',Transfer_encoding}]
+ end,
+ H3=case Retry_after of
+ undefined -> [];
+ _ -> [{'Retry-After',Retry_after}]
+ end,
+ H4=case Location of
+ undefined -> [];
+ _ -> [{'Location',Location}]
+ end,
+ HCL=case Content_length of
+ "0" -> [];
+ _ -> [{'Content-Length',Content_length}]
+ end,
+ HCT=case Content_type of
+ undefined -> [];
+ _ -> [{'Content-Type',Content_type}]
+ end,
+ H1++H2++H3++H4++HCL++HCT++Other.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.hrl
new file mode 100644
index 0000000000..6904a2379f
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.hrl
@@ -0,0 +1,127 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+-define(HTTP_REQUEST_TIMEOUT, 5000).
+-define(PIPELINE_LENGTH,3).
+-define(OPEN_SESSIONS,400).
+
+
+%%% FIXME! These definitions should probably be possible to defined via
+%%% user settings
+-define(MAX_REDIRECTS, 4).
+
+
+%%% Note that if not persitent the connection can be closed immediately on a
+%%% response, because new requests are not sent to this connection process.
+%%% address, % ({Host,Port}) Destination Host and Port
+-record(session,{
+ id, % (int) Session Id identifies session in http_manager
+ clientclose, % (bool) true if client requested "close" connection
+ scheme, % (atom) http (HTTP/TCP) or https (TCP/SSL/TCP)
+ socket, % (socket) Open socket, used by connection
+ pipeline=[], % (list) Sent requests, not yet taken care of by the
+ % associated http_responder.
+ quelength=1, % (int) Current length of pipeline (1 when created)
+ max_quelength% (int) Max pipeline length
+ }).
+
+%%% [{Pid,RequestQue,QueLength},...] list where
+%%% - RequestQue (implemented with a list) contains sent requests that
+%%% has not yet received a response (pipelined) AND is not currently
+%%% handled (awaiting data) by the session process.
+%%% - QueLength is the length of this que, but
+
+%%% Response headers
+-record(res_headers,{
+%%% --- Standard "General" headers
+% cache_control,
+ connection,
+% date,
+% pragma,
+% trailer,
+ transfer_encoding,
+% upgrade,
+% via,
+% warning,
+%%% --- Standard "Request" headers
+% accept_ranges,
+% age,
+% etag,
+ location,
+% proxy_authenticate,
+ retry_after,
+% server,
+% vary,
+% www_authenticate,
+%%% --- Standard "Entity" headers
+% allow,
+% content_encoding,
+% content_language,
+ content_length="0",
+% content_location,
+% content_md5,
+% content_range,
+ content_type,
+% expires,
+% last_modified,
+ other=[] % (list) Key/Value list with other headers
+ }).
+
+%%% All data associated to a specific HTTP request
+-record(request,{
+ id, % (int) Request Id
+ ref, % Caller specific
+ from, % (pid) Caller
+ redircount=0,% (int) Number of redirects made for this request
+ scheme, % (http|https) (HTTP/TCP) or (TCP/SSL/TCP) connection
+ address, % ({Host,Port}) Destination Host and Port
+ pathquery, % (string) Rest of parsed URL
+ method, % (atom) HTTP request Method
+ headers, % (list) Key/Value list with Headers
+ content, % ({ContentType,Body}) Current HTTP request
+ settings % (#client_settings{}) User defined settings
+ }).
+
+-record(response,{
+ scheme, % (atom) http (HTTP/TCP) or https (TCP/SSL/TCP)
+ socket, % (socket) Open socket, used by connection
+ status,
+ http_version,
+ headers=#res_headers{},
+ body = <<>>
+ }).
+
+
+
+
+%%% HTTP Client settings
+-record(client_settings,{
+ timeout=?HTTP_REQUEST_TIMEOUT,
+ % (int) Milliseconds before a request times out
+ useproxy=false, % (bool) True if the proxy should be used
+ proxy=undefined, % (tuple) Parsed Proxy URL
+ noproxylist=[], % (list) List with hosts not requiring proxy
+ autoredirect=true, % (bool) True if automatic redirection on 30X
+ % responses.
+ max_sessions=?OPEN_SESSIONS,% (int) Max open sessions for any Adr,Port
+ max_quelength=?PIPELINE_LENGTH, % (int) Max pipeline length
+% ssl=[{certfile,"/jb/server_root/ssl/ssl_client.pem"},
+% {keyfile,"/jb/server_root/ssl/ssl_client.pem"},
+% {verify,0}]
+ ssl=false % (list) SSL settings. A non-empty list enables SSL/TLS
+ % support in the HTTP client
+ }).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
new file mode 100644
index 0000000000..4f6c43710b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
@@ -0,0 +1,745 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+%%% File : http_lib.erl
+%%% Author : Johan Blom <[email protected]>
+%%% Description : Generic, HTTP specific helper functions
+%%% Created : 4 Mar 2002 by Johan Blom
+
+%%% TODO
+%%% - Check if I need to anything special when parsing
+%%% "Content-Type:multipart/form-data"
+
+-module(http_lib).
+-author("[email protected]").
+
+-include("http.hrl").
+-include("jnets_httpd.hrl").
+
+-export([connection_close/1,
+ accept/3,deliver/3,recv/4,recv0/3,
+ connect/1,send/3,close/2,controlling_process/3,setopts/3,
+ getParameterValue/2,
+% get_var/2,
+ create_request_line/3]).
+
+-export([read_client_headers/2,read_server_headers/2,
+ get_auth_data/1,create_header_list/1,
+ read_client_body/2,read_client_multipartrange_body/3,
+ read_server_body/2]).
+
+
+%%% Server response:
+%%% Check "Connection" header if server requests session to be closed.
+%%% No 'close' means returns false
+%%% Client Request:
+%%% Check if 'close' in request headers
+%%% Only care about HTTP 1.1 clients!
+connection_close(Headers) when record(Headers,req_headers) ->
+ case Headers#req_headers.connection of
+ "close" ->
+ true;
+ "keep-alive" ->
+ false;
+ Value when list(Value) ->
+ true;
+ _ ->
+ false
+ end;
+connection_close(Headers) when record(Headers,res_headers) ->
+ case Headers#res_headers.connection of
+ "close" ->
+ true;
+ "keep-alive" ->
+ false;
+ Value when list(Value) ->
+ true;
+ _ ->
+ false
+ end.
+
+
+%% =============================================================================
+%%% Debugging:
+
+% format_time(TS) ->
+% {_,_,MicroSecs}=TS,
+% {{Y,Mon,D},{H,M,S}}=calendar:now_to_universal_time(TS),
+% lists:flatten(io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w,~2.2.0w:~2.2.0w:~6.3.0f",
+% [Y,Mon,D,H,M,S+(MicroSecs/1000000)])).
+
+%% Time in milli seconds
+% t() ->
+% {A,B,C} = erlang:now(),
+% A*1000000000+B*1000+(C div 1000).
+
+% sz(L) when list(L) ->
+% length(L);
+% sz(B) when binary(B) ->
+% size(B);
+% sz(O) ->
+% {unknown_size,O}.
+
+
+%% =============================================================================
+
+getHeaderValue(_Attr,[]) ->
+ [];
+getHeaderValue(Attr,[{Attr,Value}|_Rest]) ->
+ Value;
+getHeaderValue(Attr,[_|Rest]) ->
+ getHeaderValue(Attr,Rest).
+
+getParameterValue(_Attr,undefined) ->
+ undefined;
+getParameterValue(Attr,List) ->
+ case lists:keysearch(Attr,1,List) of
+ {value,{Attr,Val}} ->
+ Val;
+ _ ->
+ undefined
+ end.
+
+create_request_line(Method,Path,{Major,Minor}) ->
+ [atom_to_list(Method)," ",Path,
+ " HTTP/",integer_to_list(Major),".",integer_to_list(Minor)];
+create_request_line(Method,Path,Minor) ->
+ [atom_to_list(Method)," ",Path," HTTP/1.",integer_to_list(Minor)].
+
+
+%%% ============================================================================
+read_client_headers(Info,Timeout) ->
+ Headers=read_response_h(Info#response.scheme,Info#response.socket,Timeout,
+ Info#response.headers),
+ Info#response{headers=Headers}.
+
+read_server_headers(Info,Timeout) ->
+ Headers=read_request_h(Info#mod.socket_type,Info#mod.socket,Timeout,
+ Info#mod.headers),
+ Info#mod{headers=Headers}.
+
+
+%% Parses the header of a HTTP request and returns a key,value tuple
+%% list containing Name and Value of each header directive as of:
+%%
+%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
+%%
+%% But in http/1.1 the field-names are case insencitive so now it must be
+%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
+%% The standard furthermore says that leading and traling white space
+%% is not a part of the fieldvalue and shall therefore be removed.
+read_request_h(SType,S,Timeout,H) ->
+ case recv0(SType,S,Timeout) of
+ {ok,{http_header,_,'Connection',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{connection=Value});
+ {ok,{http_header,_,'Content-Type',_,Val}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{content_type=Val});
+ {ok,{http_header,_,'Host',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{host=Value});
+ {ok,{http_header,_,'Content-Length',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{content_length=Value});
+% {ok,{http_header,_,'Expect',_,Value}} -> % FIXME! Update inet_drv.c!!
+% read_request_h(SType,S,Timeout,H#req_headers{expect=Value});
+ {ok,{http_header,_,'Transfer-Encoding',_,V}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{transfer_encoding=V});
+ {ok,{http_header,_,'Authorization',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{authorization=Value});
+ {ok,{http_header,_,'User-Agent',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{user_agent=Value});
+ {ok,{http_header,_,'Range',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{range=Value});
+ {ok,{http_header,_,'If-Range',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_range=Value});
+ {ok,{http_header,_,'If-Match',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_match=Value});
+ {ok,{http_header,_,'If-None-Match',_,Value}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_none_match=Value});
+ {ok,{http_header,_,'If-Modified-Since',_,V}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_modified_since=V});
+ {ok,{http_header,_,'If-Unmodified-Since',_,V}} ->
+ read_request_h(SType,S,Timeout,H#req_headers{if_unmodified_since=V});
+ {ok,{http_header,_,K,_,V}} ->
+ read_request_h(SType,S,Timeout,
+ H#req_headers{other=H#req_headers.other++[{K,V}]});
+ {ok,http_eoh} ->
+ H;
+ {error, timeout} when SType==http ->
+ throw({error, session_local_timeout});
+ {error, etimedout} when SType==https ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+ throw({error,Reason})
+ end.
+
+
+read_response_h(SType,S,Timeout,H) ->
+ case recv0(SType,S,Timeout) of
+ {ok,{http_header,_,'Connection',_,Val}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{connection=Val});
+ {ok,{http_header,_,'Content-Length',_,Val}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{content_length=Val});
+ {ok,{http_header,_,'Content-Type',_,Val}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{content_type=Val});
+ {ok,{http_header,_,'Transfer-Encoding',_,V}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{transfer_encoding=V});
+ {ok,{http_header,_,'Location',_,V}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{location=V});
+ {ok,{http_header,_,'Retry-After',_,V}} ->
+ read_response_h(SType,S,Timeout,H#res_headers{retry_after=V});
+ {ok,{http_header,_,K,_,V}} ->
+ read_response_h(SType,S,Timeout,
+ H#res_headers{other=H#res_headers.other++[{K,V}]});
+ {ok,http_eoh} ->
+ H;
+ {error, timeout} when SType==http ->
+ throw({error, session_local_timeout});
+ {error, etimedout} when SType==https ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+ throw({error,Reason})
+ end.
+
+
+%%% Got the headers, and maybe a part of the body, now read in the rest
+%%% Note:
+%%% - No need to check for Expect header if client
+%%% - Currently no support for setting MaxHeaderSize in client, set to
+%%% unlimited.
+%%% - Move to raw packet mode as we are finished with HTTP parsing
+read_client_body(Info,Timeout) ->
+ Headers=Info#response.headers,
+ case Headers#res_headers.transfer_encoding of
+ "chunked" ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Chunked Data:",[]),
+ read_client_chunked_body(Info,Timeout,?MAXBODYSIZE);
+ Encoding when list(Encoding) ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Unknown",[]),
+ throw({error,unknown_coding});
+ _ ->
+ ContLen=list_to_integer(Headers#res_headers.content_length),
+ if
+ ContLen>?MAXBODYSIZE ->
+ throw({error,body_too_big});
+ true ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:none ",[]),
+ Info#response{body=read_plain_body(Info#response.scheme,
+ Info#response.socket,
+ ContLen,
+ Info#response.body,
+ Timeout)}
+ end
+ end.
+
+
+%%% ----------------------------------------------------------------------
+read_server_body(Info,Timeout) ->
+ MaxBodySz=httpd_util:lookup(Info#mod.config_db,max_body_size,?MAXBODYSIZE),
+ ContLen=list_to_integer((Info#mod.headers)#req_headers.content_length),
+ %% ?vtrace("ContentLength: ~p", [ContLen]),
+ if
+ integer(ContLen),integer(MaxBodySz),ContLen>MaxBodySz ->
+ throw({error,body_too_big});
+ true ->
+ read_server_body2(Info,Timeout,ContLen,MaxBodySz)
+ end.
+
+
+%%----------------------------------------------------------------------
+%% Control if the body is transfer encoded, if so decode it.
+%% Note:
+%% - MaxBodySz has an integer value or 'nolimit'
+%% - ContLen has an integer value or 'undefined'
+%% All applications MUST be able to receive and decode the "chunked"
+%% transfer-coding, see RFC 2616 Section 3.6.1
+read_server_body2(Info,Timeout,ContLen,MaxBodySz) ->
+ ?DEBUG("read_entity_body2()->Max: ~p ~nLength:~p ~nSocket: ~p ~n",
+ [MaxBodySz,ContLen,Info#mod.socket]),
+ case (Info#mod.headers)#req_headers.transfer_encoding of
+ "chunked" ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Chunked Data:",[]),
+ read_server_chunked_body(Info,Timeout,MaxBodySz);
+ Encoding when list(Encoding) ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:Unknown",[]),
+ httpd_response:send_status(Info,501,"Unknown Transfer-Encoding"),
+ http_lib:close(Info#mod.socket_type,Info#mod.socket),
+ throw({error,{status_sent,"Unknown Transfer-Encoding "++Encoding}});
+ _ when integer(ContLen),integer(MaxBodySz),ContLen>MaxBodySz ->
+ throw({error,body_too_big});
+ _ when integer(ContLen) ->
+ ?DEBUG("read_entity_body2()->"
+ "Transfer-encoding:none ",[]),
+ Info#mod{entity_body=read_plain_body(Info#mod.socket_type,
+ Info#mod.socket,
+ ContLen,Info#mod.entity_body,
+ Timeout)}
+ end.
+
+
+%%% ----------------------------------------------------------------------------
+%%% The body was plain, just read it from the socket.
+read_plain_body(_SocketType,Socket,0,Cont,_Timeout) ->
+ Cont;
+read_plain_body(SocketType,Socket,ContLen,Cont,Timeout) ->
+ Body=read_more_data(SocketType,Socket,ContLen,Timeout),
+ <<Cont/binary,Body/binary>>.
+
+%%% ----------------------------------------------------------------------------
+%%% The body was chunked, decode it.
+%%% From RFC2616, Section 3.6.1
+%% Chunked-Body = *chunk
+%% last-chunk
+%% trailer
+%% CRLF
+%%
+%% chunk = chunk-size [ chunk-extension ] CRLF
+%% chunk-data CRLF
+%% chunk-size = 1*HEX
+%% last-chunk = 1*("0") [ chunk-extension ] CRLF
+%%
+%% chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
+%% chunk-ext-name = token
+%% chunk-ext-val = token | quoted-string
+%% chunk-data = chunk-size(OCTET)
+%% trailer = *(entity-header CRLF)
+%%
+%%% "All applications MUST ignore chunk-extension extensions they do not
+%%% understand.", see RFC 2616 Section 3.6.1
+%%% We don't understand any extension...
+read_client_chunked_body(Info,Timeout,MaxChunkSz) ->
+ case read_chunk(Info#response.scheme,Info#response.socket,
+ Timeout,0,MaxChunkSz) of
+ {last_chunk,_ExtensionList} -> % Ignore extension
+ TrailH=read_headers_old(Info#response.scheme,Info#response.socket,
+ Timeout),
+ H=Info#response.headers,
+ OtherHeaders=H#res_headers.other++TrailH,
+ Info#response{headers=H#res_headers{other=OtherHeaders}};
+ {Chunk,ChunkSize,_ExtensionList} -> % Ignore extension
+ Info1=Info#response{body= <<(Info#response.body)/binary,
+ Chunk/binary>>},
+ read_client_chunked_body(Info1,Timeout,MaxChunkSz-ChunkSize);
+ {error,Reason} ->
+ throw({error,Reason})
+ end.
+
+
+read_server_chunked_body(Info,Timeout,MaxChunkSz) ->
+ case read_chunk(Info#mod.socket_type,Info#mod.socket,
+ Timeout,0,MaxChunkSz) of
+ {last_chunk,_ExtensionList} -> % Ignore extension
+ TrailH=read_headers_old(Info#mod.socket_type,Info#mod.socket,
+ Timeout),
+ H=Info#mod.headers,
+ OtherHeaders=H#req_headers.other++TrailH,
+ Info#mod{headers=H#req_headers{other=OtherHeaders}};
+ {Chunk,ChunkSize,_ExtensionList} -> % Ignore extension
+ Info1=Info#mod{entity_body= <<(Info#mod.entity_body)/binary,
+ Chunk/binary>>},
+ read_server_chunked_body(Info1,Timeout,MaxChunkSz-ChunkSize);
+ {error,Reason} ->
+ throw({error,Reason})
+ end.
+
+
+read_chunk(Scheme,Socket,Timeout,Int,MaxChunkSz) when MaxChunkSz>Int ->
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ <<C>> when $0=<C,C=<$9 ->
+ read_chunk(Scheme,Socket,Timeout,16*Int+(C-$0),MaxChunkSz);
+ <<C>> when $a=<C,C=<$f ->
+ read_chunk(Scheme,Socket,Timeout,16*Int+10+(C-$a),MaxChunkSz);
+ <<C>> when $A=<C,C=<$F ->
+ read_chunk(Scheme,Socket,Timeout,16*Int+10+(C-$A),MaxChunkSz);
+ <<$;>> when Int>0 ->
+ ExtensionList=read_chunk_ext_name(Scheme,Socket,Timeout,[],[]),
+ read_chunk_data(Scheme,Socket,Int+1,ExtensionList,Timeout);
+ <<$;>> when Int==0 ->
+ ExtensionList=read_chunk_ext_name(Scheme,Socket,Timeout,[],[]),
+ read_data_lf(Scheme,Socket,Timeout),
+ {last_chunk,ExtensionList};
+ <<?CR>> when Int>0 ->
+ read_chunk_data(Scheme,Socket,Int+1,[],Timeout);
+ <<?CR>> when Int==0 ->
+ read_data_lf(Scheme,Socket,Timeout),
+ {last_chunk,[]};
+ <<C>> when C==$ -> % Some servers (e.g., Apache 1.3.6) throw in
+ % additional whitespace...
+ read_chunk(Scheme,Socket,Timeout,Int,MaxChunkSz);
+ _Other ->
+ {error,unexpected_chunkdata}
+ end;
+read_chunk(_Scheme,_Socket,_Timeout,_Int,_MaxChunkSz) ->
+ {error,body_too_big}.
+
+
+%%% Note:
+%%% - Got the initial ?CR already!
+%%% - Bitsyntax does not allow matching of ?CR,?LF in the end of the first read
+read_chunk_data(Scheme,Socket,Int,ExtensionList,Timeout) ->
+ case read_more_data(Scheme,Socket,Int,Timeout) of
+ <<?LF,Chunk/binary>> ->
+ case read_more_data(Scheme,Socket,2,Timeout) of
+ <<?CR,?LF>> ->
+ {Chunk,size(Chunk),ExtensionList};
+ _ ->
+ {error,bad_chunkdata}
+ end;
+ _ ->
+ {error,bad_chunkdata}
+ end.
+
+read_chunk_ext_name(Scheme,Socket,Timeout,Name,Acc) ->
+ Len=length(Name),
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ $= when Len>0 ->
+ read_chunk_ext_val(Scheme,Socket,Timeout,Name,[],Acc);
+ $; when Len>0 ->
+ read_chunk_ext_name(Scheme,Socket,Timeout,[],
+ [{lists:reverse(Name),""}|Acc]);
+ ?CR when Len>0 ->
+ lists:reverse([{lists:reverse(Name,"")}|Acc]);
+ Token -> % FIXME Check that it is "token"
+ read_chunk_ext_name(Scheme,Socket,Timeout,[Token|Name],Acc);
+ _ ->
+ {error,bad_chunk_extension_name}
+ end.
+
+read_chunk_ext_val(Scheme,Socket,Timeout,Name,Val,Acc) ->
+ Len=length(Val),
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ $; when Len>0 ->
+ read_chunk_ext_name(Scheme,Socket,Timeout,[],
+ [{Name,lists:reverse(Val)}|Acc]);
+ ?CR when Len>0 ->
+ lists:reverse([{Name,lists:reverse(Val)}|Acc]);
+ Token -> % FIXME Check that it is "token" or "quoted-string"
+ read_chunk_ext_val(Scheme,Socket,Timeout,Name,[Token|Val],Acc);
+ _ ->
+ {error,bad_chunk_extension_value}
+ end.
+
+read_data_lf(Scheme,Socket,Timeout) ->
+ case read_more_data(Scheme,Socket,1,Timeout) of
+ ?LF ->
+ ok;
+ _ ->
+ {error,bad_chunkdata}
+ end.
+
+%%% ----------------------------------------------------------------------------
+%%% The body was "multipart/byteranges", decode it.
+%%% Example from RFC 2616, Appendix 19.2
+%%% HTTP/1.1 206 Partial Content
+%%% Date: Wed, 15 Nov 1995 06:25:24 GMT
+%%% Last-Modified: Wed, 15 Nov 1995 04:58:08 GMT
+%%% Content-type: multipart/byteranges; boundary=THIS_STRING_SEPARATES
+%%%
+%%% --THIS_STRING_SEPARATES
+%%% Content-type: application/pdf
+%%% Content-range: bytes 500-999/8000
+%%%
+%%% ...the first range...
+%%% --THIS_STRING_SEPARATES
+%%% Content-type: application/pdf
+%%% Content-range: bytes 7000-7999/8000
+%%%
+%%% ...the second range
+%%% --THIS_STRING_SEPARATES--
+%%%
+%%% Notes:
+%%%
+%%% 1) Additional CRLFs may precede the first boundary string in the
+%%% entity.
+%%% FIXME!!
+read_client_multipartrange_body(Info,Parstr,Timeout) ->
+ Boundary=get_boundary(Parstr),
+ scan_boundary(Info,Boundary),
+ Info#response{body=read_multipart_body(Info,Boundary,Timeout)}.
+
+read_multipart_body(Info,Boundary,Timeout) ->
+ Info.
+
+% Headers=read_headers_old(Info#response.scheme,Info#response.socket,Timeout),
+% H=Info#response.headers,
+% OtherHeaders=H#res_headers.other++TrailH,
+% Info#response{headers=H#res_headers{other=OtherHeaders}}.
+
+
+scan_boundary(Info,Boundary) ->
+ Info.
+
+
+get_boundary(Parstr) ->
+ case skip_lwsp(Parstr) of
+ [] ->
+ throw({error,missing_range_boundary_parameter});
+ Val ->
+ get_boundary2(string:tokens(Val, ";"))
+ end.
+
+get_boundary2([]) ->
+ undefined;
+get_boundary2([Param|Rest]) ->
+ case string:tokens(skip_lwsp(Param), "=") of
+ ["boundary"++Attribute,Value] ->
+ Value;
+ _ ->
+ get_boundary2(Rest)
+ end.
+
+
+%% skip space & tab
+skip_lwsp([$ | Cs]) -> skip_lwsp(Cs);
+skip_lwsp([$\t | Cs]) -> skip_lwsp(Cs);
+skip_lwsp(Cs) -> Cs.
+
+%%% ----------------------------------------------------------------------------
+
+%%% Read the incoming data from the open socket.
+read_more_data(http,Socket,Len,Timeout) ->
+ case gen_tcp:recv(Socket,Len,Timeout) of
+ {ok,Val} ->
+ Val;
+ {error, timeout} ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+% httpd_response:send_status(Info,400,none),
+ throw({error, Reason})
+ end;
+read_more_data(https,Socket,Len,Timeout) ->
+ case ssl:recv(Socket,Len,Timeout) of
+ {ok,Val} ->
+ Val;
+ {error, etimedout} ->
+ throw({error, session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error, session_remotely_closed});
+ {error, Reason} ->
+% httpd_response:send_status(Info,400,none),
+ throw({error, Reason})
+ end.
+
+
+%% =============================================================================
+%%% Socket handling
+
+accept(http,ListenSocket, Timeout) ->
+ gen_tcp:accept(ListenSocket, Timeout);
+accept(https,ListenSocket, Timeout) ->
+ ssl:accept(ListenSocket, Timeout).
+
+
+close(http,Socket) ->
+ gen_tcp:close(Socket);
+close(https,Socket) ->
+ ssl:close(Socket).
+
+
+connect(#request{scheme=http,settings=Settings,address=Addr}) ->
+ case proxyusage(Addr,Settings) of
+ {error,Reason} ->
+ {error,Reason};
+ {Host,Port} ->
+ Opts=[binary,{active,false},{reuseaddr,true}],
+ gen_tcp:connect(Host,Port,Opts)
+ end;
+connect(#request{scheme=https,settings=Settings,address=Addr}) ->
+ case proxyusage(Addr,Settings) of
+ {error,Reason} ->
+ {error,Reason};
+ {Host,Port} ->
+ Opts=case Settings#client_settings.ssl of
+ false ->
+ [binary,{active,false}];
+ SSLSettings ->
+ [binary,{active,false}]++SSLSettings
+ end,
+ ssl:connect(Host,Port,Opts)
+ end.
+
+
+%%% Check to see if the given {Host,Port} tuple is in the NoProxyList
+%%% Returns an eventually updated {Host,Port} tuple, with the proxy address
+proxyusage(HostPort,Settings) ->
+ case Settings#client_settings.useproxy of
+ true ->
+ case noProxy(HostPort,Settings#client_settings.noproxylist) of
+ true ->
+ HostPort;
+ _ ->
+ case Settings#client_settings.proxy of
+ undefined ->
+ {error,no_proxy_defined};
+ ProxyHostPort ->
+ ProxyHostPort
+ end
+ end;
+ _ ->
+ HostPort
+ end.
+
+noProxy(_HostPort,[]) ->
+ false;
+noProxy({Host,Port},[{Host,Port}|Rest]) ->
+ true;
+noProxy(HostPort,[_|Rest]) ->
+ noProxy(HostPort,Rest).
+
+
+controlling_process(http,Socket,Pid) ->
+ gen_tcp:controlling_process(Socket,Pid);
+controlling_process(https,Socket,Pid) ->
+ ssl:controlling_process(Socket,Pid).
+
+
+deliver(SocketType, Socket, Message) ->
+ case send(SocketType, Socket, Message) of
+ {error, einval} ->
+ close(SocketType, Socket),
+ socket_closed;
+ {error, _Reason} ->
+% ?vlog("deliver(~p) failed for reason:"
+% "~n Reason: ~p",[SocketType,_Reason]),
+ close(SocketType, Socket),
+ socket_closed;
+ _Other ->
+ ok
+ end.
+
+
+recv0(http,Socket,Timeout) ->
+ gen_tcp:recv(Socket,0,Timeout);
+recv0(https,Socket,Timeout) ->
+ ssl:recv(Socket,0,Timeout).
+
+recv(http,Socket,Len,Timeout) ->
+ gen_tcp:recv(Socket,Len,Timeout);
+recv(https,Socket,Len,Timeout) ->
+ ssl:recv(Socket,Len,Timeout).
+
+
+setopts(http,Socket,Options) ->
+ inet:setopts(Socket,Options);
+setopts(https,Socket,Options) ->
+ ssl:setopts(Socket,Options).
+
+
+send(http,Socket,Message) ->
+ gen_tcp:send(Socket,Message);
+send(https,Socket,Message) ->
+ ssl:send(Socket,Message).
+
+
+%%% ============================================================================
+%%% HTTP Server only
+
+%%% Returns the Authenticating data in the HTTP request
+get_auth_data("Basic "++EncodedString) ->
+ UnCodedString=httpd_util:decode_base64(EncodedString),
+ case catch string:tokens(UnCodedString,":") of
+ [User,PassWord] ->
+ {User,PassWord};
+ {error,Error}->
+ {error,Error}
+ end;
+get_auth_data(BadCredentials) when list(BadCredentials) ->
+ {error,BadCredentials};
+get_auth_data(_) ->
+ {error,nouser}.
+
+
+create_header_list(H) ->
+ lookup(connection,H#req_headers.connection)++
+ lookup(host,H#req_headers.host)++
+ lookup(content_length,H#req_headers.content_length)++
+ lookup(transfer_encoding,H#req_headers.transfer_encoding)++
+ lookup(authorization,H#req_headers.authorization)++
+ lookup(user_agent,H#req_headers.user_agent)++
+ lookup(user_agent,H#req_headers.range)++
+ lookup(user_agent,H#req_headers.if_range)++
+ lookup(user_agent,H#req_headers.if_match)++
+ lookup(user_agent,H#req_headers.if_none_match)++
+ lookup(user_agent,H#req_headers.if_modified_since)++
+ lookup(user_agent,H#req_headers.if_unmodified_since)++
+ H#req_headers.other.
+
+lookup(_Key,undefined) ->
+ [];
+lookup(Key,Val) ->
+ [{Key,Val}].
+
+
+
+%%% ============================================================================
+%%% This code is for parsing trailer headers in chunked messages.
+%%% Will be deprecated whenever I have found an alternative working solution!
+%%% Note:
+%%% - The header names are returned slighly different from what the what
+%%% inet_drv returns
+read_headers_old(Scheme,Socket,Timeout) ->
+ read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]).
+
+read_headers_old(<<>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ read_headers_old(read_more_data(Scheme,Socket,1,Timeout),
+ Scheme,Socket,Timeout,Acc,AccHdrs);
+read_headers_old(<<$\r>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ read_headers_old(<<$\r,(read_more_data(Scheme,Socket,1,Timeout))/binary>>,
+ Scheme,Socket,Timeout,Acc,AccHdrs);
+read_headers_old(<<$\r,$\n>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ if
+ Acc==[] -> % Done!
+ tagup_header(lists:reverse(AccHdrs));
+ true ->
+ read_headers_old(read_more_data(Scheme,Socket,1,Timeout),
+ Scheme,Socket,
+ Timeout,[],[lists:reverse(Acc)|AccHdrs])
+ end;
+read_headers_old(<<C>>,Scheme,Socket,Timeout,Acc,AccHdrs) ->
+ read_headers_old(read_more_data(Scheme,Socket,1,Timeout),
+ Scheme,Socket,Timeout,[C|Acc],AccHdrs);
+read_headers_old(Bin,_Scheme,_Socket,_Timeout,_Acc,_AccHdrs) ->
+ io:format("ERROR: Unexpected data from inet driver: ~p",[Bin]),
+ throw({error,this_is_a_bug}).
+
+
+%% Parses the header of a HTTP request and returns a key,value tuple
+%% list containing Name and Value of each header directive as of:
+%%
+%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
+%%
+%% But in http/1.1 the field-names are case insencitive so now it must be
+%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
+%% The standard furthermore says that leading and traling white space
+%% is not a part of the fieldvalue and shall therefore be removed.
+tagup_header([]) -> [];
+tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
+
+tag([], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), ""};
+tag([$:|Rest], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), httpd_util:strip(Rest)};
+tag([Chr|Rest], Tag) ->
+ tag(Rest, [Chr|Tag]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_handler.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_handler.erl
new file mode 100644
index 0000000000..8e5e1c709a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_handler.erl
@@ -0,0 +1,724 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+%%% TODO:
+%%% - If an error is returned when sending a request, don't use this
+%%% session anymore.
+%%% - Closing of sessions not properly implemented for some cases
+
+%%% File : httpc_handler.erl
+%%% Author : Johan Blom <[email protected]>
+%%% Description : Handles HTTP client responses, for a single TCP session
+%%% Created : 4 Mar 2002 by Johan Blom
+
+-module(httpc_handler).
+
+-include("http.hrl").
+-include("jnets_httpd.hrl").
+
+-export([init_connection/2,http_request/2]).
+
+%%% ==========================================================================
+%%% "Main" function in the spawned process for the session.
+init_connection(Req,Session) when record(Req,request) ->
+ case catch http_lib:connect(Req) of
+ {ok,Socket} ->
+ case catch http_request(Req,Socket) of
+ ok ->
+ case Session#session.clientclose of
+ true ->
+ ok;
+ false ->
+ httpc_manager:register_socket(Req#request.address,
+ Session#session.id,
+ Socket)
+ end,
+ next_response_with_request(Req,
+ Session#session{socket=Socket});
+ {error,Reason} -> % Not possible to use new session
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session_ok(Req#request.address,
+ Session#session{socket=Socket})
+ end;
+ {error,Reason} -> % Not possible to set up new session
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session_ok2(Req#request.address,
+ Session#session.clientclose,Session#session.id)
+ end.
+
+next_response_with_request(Req,Session) ->
+ Timeout=(Req#request.settings)#client_settings.timeout,
+ case catch read(Timeout,Session#session.scheme,Session#session.socket) of
+ {Status,Headers,Body} ->
+ NewReq=handle_response({Status,Headers,Body},Timeout,Req,Session),
+ next_response_with_request(NewReq,Session);
+ {error,Reason} ->
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session(Req#request.address,Session,aborted_request);
+ {'EXIT',Reason} ->
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session(Req#request.address,Session,aborted_request)
+ end.
+
+handle_response(Response,Timeout,Req,Session) ->
+ case http_response(Response,Req,Session) of
+ ok ->
+ next_response(Timeout,Req#request.address,Session);
+ stop ->
+ exit(normal);
+ {error,Reason} ->
+ gen_server:cast(Req#request.from,
+ {Req#request.ref,Req#request.id,{error,Reason}}),
+ exit_session(Req#request.address,Session,aborted_request)
+ end.
+
+
+
+%%% Wait for the next respond until
+%%% - session is closed by the other side
+%%% => set up a new a session, if there are pending requests in the que
+%%% - "Connection:close" header is received
+%%% => close the connection (release socket) then
+%%% set up a new a session, if there are pending requests in the que
+%%%
+%%% Note:
+%%% - When invoked there are no pending responses on received requests.
+%%% - Never close the session explicitly, let it timeout instead!
+next_response(Timeout,Address,Session) ->
+ case httpc_manager:next_request(Address,Session#session.id) of
+ no_more_requests ->
+ %% There are no more pending responses, now just wait for
+ %% timeout or a new response.
+ case catch read(Timeout,
+ Session#session.scheme,Session#session.socket) of
+ {error,Reason} when Reason==session_remotely_closed;
+ Reason==session_local_timeout ->
+ exit_session_ok(Address,Session);
+ {error,Reason} ->
+ exit_session(Address,Session,aborted_request);
+ {'EXIT',Reason} ->
+ exit_session(Address,Session,aborted_request);
+ {Status2,Headers2,Body2} ->
+ case httpc_manager:next_request(Address,
+ Session#session.id) of
+ no_more_requests -> % Should not happen!
+ exit_session(Address,Session,aborted_request);
+ {error,Reason} -> % Should not happen!
+ exit_session(Address,Session,aborted_request);
+ NewReq ->
+ handle_response({Status2,Headers2,Body2},
+ Timeout,NewReq,Session)
+ end
+ end;
+ {error,Reason} -> % The connection has been closed by httpc_manager
+ exit_session(Address,Session,aborted_request);
+ NewReq ->
+ NewReq
+ end.
+
+%% ===========================================================================
+%% Internals
+
+%%% Read in and parse response data from the socket
+read(Timeout,SockType,Socket) ->
+ Info=#response{scheme=SockType,socket=Socket},
+ http_lib:setopts(SockType,Socket,[{packet, http}]),
+ Info1=read_response(SockType,Socket,Info,Timeout),
+ http_lib:setopts(SockType,Socket,[binary,{packet, raw}]),
+ case (Info1#response.headers)#res_headers.content_type of
+ "multipart/byteranges"++Param ->
+ range_response_body(Info1,Timeout,Param);
+ _ ->
+ #response{status=Status2,headers=Headers2,body=Body2}=
+ http_lib:read_client_body(Info1,Timeout),
+ {Status2,Headers2,Body2}
+ end.
+
+
+%%% From RFC 2616:
+%%% Status-Line = HTTP-Version SP Status-Code SP Reason-Phrase CRLF
+%%% HTTP-Version = "HTTP" "/" 1*DIGIT "." 1*DIGIT
+%%% Status-Code = 3DIGIT
+%%% Reason-Phrase = *<TEXT, excluding CR, LF>
+read_response(SockType,Socket,Info,Timeout) ->
+ case http_lib:recv0(SockType,Socket,Timeout) of
+ {ok,{http_response,{1,VerMin}, Status, _Phrase}} when VerMin==0;
+ VerMin==1 ->
+ Info1=Info#response{status=Status,http_version=VerMin},
+ http_lib:read_client_headers(Info1,Timeout);
+ {ok,{http_response,_Version, _Status, _Phrase}} ->
+ throw({error,bad_status_line});
+ {error, timeout} ->
+ throw({error,session_local_timeout});
+ {error, Reason} when Reason==closed;Reason==enotconn ->
+ throw({error,session_remotely_closed});
+ {error, Reason} ->
+ throw({error,Reason})
+ end.
+
+%%% From RFC 2616, Section 4.4, Page 34
+%% 4.If the message uses the media type "multipart/byteranges", and the
+%% transfer-length is not otherwise specified, then this self-
+%% delimiting media type defines the transfer-length. This media type
+%% MUST NOT be used unless the sender knows that the recipient can parse
+%% it; the presence in a request of a Range header with multiple byte-
+%% range specifiers from a 1.1 client implies that the client can parse
+%% multipart/byteranges responses.
+%%% FIXME !!
+range_response_body(Info,Timeout,Param) ->
+ Headers=Info#response.headers,
+ case {Headers#res_headers.content_length,
+ Headers#res_headers.transfer_encoding} of
+ {undefined,undefined} ->
+ #response{status=Status2,headers=Headers2,body=Body2}=
+ http_lib:read_client_multipartrange_body(Info,Param,Timeout),
+ {Status2,Headers2,Body2};
+ _ ->
+ #response{status=Status2,headers=Headers2,body=Body2}=
+ http_lib:read_client_body(Info,Timeout),
+ {Status2,Headers2,Body2}
+ end.
+
+
+%%% ----------------------------------------------------------------------------
+%%% Host: field is required when addressing multi-homed sites ...
+%%% It must not be present when the request is being made to a proxy.
+http_request(#request{method=Method,id=Id,
+ scheme=Scheme,address={Host,Port},pathquery=PathQuery,
+ headers=Headers, content={ContentType,Body},
+ settings=Settings},
+ Socket) ->
+ PostData=
+ if
+ Method==post;Method==put ->
+ case Headers#req_headers.expect of
+ "100-continue" ->
+ content_type_header(ContentType) ++
+ content_length_header(length(Body)) ++
+ "\r\n";
+ _ ->
+ content_type_header(ContentType) ++
+ content_length_header(length(Body)) ++
+ "\r\n" ++ Body
+ end;
+ true ->
+ "\r\n"
+ end,
+ Message=
+ case useProxy(Settings#client_settings.useproxy,
+ {Scheme,Host,Port,PathQuery}) of
+ false ->
+ method(Method)++" "++PathQuery++" HTTP/1.1\r\n"++
+ host_header(Host)++te_header()++
+ headers(Headers) ++ PostData;
+ AbsURI ->
+ method(Method)++" "++AbsURI++" HTTP/1.1\r\n"++
+ te_header()++
+ headers(Headers)++PostData
+ end,
+ http_lib:send(Scheme,Socket,Message).
+
+useProxy(false,_) ->
+ false;
+useProxy(true,{Scheme,Host,Port,PathQuery}) ->
+ [atom_to_list(Scheme),"://",Host,":",integer_to_list(Port),PathQuery].
+
+
+
+headers(#req_headers{expect=Expect,
+ other=Other}) ->
+ H1=case Expect of
+ undefined ->[];
+ _ -> "Expect: "++Expect++"\r\n"
+ end,
+ H1++headers_other(Other).
+
+
+headers_other([]) ->
+ [];
+headers_other([{Key,Value}|Rest]) when atom(Key) ->
+ Head = atom_to_list(Key)++": "++Value++"\r\n",
+ Head ++ headers_other(Rest);
+headers_other([{Key,Value}|Rest]) ->
+ Head = Key++": "++Value++"\r\n",
+ Head ++ headers_other(Rest).
+
+host_header(Host) ->
+ "Host: "++lists:concat([Host])++"\r\n".
+content_type_header(ContentType) ->
+ "Content-Type: " ++ ContentType ++ "\r\n".
+content_length_header(ContentLength) ->
+ "Content-Length: "++integer_to_list(ContentLength) ++ "\r\n".
+te_header() ->
+ "TE: \r\n".
+
+method(Method) ->
+ httpd_util:to_upper(atom_to_list(Method)).
+
+
+%%% ----------------------------------------------------------------------------
+http_response({Status,Headers,Body},Req,Session) ->
+ case Status of
+ 100 ->
+ status_continue(Req,Session);
+ 200 ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {Status,Headers,Body}}),
+ ServerClose=http_lib:connection_close(Headers),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ 300 -> status_multiple_choices(Headers,Body,Req,Session);
+ 301 -> status_moved_permanently(Req#request.method,
+ Headers,Body,Req,Session);
+ 302 -> status_found(Headers,Body,Req,Session);
+ 303 -> status_see_other(Headers,Body,Req,Session);
+ 304 -> status_not_modified(Headers,Body,Req,Session);
+ 305 -> status_use_proxy(Headers,Body,Req,Session);
+ %% 306 This Status code is not used in HTTP 1.1
+ 307 -> status_temporary_redirect(Headers,Body,Req,Session);
+ 503 -> status_service_unavailable({Status,Headers,Body},Req,Session);
+ Status50x when Status50x==500;Status50x==501;Status50x==502;
+ Status50x==504;Status50x==505 ->
+ status_server_error_50x({Status,Headers,Body},Req,Session);
+ _ -> % FIXME May want to take some action on other Status codes as well
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {Status,Headers,Body}}),
+ ServerClose=http_lib:connection_close(Headers),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session)
+ end.
+
+
+%%% Status code dependent functions.
+
+%%% Received a 100 Status code ("Continue")
+%%% From RFC2616
+%%% The client SHOULD continue with its request. This interim response is
+%%% used to inform the client that the initial part of the request has
+%%% been received and has not yet been rejected by the server. The client
+%%% SHOULD continue by sending the remainder of the request or, if the
+%%% request has already been completed, ignore this response. The server
+%%% MUST send a final response after the request has been completed. See
+%%% section 8.2.3 for detailed discussion of the use and handling of this
+%%% status code.
+status_continue(Req,Session) ->
+ {_,Body}=Req#request.content,
+ http_lib:send(Session#session.scheme,Session#session.socket,Body),
+ next_response_with_request(Req,Session).
+
+
+%%% Received a 300 Status code ("Multiple Choices")
+%%% The resource is located in any one of a set of locations
+%%% - If a 'Location' header is present (preserved server choice), use that
+%%% to automatically redirect to the given URL
+%%% - else if the Content-Type/Body both are non-empty let the user agent make
+%%% the choice and thus return a response with status 300
+%%% Note:
+%%% - If response to a HEAD request, the Content-Type/Body both should be empty.
+%%% - The behaviour on an empty Content-Type or Body is unspecified.
+%%% However, e.g. "Apache/1.3" servers returns both empty if the header
+%%% 'if-modified-since: Date' was sent in the request and the content is
+%%% "not modified" (instead of 304). Thus implicitly giving the cache as the
+%%% only choice.
+status_multiple_choices(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {300,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_multiple_choices(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {300,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 301 Status code ("Moved Permanently")
+%%% The resource has been assigned a new permanent URI
+%%% - If a 'Location' header is present, use that to automatically redirect to
+%%% the given URL if GET or HEAD request
+%%% - else return
+%%% Note:
+%%% - The Body should contain a short hypertext note with a hyperlink to the
+%%% new URI. Return this if Content-Type acceptable (some HTTP servers doesn't
+%%% deal properly with Accept headers)
+status_moved_permanently(Method,Headers,Body,Req,Session)
+ when (((Req#request.settings)#client_settings.autoredirect)==true) and
+ (Method==get) or (Method==head) ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {301,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_moved_permanently(_Method,Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {301,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 302 Status code ("Found")
+%%% The requested resource resides temporarily under a different URI.
+%%% Note:
+%%% - Only cacheable if indicated by a Cache-Control or Expires header
+status_found(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {302,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_found(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {302,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+%%% Received a 303 Status code ("See Other")
+%%% The request found under a different URI and should be retrieved using GET
+%%% Note:
+%%% - Must not be cached
+status_see_other(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {303,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ method=get,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_see_other(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {303,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 304 Status code ("Not Modified")
+%%% Note:
+%%% - The response MUST NOT contain a body.
+%%% - The response MUST include the following header fields:
+%%% - Date, unless its omission is required
+%%% - ETag and/or Content-Location, if the header would have been sent
+%%% in a 200 response to the same request
+%%% - Expires, Cache-Control, and/or Vary, if the field-value might
+%%% differ from that sent in any previous response for the same
+%%% variant
+status_not_modified(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {304,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_not_modified(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {304,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+
+%%% Received a 305 Status code ("Use Proxy")
+%%% The requested resource MUST be accessed through the proxy given by the
+%%% Location field
+status_use_proxy(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {305,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_use_proxy(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {305,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+%%% Received a 307 Status code ("Temporary Redirect")
+status_temporary_redirect(Headers,Body,Req,Session)
+ when ((Req#request.settings)#client_settings.autoredirect)==true ->
+ ServerClose=http_lib:connection_close(Headers),
+ case Headers#res_headers.location of
+ undefined ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {307,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,
+ Req,Session);
+ RedirUrl ->
+ Scheme=Session#session.scheme,
+ case uri:parse(RedirUrl) of
+ {error,Reason} ->
+ {error,Reason};
+ {Scheme,Host,Port,PathQuery} -> % Automatic redirection
+ NewReq=Req#request{redircount=Req#request.redircount+1,
+ address={Host,Port},pathquery=PathQuery},
+ handle_redirect(Session#session.clientclose,ServerClose,
+ NewReq,Session)
+ end
+ end;
+status_temporary_redirect(Headers,Body,Req,Session) ->
+ ServerClose=http_lib:connection_close(Headers),
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {307,Headers,Body}}),
+ handle_connection(Session#session.clientclose,ServerClose,Req,Session).
+
+
+
+%%% Received a 503 Status code ("Service Unavailable")
+%%% The server is currently unable to handle the request due to a
+%%% temporary overloading or maintenance of the server. The implication
+%%% is that this is a temporary condition which will be alleviated after
+%%% some delay. If known, the length of the delay MAY be indicated in a
+%%% Retry-After header. If no Retry-After is given, the client SHOULD
+%%% handle the response as it would for a 500 response.
+%% Note:
+%% - This session is now considered busy, thus cancel any requests in the
+%% pipeline and close the session.
+%% FIXME! Implement a user option to automatically retry if the 'Retry-After'
+%% header is given.
+status_service_unavailable(Resp,Req,Session) ->
+% RetryAfter=Headers#res_headers.retry_after,
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,Resp}),
+ close_session(server_connection_close,Req,Session).
+
+
+%%% Received a 50x Status code (~ "Service Error")
+%%% Response status codes beginning with the digit "5" indicate cases in
+%%% which the server is aware that it has erred or is incapable of
+%%% performing the request.
+status_server_error_50x(Resp,Req,Session) ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,Resp}),
+ close_session(server_connection_close,Req,Session).
+
+
+%%% Handles requests for redirects
+%%% The redirected request might be:
+%%% - FIXME! on another TCP session, another scheme
+%%% - on the same TCP session, same scheme
+%%% - on another TCP session , same scheme
+%%% However, in all cases treat it as a new request, with redircount updated.
+%%%
+%%% The redirect may fail, but this not a reason to close this session.
+%%% Instead return a error for this request, and continue as ok.
+handle_redirect(ClientClose,ServerClose,Req,Session) ->
+ case httpc_manager:request(Req) of
+ {ok,_ReqId} -> % FIXME Should I perhaps reuse the Reqid?
+ handle_connection(ClientClose,ServerClose,Req,Session);
+ {error,Reason} ->
+ gen_server:cast(Req#request.from,{Req#request.ref,Req#request.id,
+ {error,Reason}}),
+ handle_connection(ClientClose,ServerClose,Req,Session)
+ end.
+
+%%% Check if the persistent connection flag is false (ie client request
+%%% non-persistive connection), or if the server requires a closed connection
+%%% (by sending a "Connection: close" header). If the connection required
+%%% non-persistent, we may close the connection immediately.
+handle_connection(ClientClose,ServerClose,Req,Session) ->
+ case {ClientClose,ServerClose} of
+ {false,false} ->
+ ok;
+ {false,true} -> % The server requests this session to be closed.
+ close_session(server_connection_close,Req,Session);
+ {true,_} -> % The client requested a non-persistent connection
+ close_session(client_connection_close,Req,Session)
+ end.
+
+
+%%% Close the session.
+%%% We now have three cases:
+%%% - Client request a non-persistent connection when initiating the request.
+%%% Session info not stored in httpc_manager
+%%% - Server requests a non-persistent connection when answering a request.
+%%% No need to resend request, but there might be a pipeline.
+%%% - Some kind of error
+%%% Close the session, we may then try resending all requests in the pipeline
+%%% including the current depending on the error.
+%%% FIXME! Should not always abort the session (see close_session in
+%%% httpc_manager for more details)
+close_session(client_connection_close,_Req,Session) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ stop;
+close_session(server_connection_close,Req,Session) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ httpc_manager:abort_session(Req#request.address,Session#session.id,
+ aborted_request),
+ stop.
+
+exit_session(Address,Session,Reason) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ httpc_manager:abort_session(Address,Session#session.id,Reason),
+ exit(normal).
+
+%%% This is the "normal" case to close a persistent connection. I.e., there are
+%%% no more requests waiting and the session was closed by the client, or
+%%% server because of a timeout or user request.
+exit_session_ok(Address,Session) ->
+ http_lib:close(Session#session.scheme,Session#session.socket),
+ exit_session_ok2(Address,Session#session.clientclose,Session#session.id).
+
+exit_session_ok2(Address,ClientClose,Sid) ->
+ case ClientClose of
+ false ->
+ httpc_manager:close_session(Address,Sid);
+ true ->
+ ok
+ end,
+ exit(normal).
+
+%%% ============================================================================
+%%% This is deprecated code, to be removed
+
+format_time() ->
+ {_,_,MicroSecs}=TS=now(),
+ {{Y,Mon,D},{H,M,S}}=calendar:now_to_universal_time(TS),
+ lists:flatten(io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w,~2.2.0w:~2.2.0w:~6.3.0f",
+ [Y,Mon,D,H,M,S+(MicroSecs/1000000)])).
+
+%%% Read more data from the open socket.
+%%% Two different read functions is used because for the {active, once} socket
+%%% option is (currently) not available for SSL...
+%%% FIXME
+% read_more_data(http,Socket,Timeout) ->
+% io:format("read_more_data(ip_comm) -> "
+% "~n set active = 'once' and "
+% "await a chunk data", []),
+% http_lib:setopts(Socket, [{active,once}]),
+% read_more_data_ipcomm(Socket,Timeout);
+% read_more_data(https,Socket,Timeout) ->
+% case ssl:recv(Socket,0,Timeout) of
+% {ok,MoreData} ->
+% MoreData;
+% {error,closed} ->
+% throw({error, session_remotely_closed});
+% {error,etimedout} ->
+% throw({error, session_local_timeout});
+% {error,Reason} ->
+% throw({error, Reason});
+% Other ->
+% throw({error, Other})
+% end.
+
+% %%% Send any incoming requests on the open session immediately
+% read_more_data_ipcomm(Socket,Timeout) ->
+% receive
+% {tcp,Socket,MoreData} ->
+% % ?vtrace("read_more_data(ip_comm) -> got some data:~p",
+% % [MoreData]),
+% MoreData;
+% {tcp_closed,Socket} ->
+% % ?vtrace("read_more_data(ip_comm) -> socket closed",[]),
+% throw({error,session_remotely_closed});
+% {tcp_error,Socket,Reason} ->
+% % ?vtrace("read_more_data(ip_comm) -> ~p socket error: ~p",
+% % [self(),Reason]),
+% throw({error, Reason});
+% stop ->
+% throw({error, user_req})
+% after Timeout ->
+% throw({error, session_local_timeout})
+% end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
new file mode 100644
index 0000000000..29659ce1ce
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
@@ -0,0 +1,542 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+%% Created : 18 Dec 2001 by Johan Blom <[email protected]>
+%%
+
+-module(httpc_manager).
+
+-behaviour(gen_server).
+
+-include("http.hrl").
+
+-define(HMACALL, ?MODULE).
+-define(HMANAME, ?MODULE).
+
+%%--------------------------------------------------------------------
+%% External exports
+-export([start_link/0,start/0,
+ request/1,cancel_request/1,
+ next_request/2,
+ register_socket/3,
+ abort_session/3,close_session/2,close_session/3
+ ]).
+
+%% Debugging only
+-export([status/0]).
+
+%% gen_server callbacks
+-export([init/1,handle_call/3,handle_cast/2,handle_info/2,terminate/2,
+ code_change/3]).
+
+%%% address_db - ets() Contains mappings from a tuple {Host,Port} to a tuple
+%%% {LastSID,OpenSessions,ets()} where
+%%% LastSid is the last allocated session id,
+%%% OpenSessions is the number of currently open sessions and
+%%% ets() contains mappings from Session Id to #session{}.
+%%%
+%%% Note:
+%%% - Only persistent connections are stored in address_db
+%%% - When automatically redirecting, multiple requests are performed.
+-record(state,{
+ address_db, % ets()
+ reqid % int() Next Request id to use (identifies request).
+ }).
+
+%%====================================================================
+%% External functions
+%%====================================================================
+%%--------------------------------------------------------------------
+%% Function: start_link/0
+%% Description: Starts the server
+%%--------------------------------------------------------------------
+start() ->
+ ensure_started().
+
+start_link() ->
+ gen_server:start_link({local,?HMACALL}, ?HMANAME, [], []).
+
+
+%% Find available session process and store in address_db. If no
+%% available, start new handler process.
+request(Req) ->
+ ensure_started(),
+ ClientClose=http_lib:connection_close(Req#request.headers),
+ gen_server:call(?HMACALL,{request,ClientClose,Req},infinity).
+
+cancel_request(ReqId) ->
+ gen_server:call(?HMACALL,{cancel_request,ReqId},infinity).
+
+
+%%% Close Session
+close_session(Addr,Sid) ->
+ gen_server:call(?HMACALL,{close_session,Addr,Sid},infinity).
+close_session(Req,Addr,Sid) ->
+ gen_server:call(?HMACALL,{close_session,Req,Addr,Sid},infinity).
+
+abort_session(Addr,Sid,Msg) ->
+ gen_server:call(?HMACALL,{abort_session,Addr,Sid,Msg},infinity).
+
+
+%%% Pick next in request que
+next_request(Addr,Sid) ->
+ gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity).
+
+%%% Session handler has succeded to set up a new session, now register
+%%% the socket
+register_socket(Addr,Sid,Socket) ->
+ gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}).
+
+
+%%% Debugging
+status() ->
+ gen_server:cast(?HMACALL,status).
+
+
+%%--------------------------------------------------------------------
+%% Function: init/1
+%% Description: Initiates the server
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% ignore |
+%% {stop, Reason}
+%%--------------------------------------------------------------------
+init([]) ->
+ process_flag(trap_exit, true),
+ {ok,#state{address_db=ets:new(address_db,[private]),
+ reqid=0}}.
+
+
+%%--------------------------------------------------------------------
+%% Function: handle_call/3
+%% Description: Handling call messages
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%% {stop, Reason, State} (terminate/2 is called)
+%%--------------------------------------------------------------------
+%%% Note:
+%%% - We may have multiple non-persistent connections, each will be handled in
+%%% separate processes, thus don't add such connections to address_db
+handle_call({request,false,Req},_From,State) ->
+ case ets:lookup(State#state.address_db,Req#request.address) of
+ [] ->
+ STab=ets:new(session_db,[private,{keypos,2},set]),
+ case persistent_new_session_request(0,Req,STab,State) of
+ {Reply,LastSid,State2} ->
+ ets:insert(State2#state.address_db,
+ {Req#request.address,{LastSid,1,STab}}),
+ {reply,Reply,State2};
+ {ErrorReply,State2} ->
+ {reply,ErrorReply,State2}
+ end;
+ [{_,{LastSid,OpenS,STab}}] ->
+ case lookup_session_entry(STab) of
+ {ok,Session} ->
+ old_session_request(Session,Req,STab,State);
+ need_new_session when OpenS<(Req#request.settings)#client_settings.max_sessions ->
+ case persistent_new_session_request(LastSid,Req,
+ STab,State) of
+ {Reply,LastSid2,State2} ->
+ ets:insert(State2#state.address_db,
+ {Req#request.address,
+ {LastSid2,OpenS+1,STab}}),
+ {reply,Reply,State2};
+ {ErrorReply,State2} ->
+ {reply,ErrorReply,State2}
+ end;
+ need_new_session ->
+ {reply,{error,too_many_sessions},State}
+ end
+ end;
+handle_call({request,true,Req},_From,State) ->
+ {Reply,State2}=not_persistent_new_session_request(Req,State),
+ {reply,Reply,State2};
+handle_call({cancel_request,true,_ReqId},_From,State) ->
+%% FIXME Should be possible to scan through all requests made, but perhaps
+%% better to give some more hints (such as Addr etc)
+ Reply=ok,
+ {reply,Reply,State};
+handle_call({next_request,Addr,Sid},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{_,_,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [] ->
+ {reply,{error,session_not_registered},State};
+ [S=#session{pipeline=[],quelength=QueLen}] ->
+ if
+ QueLen==1 ->
+ ets:insert(STab,S#session{quelength=0});
+ true ->
+ ok
+ end,
+ {reply,no_more_requests,State};
+ [S=#session{pipeline=Que}] ->
+ [Req|RevQue]=lists:reverse(Que),
+ ets:insert(STab,S#session{pipeline=lists:reverse(RevQue),
+ quelength=S#session.quelength-1}),
+ {reply,Req,State}
+ end
+ end;
+handle_call({close_session,Addr,Sid},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{LastSid,OpenS,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [#session{pipeline=Que}] ->
+ R=handle_close_session(lists:reverse(Que),STab,Sid,State),
+ ets:insert(State#state.address_db,
+ {Addr,{LastSid,OpenS-1,STab}}),
+ {reply,R,State};
+ [] ->
+ {reply,{error,session_not_registered},State}
+ end
+ end;
+handle_call({close_session,Req,Addr,Sid},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{LastSid,OpenS,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [#session{pipeline=Que}] ->
+ R=handle_close_session([Req|lists:reverse(Que)],
+ STab,Sid,State),
+ ets:insert(State#state.address_db,
+ {Addr,{LastSid,OpenS-1,STab}}),
+ {reply,R,State};
+ [] ->
+ {reply,{error,session_not_registered},State}
+ end
+ end;
+handle_call({abort_session,Addr,Sid,Msg},_From,State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {reply,{error,no_connection},State};
+ [{_,{LastSid,OpenS,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [#session{pipeline=Que}] ->
+ R=abort_request_que(Que,{error,Msg}),
+ ets:delete(STab,Sid),
+ ets:insert(State#state.address_db,
+ {Addr,{LastSid,OpenS-1,STab}}),
+ {reply,R,State};
+ [] ->
+ {reply,{error,session_not_registered},State}
+ end
+ end.
+
+
+%%--------------------------------------------------------------------
+%% Function: handle_cast/2
+%% Description: Handling cast messages
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%--------------------------------------------------------------------
+handle_cast(status, State) ->
+ io:format("Status:~n"),
+ print_all(lists:sort(ets:tab2list(State#state.address_db))),
+ {noreply, State};
+handle_cast({register_socket,Addr,Sid,Socket},State) ->
+ case ets:lookup(State#state.address_db,Addr) of
+ [] ->
+ {noreply,State};
+ [{_,{_,_,STab}}] ->
+ case ets:lookup(STab,Sid) of
+ [Session] ->
+ ets:insert(STab,Session#session{socket=Socket}),
+ {noreply,State};
+ [] ->
+ {noreply,State}
+ end
+ end.
+
+print_all([]) ->
+ ok;
+print_all([{Addr,{LastSid,OpenSessions,STab}}|Rest]) ->
+ io:format(" Address:~p LastSid=~p OpenSessions=~p~n",[Addr,LastSid,OpenSessions]),
+ SortedList=lists:sort(fun(A,B) ->
+ if
+ A#session.id<B#session.id ->
+ true;
+ true ->
+ false
+ end
+ end,ets:tab2list(STab)),
+ print_all2(SortedList),
+ print_all(Rest).
+
+print_all2([]) ->
+ ok;
+print_all2([Session|Rest]) ->
+ io:format(" Session:~p~n",[Session#session.id]),
+ io:format(" Client close:~p~n",[Session#session.clientclose]),
+ io:format(" Socket:~p~n",[Session#session.socket]),
+ io:format(" Pipe: length=~p Que=~p~n",[Session#session.quelength,Session#session.pipeline]),
+ print_all2(Rest).
+
+%%--------------------------------------------------------------------
+%% Function: handle_info/2
+%% Description: Handling all non call/cast messages
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%--------------------------------------------------------------------
+handle_info({'EXIT',_Pid,normal}, State) ->
+ {noreply, State};
+handle_info(Info, State) ->
+ io:format("ERROR httpc_manager:handle_info ~p~n",[Info]),
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% Function: terminate/2
+%% Description: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%--------------------------------------------------------------------
+terminate(_Reason, State) ->
+ ets:delete(State#state.address_db).
+
+%%--------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Convert process state when code is changed
+%% Returns: {ok, NewState}
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+%%% From RFC 2616, Section 8.1.4
+%%% A client, server, or proxy MAY close the transport connection at any
+%%% time. For example, a client might have started to send a new request
+%%% at the same time that the server has decided to close the "idle"
+%%% connection. From the server's point of view, the connection is being
+%%% closed while it was idle, but from the client's point of view, a
+%%% request is in progress.
+%%%
+%%% This means that clients, servers, and proxies MUST be able to recover
+%%% from asynchronous close events. Client software SHOULD reopen the
+%%% transport connection and retransmit the aborted sequence of requests
+%%% without user interaction so long as the request sequence is
+%%% idempotent (see section 9.1.2). Non-idempotent methods or sequences
+%%%
+%%% FIXME
+%%% Note:
+%%% - If this happen (server close because of idle) there can't be any requests
+%%% in the que.
+%%% - This is the main function for closing of sessions
+handle_close_session([],STab,Sid,_State) ->
+ ets:delete(STab,Sid);
+handle_close_session(Que,STab,Sid,_State) ->
+ ets:delete(STab,Sid),
+ abort_request_que(Que,{error,aborted_request}).
+
+
+%%% From RFC 2616, Section 8.1.2.2
+%%% Clients which assume persistent connections and pipeline immediately
+%%% after connection establishment SHOULD be prepared to retry their
+%%% connection if the first pipelined attempt fails. If a client does
+%%% such a retry, it MUST NOT pipeline before it knows the connection is
+%%% persistent. Clients MUST also be prepared to resend their requests if
+%%% the server closes the connection before sending all of the
+%%% corresponding responses.
+%%% FIXME! I'm currently not checking if tis is the first attempt on the session
+%%% FIXME! Pipeline size must be dynamically variable (e.g. 0 if resend, 2 else)
+%%% The que contains requests that have been sent ok previously, but the session
+%%% was closed prematurely when reading the response.
+%%% Try setup a new session and resend these requests.
+%%% Note:
+%%% - This MUST be a persistent session
+% handle_closed_pipelined_session_que([],_State) ->
+% ok;
+% handle_closed_pipelined_session_que(_Que,_State) ->
+% ok.
+
+
+%%% From RFC 2616, Section 8.2.4
+%%% If an HTTP/1.1 client sends a request which includes a request body,
+%%% but which does not include an Expect request-header field with the
+%%% "100-continue" expectation, and if the client is not directly
+%%% connected to an HTTP/1.1 origin server, and if the client sees the
+%%% connection close before receiving any status from the server, the
+%%% client SHOULD retry the request. If the client does retry this
+%%% request, it MAY use the following "binary exponential backoff"
+%%% algorithm to be assured of obtaining a reliable response:
+%%% ...
+%%% FIXME! I'm currently not checking if a "Expect: 100-continue" has been sent.
+% handle_remotely_closed_session_que([],_State) ->
+% ok;
+% handle_remotely_closed_session_que(_Que,_State) ->
+% % resend_que(Que,Socket),
+% ok.
+
+%%% Resend all requests in the request que
+% resend_que([],_) ->
+% ok;
+% resend_que([Req|Que],Socket) ->
+% case catch httpc_handler:http_request(Req,Socket) of
+% ok ->
+% resend_que(Que,Socket);
+% {error,Reason} ->
+% {error,Reason}
+% end.
+
+
+%%% From RFC 2616,
+%%% Section 8.1.2.2:
+%%% Clients SHOULD NOT pipeline requests using non-idempotent methods or
+%%% non-idempotent sequences of methods (see section 9.1.2). Otherwise, a
+%%% premature termination of the transport connection could lead to
+%%% indeterminate results. A client wishing to send a non-idempotent
+%%% request SHOULD wait to send that request until it has received the
+%%% response status for the previous request.
+%%% Section 9.1.2:
+%%% Methods can also have the property of "idempotence" in that (aside
+%%% from error or expiration issues) the side-effects of N > 0 identical
+%%% requests is the same as for a single request. The methods GET, HEAD,
+%%% PUT and DELETE share this property. Also, the methods OPTIONS and
+%%% TRACE SHOULD NOT have side effects, and so are inherently idempotent.
+%%%
+%%% Note that POST and CONNECT are idempotent methods.
+%%%
+%%% Tries to find an open, free session i STab. Such a session has quelength
+%%% less than ?MAX_PIPELINE_LENGTH
+%%% Don't care about non-standard, user defined methods.
+%%%
+%%% Returns {ok,Session} or need_new_session where
+%%% Session is the session that may be used
+lookup_session_entry(STab) ->
+ MS=[{#session{quelength='$1',max_quelength='$2',
+ id='_',clientclose='_',socket='$3',scheme='_',pipeline='_'},
+ [{'<','$1','$2'},{is_port,'$3'}],
+ ['$_']}],
+ case ets:select(STab,MS) of
+ [] ->
+ need_new_session;
+ SessionList -> % Now check if any of these has an empty pipeline.
+ case lists:keysearch(0,2,SessionList) of
+ {value,Session} ->
+ {ok,Session};
+ false ->
+ {ok,hd(SessionList)}
+ end
+ end.
+
+
+%%% Returns a tuple {Reply,State} where
+%%% Reply is the response sent back to the application
+%%%
+%%% Note:
+%%% - An {error,einval} from a send should sometimes rather be {error,closed}
+%%% - Don't close the session from here, let httpc_handler take care of that.
+%old_session_request(Session,Req,STab,State)
+% when (Req#request.settings)#client_settings.max_quelength==0 ->
+% Session1=Session#session{pipeline=[Req]},
+% ets:insert(STab,Session1),
+% {reply,{ok,ReqId},State#state{reqid=ReqId+1}};
+old_session_request(Session,Req,STab,State) ->
+ ReqId=State#state.reqid,
+ Req1=Req#request{id=ReqId},
+ case catch httpc_handler:http_request(Req1,Session#session.socket) of
+ ok ->
+ Session1=Session#session{pipeline=[Req1|Session#session.pipeline],
+ quelength=Session#session.quelength+1},
+ ets:insert(STab,Session1),
+ {reply,{ok,ReqId},State#state{reqid=ReqId+1}};
+ {error,Reason} ->
+ ets:insert(STab,Session#session{socket=undefined}),
+% http_lib:close(Session#session.sockettype,Session#session.socket),
+ {reply,{error,Reason},State#state{reqid=ReqId+1}}
+ end.
+
+%%% Returns atuple {Reply,Sid,State} where
+%%% Reply is the response sent back to the application, and
+%%% Sid is the last used Session Id
+persistent_new_session_request(Sid,Req,STab,State) ->
+ ReqId=State#state.reqid,
+ case setup_new_session(Req#request{id=ReqId},false,Sid) of
+ {error,Reason} ->
+ {{error,Reason},State#state{reqid=ReqId+1}};
+ {NewSid,Session} ->
+ ets:insert(STab,Session),
+ {{ok,ReqId},NewSid,State#state{reqid=ReqId+1}}
+ end.
+
+%%% Returns a tuple {Reply,State} where
+%%% Reply is the response sent back to the application
+not_persistent_new_session_request(Req,State) ->
+ ReqId=State#state.reqid,
+ case setup_new_session(Req#request{id=ReqId},true,undefined) of
+ {error,Reason} ->
+ {{error,Reason},State#state{reqid=ReqId+1}};
+ ok ->
+ {{ok,ReqId},State#state{reqid=ReqId+1}}
+ end.
+
+%%% As there are no sessions available, setup a new session and send the request
+%%% on it.
+setup_new_session(Req,ClientClose,Sid) ->
+ S=#session{id=Sid,clientclose=ClientClose,
+ scheme=Req#request.scheme,
+ max_quelength=(Req#request.settings)#client_settings.max_quelength},
+ spawn_link(httpc_handler,init_connection,[Req,S]),
+ case ClientClose of
+ false ->
+ {Sid+1,S};
+ true ->
+ ok
+ end.
+
+
+%%% ----------------------------------------------------------------------------
+%%% Abort all requests in the request que.
+abort_request_que([],_Msg) ->
+ ok;
+abort_request_que([#request{from=From,ref=Ref,id=Id}|Que],Msg) ->
+ gen_server:cast(From,{Ref,Id,Msg}),
+ abort_request_que(Que,Msg);
+abort_request_que(#request{from=From,ref=Ref,id=Id},Msg) ->
+ gen_server:cast(From,{Ref,Id,Msg}).
+
+
+%%% --------------------------------
+% C={httpc_manager,{?MODULE,start_link,[]},permanent,1000,
+% worker,[?MODULE]},
+% supervisor:start_child(inets_sup, C),
+ensure_started() ->
+ case whereis(?HMANAME) of
+ undefined ->
+ start_link();
+ _ ->
+ ok
+ end.
+
+
+%%% ============================================================================
+%%% This is deprecated code, to be removed
+
+% format_time() ->
+% {_,_,MicroSecs}=TS=now(),
+% {{Y,Mon,D},{H,M,S}}=calendar:now_to_universal_time(TS),
+% lists:flatten(io_lib:format("~4.4.0w-~2.2.0w-~2.2.0w,~2.2.0w:~2.2.0w:~6.3.0f",
+% [Y,Mon,D,H,M,S+(MicroSecs/1000000)])).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.erl
new file mode 100644
index 0000000000..3199e4430d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.erl
@@ -0,0 +1,594 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+-module(httpd).
+-export([multi_start/1, multi_start_link/1,
+ start/0, start/1, start/2,
+ start_link/0, start_link/1, start_link/2,
+ start_child/0,start_child/1,
+ multi_stop/1,
+ stop/0,stop/1,stop/2,
+ stop_child/0,stop_child/1,stop_child/2,
+ multi_restart/1,
+ restart/0,restart/1,restart/2,
+ parse_query/1]).
+
+%% Optional start related stuff...
+-export([load/1, load_mime_types/1,
+ start2/1, start2/2,
+ start_link2/1, start_link2/2,
+ stop2/1]).
+
+%% Management stuff
+-export([block/0,block/1,block/2,block/3,block/4,
+ unblock/0,unblock/1,unblock/2]).
+
+%% Debugging and status info stuff...
+-export([verbosity/3,verbosity/4]).
+-export([get_status/1,get_status/2,get_status/3,
+ get_admin_state/0,get_admin_state/1,get_admin_state/2,
+ get_usage_state/0,get_usage_state/1,get_usage_state/2]).
+
+-include("httpd.hrl").
+
+-define(D(F, A), io:format("~p:" ++ F ++ "~n", [?MODULE|A])).
+
+
+%% start
+
+start() ->
+ start("/var/tmp/server_root/conf/8888.conf").
+
+start(ConfigFile) ->
+ %% ?D("start(~s) -> entry", [ConfigFile]),
+ start(ConfigFile, []).
+
+start(ConfigFile, Verbosity) when list(ConfigFile), list(Verbosity) ->
+ httpd_sup:start(ConfigFile, Verbosity).
+
+
+%% start_link
+
+start_link() ->
+ start("/var/tmp/server_root/conf/8888.conf").
+
+start_link(ConfigFile) ->
+ start_link(ConfigFile, []).
+
+start_link(ConfigFile, Verbosity) when list(ConfigFile), list(Verbosity) ->
+ httpd_sup:start_link(ConfigFile, Verbosity).
+
+
+%% start2 & start_link2
+
+start2(Config) ->
+ start2(Config, []).
+
+start2(Config, Verbosity) when list(Config), list(Verbosity) ->
+ httpd_sup:start2(Config, Verbosity).
+
+start_link2(Config) ->
+ start_link2(Config, []).
+
+start_link2(Config, Verbosity) when list(Config), list(Verbosity) ->
+ httpd_sup:start_link2(Config, Verbosity).
+
+
+%% stop
+
+stop() ->
+ stop(8888).
+
+stop(Port) when integer(Port) ->
+ stop(undefined, Port);
+stop(Pid) when pid(Pid) ->
+ httpd_sup:stop(Pid);
+stop(ConfigFile) when list(ConfigFile) ->
+ %% ?D("stop(~s) -> entry", [ConfigFile]),
+ httpd_sup:stop(ConfigFile).
+
+stop(Addr, Port) when integer(Port) ->
+ httpd_sup:stop(Addr, Port).
+
+stop2(Config) when list(Config) ->
+ httpd_sup:stop2(Config).
+
+%% start_child
+
+start_child() ->
+ start_child("/var/tmp/server_root/conf/8888.conf").
+
+start_child(ConfigFile) ->
+ start_child(ConfigFile, []).
+
+start_child(ConfigFile, Verbosity) ->
+ inets_sup:start_child(ConfigFile, Verbosity).
+
+
+%% stop_child
+
+stop_child() ->
+ stop_child(8888).
+
+stop_child(Port) ->
+ stop_child(undefined,Port).
+
+stop_child(Addr, Port) when integer(Port) ->
+ inets_sup:stop_child(Addr, Port).
+
+
+%% multi_start
+
+multi_start(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mstart(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mstart(ConfigFiles) ->
+ mstart(ConfigFiles,[]).
+mstart([],Results) ->
+ {ok,lists:reverse(Results)};
+mstart([H|T],Results) ->
+ Res = start(H),
+ mstart(T,[Res|Results]).
+
+
+%% multi_start_link
+
+multi_start_link(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mstart_link(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mstart_link(ConfigFiles) ->
+ mstart_link(ConfigFiles,[]).
+mstart_link([],Results) ->
+ {ok,lists:reverse(Results)};
+mstart_link([H|T],Results) ->
+ Res = start_link(H),
+ mstart_link(T,[Res|Results]).
+
+
+%% multi_stop
+
+multi_stop(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mstop(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mstop(ConfigFiles) ->
+ mstop(ConfigFiles,[]).
+mstop([],Results) ->
+ {ok,lists:reverse(Results)};
+mstop([H|T],Results) ->
+ Res = stop(H),
+ mstop(T,[Res|Results]).
+
+
+%% multi_restart
+
+multi_restart(MultiConfigFile) ->
+ case read_multi_file(MultiConfigFile) of
+ {ok,ConfigFiles} ->
+ mrestart(ConfigFiles);
+ Error ->
+ Error
+ end.
+
+mrestart(ConfigFiles) ->
+ mrestart(ConfigFiles,[]).
+mrestart([],Results) ->
+ {ok,lists:reverse(Results)};
+mrestart([H|T],Results) ->
+ Res = restart(H),
+ mrestart(T,[Res|Results]).
+
+
+%% restart
+
+restart() -> restart(undefined,8888).
+
+restart(Port) when integer(Port) ->
+ restart(undefined,Port);
+restart(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ restart(Addr,Port);
+ Error ->
+ Error
+ end.
+
+
+restart(Addr,Port) when integer(Port) ->
+ do_restart(Addr,Port).
+
+do_restart(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:restart(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+%%% =========================================================
+%%% Function: block/0, block/1, block/2, block/3, block/4
+%%% block()
+%%% block(Port)
+%%% block(ConfigFile)
+%%% block(Addr,Port)
+%%% block(Port,Mode)
+%%% block(ConfigFile,Mode)
+%%% block(Addr,Port,Mode)
+%%% block(ConfigFile,Mode,Timeout)
+%%% block(Addr,Port,Mode,Timeout)
+%%%
+%%% Returns: ok | {error,Reason}
+%%%
+%%% Description: This function is used to block an HTTP server.
+%%% The blocking can be done in two ways,
+%%% disturbing or non-disturbing. Default is disturbing.
+%%% When a HTTP server is blocked, all requests are rejected
+%%% (status code 503).
+%%%
+%%% disturbing:
+%%% By performing a disturbing block, the server
+%%% is blocked forcefully and all ongoing requests
+%%% are terminated. No new connections are accepted.
+%%% If a timeout time is given then, on-going requests
+%%% are given this much time to complete before the
+%%% server is forcefully blocked. In this case no new
+%%% connections is accepted.
+%%%
+%%% non-disturbing:
+%%% A non-disturbing block is more gracefull. No
+%%% new connections are accepted, but the ongoing
+%%% requests are allowed to complete.
+%%% If a timeout time is given, it waits this long before
+%%% giving up (the block operation is aborted and the
+%%% server state is once more not-blocked).
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% ConfigFile -> string()
+%%% Mode -> disturbing | non_disturbing
+%%% Timeout -> integer()
+%%%
+block() -> block(undefined,8888,disturbing).
+
+block(Port) when integer(Port) ->
+ block(undefined,Port,disturbing);
+
+block(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ block(Addr,Port,disturbing);
+ Error ->
+ Error
+ end.
+
+block(Addr,Port) when integer(Port) ->
+ block(Addr,Port,disturbing);
+
+block(Port,Mode) when integer(Port), atom(Mode) ->
+ block(undefined,Port,Mode);
+
+block(ConfigFile,Mode) when list(ConfigFile), atom(Mode) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ block(Addr,Port,Mode);
+ Error ->
+ Error
+ end.
+
+
+block(Addr,Port,disturbing) when integer(Port) ->
+ do_block(Addr,Port,disturbing);
+block(Addr,Port,non_disturbing) when integer(Port) ->
+ do_block(Addr,Port,non_disturbing);
+
+block(ConfigFile,Mode,Timeout) when list(ConfigFile), atom(Mode), integer(Timeout) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ block(Addr,Port,Mode,Timeout);
+ Error ->
+ Error
+ end.
+
+
+block(Addr,Port,non_disturbing,Timeout) when integer(Port), integer(Timeout) ->
+ do_block(Addr,Port,non_disturbing,Timeout);
+block(Addr,Port,disturbing,Timeout) when integer(Port), integer(Timeout) ->
+ do_block(Addr,Port,disturbing,Timeout).
+
+do_block(Addr,Port,Mode) when integer(Port), atom(Mode) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:block(Pid,Mode);
+ _ ->
+ {error,not_started}
+ end.
+
+
+do_block(Addr,Port,Mode,Timeout) when integer(Port), atom(Mode) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:block(Pid,Mode,Timeout);
+ _ ->
+ {error,not_started}
+ end.
+
+
+%%% =========================================================
+%%% Function: unblock/0, unblock/1, unblock/2
+%%% unblock()
+%%% unblock(Port)
+%%% unblock(ConfigFile)
+%%% unblock(Addr,Port)
+%%%
+%%% Description: This function is used to reverse a previous block
+%%% operation on the HTTP server.
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% ConfigFile -> string()
+%%%
+unblock() -> unblock(undefined,8888).
+unblock(Port) when integer(Port) -> unblock(undefined,Port);
+
+unblock(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ unblock(Addr,Port);
+ Error ->
+ Error
+ end.
+
+unblock(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:unblock(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+verbosity(Port,Who,Verbosity) ->
+ verbosity(undefined,Port,Who,Verbosity).
+
+verbosity(Addr,Port,Who,Verbosity) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:verbosity(Pid,Who,Verbosity);
+ _ ->
+ not_started
+ end.
+
+
+%%% =========================================================
+%%% Function: get_admin_state/0, get_admin_state/1, get_admin_state/2
+%%% get_admin_state()
+%%% get_admin_state(Port)
+%%% get_admin_state(Addr,Port)
+%%%
+%%% Returns: {ok,State} | {error,Reason}
+%%%
+%%% Description: This function is used to retrieve the administrative
+%%% state of the HTTP server.
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% State -> unblocked | shutting_down | blocked
+%%% Reason -> term()
+%%%
+get_admin_state() -> get_admin_state(undefined,8888).
+get_admin_state(Port) when integer(Port) -> get_admin_state(undefined,Port);
+
+get_admin_state(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ unblock(Addr,Port);
+ Error ->
+ Error
+ end.
+
+get_admin_state(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:get_admin_state(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+
+%%% =========================================================
+%%% Function: get_usage_state/0, get_usage_state/1, get_usage_state/2
+%%% get_usage_state()
+%%% get_usage_state(Port)
+%%% get_usage_state(Addr,Port)
+%%%
+%%% Returns: {ok,State} | {error,Reason}
+%%%
+%%% Description: This function is used to retrieve the usage
+%%% state of the HTTP server.
+%%%
+%%% Types: Port -> integer()
+%%% Addr -> {A,B,C,D} | string() | undefined
+%%% State -> idle | active | busy
+%%% Reason -> term()
+%%%
+get_usage_state() -> get_usage_state(undefined,8888).
+get_usage_state(Port) when integer(Port) -> get_usage_state(undefined,Port);
+
+get_usage_state(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ unblock(Addr,Port);
+ Error ->
+ Error
+ end.
+
+get_usage_state(Addr,Port) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:get_usage_state(Pid);
+ _ ->
+ {error,not_started}
+ end.
+
+
+
+%%% =========================================================
+%% Function: get_status(ConfigFile) -> Status
+%% get_status(Port) -> Status
+%% get_status(Addr,Port) -> Status
+%% get_status(Port,Timeout) -> Status
+%% get_status(Addr,Port,Timeout) -> Status
+%%
+%% Arguments: ConfigFile -> string()
+%% Configuration file from which Port and
+%% BindAddress will be extracted.
+%% Addr -> {A,B,C,D} | string()
+%% Bind Address of the http server
+%% Port -> integer()
+%% Port number of the http server
+%% Timeout -> integer()
+%% Timeout time for the call
+%%
+%% Returns: Status -> list()
+%%
+%% Description: This function is used when the caller runs in the
+%% same node as the http server or if calling with a
+%% program such as erl_call (see erl_interface).
+%%
+
+get_status(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok,Addr,Port} ->
+ get_status(Addr,Port);
+ Error ->
+ Error
+ end;
+
+get_status(Port) when integer(Port) ->
+ get_status(undefined,Port,5000).
+
+get_status(Port,Timeout) when integer(Port), integer(Timeout) ->
+ get_status(undefined,Port,Timeout);
+
+get_status(Addr,Port) when list(Addr), integer(Port) ->
+ get_status(Addr,Port,5000).
+
+get_status(Addr,Port,Timeout) when integer(Port) ->
+ Name = make_name(Addr,Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ httpd_manager:get_status(Pid,Timeout);
+ _ ->
+ not_started
+ end.
+
+
+%% load config
+
+load(ConfigFile) ->
+ httpd_conf:load(ConfigFile).
+
+load_mime_types(MimeTypesFile) ->
+ httpd_conf:load_mime_types(MimeTypesFile).
+
+
+%% parse_query
+
+parse_query(String) ->
+ {ok, SplitString} = regexp:split(String,"[&;]"),
+ foreach(SplitString).
+
+foreach([]) ->
+ [];
+foreach([KeyValue|Rest]) ->
+ {ok, Plus2Space, _} = regexp:gsub(KeyValue,"[\+]"," "),
+ case regexp:split(Plus2Space,"=") of
+ {ok,[Key|Value]} ->
+ [{httpd_util:decode_hex(Key),
+ httpd_util:decode_hex(lists:flatten(Value))}|foreach(Rest)];
+ {ok,_} ->
+ foreach(Rest)
+ end.
+
+
+%% get_addr_and_port
+
+get_addr_and_port(ConfigFile) ->
+ case httpd_conf:load(ConfigFile) of
+ {ok,ConfigList} ->
+ Port = httpd_util:key1search(ConfigList,port,80),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ {ok,Addr,Port};
+ Error ->
+ Error
+ end.
+
+
+%% make_name
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd",Addr,Port).
+
+
+%% Multi stuff
+%%
+
+read_multi_file(File) ->
+ read_mfile(file:open(File,[read])).
+
+read_mfile({ok,Fd}) ->
+ read_mfile(read_line(Fd),Fd,[]);
+read_mfile(Error) ->
+ Error.
+
+read_mfile(eof,_Fd,SoFar) ->
+ {ok,lists:reverse(SoFar)};
+read_mfile({error,Reason},_Fd,SoFar) ->
+ {error,Reason};
+read_mfile([$#|Comment],Fd,SoFar) ->
+ read_mfile(read_line(Fd),Fd,SoFar);
+read_mfile([],Fd,SoFar) ->
+ read_mfile(read_line(Fd),Fd,SoFar);
+read_mfile(Line,Fd,SoFar) ->
+ read_mfile(read_line(Fd),Fd,[Line|SoFar]).
+
+read_line(Fd) -> read_line1(io:get_line(Fd,[])).
+read_line1(eof) -> eof;
+read_line1(String) -> httpd_conf:clean(String).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.hrl
new file mode 100644
index 0000000000..015c1b1e2d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd.hrl
@@ -0,0 +1,77 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd.hrl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+
+-include_lib("kernel/include/file.hrl").
+
+-ifndef(SERVER_SOFTWARE).
+-define(SERVER_SOFTWARE,"inets/develop"). % Define in Makefile!
+-endif.
+-define(SERVER_PROTOCOL,"HTTP/1.1").
+-define(SOCKET_CHUNK_SIZE,8192).
+-define(SOCKET_MAX_POLL,25).
+-define(FILE_CHUNK_SIZE,64*1024).
+-define(NICE(Reason),lists:flatten(atom_to_list(?MODULE)++": "++Reason)).
+-define(DEFAULT_CONTEXT,
+ [{errmsg,"[an error occurred while processing this directive]"},
+ {timefmt,"%A, %d-%b-%y %T %Z"},
+ {sizefmt,"abbrev"}]).
+
+
+-ifdef(inets_error).
+-define(ERROR(Format, Args), io:format("E(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(ERROR(F,A),[]).
+-endif.
+
+-ifdef(inets_log).
+-define(LOG(Format, Args), io:format("L(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(LOG(F,A),[]).
+-endif.
+
+-ifdef(inets_debug).
+-define(DEBUG(Format, Args), io:format("D(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(DEBUG(F,A),[]).
+-endif.
+
+-ifdef(inets_cdebug).
+-define(CDEBUG(Format, Args), io:format("C(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(CDEBUG(F,A),[]).
+-endif.
+
+
+-record(init_data,{peername,resolve}).
+-record(mod,{init_data,
+ data=[],
+ socket_type=ip_comm,
+ socket,
+ config_db,
+ method,
+ absolute_uri=[],
+ request_uri,
+ http_version,
+ request_line,
+ parsed_header=[],
+ entity_body,
+ connection}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor.erl
new file mode 100644
index 0000000000..7bf2d5d868
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor.erl
@@ -0,0 +1,174 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_acceptor.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+-module(httpd_acceptor).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+
+%% External API
+-export([start_link/6]).
+
+%% Other exports (for spawn's etc.)
+-export([acceptor/4, acceptor/7]).
+
+
+%%
+%% External API
+%%
+
+%% start_link
+
+start_link(Manager, SocketType, Addr, Port, ConfigDb, Verbosity) ->
+ Args = [self(), Manager, SocketType, Addr, Port, ConfigDb, Verbosity],
+ proc_lib:start_link(?MODULE, acceptor, Args).
+
+
+acceptor(Parent, Manager, SocketType, Addr, Port, ConfigDb, Verbosity) ->
+ put(sname,acc),
+ put(verbosity,Verbosity),
+ ?vlog("starting",[]),
+ case (catch do_init(SocketType, Addr, Port)) of
+ {ok, ListenSocket} ->
+ proc_lib:init_ack(Parent, {ok, self()}),
+ acceptor(Manager, SocketType, ListenSocket, ConfigDb);
+ Error ->
+ proc_lib:init_ack(Parent, Error),
+ error
+ end.
+
+do_init(SocketType, Addr, Port) ->
+ do_socket_start(SocketType),
+ ListenSocket = do_socket_listen(SocketType, Addr, Port),
+ {ok, ListenSocket}.
+
+
+do_socket_start(SocketType) ->
+ case httpd_socket:start(SocketType) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ ?vinfo("failed socket start: ~p",[Reason]),
+ throw({error, {socket_start_failed, Reason}})
+ end.
+
+
+do_socket_listen(SocketType, Addr, Port) ->
+ case httpd_socket:listen(SocketType, Addr, Port) of
+ {error, Reason} ->
+ ?vinfo("failed socket listen operation: ~p", [Reason]),
+ throw({error, {listen, Reason}});
+ ListenSocket ->
+ ListenSocket
+ end.
+
+
+%% acceptor
+
+acceptor(Manager, SocketType, ListenSocket, ConfigDb) ->
+ ?vdebug("await connection",[]),
+ case (catch httpd_socket:accept(SocketType, ListenSocket, 30000)) of
+ {error, Reason} ->
+ handle_error(Reason, ConfigDb, SocketType),
+ ?MODULE:acceptor(Manager, SocketType, ListenSocket, ConfigDb);
+
+ {'EXIT', Reason} ->
+ handle_error({'EXIT', Reason}, ConfigDb, SocketType),
+ ?MODULE:acceptor(Manager, SocketType, ListenSocket, ConfigDb);
+
+ Socket ->
+ handle_connection(Manager, ConfigDb, SocketType, Socket),
+ ?MODULE:acceptor(Manager, SocketType, ListenSocket, ConfigDb)
+ end.
+
+
+handle_connection(Manager, ConfigDb, SocketType, Socket) ->
+ case httpd_request_handler:start_link(Manager, ConfigDb) of
+ {ok, Pid} ->
+ httpd_socket:controlling_process(SocketType, Socket, Pid),
+ httpd_request_handler:synchronize(Pid, SocketType, Socket);
+ {error, Reason} ->
+ handle_connection_err(SocketType, Socket, ConfigDb, Reason)
+ end.
+
+
+handle_connection_err(SocketType, Socket, ConfigDb, Reason) ->
+ String =
+ lists:flatten(
+ io_lib:format("failed starting request handler:~n ~p", [Reason])),
+ report_error(ConfigDb, String),
+ httpd_socket:close(SocketType, Socket).
+
+
+handle_error(timeout, _, _) ->
+ ?vtrace("Accept timeout",[]),
+ ok;
+
+handle_error({enfile, _}, _, _) ->
+ ?vinfo("Accept error: enfile",[]),
+ %% Out of sockets...
+ sleep(200);
+
+handle_error(emfile, _, _) ->
+ ?vinfo("Accept error: emfile",[]),
+ %% Too many open files -> Out of sockets...
+ sleep(200);
+
+handle_error(closed, _, _) ->
+ ?vlog("Accept error: closed",[]),
+ %% This propably only means that the application is stopping,
+ %% but just in case
+ exit(closed);
+
+handle_error(econnaborted, _, _) ->
+ ?vlog("Accept aborted",[]),
+ ok;
+
+handle_error(esslaccept, _, _) ->
+ %% The user has selected to cancel the installation of
+ %% the certifikate, This is not a real error, so we do
+ %% not write an error message.
+ ok;
+
+handle_error({'EXIT', Reason}, ConfigDb, SocketType) ->
+ ?vinfo("Accept exit:~n ~p",[Reason]),
+ String = lists:flatten(io_lib:format("Accept exit: ~p", [Reason])),
+ accept_failed(SocketType, ConfigDb, String);
+
+handle_error(Reason, ConfigDb, SocketType) ->
+ ?vinfo("Accept error:~n ~p",[Reason]),
+ String = lists:flatten(io_lib:format("Accept error: ~p", [Reason])),
+ accept_failed(SocketType, ConfigDb, String).
+
+
+accept_failed(SocketType, ConfigDb, String) ->
+ error_logger:error_report(String),
+ mod_log:error_log(SocketType, undefined, ConfigDb,
+ {0, "unknown"}, String),
+ mod_disk_log:error_log(SocketType, undefined, ConfigDb,
+ {0, "unknown"}, String),
+ exit({accept_failed, String}).
+
+
+report_error(Db, String) ->
+ error_logger:error_report(String),
+ mod_log:report_error(Db, String),
+ mod_disk_log:report_error(Db, String).
+
+
+sleep(T) -> receive after T -> ok end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor_sup.erl
new file mode 100644
index 0000000000..86c31ad5df
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_acceptor_sup.erl
@@ -0,0 +1,116 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_acceptor_sup.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The top supervisor for the Megaco/H.248 application
+%%----------------------------------------------------------------------
+
+-module(httpd_acceptor_sup).
+
+-behaviour(supervisor).
+
+-include("httpd_verbosity.hrl").
+
+%% public
+-export([start/3, stop/1, init/1]).
+
+-export([start_acceptor/4, stop_acceptor/2]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% supervisor callback functions
+
+
+start(Addr, Port, AccSupVerbosity) ->
+ SupName = make_name(Addr, Port),
+ supervisor:start_link({local, SupName}, ?MODULE, [AccSupVerbosity]).
+
+stop(StartArgs) ->
+ ok.
+
+init([Verbosity]) -> % Supervisor
+ do_init(Verbosity);
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+do_init(Verbosity) ->
+ put(verbosity,?vvalidate(Verbosity)),
+ put(sname,acc_sup),
+ ?vlog("starting", []),
+ Flags = {one_for_one, 500, 100},
+ KillAfter = timer:seconds(1),
+ Workers = [],
+ {ok, {Flags, Workers}}.
+
+
+%%----------------------------------------------------------------------
+%% Function: [start|stop]_acceptor/5
+%% Description: Starts a [auth | security] worker (child) process
+%%----------------------------------------------------------------------
+
+start_acceptor(SocketType, Addr, Port, ConfigDb) ->
+ Verbosity = get_acc_verbosity(),
+ start_worker(httpd_acceptor, SocketType, Addr, Port,
+ ConfigDb, Verbosity, self(), []).
+
+stop_acceptor(Addr, Port) ->
+ stop_worker(httpd_acceptor, Addr, Port).
+
+
+%%----------------------------------------------------------------------
+%% Function: start_worker/5
+%% Description: Starts a (permanent) worker (child) process
+%%----------------------------------------------------------------------
+
+start_worker(M, SocketType, Addr, Port, ConfigDB, Verbosity, Manager,
+ Modules) ->
+ SupName = make_name(Addr, Port),
+ Args = [Manager, SocketType, Addr, Port, ConfigDB, Verbosity],
+ Spec = {{M, Addr, Port},
+ {M, start_link, Args},
+ permanent, timer:seconds(1), worker, [M] ++ Modules},
+ supervisor:start_child(SupName, Spec).
+
+
+%%----------------------------------------------------------------------
+%% Function: stop_permanent_worker/3
+%% Description: Stops a permanent worker (child) process
+%%----------------------------------------------------------------------
+
+stop_worker(M, Addr, Port) ->
+ SupName = make_name(Addr, Port),
+ Name = {M, Addr, Port},
+ case supervisor:terminate_child(SupName, Name) of
+ ok ->
+ supervisor:delete_child(SupName, Name);
+ Error ->
+ Error
+ end.
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_acc_sup",Addr,Port).
+
+
+
+get_acc_verbosity() ->
+ get_verbosity(get(acceptor_verbosity)).
+
+get_verbosity(undefined) ->
+ ?default_verbosity;
+get_verbosity(V) ->
+ ?vvalidate(V).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_conf.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_conf.erl
new file mode 100644
index 0000000000..69419b1eb3
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_conf.erl
@@ -0,0 +1,688 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_conf.erl,v 1.1 2008/12/17 09:53:33 mikpe Exp $
+%%
+-module(httpd_conf).
+-export([load/1, load_mime_types/1,
+ load/2, store/1, store/2,
+ remove_all/1, remove/1,
+ is_directory/1, is_file/1,
+ make_integer/1, clean/1, custom_clean/3, check_enum/2]).
+
+
+-define(VMODULE,"CONF").
+-include("httpd_verbosity.hrl").
+
+%% The configuration data is handled in three (3) phases:
+%% 1. Parse the config file and put all directives into a key-vale
+%% tuple list (load/1).
+%% 2. Traverse the key-value tuple list store it into an ETS table.
+%% Directives depending on other directives are taken care of here
+%% (store/1).
+%% 3. Traverse the ETS table and do a complete clean-up (remove/1).
+
+-include("httpd.hrl").
+
+%%
+%% Phase 1: Load
+%%
+
+%% load
+
+load(ConfigFile) ->
+ ?CDEBUG("load -> ConfigFile: ~p",[ConfigFile]),
+ case read_config_file(ConfigFile) of
+ {ok, Config} ->
+ case bootstrap(Config) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Modules} ->
+ load_config(Config, lists:append(Modules, [?MODULE]))
+ end;
+ {error, Reason} ->
+ {error, ?NICE("Error while reading config file: "++Reason)}
+ end.
+
+
+bootstrap([]) ->
+ {error, ?NICE("Modules must be specified in the config file")};
+bootstrap([Line|Config]) ->
+ case Line of
+ [$M,$o,$d,$u,$l,$e,$s,$ |Modules] ->
+ {ok, ModuleList} = regexp:split(Modules," "),
+ TheMods = [list_to_atom(X) || X <- ModuleList],
+ case verify_modules(TheMods) of
+ ok ->
+ {ok, TheMods};
+ {error, Reason} ->
+ ?ERROR("bootstrap -> : validation failed: ~p",[Reason]),
+ {error, Reason}
+ end;
+ _ ->
+ bootstrap(Config)
+ end.
+
+
+%%
+%% verify_modules/1 -> ok | {error, Reason}
+%%
+%% Verifies that all specified modules are available.
+%%
+verify_modules([]) ->
+ ok;
+verify_modules([Mod|Rest]) ->
+ case code:which(Mod) of
+ non_existing ->
+ {error, ?NICE(atom_to_list(Mod)++" does not exist")};
+ Path ->
+ verify_modules(Rest)
+ end.
+
+%%
+%% read_config_file/1 -> {ok, [line(), line()..]} | {error, Reason}
+%%
+%% Reads the entire configuration file and returns list of strings or
+%% and error.
+%%
+
+
+read_config_file(FileName) ->
+ case file:open(FileName, [read]) of
+ {ok, Stream} ->
+ read_config_file(Stream, []);
+ {error, Reason} ->
+ {error, ?NICE("Cannot open "++FileName)}
+ end.
+
+read_config_file(Stream, SoFar) ->
+ case io:get_line(Stream, []) of
+ eof ->
+ {ok, lists:reverse(SoFar)};
+ {error, Reason} ->
+ {error, Reason};
+ [$#|Rest] ->
+ %% Ignore commented lines for efficiency later ..
+ read_config_file(Stream, SoFar);
+ Line ->
+ {ok, NewLine, _}=regexp:sub(clean(Line),"[\t\r\f ]"," "),
+ case NewLine of
+ [] ->
+ %% Also ignore empty lines ..
+ read_config_file(Stream, SoFar);
+ Other ->
+ read_config_file(Stream, [NewLine|SoFar])
+ end
+ end.
+
+is_exported(Module, ToFind) ->
+ Exports = Module:module_info(exports),
+ lists:member(ToFind, Exports).
+
+%%
+%% load/4 -> {ok, ConfigList} | {error, Reason}
+%%
+%% This loads the config file into each module specified by Modules
+%% Each module has its own context that is passed to and (optionally)
+%% returned by the modules load function. The module can also return
+%% a ConfigEntry, which will be added to the global configuration
+%% list.
+%% All configuration directives are guaranteed to be passed to all
+%% modules. Each module only implements the function clauses of
+%% the load function for the configuration directives it supports,
+%% it's ok if an apply returns {'EXIT', {function_clause, ..}}.
+%%
+load_config(Config, Modules) ->
+ %% Create default contexts for all modules
+ Contexts = lists:duplicate(length(Modules), []),
+ load_config(Config, Modules, Contexts, []).
+
+
+load_config([], _Modules, _Contexts, ConfigList) ->
+ case a_must(ConfigList, [server_name,port,server_root,document_root]) of
+ ok ->
+ {ok, ConfigList};
+ {missing, Directive} ->
+ {error, ?NICE(atom_to_list(Directive)++
+ " must be specified in the config file")}
+ end;
+
+load_config([Line|Config], Modules, Contexts, ConfigList) ->
+ ?CDEBUG("load_config -> Line: ~p",[Line]),
+ case load_traverse(Line, Contexts, Modules, [], ConfigList, no) of
+ {ok, NewContexts, NewConfigList} ->
+ load_config(Config, Modules, NewContexts, NewConfigList);
+ {error, Reason} ->
+ ?ERROR("load_config -> traverse failed: ~p",[Reason]),
+ {error, Reason}
+ end.
+
+
+load_traverse(Line, [], [], NewContexts, ConfigList, no) ->
+ ?CDEBUG("load_traverse/no -> ~n"
+ " Line: ~p~n"
+ " NewContexts: ~p~n"
+ " ConfigList: ~p",
+ [Line,NewContexts,ConfigList]),
+ {error, ?NICE("Configuration directive not recognized: "++Line)};
+load_traverse(Line, [], [], NewContexts, ConfigList, yes) ->
+ ?CDEBUG("load_traverse/yes -> ~n"
+ " Line: ~p~n"
+ " NewContexts: ~p~n"
+ " ConfigList: ~p",
+ [Line,NewContexts,ConfigList]),
+ {ok, lists:reverse(NewContexts), ConfigList};
+load_traverse(Line, [Context|Contexts], [Module|Modules], NewContexts, ConfigList, State) ->
+ ?CDEBUG("load_traverse/~p -> ~n"
+ " Line: ~p~n"
+ " Module: ~p~n"
+ " Context: ~p~n"
+ " Contexts: ~p~n"
+ " NewContexts: ~p",
+ [State,Line,Module,Context,Contexts,NewContexts]),
+ case is_exported(Module, {load, 2}) of
+ true ->
+ ?CDEBUG("load_traverse -> ~p:load/2 exported",[Module]),
+ case catch apply(Module, load, [Line, Context]) of
+ {'EXIT', {function_clause, _}} ->
+ ?CDEBUG("load_traverse -> exit: function_clause"
+ "~n Module: ~p"
+ "~n Line: ~s",[Module,Line]),
+ load_traverse(Line, Contexts, Modules, [Context|NewContexts], ConfigList, State);
+ {'EXIT', Reason} ->
+ ?CDEBUG("load_traverse -> exit: ~p",[Reason]),
+ error_logger:error_report({'EXIT', Reason}),
+ load_traverse(Line, Contexts, Modules, [Context|NewContexts], ConfigList, State);
+ {ok, NewContext} ->
+ ?CDEBUG("load_traverse -> ~n"
+ " NewContext: ~p",[NewContext]),
+ load_traverse(Line, Contexts, Modules, [NewContext|NewContexts], ConfigList,yes);
+ {ok, NewContext, ConfigEntry} when tuple(ConfigEntry) ->
+ ?CDEBUG("load_traverse (tuple) -> ~n"
+ " NewContext: ~p~n"
+ " ConfigEntry: ~p",[NewContext,ConfigEntry]),
+ load_traverse(Line, Contexts, Modules, [NewContext|NewContexts],
+ [ConfigEntry|ConfigList], yes);
+ {ok, NewContext, ConfigEntry} when list(ConfigEntry) ->
+ ?CDEBUG("load_traverse (list) -> ~n"
+ " NewContext: ~p~n"
+ " ConfigEntry: ~p",[NewContext,ConfigEntry]),
+ load_traverse(Line, Contexts, Modules, [NewContext|NewContexts],
+ lists:append(ConfigEntry, ConfigList), yes);
+ {error, Reason} ->
+ ?CDEBUG("load_traverse -> error: ~p",[Reason]),
+ {error, Reason}
+ end;
+ false ->
+ ?CDEBUG("load_traverse -> ~p:load/2 not exported",[Module]),
+ load_traverse(Line, Contexts, Modules, [Context|NewContexts],
+ ConfigList,yes)
+ end.
+
+
+load(eof, []) ->
+ eof;
+
+load([$M,$a,$x,$H,$e,$a,$d,$e,$r,$S,$i,$z,$e,$ |MaxHeaderSize], []) ->
+ ?DEBUG("load -> MaxHeaderSize: ~p",[MaxHeaderSize]),
+ case make_integer(MaxHeaderSize) of
+ {ok, Integer} ->
+ {ok, [], {max_header_size,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxHeaderSize)++
+ " is an invalid number of MaxHeaderSize")}
+ end;
+load([$M,$a,$x,$H,$e,$a,$d,$e,$r,$A,$c,$t,$i,$o,$n,$ |Action], []) ->
+ ?DEBUG("load -> MaxHeaderAction: ~p",[Action]),
+ {ok, [], {max_header_action,list_to_atom(clean(Action))}};
+load([$M,$a,$x,$B,$o,$d,$y,$S,$i,$z,$e,$ |MaxBodySize], []) ->
+ ?DEBUG("load -> MaxBodySize: ~p",[MaxBodySize]),
+ case make_integer(MaxBodySize) of
+ {ok, Integer} ->
+ {ok, [], {max_body_size,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxBodySize)++
+ " is an invalid number of MaxBodySize")}
+ end;
+load([$M,$a,$x,$B,$o,$d,$y,$A,$c,$t,$i,$o,$n,$ |Action], []) ->
+ ?DEBUG("load -> MaxBodyAction: ~p",[Action]),
+ {ok, [], {max_body_action,list_to_atom(clean(Action))}};
+load([$S,$e,$r,$v,$e,$r,$N,$a,$m,$e,$ |ServerName], []) ->
+ ?DEBUG("load -> ServerName: ~p",[ServerName]),
+ {ok,[],{server_name,clean(ServerName)}};
+load([$S,$o,$c,$k,$e,$t,$T,$y,$p,$e,$ |SocketType], []) ->
+ ?DEBUG("load -> SocketType: ~p",[SocketType]),
+ case check_enum(clean(SocketType),["ssl","ip_comm"]) of
+ {ok, ValidSocketType} ->
+ {ok, [], {com_type,ValidSocketType}};
+ {error,_} ->
+ {error, ?NICE(clean(SocketType) ++ " is an invalid SocketType")}
+ end;
+load([$P,$o,$r,$t,$ |Port], []) ->
+ ?DEBUG("load -> Port: ~p",[Port]),
+ case make_integer(Port) of
+ {ok, Integer} ->
+ {ok, [], {port,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(Port)++" is an invalid Port")}
+ end;
+load([$B,$i,$n,$d,$A,$d,$d,$r,$e,$s,$s,$ |Address], []) ->
+ ?DEBUG("load -> Address: ~p",[Address]),
+ case clean(Address) of
+ "*" ->
+ {ok, [], {bind_address,any}};
+ CAddress ->
+ ?CDEBUG("load -> CAddress: ~p",[CAddress]),
+ case inet:getaddr(CAddress,inet) of
+ {ok, IPAddr} ->
+ ?CDEBUG("load -> IPAddr: ~p",[IPAddr]),
+ {ok, [], {bind_address,IPAddr}};
+ {error, _} ->
+ {error, ?NICE(CAddress++" is an invalid address")}
+ end
+ end;
+load([$K,$e,$e,$p,$A,$l,$i,$v,$e,$ |OnorOff], []) ->
+ case list_to_atom(clean(OnorOff)) of
+ off ->
+ {ok, [], {persistent_conn, false}};
+ _ ->
+ {ok, [], {persistent_conn, true}}
+ end;
+load([$M,$a,$x,$K,$e,$e,$p,$A,$l,$i,$v,$e,$R,$e,$q,$u,$e,$s,$t,$ |MaxRequests], []) ->
+ case make_integer(MaxRequests) of
+ {ok, Integer} ->
+ {ok, [], {max_keep_alive_request, Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxRequests)++" is an invalid MaxKeepAliveRequest")}
+ end;
+load([$K,$e,$e,$p,$A,$l,$i,$v,$e,$T,$i,$m,$e,$o,$u,$t,$ |Timeout], []) ->
+ case make_integer(Timeout) of
+ {ok, Integer} ->
+ {ok, [], {keep_alive_timeout, Integer*1000}};
+ {error, _} ->
+ {error, ?NICE(clean(Timeout)++" is an invalid KeepAliveTimeout")}
+ end;
+load([$M,$o,$d,$u,$l,$e,$s,$ |Modules], []) ->
+ {ok, ModuleList} = regexp:split(Modules," "),
+ {ok, [], {modules,[list_to_atom(X) || X <- ModuleList]}};
+load([$S,$e,$r,$v,$e,$r,$A,$d,$m,$i,$n,$ |ServerAdmin], []) ->
+ {ok, [], {server_admin,clean(ServerAdmin)}};
+load([$S,$e,$r,$v,$e,$r,$R,$o,$o,$t,$ |ServerRoot], []) ->
+ case is_directory(clean(ServerRoot)) of
+ {ok, Directory} ->
+ MimeTypesFile =
+ filename:join([clean(ServerRoot),"conf", "mime.types"]),
+ case load_mime_types(MimeTypesFile) of
+ {ok, MimeTypesList} ->
+ {ok, [], [{server_root,string:strip(Directory,right,$/)},
+ {mime_types,MimeTypesList}]};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, _} ->
+ {error, ?NICE(clean(ServerRoot)++" is an invalid ServerRoot")}
+ end;
+load([$M,$a,$x,$C,$l,$i,$e,$n,$t,$s,$ |MaxClients], []) ->
+ ?DEBUG("load -> MaxClients: ~p",[MaxClients]),
+ case make_integer(MaxClients) of
+ {ok, Integer} ->
+ {ok, [], {max_clients,Integer}};
+ {error, _} ->
+ {error, ?NICE(clean(MaxClients)++" is an invalid number of MaxClients")}
+ end;
+load([$D,$o,$c,$u,$m,$e,$n,$t,$R,$o,$o,$t,$ |DocumentRoot],[]) ->
+ case is_directory(clean(DocumentRoot)) of
+ {ok, Directory} ->
+ {ok, [], {document_root,string:strip(Directory,right,$/)}};
+ {error, _} ->
+ {error, ?NICE(clean(DocumentRoot)++"is an invalid DocumentRoot")}
+ end;
+load([$D,$e,$f,$a,$u,$l,$t,$T,$y,$p,$e,$ |DefaultType], []) ->
+ {ok, [], {default_type,clean(DefaultType)}};
+load([$S,$S,$L,$C,$e,$r,$t,$i,$f,$i,$c,$a,$t,$e,$F,$i,$l,$e,$ | SSLCertificateFile], []) ->
+ ?DEBUG("load -> SSLCertificateFile: ~p",[SSLCertificateFile]),
+ case is_file(clean(SSLCertificateFile)) of
+ {ok, File} ->
+ {ok, [], {ssl_certificate_file,File}};
+ {error, _} ->
+ {error, ?NICE(clean(SSLCertificateFile)++
+ " is an invalid SSLCertificateFile")}
+ end;
+load([$S,$S,$L,$C,$e,$r,$t,$i,$f,$i,$c,$a,$t,$e,$K,$e,$y,$F,$i,$l,$e,$ |
+ SSLCertificateKeyFile], []) ->
+ ?DEBUG("load -> SSLCertificateKeyFile: ~p",[SSLCertificateKeyFile]),
+ case is_file(clean(SSLCertificateKeyFile)) of
+ {ok, File} ->
+ {ok, [], {ssl_certificate_key_file,File}};
+ {error, _} ->
+ {error, ?NICE(clean(SSLCertificateKeyFile)++
+ " is an invalid SSLCertificateKeyFile")}
+ end;
+load([$S,$S,$L,$V,$e,$r,$i,$f,$y,$C,$l,$i,$e,$n,$t,$ |SSLVerifyClient], []) ->
+ ?DEBUG("load -> SSLVerifyClient: ~p",[SSLVerifyClient]),
+ case make_integer(clean(SSLVerifyClient)) of
+ {ok, Integer} when Integer >=0,Integer =< 2 ->
+ {ok, [], {ssl_verify_client,Integer}};
+ {ok, Integer} ->
+ {error,?NICE(clean(SSLVerifyClient)++" is an invalid SSLVerifyClient")};
+ {error, nomatch} ->
+ {error,?NICE(clean(SSLVerifyClient)++" is an invalid SSLVerifyClient")}
+ end;
+load([$S,$S,$L,$V,$e,$r,$i,$f,$y,$D,$e,$p,$t,$h,$ |
+ SSLVerifyDepth], []) ->
+ ?DEBUG("load -> SSLVerifyDepth: ~p",[SSLVerifyDepth]),
+ case make_integer(clean(SSLVerifyDepth)) of
+ {ok, Integer} when Integer > 0 ->
+ {ok, [], {ssl_verify_client_depth,Integer}};
+ {ok, Integer} ->
+ {error,?NICE(clean(SSLVerifyDepth) ++
+ " is an invalid SSLVerifyDepth")};
+ {error, nomatch} ->
+ {error,?NICE(clean(SSLVerifyDepth) ++
+ " is an invalid SSLVerifyDepth")}
+ end;
+load([$S,$S,$L,$C,$i,$p,$h,$e,$r,$s,$ | SSLCiphers], []) ->
+ ?DEBUG("load -> SSLCiphers: ~p",[SSLCiphers]),
+ {ok, [], {ssl_ciphers, clean(SSLCiphers)}};
+load([$S,$S,$L,$C,$A,$C,$e,$r,$t,$i,$f,$i,$c,$a,$t,$e,$F,$i,$l,$e,$ |
+ SSLCACertificateFile], []) ->
+ case is_file(clean(SSLCACertificateFile)) of
+ {ok, File} ->
+ {ok, [], {ssl_ca_certificate_file,File}};
+ {error, _} ->
+ {error, ?NICE(clean(SSLCACertificateFile)++
+ " is an invalid SSLCACertificateFile")}
+ end;
+load([$S,$S,$L,$P,$a,$s,$s,$w,$o,$r,$d,$C,$a,$l,$l,$b,$a,$c,$k,$M,$o,$d,$u,$l,$e,$ | SSLPasswordCallbackModule], []) ->
+ ?DEBUG("load -> SSLPasswordCallbackModule: ~p",
+ [SSLPasswordCallbackModule]),
+ {ok, [], {ssl_password_callback_module,
+ list_to_atom(clean(SSLPasswordCallbackModule))}};
+load([$S,$S,$L,$P,$a,$s,$s,$w,$o,$r,$d,$C,$a,$l,$l,$b,$a,$c,$k,$F,$u,$n,$c,$t,$i,$o,$n,$ | SSLPasswordCallbackFunction], []) ->
+ ?DEBUG("load -> SSLPasswordCallbackFunction: ~p",
+ [SSLPasswordCallbackFunction]),
+ {ok, [], {ssl_password_callback_function,
+ list_to_atom(clean(SSLPasswordCallbackFunction))}}.
+
+
+%%
+%% load_mime_types/1 -> {ok, MimeTypes} | {error, Reason}
+%%
+load_mime_types(MimeTypesFile) ->
+ case file:open(MimeTypesFile, [read]) of
+ {ok, Stream} ->
+ parse_mime_types(Stream, []);
+ {error, _} ->
+ {error, ?NICE("Can't open " ++ MimeTypesFile)}
+ end.
+
+parse_mime_types(Stream,MimeTypesList) ->
+ Line=
+ case io:get_line(Stream,'') of
+ eof ->
+ eof;
+ String ->
+ clean(String)
+ end,
+ parse_mime_types(Stream, MimeTypesList, Line).
+
+parse_mime_types(Stream, MimeTypesList, eof) ->
+ file:close(Stream),
+ {ok, MimeTypesList};
+parse_mime_types(Stream, MimeTypesList, "") ->
+ parse_mime_types(Stream, MimeTypesList);
+parse_mime_types(Stream, MimeTypesList, [$#|_]) ->
+ parse_mime_types(Stream, MimeTypesList);
+parse_mime_types(Stream, MimeTypesList, Line) ->
+ case regexp:split(Line, " ") of
+ {ok, [NewMimeType|Suffixes]} ->
+ parse_mime_types(Stream,lists:append(suffixes(NewMimeType,Suffixes),
+ MimeTypesList));
+ {ok, _} ->
+ {error, ?NICE(Line)}
+ end.
+
+suffixes(MimeType,[]) ->
+ [];
+suffixes(MimeType,[Suffix|Rest]) ->
+ [{Suffix,MimeType}|suffixes(MimeType,Rest)].
+
+%%
+%% Phase 2: Store
+%%
+
+%% store
+
+store(ConfigList) ->
+ Modules = httpd_util:key1search(ConfigList, modules, []),
+ Port = httpd_util:key1search(ConfigList, port),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ Name = httpd_util:make_name("httpd_conf",Addr,Port),
+ ?CDEBUG("store -> Name = ~p",[Name]),
+ ConfigDB = ets:new(Name, [named_table, bag, protected]),
+ ?CDEBUG("store -> ConfigDB = ~p",[ConfigDB]),
+ store(ConfigDB, ConfigList, lists:append(Modules,[?MODULE]),ConfigList).
+
+store(ConfigDB, ConfigList, Modules,[]) ->
+ ?vtrace("store -> done",[]),
+ ?CDEBUG("store -> done",[]),
+ {ok, ConfigDB};
+store(ConfigDB, ConfigList, Modules, [ConfigListEntry|Rest]) ->
+ ?vtrace("store -> entry with"
+ "~n ConfigListEntry: ~p",[ConfigListEntry]),
+ ?CDEBUG("store -> "
+ "~n ConfigListEntry: ~p",[ConfigListEntry]),
+ case store_traverse(ConfigListEntry,ConfigList,Modules) of
+ {ok, ConfigDBEntry} when tuple(ConfigDBEntry) ->
+ ?vtrace("store -> ConfigDBEntry(tuple): "
+ "~n ~p",[ConfigDBEntry]),
+ ?CDEBUG("store -> ConfigDBEntry(tuple): "
+ "~n ~p",[ConfigDBEntry]),
+ ets:insert(ConfigDB,ConfigDBEntry),
+ store(ConfigDB,ConfigList,Modules,Rest);
+ {ok, ConfigDBEntry} when list(ConfigDBEntry) ->
+ ?vtrace("store -> ConfigDBEntry(list): "
+ "~n ~p",[ConfigDBEntry]),
+ ?CDEBUG("store -> ConfigDBEntry(list): "
+ "~n ~p",[ConfigDBEntry]),
+ lists:foreach(fun(Entry) ->
+ ets:insert(ConfigDB,Entry)
+ end,ConfigDBEntry),
+ store(ConfigDB,ConfigList,Modules,Rest);
+ {error, Reason} ->
+ ?vlog("store -> error: ~p",[Reason]),
+ ?ERROR("store -> error: ~p",[Reason]),
+ {error,Reason}
+ end.
+
+store_traverse(ConfigListEntry,ConfigList,[]) ->
+ {error,?NICE("Unable to store configuration...")};
+store_traverse(ConfigListEntry, ConfigList, [Module|Rest]) ->
+ case is_exported(Module, {store, 2}) of
+ true ->
+ ?CDEBUG("store_traverse -> call ~p:store/2",[Module]),
+ case catch apply(Module,store,[ConfigListEntry, ConfigList]) of
+ {'EXIT',{function_clause,_}} ->
+ ?CDEBUG("store_traverse -> exit: function_clause",[]),
+ store_traverse(ConfigListEntry,ConfigList,Rest);
+ {'EXIT',Reason} ->
+ ?ERROR("store_traverse -> exit: ~p",[Reason]),
+ error_logger:error_report({'EXIT',Reason}),
+ store_traverse(ConfigListEntry,ConfigList,Rest);
+ Result ->
+ ?CDEBUG("store_traverse -> ~n"
+ " Result: ~p",[Result]),
+ Result
+ end;
+ false ->
+ store_traverse(ConfigListEntry,ConfigList,Rest)
+ end.
+
+store({mime_types,MimeTypesList},ConfigList) ->
+ Port = httpd_util:key1search(ConfigList, port),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ Name = httpd_util:make_name("httpd_mime",Addr,Port),
+ ?CDEBUG("store(mime_types) -> Name: ~p",[Name]),
+ {ok, MimeTypesDB} = store_mime_types(Name,MimeTypesList),
+ ?CDEBUG("store(mime_types) -> ~n"
+ " MimeTypesDB: ~p~n"
+ " MimeTypesDB info: ~p",
+ [MimeTypesDB,ets:info(MimeTypesDB)]),
+ {ok, {mime_types,MimeTypesDB}};
+store(ConfigListEntry,ConfigList) ->
+ ?CDEBUG("store/2 -> ~n"
+ " ConfigListEntry: ~p~n"
+ " ConfigList: ~p",
+ [ConfigListEntry,ConfigList]),
+ {ok, ConfigListEntry}.
+
+
+%% store_mime_types
+store_mime_types(Name,MimeTypesList) ->
+ ?CDEBUG("store_mime_types -> Name: ~p",[Name]),
+ MimeTypesDB = ets:new(Name, [set, protected]),
+ ?CDEBUG("store_mime_types -> MimeTypesDB: ~p",[MimeTypesDB]),
+ store_mime_types1(MimeTypesDB, MimeTypesList).
+
+store_mime_types1(MimeTypesDB,[]) ->
+ {ok, MimeTypesDB};
+store_mime_types1(MimeTypesDB,[Type|Rest]) ->
+ ?CDEBUG("store_mime_types1 -> Type: ~p",[Type]),
+ ets:insert(MimeTypesDB, Type),
+ store_mime_types1(MimeTypesDB, Rest).
+
+
+%%
+%% Phase 3: Remove
+%%
+
+remove_all(ConfigDB) ->
+ Modules = httpd_util:lookup(ConfigDB,modules,[]),
+ remove_traverse(ConfigDB, lists:append(Modules,[?MODULE])).
+
+remove_traverse(ConfigDB,[]) ->
+ ?vtrace("remove_traverse -> done", []),
+ ok;
+remove_traverse(ConfigDB,[Module|Rest]) ->
+ ?vtrace("remove_traverse -> call ~p:remove", [Module]),
+ case (catch apply(Module,remove,[ConfigDB])) of
+ {'EXIT',{undef,_}} ->
+ ?vtrace("remove_traverse -> undef", []),
+ remove_traverse(ConfigDB,Rest);
+ {'EXIT',{function_clause,_}} ->
+ ?vtrace("remove_traverse -> function_clause", []),
+ remove_traverse(ConfigDB,Rest);
+ {'EXIT',Reason} ->
+ ?vtrace("remove_traverse -> exit: ~p", [Reason]),
+ error_logger:error_report({'EXIT',Reason}),
+ remove_traverse(ConfigDB,Rest);
+ {error,Reason} ->
+ ?vtrace("remove_traverse -> error: ~p", [Reason]),
+ error_logger:error_report(Reason),
+ remove_traverse(ConfigDB,Rest);
+ _ ->
+ remove_traverse(ConfigDB,Rest)
+ end.
+
+remove(ConfigDB) ->
+ ets:delete(ConfigDB),
+ ok.
+
+
+%%
+%% Utility functions
+%%
+
+%% is_directory
+
+is_directory(Directory) ->
+ case file:read_file_info(Directory) of
+ {ok,FileInfo} ->
+ #file_info{type = Type, access = Access} = FileInfo,
+ is_directory(Type,Access,FileInfo,Directory);
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+is_directory(directory,read,_FileInfo,Directory) ->
+ {ok,Directory};
+is_directory(directory,read_write,_FileInfo,Directory) ->
+ {ok,Directory};
+is_directory(_Type,_Access,FileInfo,_Directory) ->
+ {error,FileInfo}.
+
+
+%% is_file
+
+is_file(File) ->
+ case file:read_file_info(File) of
+ {ok,FileInfo} ->
+ #file_info{type = Type, access = Access} = FileInfo,
+ is_file(Type,Access,FileInfo,File);
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+is_file(regular,read,_FileInfo,File) ->
+ {ok,File};
+is_file(regular,read_write,_FileInfo,File) ->
+ {ok,File};
+is_file(_Type,_Access,FileInfo,_File) ->
+ {error,FileInfo}.
+
+%% make_integer
+
+make_integer(String) ->
+ case regexp:match(clean(String),"[0-9]+") of
+ {match, _, _} ->
+ {ok, list_to_integer(clean(String))};
+ nomatch ->
+ {error, nomatch}
+ end.
+
+
+%% clean
+
+clean(String) ->
+ {ok,CleanedString,_} = regexp:gsub(String, "^[ \t\n\r\f]*|[ \t\n\r\f]*\$",""),
+ CleanedString.
+
+%% custom_clean
+
+custom_clean(String,MoreBefore,MoreAfter) ->
+ {ok,CleanedString,_}=regexp:gsub(String,"^[ \t\n\r\f"++MoreBefore++
+ "]*|[ \t\n\r\f"++MoreAfter++"]*\$",""),
+ CleanedString.
+
+%% check_enum
+
+check_enum(Enum,[]) ->
+ {error, not_valid};
+check_enum(Enum,[Enum|Rest]) ->
+ {ok, list_to_atom(Enum)};
+check_enum(Enum, [NotValid|Rest]) ->
+ check_enum(Enum, Rest).
+
+%% a_must
+
+a_must(ConfigList,[]) ->
+ ok;
+a_must(ConfigList,[Directive|Rest]) ->
+ case httpd_util:key1search(ConfigList,Directive) of
+ undefined ->
+ {missing,Directive};
+ _ ->
+ a_must(ConfigList,Rest)
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_example.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_example.erl
new file mode 100644
index 0000000000..4aec440db3
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_example.erl
@@ -0,0 +1,134 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_example.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_example).
+-export([print/1]).
+-export([get/2, post/2, yahoo/2, test1/2]).
+
+-export([newformat/3]).
+%% These are used by the inets test-suite
+-export([delay/1]).
+
+
+print(String) ->
+ [header(),
+ top("Print"),
+ String++"\n",
+ footer()].
+
+
+test1(Env, []) ->
+ io:format("Env:~p~n",[Env]),
+ ["<html>",
+ "<head>",
+ "<title>Test1</title>",
+ "</head>",
+ "<body>",
+ "<h1>Erlang Body</h1>",
+ "<h2>Stuff</h2>",
+ "</body>",
+ "</html>"].
+
+
+get(Env,[]) ->
+ [header(),
+ top("GET Example"),
+ "<FORM ACTION=\"/cgi-bin/erl/httpd_example:get\" METHOD=GET>
+<B>Input:</B> <INPUT TYPE=\"text\" NAME=\"input1\">
+<INPUT TYPE=\"text\" NAME=\"input2\">
+<INPUT TYPE=\"submit\"><BR>
+</FORM>" ++ "\n",
+ footer()];
+
+get(Env,Input) ->
+ default(Env,Input).
+
+post(Env,[]) ->
+ [header(),
+ top("POST Example"),
+ "<FORM ACTION=\"/cgi-bin/erl/httpd_example:post\" METHOD=POST>
+<B>Input:</B> <INPUT TYPE=\"text\" NAME=\"input1\">
+<INPUT TYPE=\"text\" NAME=\"input2\">
+<INPUT TYPE=\"submit\"><BR>
+</FORM>" ++ "\n",
+ footer()];
+
+post(Env,Input) ->
+ default(Env,Input).
+
+yahoo(Env,Input) ->
+ "Location: http://www.yahoo.com\r\n\r\n".
+
+default(Env,Input) ->
+ [header(),
+ top("Default Example"),
+ "<B>Environment:</B> ",io_lib:format("~p",[Env]),"<BR>\n",
+ "<B>Input:</B> ",Input,"<BR>\n",
+ "<B>Parsed Input:</B> ",
+ io_lib:format("~p",[httpd:parse_query(Input)]),"\n",
+ footer()].
+
+header() ->
+ header("text/html").
+header(MimeType) ->
+ "Content-type: " ++ MimeType ++ "\r\n\r\n".
+
+top(Title) ->
+ "<HTML>
+<HEAD>
+<TITLE>" ++ Title ++ "</TITLE>
+</HEAD>
+<BODY>\n".
+
+footer() ->
+ "</BODY>
+</HTML>\n".
+
+
+newformat(SessionID,Env,Input)->
+ mod_esi:deliver(SessionID,"Content-Type:text/html\r\n\r\n"),
+ mod_esi:deliver(SessionID,top("new esi format test")),
+ mod_esi:deliver(SessionID,"This new format is nice<BR>"),
+ mod_esi:deliver(SessionID,"This new format is nice<BR>"),
+ mod_esi:deliver(SessionID,"This new format is nice<BR>"),
+ mod_esi:deliver(SessionID,footer()).
+
+%% ------------------------------------------------------
+
+delay(Time) when integer(Time) ->
+ i("httpd_example:delay(~p) -> do the delay",[Time]),
+ sleep(Time),
+ i("httpd_example:delay(~p) -> done, now reply",[Time]),
+ delay_reply("delay ok");
+delay(Time) when list(Time) ->
+ delay(httpd_conf:make_integer(Time));
+delay({ok,Time}) when integer(Time) ->
+ delay(Time);
+delay({error,_Reason}) ->
+ i("delay -> called with invalid time"),
+ delay_reply("delay failed: invalid delay time").
+
+delay_reply(Reply) ->
+ [header(),
+ top("delay"),
+ Reply,
+ footer()].
+
+i(F) -> i(F,[]).
+i(F,A) -> io:format(F ++ "~n",A).
+
+sleep(T) -> receive after T -> ok end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
new file mode 100644
index 0000000000..704cb1f319
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
@@ -0,0 +1,1029 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_manager.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+
+-module(httpd_manager).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+-behaviour(gen_server).
+
+%% External API
+-export([start/2, start/3, start_link/2, start_link/3, stop/1, restart/1]).
+
+%% Internal API
+-export([new_connection/1, done_connection/1]).
+
+%% Module API
+-export([config_lookup/2, config_lookup/3,
+ config_multi_lookup/2, config_multi_lookup/3,
+ config_match/2, config_match/3]).
+
+%% gen_server exports
+-export([init/1,
+ handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+
+%% Management exports
+-export([block/2, block/3, unblock/1]).
+-export([get_admin_state/1, get_usage_state/1]).
+-export([is_busy/1,is_busy/2,is_busy_or_blocked/1,is_blocked/1]). %% ???????
+-export([get_status/1, get_status/2]).
+-export([verbosity/2, verbosity/3]).
+
+
+-export([c/1]).
+
+-record(state,{socket_type = ip_comm,
+ config_file,
+ config_db = null,
+ connections, %% Current request handlers
+ admin_state = unblocked,
+ blocker_ref = undefined,
+ blocking_tmr = undefined,
+ status = []}).
+
+
+c(Port) ->
+ Ref = httpd_util:make_name("httpd",undefined,Port),
+ gen_server:call(Ref, fake_close).
+
+
+%%
+%% External API
+%%
+
+start(ConfigFile, ConfigList) ->
+ start(ConfigFile, ConfigList, []).
+
+start(ConfigFile, ConfigList, Verbosity) ->
+ Port = httpd_util:key1search(ConfigList,port,80),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ Name = make_name(Addr,Port),
+ ?LOG("start -> Name = ~p",[Name]),
+ gen_server:start({local,Name},?MODULE,
+ [ConfigFile, ConfigList, Addr, Port, Verbosity],[]).
+
+start_link(ConfigFile, ConfigList) ->
+ start_link(ConfigFile, ConfigList, []).
+
+start_link(ConfigFile, ConfigList, Verbosity) ->
+ Port = httpd_util:key1search(ConfigList,port,80),
+ Addr = httpd_util:key1search(ConfigList,bind_address),
+ Name = make_name(Addr,Port),
+ ?LOG("start_link -> Name = ~p",[Name]),
+ gen_server:start_link({local, Name},?MODULE,
+ [ConfigFile, ConfigList, Addr, Port, Verbosity],[]).
+
+%% stop
+
+stop(ServerRef) ->
+ gen_server:call(ServerRef, stop).
+
+%% restart
+
+restart(ServerRef) ->
+ gen_server:call(ServerRef, restart).
+
+
+%%%----------------------------------------------------------------
+
+block(ServerRef, disturbing) ->
+ call(ServerRef,block);
+
+block(ServerRef, non_disturbing) ->
+ do_block(ServerRef, non_disturbing, infinity).
+
+block(ServerRef, Method, Timeout) ->
+ do_block(ServerRef, Method, Timeout).
+
+
+%% The reason for not using call here, is that the manager cannot
+%% _wait_ for completion of the requests. It must be able to do
+%% do other things at the same time as the blocking goes on.
+do_block(ServerRef, Method, infinity) ->
+ Ref = make_ref(),
+ cast(ServerRef, {block, Method, infinity, self(), Ref}),
+ receive
+ {block_reply, Reply, Ref} ->
+ Reply
+ end;
+do_block(ServerRef,Method,Timeout) when Timeout > 0 ->
+ Ref = make_ref(),
+ cast(ServerRef,{block,Method,Timeout,self(),Ref}),
+ receive
+ {block_reply,Reply,Ref} ->
+ Reply
+ end.
+
+
+%%%----------------------------------------------------------------
+
+%% unblock
+
+unblock(ServerRef) ->
+ call(ServerRef,unblock).
+
+%% get admin/usage state
+
+get_admin_state(ServerRef) ->
+ call(ServerRef,get_admin_state).
+
+get_usage_state(ServerRef) ->
+ call(ServerRef,get_usage_state).
+
+
+%% get_status
+
+get_status(ServerRef) ->
+ gen_server:call(ServerRef,get_status).
+
+get_status(ServerRef,Timeout) ->
+ gen_server:call(ServerRef,get_status,Timeout).
+
+
+verbosity(ServerRef,Verbosity) ->
+ verbosity(ServerRef,all,Verbosity).
+
+verbosity(ServerRef,all,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,all,Verbosity});
+verbosity(ServerRef,manager,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,manager,Verbosity});
+verbosity(ServerRef,request,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,request,Verbosity});
+verbosity(ServerRef,acceptor,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,acceptor,Verbosity});
+verbosity(ServerRef,security,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,security,Verbosity});
+verbosity(ServerRef,auth,Verbosity) ->
+ gen_server:call(ServerRef,{verbosity,auth,Verbosity}).
+
+%%
+%% Internal API
+%%
+
+
+%% new_connection
+
+new_connection(Manager) ->
+ gen_server:call(Manager, {new_connection, self()}).
+
+%% done
+
+done_connection(Manager) ->
+ gen_server:cast(Manager, {done_connection, self()}).
+
+
+%% is_busy(ServerRef) -> true | false
+%%
+%% Tests if the server is (in usage state) busy,
+%% i.e. has rached the heavy load limit.
+%%
+
+is_busy(ServerRef) ->
+ gen_server:call(ServerRef,is_busy).
+
+is_busy(ServerRef,Timeout) ->
+ gen_server:call(ServerRef,is_busy,Timeout).
+
+
+%% is_busy_or_blocked(ServerRef) -> busy | blocked | false
+%%
+%% Tests if the server is busy (usage state), i.e. has rached,
+%% the heavy load limit, or blocked (admin state) .
+%%
+
+is_busy_or_blocked(ServerRef) ->
+ gen_server:call(ServerRef,is_busy_or_blocked).
+
+
+%% is_blocked(ServerRef) -> true | false
+%%
+%% Tests if the server is blocked (admin state) .
+%%
+
+is_blocked(ServerRef) ->
+ gen_server:call(ServerRef,is_blocked).
+
+
+%%
+%% Module API. Theese functions are intended for use from modules only.
+%%
+
+config_lookup(Port, Query) ->
+ config_lookup(undefined, Port, Query).
+config_lookup(Addr, Port, Query) ->
+ Name = httpd_util:make_name("httpd",Addr,Port),
+ gen_server:call(whereis(Name), {config_lookup, Query}).
+
+config_multi_lookup(Port, Query) ->
+ config_multi_lookup(undefined,Port,Query).
+config_multi_lookup(Addr,Port, Query) ->
+ Name = httpd_util:make_name("httpd",Addr,Port),
+ gen_server:call(whereis(Name), {config_multi_lookup, Query}).
+
+config_match(Port, Pattern) ->
+ config_match(undefined,Port,Pattern).
+config_match(Addr, Port, Pattern) ->
+ Name = httpd_util:make_name("httpd",Addr,Port),
+ gen_server:call(whereis(Name), {config_match, Pattern}).
+
+
+%%
+%% Server call-back functions
+%%
+
+%% init
+
+init([ConfigFile, ConfigList, Addr, Port, Verbosity]) ->
+ process_flag(trap_exit, true),
+ case (catch do_init(ConfigFile, ConfigList, Addr, Port, Verbosity)) of
+ {error, Reason} ->
+ ?vlog("failed starting server: ~p", [Reason]),
+ {stop, Reason};
+ {ok, State} ->
+ {ok, State}
+ end.
+
+
+do_init(ConfigFile, ConfigList, Addr, Port, Verbosity) ->
+ put(sname,man),
+ set_verbosity(Verbosity),
+ ?vlog("starting",[]),
+ ConfigDB = do_initial_store(ConfigList),
+ ?vtrace("config db: ~p", [ConfigDB]),
+ SocketType = httpd_socket:config(ConfigDB),
+ ?vtrace("socket type: ~p, now start acceptor", [SocketType]),
+ case httpd_acceptor_sup:start_acceptor(SocketType, Addr, Port, ConfigDB) of
+ {ok, Pid} ->
+ ?vtrace("acceptor started: ~p", [Pid]),
+ Status = [{max_conn,0}, {last_heavy_load,never},
+ {last_connection,never}],
+ State = #state{socket_type = SocketType,
+ config_file = ConfigFile,
+ config_db = ConfigDB,
+ connections = [],
+ status = Status},
+ ?vdebug("started",[]),
+ {ok, State};
+ Else ->
+ Else
+ end.
+
+
+do_initial_store(ConfigList) ->
+ case httpd_conf:store(ConfigList) of
+ {ok, ConfigDB} ->
+ ConfigDB;
+ {error, Reason} ->
+ ?vinfo("failed storing configuration: ~p",[Reason]),
+ throw({error, Reason})
+ end.
+
+
+
+%% handle_call
+
+handle_call(stop, _From, State) ->
+ ?vlog("stop",[]),
+ {stop, normal, ok, State};
+
+handle_call({config_lookup, Query}, _From, State) ->
+ ?vlog("config lookup: Query = ~p",[Query]),
+ Res = httpd_util:lookup(State#state.config_db, Query),
+ ?vdebug("config lookup result: ~p",[Res]),
+ {reply, Res, State};
+
+handle_call({config_multi_lookup, Query}, _From, State) ->
+ ?vlog("multi config lookup: Query = ~p",[Query]),
+ Res = httpd_util:multi_lookup(State#state.config_db, Query),
+ ?vdebug("multi config lookup result: ~p",[Res]),
+ {reply, Res, State};
+
+handle_call({config_match, Query}, _From, State) ->
+ ?vlog("config match: Query = ~p",[Query]),
+ Res = ets:match_object(State#state.config_db, Query),
+ ?vdebug("config match result: ~p",[Res]),
+ {reply, Res, State};
+
+handle_call(get_status, _From, State) ->
+ ?vdebug("get status",[]),
+ ManagerStatus = manager_status(self()),
+ %% AuthStatus = auth_status(get(auth_server)),
+ %% SecStatus = sec_status(get(sec_server)),
+ %% AccStatus = sec_status(get(acceptor_server)),
+ S1 = [{current_conn,length(State#state.connections)}|State#state.status]++
+ [ManagerStatus],
+ ?vtrace("status = ~p",[S1]),
+ {reply,S1,State};
+
+handle_call(is_busy, From, State) ->
+ Reply = case get_ustate(State) of
+ busy ->
+ true;
+ _ ->
+ false
+ end,
+ ?vlog("is busy: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(is_busy_or_blocked, From, State) ->
+ Reply =
+ case get_astate(State) of
+ unblocked ->
+ case get_ustate(State) of
+ busy ->
+ busy;
+ _ ->
+ false
+ end;
+ _ ->
+ blocked
+ end,
+ ?vlog("is busy or blocked: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(is_blocked, From, State) ->
+ Reply =
+ case get_astate(State) of
+ unblocked ->
+ false;
+ _ ->
+ true
+ end,
+ ?vlog("is blocked: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(get_admin_state, From, State) ->
+ Reply = get_astate(State),
+ ?vlog("admin state: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call(get_usage_state, From, State) ->
+ Reply = get_ustate(State),
+ ?vlog("usage state: ~p",[Reply]),
+ {reply,Reply,State};
+
+handle_call({verbosity,Who,Verbosity}, From, State) ->
+ V = ?vvalidate(Verbosity),
+ ?vlog("~n Set new verbosity to ~p for ~p",[V,Who]),
+ Reply = set_verbosity(Who,V,State),
+ {reply,Reply,State};
+
+handle_call(restart, From, State) when State#state.admin_state == blocked ->
+ ?vlog("restart",[]),
+ case handle_restart(State) of
+ {stop, Reply,S1} ->
+ {stop, Reply, S1};
+ {_, Reply, S1} ->
+ {reply,Reply,S1}
+ end;
+
+handle_call(restart, From, State) ->
+ ?vlog("restart(~p)",[State#state.admin_state]),
+ {reply,{error,{invalid_admin_state,State#state.admin_state}},State};
+
+handle_call(block, From, State) ->
+ ?vlog("block(disturbing)",[]),
+ {Reply,S1} = handle_block(State),
+ {reply,Reply,S1};
+
+handle_call(unblock, {From,_Tag}, State) ->
+ ?vlog("unblock",[]),
+ {Reply,S1} = handle_unblock(State,From),
+ {reply, Reply, S1};
+
+handle_call({new_connection, Pid}, From, State) ->
+ ?vlog("~n New connection (~p) when connection count = ~p",
+ [Pid,length(State#state.connections)]),
+ {S, S1} = handle_new_connection(State, Pid),
+ Reply = {S, get(request_handler_verbosity)},
+ {reply, Reply, S1};
+
+handle_call(Request, From, State) ->
+ ?vinfo("~n unknown request '~p' from ~p", [Request,From]),
+ String =
+ lists:flatten(
+ io_lib:format("Unknown request "
+ "~n ~p"
+ "~nto manager (~p)"
+ "~nfrom ~p",
+ [Request, self(), From])),
+ report_error(State,String),
+ {reply, ok, State}.
+
+
+%% handle_cast
+
+handle_cast({done_connection, Pid}, State) ->
+ ?vlog("~n Done connection (~p)", [Pid]),
+ S1 = handle_done_connection(State, Pid),
+ {noreply, S1};
+
+handle_cast({block, disturbing, Timeout, From, Ref}, State) ->
+ ?vlog("block(disturbing,~p)",[Timeout]),
+ S1 = handle_block(State, Timeout, From, Ref),
+ {noreply,S1};
+
+handle_cast({block, non_disturbing, Timeout, From, Ref}, State) ->
+ ?vlog("block(non-disturbing,~p)",[Timeout]),
+ S1 = handle_nd_block(State, Timeout, From, Ref),
+ {noreply,S1};
+
+handle_cast(Message, State) ->
+ ?vinfo("~n received unknown message '~p'",[Message]),
+ String =
+ lists:flatten(
+ io_lib:format("Unknown message "
+ "~n ~p"
+ "~nto manager (~p)",
+ [Message, self()])),
+ report_error(State, String),
+ {noreply, State}.
+
+%% handle_info
+
+handle_info({block_timeout, Method}, State) ->
+ ?vlog("received block_timeout event",[]),
+ S1 = handle_block_timeout(State,Method),
+ {noreply, S1};
+
+handle_info({'DOWN', Ref, process, _Object, Info}, State) ->
+ ?vlog("~n down message for ~p",[Ref]),
+ S1 =
+ case State#state.blocker_ref of
+ Ref ->
+ handle_blocker_exit(State);
+ _ ->
+ %% Not our blocker, so ignore
+ State
+ end,
+ {noreply, S1};
+
+handle_info({'EXIT', Pid, normal}, State) ->
+ ?vdebug("~n Normal exit message from ~p", [Pid]),
+ {noreply, State};
+
+handle_info({'EXIT', Pid, blocked}, S) ->
+ ?vdebug("blocked exit signal from request handler (~p)", [Pid]),
+ {noreply, S};
+
+handle_info({'EXIT', Pid, Reason}, State) ->
+ ?vlog("~n Exit message from ~p for reason ~p",[Pid, Reason]),
+ S1 = check_connections(State, Pid, Reason),
+ {noreply, S1};
+
+handle_info(Info, State) ->
+ ?vinfo("~n received unknown info '~p'",[Info]),
+ String =
+ lists:flatten(
+ io_lib:format("Unknown info "
+ "~n ~p"
+ "~nto manager (~p)",
+ [Info, self()])),
+ report_error(State, String),
+ {noreply, State}.
+
+
+%% terminate
+
+terminate(R, #state{config_db = Db}) ->
+ ?vlog("Terminating for reason: ~n ~p", [R]),
+ httpd_conf:remove_all(Db),
+ ok.
+
+
+%% code_change({down,ToVsn}, State, Extra)
+%%
+%% NOTE:
+%% Actually upgrade from 2.5.1 to 2.5.3 and downgrade from
+%% 2.5.3 to 2.5.1 is done with an application restart, so
+%% these function is actually never used. The reason for keeping
+%% this stuff is only for future use.
+%%
+code_change({down,ToVsn},State,Extra) ->
+ {ok,State};
+
+%% code_change(FromVsn, State, Extra)
+%%
+code_change(FromVsn,State,Extra) ->
+ {ok,State}.
+
+
+
+%% -------------------------------------------------------------------------
+%% check_connection
+%%
+%%
+%%
+%%
+
+check_connections(#state{connections = []} = State, _Pid, _Reason) ->
+ State;
+check_connections(#state{admin_state = shutting_down,
+ connections = Connections} = State, Pid, Reason) ->
+ %% Could be a crashing request handler
+ case lists:delete(Pid, Connections) of
+ [] -> % Crashing request handler => block complete
+ String =
+ lists:flatten(
+ io_lib:format("request handler (~p) crashed:"
+ "~n ~p", [Pid, Reason])),
+ report_error(State, String),
+ ?vlog("block complete",[]),
+ demonitor_blocker(State#state.blocker_ref),
+ {Tmr,From,Ref} = State#state.blocking_tmr,
+ ?vlog("(possibly) stop block timer",[]),
+ stop_block_tmr(Tmr),
+ ?vlog("and send the reply",[]),
+ From ! {block_reply,ok,Ref},
+ State#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined};
+ Connections1 ->
+ State#state{connections = Connections1}
+ end;
+check_connections(#state{connections = Connections} = State, Pid, Reason) ->
+ case lists:delete(Pid, Connections) of
+ Connections -> % Not a request handler, so ignore
+ State;
+ Connections1 ->
+ String =
+ lists:flatten(
+ io_lib:format("request handler (~p) crashed:"
+ "~n ~p", [Pid, Reason])),
+ report_error(State, String),
+ State#state{connections = lists:delete(Pid, Connections)}
+ end.
+
+
+%% -------------------------------------------------------------------------
+%% handle_[new | done]_connection
+%%
+%%
+%%
+%%
+
+handle_new_connection(State, Handler) ->
+ UsageState = get_ustate(State),
+ AdminState = get_astate(State),
+ handle_new_connection(UsageState, AdminState, State, Handler).
+
+handle_new_connection(busy, unblocked, State, Handler) ->
+ Status = update_heavy_load_status(State#state.status),
+ {{reject, busy},
+ State#state{status = Status}};
+
+handle_new_connection(_UsageState, unblocked, State, Handler) ->
+ Connections = State#state.connections,
+ Status = update_connection_status(State#state.status,
+ length(Connections)+1),
+ link(Handler),
+ {accept,
+ State#state{connections = [Handler|Connections], status = Status}};
+
+handle_new_connection(_UsageState, _AdminState, State, _Handler) ->
+ {{reject, blocked},
+ State}.
+
+
+handle_done_connection(#state{admin_state = shutting_down,
+ connections = Connections} = State, Handler) ->
+ unlink(Handler),
+ case lists:delete(Handler, Connections) of
+ [] -> % Ok, block complete
+ ?vlog("block complete",[]),
+ demonitor_blocker(State#state.blocker_ref),
+ {Tmr,From,Ref} = State#state.blocking_tmr,
+ ?vlog("(possibly) stop block timer",[]),
+ stop_block_tmr(Tmr),
+ ?vlog("and send the reply",[]),
+ From ! {block_reply,ok,Ref},
+ State#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined};
+ Connections1 ->
+ State#state{connections = Connections1}
+ end;
+
+handle_done_connection(#state{connections = Connections} = State, Handler) ->
+ State#state{connections = lists:delete(Handler, Connections)}.
+
+
+%% -------------------------------------------------------------------------
+%% handle_block
+%%
+%%
+%%
+%%
+handle_block(#state{admin_state = AdminState} = S) ->
+ handle_block(S, AdminState).
+
+handle_block(S,unblocked) ->
+ %% Kill all connections
+ ?vtrace("handle_block(unblocked) -> kill all request handlers",[]),
+%% [exit(Pid,blocked) || Pid <- S#state.connections],
+ [kill_handler(Pid) || Pid <- S#state.connections],
+ {ok,S#state{connections = [], admin_state = blocked}};
+handle_block(S,blocked) ->
+ ?vtrace("handle_block(blocked) -> already blocked",[]),
+ {ok,S};
+handle_block(S,shutting_down) ->
+ ?vtrace("handle_block(shutting_down) -> ongoing...",[]),
+ {{error,shutting_down},S}.
+
+
+kill_handler(Pid) ->
+ ?vtrace("kill request handler: ~p",[Pid]),
+ exit(Pid, blocked).
+%% exit(Pid, kill).
+
+handle_block(S,Timeout,From,Ref) when Timeout >= 0 ->
+ do_block(S,Timeout,From,Ref);
+
+handle_block(S,Timeout,From,Ref) ->
+ Reply = {error,{invalid_block_request,Timeout}},
+ From ! {block_reply,Reply,Ref},
+ S.
+
+do_block(S,Timeout,From,Ref) ->
+ case S#state.connections of
+ [] ->
+ %% Already in idle usage state => go directly to blocked
+ ?vdebug("do_block -> already in idle usage state",[]),
+ From ! {block_reply,ok,Ref},
+ S#state{admin_state = blocked};
+ _ ->
+ %% Active or Busy usage state => go to shutting_down
+ ?vdebug("do_block -> active or busy usage state",[]),
+ %% Make sure we get to know if blocker dies...
+ ?vtrace("do_block -> create blocker monitor",[]),
+ MonitorRef = monitor_blocker(From),
+ ?vtrace("do_block -> (possibly) start block timer",[]),
+ Tmr = {start_block_tmr(Timeout,disturbing),From,Ref},
+ S#state{admin_state = shutting_down,
+ blocker_ref = MonitorRef, blocking_tmr = Tmr}
+ end.
+
+handle_nd_block(S,infinity,From,Ref) ->
+ do_nd_block(S,infinity,From,Ref);
+
+handle_nd_block(S,Timeout,From,Ref) when Timeout >= 0 ->
+ do_nd_block(S,Timeout,From,Ref);
+
+handle_nd_block(S,Timeout,From,Ref) ->
+ Reply = {error,{invalid_block_request,Timeout}},
+ From ! {block_reply,Reply,Ref},
+ S.
+
+do_nd_block(S,Timeout,From,Ref) ->
+ case S#state.connections of
+ [] ->
+ %% Already in idle usage state => go directly to blocked
+ ?vdebug("do_nd_block -> already in idle usage state",[]),
+ From ! {block_reply,ok,Ref},
+ S#state{admin_state = blocked};
+ _ ->
+ %% Active or Busy usage state => go to shutting_down
+ ?vdebug("do_nd_block -> active or busy usage state",[]),
+ %% Make sure we get to know if blocker dies...
+ ?vtrace("do_nd_block -> create blocker monitor",[]),
+ MonitorRef = monitor_blocker(From),
+ ?vtrace("do_nd_block -> (possibly) start block timer",[]),
+ Tmr = {start_block_tmr(Timeout,non_disturbing),From,Ref},
+ S#state{admin_state = shutting_down,
+ blocker_ref = MonitorRef, blocking_tmr = Tmr}
+ end.
+
+handle_block_timeout(S,Method) ->
+ %% Time to take this to the road...
+ demonitor_blocker(S#state.blocker_ref),
+ handle_block_timeout1(S,Method,S#state.blocking_tmr).
+
+handle_block_timeout1(S,non_disturbing,{_,From,Ref}) ->
+ ?vdebug("handle_block_timeout1(non-disturbing) -> send reply: timeout",[]),
+ From ! {block_reply,{error,timeout},Ref},
+ S#state{admin_state = unblocked,
+ blocker_ref = undefined, blocking_tmr = undefined};
+
+handle_block_timeout1(S,disturbing,{_,From,Ref}) ->
+ ?vdebug("handle_block_timeout1(disturbing) -> kill all connections",[]),
+ [exit(Pid,blocked) || Pid <- S#state.connections],
+
+ ?vdebug("handle_block_timeout1 -> send reply: ok",[]),
+ From ! {block_reply,ok,Ref},
+ S#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined, blocking_tmr = undefined};
+
+handle_block_timeout1(S,Method,{_,From,Ref}) ->
+ ?vinfo("received block timeout with unknown block method:"
+ "~n Method: ~p",[Method]),
+ From ! {block_reply,{error,{unknown_block_method,Method}},Ref},
+ S#state{admin_state = blocked, connections = [],
+ blocker_ref = undefined, blocking_tmr = undefined};
+
+handle_block_timeout1(S,Method,TmrInfo) ->
+ ?vinfo("received block timeout with erroneous timer info:"
+ "~n Method: ~p"
+ "~n TmrInfo: ~p",[Method,TmrInfo]),
+ S#state{admin_state = unblocked,
+ blocker_ref = undefined, blocking_tmr = undefined}.
+
+handle_unblock(S,FromA) ->
+ handle_unblock(S,FromA,S#state.admin_state).
+
+handle_unblock(S,_FromA,unblocked) ->
+ {ok,S};
+handle_unblock(S,FromA,_AdminState) ->
+ ?vtrace("handle_unblock -> (possibly) stop block timer",[]),
+ stop_block_tmr(S#state.blocking_tmr),
+ case S#state.blocking_tmr of
+ {Tmr,FromB,Ref} ->
+ %% Another process is trying to unblock
+ %% Inform the blocker
+ FromB ! {block_reply, {error,{unblocked,FromA}},Ref};
+ _ ->
+ ok
+ end,
+ {ok,S#state{admin_state = unblocked, blocking_tmr = undefined}}.
+
+%% The blocker died so we give up on the block.
+handle_blocker_exit(S) ->
+ {Tmr,_From,_Ref} = S#state.blocking_tmr,
+ ?vtrace("handle_blocker_exit -> (possibly) stop block timer",[]),
+ stop_block_tmr(Tmr),
+ S#state{admin_state = unblocked,
+ blocker_ref = undefined, blocking_tmr = undefined}.
+
+
+
+%% -------------------------------------------------------------------------
+%% handle_restart
+%%
+%%
+%%
+%%
+handle_restart(#state{config_file = undefined} = State) ->
+ {continue, {error, undefined_config_file}, State};
+handle_restart(#state{config_db = Db, config_file = ConfigFile} = State) ->
+ ?vtrace("load new configuration",[]),
+ {ok, Config} = httpd_conf:load(ConfigFile),
+ ?vtrace("check for illegal changes (addr, port and socket-type)",[]),
+ case (catch check_constant_values(Db, Config)) of
+ ok ->
+ %% If something goes wrong between the remove
+ %% and the store where fu-ed
+ ?vtrace("remove old configuration, now hold you breath...",[]),
+ httpd_conf:remove_all(Db),
+ ?vtrace("store new configuration",[]),
+ case httpd_conf:store(Config) of
+ {ok, NewConfigDB} ->
+ ?vlog("restart done, puh!",[]),
+ {continue, ok, State#state{config_db = NewConfigDB}};
+ Error ->
+ ?vlog("failed store new config: ~n ~p",[Error]),
+ {stop, Error, State}
+ end;
+ Error ->
+ ?vlog("restart NOT performed due to:"
+ "~n ~p",[Error]),
+ {continue, Error, State}
+ end.
+
+
+check_constant_values(Db, Config) ->
+ %% Check port number
+ ?vtrace("check_constant_values -> check port number",[]),
+ Port = httpd_util:lookup(Db,port),
+ case httpd_util:key1search(Config,port) of %% MUST be equal
+ Port ->
+ ok;
+ OtherPort ->
+ throw({error,{port_number_changed,Port,OtherPort}})
+ end,
+
+ %% Check bind address
+ ?vtrace("check_constant_values -> check bind address",[]),
+ Addr = httpd_util:lookup(Db,bind_address),
+ case httpd_util:key1search(Config,bind_address) of %% MUST be equal
+ Addr ->
+ ok;
+ OtherAddr ->
+ throw({error,{addr_changed,Addr,OtherAddr}})
+ end,
+
+ %% Check socket type
+ ?vtrace("check_constant_values -> check socket type",[]),
+ SockType = httpd_util:lookup(Db, com_type),
+ case httpd_util:key1search(Config, com_type) of %% MUST be equal
+ SockType ->
+ ok;
+ OtherSockType ->
+ throw({error,{sock_type_changed,SockType,OtherSockType}})
+ end,
+ ?vtrace("check_constant_values -> done",[]),
+ ok.
+
+
+%% get_ustate(State) -> idle | active | busy
+%%
+%% Retrieve the usage state of the HTTP server:
+%% 0 active connection -> idle
+%% max_clients active connections -> busy
+%% Otherwise -> active
+%%
+get_ustate(State) ->
+ get_ustate(length(State#state.connections),State).
+
+get_ustate(0,_State) ->
+ idle;
+get_ustate(ConnectionCnt,State) ->
+ ConfigDB = State#state.config_db,
+ case httpd_util:lookup(ConfigDB, max_clients, 150) of
+ ConnectionCnt ->
+ busy;
+ _ ->
+ active
+ end.
+
+
+get_astate(S) -> S#state.admin_state.
+
+
+%% Timer handling functions
+start_block_tmr(infinity,_) ->
+ undefined;
+start_block_tmr(T,M) ->
+ erlang:send_after(T,self(),{block_timeout,M}).
+
+stop_block_tmr(undefined) ->
+ ok;
+stop_block_tmr(Ref) ->
+ erlang:cancel_timer(Ref).
+
+
+%% Monitor blocker functions
+monitor_blocker(Pid) when pid(Pid) ->
+ case (catch erlang:monitor(process,Pid)) of
+ MonitorRef ->
+ MonitorRef;
+ {'EXIT',Reason} ->
+ undefined
+ end;
+monitor_blocker(_) ->
+ undefined.
+
+demonitor_blocker(undefined) ->
+ ok;
+demonitor_blocker(Ref) ->
+ (catch erlang:demonitor(Ref)).
+
+
+%% Some status utility functions
+
+update_heavy_load_status(Status) ->
+ update_status_with_time(Status,last_heavy_load).
+
+update_connection_status(Status,ConnCount) ->
+ S1 = case lists:keysearch(max_conn,1,Status) of
+ {value,{max_conn,C1}} when ConnCount > C1 ->
+ lists:keyreplace(max_conn,1,Status,{max_conn,ConnCount});
+ {value,{max_conn,C2}} ->
+ Status;
+ false ->
+ [{max_conn,ConnCount}|Status]
+ end,
+ update_status_with_time(S1,last_connection).
+
+update_status_with_time(Status,Key) ->
+ lists:keyreplace(Key,1,Status,{Key,universal_time()}).
+
+universal_time() -> calendar:universal_time().
+
+
+auth_status(P) when pid(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size, current_function],
+ {auth_status, process_status(P,Items,[])};
+auth_status(_) ->
+ {auth_status, undefined}.
+
+sec_status(P) when pid(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size, current_function],
+ {security_status, process_status(P,Items,[])};
+sec_status(_) ->
+ {security_status, undefined}.
+
+acceptor_status(P) when pid(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size, current_function],
+ {acceptor_status, process_status(P,Items,[])};
+acceptor_status(_) ->
+ {acceptor_status, undefined}.
+
+
+manager_status(P) ->
+ Items = [status, message_queue_len, reductions,
+ heap_size, stack_size],
+ {manager_status, process_status(P,Items,[])}.
+
+
+process_status(P,[],L) ->
+ [{pid,P}|lists:reverse(L)];
+process_status(P,[H|T],L) ->
+ case (catch process_info(P,H)) of
+ {H, Value} ->
+ process_status(P,T,[{H,Value}|L]);
+ _ ->
+ process_status(P,T,[{H,undefined}|L])
+ end.
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd",Addr,Port).
+
+
+report_error(State,String) ->
+ Cdb = State#state.config_db,
+ error_logger:error_report(String),
+ mod_log:report_error(Cdb,String),
+ mod_disk_log:report_error(Cdb,String).
+
+
+set_verbosity(V) ->
+ Units = [manager_verbosity,
+ acceptor_verbosity, request_handler_verbosity,
+ security_verbosity, auth_verbosity],
+ case httpd_util:key1search(V, all) of
+ undefined ->
+ set_verbosity(V, Units);
+ Verbosity when atom(Verbosity) ->
+ V1 = [{Unit, Verbosity} || Unit <- Units],
+ set_verbosity(V1, Units)
+ end.
+
+set_verbosity(_V, []) ->
+ ok;
+set_verbosity(V, [manager_verbosity = Unit|Units]) ->
+ Verbosity = httpd_util:key1search(V, Unit, ?default_verbosity),
+ put(verbosity, ?vvalidate(Verbosity)),
+ set_verbosity(V, Units);
+set_verbosity(V, [Unit|Units]) ->
+ Verbosity = httpd_util:key1search(V, Unit, ?default_verbosity),
+ put(Unit, ?vvalidate(Verbosity)),
+ set_verbosity(V, Units).
+
+
+set_verbosity(manager,V,_S) ->
+ put(verbosity,V);
+set_verbosity(acceptor,V,_S) ->
+ put(acceptor_verbosity,V);
+set_verbosity(request,V,_S) ->
+ put(request_handler_verbosity,V);
+set_verbosity(security,V,S) ->
+ OldVerbosity = put(security_verbosity,V),
+ Addr = httpd_util:lookup(S#state.config_db, bind_address),
+ Port = httpd_util:lookup(S#state.config_db, port),
+ mod_security_server:verbosity(Addr,Port,V),
+ OldVerbosity;
+set_verbosity(auth,V,S) ->
+ OldVerbosity = put(auth_verbosity,V),
+ Addr = httpd_util:lookup(S#state.config_db, bind_address),
+ Port = httpd_util:lookup(S#state.config_db, port),
+ mod_auth_server:verbosity(Addr,Port,V),
+ OldVerbosity;
+
+set_verbosity(all,V,S) ->
+ OldMv = put(verbosity,V),
+ OldAv = put(acceptor_verbosity,V),
+ OldRv = put(request_handler_verbosity,V),
+ OldSv = put(security_verbosity,V),
+ OldAv = put(auth_verbosity,V),
+ Addr = httpd_util:lookup(S#state.config_db, bind_address),
+ Port = httpd_util:lookup(S#state.config_db, port),
+ mod_security_server:verbosity(Addr,Port,V),
+ mod_auth_server:verbosity(Addr,Port,V),
+ [{manager,OldMv}, {request,OldRv}, {security,OldSv}, {auth, OldAv}].
+
+
+%%
+call(ServerRef,Request) ->
+ gen_server:call(ServerRef,Request).
+
+cast(ServerRef,Message) ->
+ gen_server:cast(ServerRef,Message).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_misc_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_misc_sup.erl
new file mode 100644
index 0000000000..e671f05206
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_misc_sup.erl
@@ -0,0 +1,113 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_misc_sup.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The top supervisor for the Megaco/H.248 application
+%%----------------------------------------------------------------------
+
+-module(httpd_misc_sup).
+
+-behaviour(supervisor).
+
+-include("httpd_verbosity.hrl").
+
+%% public
+-export([start/3, stop/1, init/1]).
+
+-export([start_auth_server/3, stop_auth_server/2,
+ start_sec_server/3, stop_sec_server/2]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% supervisor callback functions
+
+
+start(Addr, Port, MiscSupVerbosity) ->
+ SupName = make_name(Addr, Port),
+ supervisor:start_link({local, SupName}, ?MODULE, [MiscSupVerbosity]).
+
+stop(StartArgs) ->
+ ok.
+
+init([Verbosity]) -> % Supervisor
+ do_init(Verbosity);
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+do_init(Verbosity) ->
+ put(verbosity,?vvalidate(Verbosity)),
+ put(sname,misc_sup),
+ ?vlog("starting", []),
+ Flags = {one_for_one, 0, 1},
+ KillAfter = timer:seconds(1),
+ Workers = [],
+ {ok, {Flags, Workers}}.
+
+
+%%----------------------------------------------------------------------
+%% Function: [start|stop]_[auth|sec]_server/3
+%% Description: Starts a [auth | security] worker (child) process
+%%----------------------------------------------------------------------
+
+start_auth_server(Addr, Port, Verbosity) ->
+ start_permanent_worker(mod_auth_server, Addr, Port,
+ Verbosity, [gen_server]).
+
+stop_auth_server(Addr, Port) ->
+ stop_permanent_worker(mod_auth_server, Addr, Port).
+
+
+start_sec_server(Addr, Port, Verbosity) ->
+ start_permanent_worker(mod_security_server, Addr, Port,
+ Verbosity, [gen_server]).
+
+stop_sec_server(Addr, Port) ->
+ stop_permanent_worker(mod_security_server, Addr, Port).
+
+
+
+%%----------------------------------------------------------------------
+%% Function: start_permanent_worker/5
+%% Description: Starts a permanent worker (child) process
+%%----------------------------------------------------------------------
+
+start_permanent_worker(Mod, Addr, Port, Verbosity, Modules) ->
+ SupName = make_name(Addr, Port),
+ Spec = {{Mod, Addr, Port},
+ {Mod, start_link, [Addr, Port, Verbosity]},
+ permanent, timer:seconds(1), worker, [Mod] ++ Modules},
+ supervisor:start_child(SupName, Spec).
+
+
+%%----------------------------------------------------------------------
+%% Function: stop_permanent_worker/3
+%% Description: Stops a permanent worker (child) process
+%%----------------------------------------------------------------------
+
+stop_permanent_worker(Mod, Addr, Port) ->
+ SupName = make_name(Addr, Port),
+ Name = {Mod, Addr, Port},
+ case supervisor:terminate_child(SupName, Name) of
+ ok ->
+ supervisor:delete_child(SupName, Name);
+ Error ->
+ Error
+ end.
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_misc_sup",Addr,Port).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
new file mode 100644
index 0000000000..2f4163de00
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
@@ -0,0 +1,344 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_parse.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_parse).
+-export([
+ request_header/1,
+ hsplit/2,
+ get_request_record/10,
+ split_lines/1,
+ tagup_header/1]).
+-include("httpd.hrl").
+
+
+%%----------------------------------------------------------------------
+%% request_header
+%%
+%% Input: The request as sent from the client (list of characters)
+%% (may include part of the entity body)
+%%
+%% Returns:
+%% {ok, Info#mod}
+%% {not_implemented,Info#mod}
+%% {bad_request, Reason}
+%%----------------------------------------------------------------------
+
+request_header(Header)->
+ [RequestLine|HeaderFields] = split_lines(Header),
+ ?DEBUG("request ->"
+ "~n RequestLine: ~p"
+ "~n Header: ~p",[RequestLine,Header]),
+ ParsedHeader = tagup_header(HeaderFields),
+ ?DEBUG("request ->"
+ "~n ParseHeader: ~p",[ParsedHeader]),
+ case verify_request(string:tokens(RequestLine," ")) of
+ ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+ {ok, ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ ["GET", RequestURI, "HTTP/0.9"] ->
+ {ok, ["GET", RequestURI, "HTTP/0.9", RequestLine, ParsedHeader]};
+ ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+ {ok, ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+ {ok, ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ %%HTTP must be 1.1 or higher
+ ["TRACE", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] when N>48->
+ {ok, ["TRACE", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+ ParsedHeader]};
+ [Method, RequestURI] ->
+ {not_implemented, RequestLine, Method, RequestURI,ParsedHeader,"HTTP/0.9"};
+ [Method, RequestURI, HTTPVersion] ->
+ {not_implemented, RequestLine, Method, RequestURI,ParsedHeader, HTTPVersion};
+ {bad_request, Reason} ->
+ {bad_request, Reason};
+ Reason ->
+ {bad_request, "Unknown request method"}
+ end.
+
+
+
+
+
+
+%%----------------------------------------------------------------------
+%% The request is passed through the server as a record of type mod get it
+%% ----------------------------------------------------------------------
+
+get_request_record(Socket,SocketType,ConfigDB,Method,RequestURI,
+ HTTPVersion,RequestLine,ParsedHeader,EntityBody,InitData)->
+ PersistentConn=get_persistens(HTTPVersion,ParsedHeader,ConfigDB),
+ Info=#mod{init_data=InitData,
+ data=[],
+ socket_type=SocketType,
+ socket=Socket,
+ config_db=ConfigDB,
+ method=Method,
+ absolute_uri=formatAbsoluteURI(RequestURI,ParsedHeader),
+ request_uri=formatRequestUri(RequestURI),
+ http_version=HTTPVersion,
+ request_line=RequestLine,
+ parsed_header=ParsedHeader,
+ entity_body=maybe_remove_nl(ParsedHeader,EntityBody),
+ connection=PersistentConn},
+ {ok,Info}.
+
+%%----------------------------------------------------------------------
+%% Conmtrol wheater we shall maintain a persistent connection or not
+%%----------------------------------------------------------------------
+get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
+ case httpd_util:lookup(ConfigDB,persistent_conn,true) of
+ true->
+ case HTTPVersion of
+ %%If it is version prio to 1.1 kill the conneciton
+ [$H, $T, $T, $P, $\/, $1, $.,N] ->
+ case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of
+ %%if the connection isnt ordered to go down let it live
+ %%The keep-alive value is the older http/1.1 might be older
+ %%Clients that use it.
+ "keep-alive" when N >= 49 ->
+ ?DEBUG("CONNECTION MODE: ~p",[true]),
+ true;
+ "close" ->
+ ?DEBUG("CONNECTION MODE: ~p",[false]),
+ false;
+ Connect ->
+ ?DEBUG("CONNECTION MODE: ~p VALUE: ~p",[false,Connect]),
+ false
+ end;
+ _ ->
+ ?DEBUG("CONNECTION MODE: ~p VERSION: ~p",[false,HTTPVersion]),
+ false
+
+ end;
+ _ ->
+ false
+ end.
+
+
+
+
+%%----------------------------------------------------------------------
+%% Control whether the last newline of the body is a part of the message or
+%%it is a part of the multipart message.
+%%----------------------------------------------------------------------
+maybe_remove_nl(Header,Rest) ->
+ case find_content_type(Header) of
+ false ->
+ {ok,EntityBody,_}=regexp:sub(Rest,"\r\n\$",""),
+ EntityBody;
+ {ok, Value} ->
+ case string:str(Value, "multipart/form-data") of
+ 0 ->
+ {ok,EntityBody,_}=regexp:sub(Rest,"\r\n\$",""),
+ EntityBody;
+ _ ->
+ Rest
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%% Cet the content type of the incomming request
+%%----------------------------------------------------------------------
+
+
+find_content_type([]) ->
+ false;
+find_content_type([{Name,Value}|Tail]) ->
+ case httpd_util:to_lower(Name) of
+ "content-type" ->
+ {ok, Value};
+ _ ->
+ find_content_type(Tail)
+ end.
+
+%%----------------------------------------------------------------------
+%% Split the header to a list of strings where each string represents a
+%% HTTP header-field
+%%----------------------------------------------------------------------
+split_lines(Request) ->
+ split_lines(Request, [], []).
+split_lines([], CAcc, Acc) ->
+ lists:reverse([lists:reverse(CAcc)|Acc]);
+
+%%White space in the header fields are allowed but the new line must begin with LWS se
+%%rfc2616 chap 4.2. The rfc do not say what to
+split_lines([$\r, $\n, $\t |Rest], CAcc, Acc) ->
+ split_lines(Rest, [$\r, $\n |CAcc], Acc);
+
+split_lines([$\r, $\n, $\s |Rest], CAcc, Acc) ->
+ split_lines(Rest, [$\r, $\n |CAcc], Acc);
+
+split_lines([$\r, $\n|Rest], CAcc, Acc) ->
+ split_lines(Rest, [], [lists:reverse(CAcc)|Acc]);
+split_lines([Chr|Rest], CAcc, Acc) ->
+ split_lines(Rest, [Chr|CAcc], Acc).
+
+
+%%----------------------------------------------------------------------
+%% This is a 'hack' to stop people from trying to access directories/files
+%% relative to the ServerRoot.
+%%----------------------------------------------------------------------
+
+
+verify_request([Request, RequestURI]) ->
+ verify_request([Request, RequestURI, "HTTP/0.9"]);
+
+verify_request([Request, RequestURI, Protocol]) ->
+ NewRequestURI =
+ case string:str(RequestURI, "?") of
+ 0 ->
+ RequestURI;
+ Ndx ->
+ string:left(RequestURI, Ndx)
+ end,
+ case string:str(NewRequestURI, "..") of
+ 0 ->
+ [Request, RequestURI, Protocol];
+ _ ->
+ {bad_request, {forbidden, RequestURI}}
+ end;
+verify_request(Request) ->
+ Request.
+
+%%----------------------------------------------------------------------
+%% tagup_header
+%%
+%% Parses the header of a HTTP request and returns a key,value tuple
+%% list containing Name and Value of each header directive as of:
+%%
+%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
+%%
+%% But in http/1.1 the field-names are case insencitive so now it must be
+%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
+%% The standard furthermore says that leading and traling white space
+%% is not a part of the fieldvalue and shall therefore be removed.
+%%----------------------------------------------------------------------
+
+tagup_header([]) -> [];
+tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
+
+tag([], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), ""};
+tag([$:|Rest], Tag) ->
+ {httpd_util:to_lower(lists:reverse(Tag)), httpd_util:strip(Rest)};
+tag([Chr|Rest], Tag) ->
+ tag(Rest, [Chr|Tag]).
+
+
+%%----------------------------------------------------------------------
+%% There are 3 possible forms of the reuqest URI
+%%
+%% 1. * When the request is not for a special assset. is is instead
+%% to the server itself
+%%
+%% 2. absoluteURI the whole servername port and asset is in the request
+%%
+%% 3. The most common form that http/1.0 used abs path that is a path
+%% to the requested asset.
+%5----------------------------------------------------------------------
+formatRequestUri("*")->
+ "*";
+formatRequestUri([$h,$t,$t,$p,$:,$\/,$\/|ServerAndPath]) ->
+ removeServer(ServerAndPath);
+
+formatRequestUri([$H,$T,$T,$P,$:,$\/,$\/|ServerAndPath]) ->
+ removeServer(ServerAndPath);
+
+formatRequestUri(ABSPath) ->
+ ABSPath.
+
+removeServer([$\/|Url])->
+ case Url of
+ []->
+ "/";
+ _->
+ [$\/|Url]
+ end;
+removeServer([N|Url]) ->
+ removeServer(Url).
+
+
+formatAbsoluteURI([$h,$t,$t,$p,$:,$\/,$\/|Uri],ParsedHeader)->
+ [$H,$T,$T,$P,$:,$\/,$\/|Uri];
+
+formatAbsoluteURI([$H,$T,$T,$P,$:,$\/,$\/|Uri],ParsedHeader)->
+ [$H,$T,$T,$P,$:,$\/,$\/|Uri];
+
+formatAbsoluteURI(Uri,ParsedHeader)->
+ case httpd_util:key1search(ParsedHeader,"host") of
+ undefined ->
+ nohost;
+ Host ->
+ Host++Uri
+ end.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%Code below is crap from an older version shall be removed when
+%%transformation to http/1.1 is finished
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+
+
+%request(Request) ->
+% ?DEBUG("request -> entry with:"
+% "~n Request: ~s",[Request]),
+ % {BeforeEntityBody, Rest} = hsplit([], Request),
+ % ?DEBUG("request ->"
+% "~n BeforeEntityBody: ~p"
+% "~n Rest: ~p",[BeforeEntityBody, Rest]),
+% [RequestLine|Header] = split_lines(BeforeEntityBody),
+% ?DEBUG("request ->"
+% "~n RequestLine: ~p"
+% "~n Header: ~p",[RequestLine,Header]),
+% ParsedHeader = tagup_header(Header),
+% ?DEBUG("request ->"
+% "~n ParseHeader: ~p",[ParsedHeader]),
+% EntityBody = maybe_remove_nl(ParsedHeader,Rest),
+% ?DEBUG("request ->"
+% "~n EntityBody: ~p",[EntityBody]),
+% case verify_request(string:tokens(RequestLine," ")) of
+% ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+% {ok, ["HEAD", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+% ParsedHeader, EntityBody]};
+% ["GET", RequestURI, "HTTP/0.9"] ->
+% {ok, ["GET", RequestURI, "HTTP/0.9", RequestLine, ParsedHeader,
+% EntityBody]};
+% ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+% {ok, ["GET", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+% ParsedHeader,EntityBody]};
+%% ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
+% {ok, ["POST", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
+% ParsedHeader, EntityBody]};
+% [Method, RequestURI] ->
+% {not_implemented, RequestLine, Method, RequestURI,ParsedHeader,"HTTP/0.9"};
+% [Method, RequestURI, HTTPVersion] ->
+% {not_implemented, RequestLine, Method, RequestURI,ParsedHeader, HTTPVersion};
+% {bad_request, Reason} ->
+% {bad_request, Reason};
+% Reason ->
+% {bad_request, "Unknown request method"}
+% end.
+
+hsplit(Accu,[]) ->
+ {lists:reverse(Accu), []};
+hsplit(Accu, [ $\r, $\n, $\r, $\n | Tail]) ->
+ {lists:reverse(Accu), Tail};
+hsplit(Accu, [H|T]) ->
+ hsplit([H|Accu],T).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_request_handler.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_request_handler.erl
new file mode 100644
index 0000000000..b2d375ceff
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_request_handler.erl
@@ -0,0 +1,994 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_request_handler.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_request_handler).
+
+%% app internal api
+-export([start_link/2, synchronize/3]).
+
+%% module internal api
+-export([connection/2, do_next_connection/6, read_header/7]).
+-export([parse_trailers/1, newline/1]).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+
+%% start_link
+
+start_link(Manager, ConfigDB) ->
+ Pid = proc_lib:spawn(?MODULE, connection, [Manager, ConfigDB]),
+ {ok, Pid}.
+
+
+%% synchronize
+
+synchronize(Pid, SocketType, Socket) ->
+ Pid ! {synchronize, SocketType, Socket}.
+
+% connection
+
+connection(Manager, ConfigDB) ->
+ {SocketType, Socket, {Status, Verbosity}} = await_synchronize(Manager),
+ put(sname,self()),
+ put(verbosity,?vvalidate(Verbosity)),
+ connection1(Status, Manager, ConfigDB, SocketType, Socket).
+
+
+connection1({reject, busy}, Manager, ConfigDB, SocketType, Socket) ->
+ handle_busy(Manager, ConfigDB, SocketType, Socket);
+
+connection1({reject, blocked}, Manager, ConfigDB, SocketType, Socket) ->
+ handle_blocked(Manager, ConfigDB, SocketType, Socket);
+
+connection1(accept, Manager, ConfigDB, SocketType, Socket) ->
+ handle_connection(Manager, ConfigDB, SocketType, Socket).
+
+
+%% await_synchronize
+
+await_synchronize(Manager) ->
+ receive
+ {synchronize, SocketType, Socket} ->
+ ?vlog("received syncronize: "
+ "~n SocketType: ~p"
+ "~n Socket: ~p", [SocketType, Socket]),
+ {SocketType, Socket, httpd_manager:new_connection(Manager)}
+ after 5000 ->
+ exit(synchronize_timeout)
+ end.
+
+
+% handle_busy
+
+handle_busy(Manager, ConfigDB, SocketType, Socket) ->
+ ?vlog("handle busy: ~p", [Socket]),
+ MaxClients = httpd_util:lookup(ConfigDB, max_clients, 150),
+ String = io_lib:format("heavy load (>~w processes)", [MaxClients]),
+ reject_connection(Manager, ConfigDB, SocketType, Socket, String).
+
+
+% handle_blocked
+
+handle_blocked(Manager, ConfigDB, SocketType, Socket) ->
+ ?vlog("handle blocked: ~p", [Socket]),
+ String = "Server maintenance performed, try again later",
+ reject_connection(Manager, ConfigDB, SocketType, Socket, String).
+
+
+% reject_connection
+
+reject_connection(Manager, ConfigDB, SocketType, Socket, Info) ->
+ String = lists:flatten(Info),
+ ?vtrace("send status (503) message", []),
+ httpd_response:send_status(SocketType, Socket, 503, String, ConfigDB),
+ %% This ugly thing is to make ssl deliver the message, before the close...
+ close_sleep(SocketType, 1000),
+ ?vtrace("close the socket", []),
+ close(SocketType, Socket, ConfigDB).
+
+
+% handle_connection
+
+handle_connection(Manager, ConfigDB, SocketType, Socket) ->
+ ?vlog("handle connection: ~p", [Socket]),
+ Resolve = httpd_socket:resolve(SocketType),
+ Peername = httpd_socket:peername(SocketType, Socket),
+ InitData = #init_data{peername=Peername, resolve=Resolve},
+ TimeOut = httpd_util:lookup(ConfigDB, keep_alive_timeout, 150000),
+ NrOfRequest = httpd_util:lookup(ConfigDB, max_keep_alive_request, forever),
+ ?MODULE:do_next_connection(ConfigDB, InitData,
+ SocketType, Socket,NrOfRequest,TimeOut),
+ ?vlog("handle connection: done", []),
+ httpd_manager:done_connection(Manager),
+ ?vlog("handle connection: close socket", []),
+ close(SocketType, Socket, ConfigDB).
+
+
+% do_next_connection
+do_next_connection(_ConfigDB, _InitData, _SocketType, _Socket, NrOfRequests,
+ _Timeout) when NrOfRequests < 1 ->
+ ?vtrace("do_next_connection: done", []),
+ ok;
+do_next_connection(ConfigDB, InitData, SocketType, Socket, NrOfRequests,
+ Timeout) ->
+ Peername = InitData#init_data.peername,
+ case (catch read(ConfigDB, SocketType, Socket, InitData, Timeout)) of
+ {'EXIT', Reason} ->
+ ?vlog("exit reading from socket: ~p",[Reason]),
+ error_logger:error_report({'EXIT',Reason}),
+ String =
+ lists:flatten(
+ io_lib:format("exit reading from socket: ~p => ~n~p~n",
+ [Socket, Reason])),
+ error_log(mod_log,
+ SocketType, Socket, ConfigDB, Peername, String),
+ error_log(mod_disk_log,
+ SocketType, Socket, ConfigDB, Peername, String);
+ {error, Reason} ->
+ handle_read_error(Reason,SocketType,Socket,ConfigDB,Peername);
+ Info when record(Info, mod) ->
+ case Info#mod.connection of
+ true ->
+ ReqTimeout = httpd_util:lookup(ConfigDB,
+ keep_alive_timeout, 150000),
+ ?MODULE:do_next_connection(ConfigDB, InitData,
+ SocketType, Socket,
+ dec(NrOfRequests), ReqTimeout);
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end.
+
+
+
+%% read
+read(ConfigDB, SocketType, Socket, InitData, Timeout) ->
+ ?vdebug("read from socket ~p with Timeout ~p",[Socket, Timeout]),
+ MaxHdrSz = httpd_util:lookup(ConfigDB, max_header_size, 10240),
+ case ?MODULE:read_header(SocketType, Socket, Timeout, MaxHdrSz,
+ ConfigDB, InitData, []) of
+ {socket_closed, Reason} ->
+ ?vlog("Socket closed while reading request header: "
+ "~n ~p", [Reason]),
+ socket_close;
+ {error, Error} ->
+ {error, Error};
+ {ok, Info, EntityBodyPart} ->
+ read1(SocketType, Socket, ConfigDB, InitData, Timeout, Info,
+ EntityBodyPart)
+ end.
+
+%% Got the head and maybe a part of the body: read in the rest
+read1(SocketType, Socket, ConfigDB, InitData, Timeout, Info, BodyPart)->
+ MaxBodySz = httpd_util:lookup(ConfigDB, max_body_size, nolimit),
+ ContentLength = content_length(Info),
+ ?vtrace("ContentLength: ~p", [ContentLength]),
+ case read_entity_body(SocketType, Socket, Timeout, MaxBodySz,
+ ContentLength, BodyPart, Info, ConfigDB) of
+ {socket_closed, Reason} ->
+ ?vlog("Socket closed while reading request body: "
+ "~n ~p", [Reason]),
+ socket_close;
+ {ok, EntityBody} ->
+ finish_request(EntityBody, [], Info);
+ {ok, ExtraHeader, EntityBody} ->
+ finish_request(EntityBody, ExtraHeader, Info);
+ Response ->
+ httpd_socket:close(SocketType, Socket),
+ socket_closed
+ %% Catch up all bad return values
+ end.
+
+
+%% The request is read in send it forward to the module that
+%% generates the response
+
+finish_request(EntityBody, ExtraHeader,
+ #mod{parsed_header = ParsedHeader} = Info)->
+ ?DEBUG("finish_request -> ~n"
+ " EntityBody: ~p~n"
+ " ExtraHeader: ~p~n"
+ " ParsedHeader: ~p~n",
+ [EntityBody, ExtraHeader, ParsedHeader]),
+ httpd_response:send(Info#mod{parsed_header = ParsedHeader ++ ExtraHeader,
+ entity_body = EntityBody}).
+
+
+%% read_header
+
+%% This algorithm rely on the buffer size of the inet driver together
+%% with the {active, once} socket option. Atmost one message of this
+%% size will be received at a given time. When a full header has been
+%% read, the body is read with the recv function (the body size is known).
+%%
+read_header(SocketType, Socket, Timeout, MaxHdrSz, ConfigDB,
+ InitData, SoFar0) ->
+ T = t(),
+ %% remove any newlines at the begining, they might be crap from ?
+ SoFar = remove_newline(SoFar0),
+
+ case terminated_header(MaxHdrSz, SoFar) of
+ {true, Header, EntityBodyPart} ->
+ ?vdebug("read_header -> done reading header: "
+ "~n length(Header): ~p"
+ "~n length(EntityBodyPart): ~p",
+ [length(Header), length(EntityBodyPart)]),
+ transform_header(SocketType, Socket, Header, ConfigDB, InitData,
+ EntityBodyPart);
+ false ->
+ ?vtrace("read_header -> "
+ "~n set active = 'once' and "
+ "await a chunk of the header", []),
+
+ case httpd_socket:active_once(SocketType, Socket) of
+ ok ->
+ receive
+ %%
+ %% TCP
+ %%
+ {tcp, Socket, Data} ->
+ ?vtrace("read_header(ip) -> got some data: ~p",
+ [sz(Data)]),
+ ?MODULE:read_header(SocketType, Socket,
+ Timeout - (t()-T),
+ MaxHdrSz, ConfigDB,
+ InitData, SoFar ++ Data);
+ {tcp_closed, Socket} ->
+ ?vtrace("read_header(ip) -> socket closed",[]),
+ {socket_closed,normal};
+ {tcp_error, Socket, Reason} ->
+ ?vtrace("read_header(ip) -> socket error: ~p",
+ [Reason]),
+ {socket_closed, Reason};
+
+ %%
+ %% SSL
+ %%
+ {ssl, Socket, Data} ->
+ ?vtrace("read_header(ssl) -> got some data: ~p",
+ [sz(Data)]),
+ ?MODULE:read_header(SocketType, Socket,
+ Timeout - (t()-T),
+ MaxHdrSz, ConfigDB,
+ InitData, SoFar ++ Data);
+ {ssl_closed, Socket} ->
+ ?vtrace("read_header(ssl) -> socket closed", []),
+ {socket_closed, normal};
+ {ssl_error, Socket, Reason} ->
+ ?vtrace("read_header(ssl) -> socket error: ~p",
+ [Reason]),
+ {socket_closed, Reason}
+
+ after Timeout ->
+ ?vlog("read_header -> timeout", []),
+ {socket_closed, timeout}
+ end;
+
+ Error ->
+ httpd_response:send_status(SocketType, Socket,
+ 500, none, ConfigDB),
+ Error
+ end
+ end.
+
+
+terminated_header(MaxHdrSz, Data) ->
+ D1 = lists:flatten(Data),
+ ?vtrace("terminated_header -> Data size: ~p",[sz(D1)]),
+ case hsplit(MaxHdrSz,[],D1) of
+ not_terminated ->
+ false;
+ [Header, EntityBodyPart] ->
+ {true, Header++"\r\n\r\n",EntityBodyPart}
+ end.
+
+
+transform_header(SocketType, Socket, Request, ConfigDB, InitData, BodyPart) ->
+ case httpd_parse:request_header(Request) of
+ {not_implemented, RequestLine, Method, RequestURI, ParsedHeader,
+ HTTPVersion} ->
+ httpd_response:send_status(SocketType, Socket, 501,
+ {Method, RequestURI, HTTPVersion},
+ ConfigDB),
+ {error,"Not Implemented"};
+ {bad_request, {forbidden, URI}} ->
+ httpd_response:send_status(SocketType, Socket, 403, URI, ConfigDB),
+ {error,"Forbidden Request"};
+ {bad_request, Reason} ->
+ httpd_response:send_status(SocketType, Socket, 400, none,
+ ConfigDB),
+ {error,"Malformed request"};
+ {ok,[Method, RequestURI, HTTPVersion, RequestLine, ParsedHeader]} ->
+ ?DEBUG("send -> ~n"
+ " Method: ~p~n"
+ " RequestURI: ~p~n"
+ " HTTPVersion: ~p~n"
+ " RequestLine: ~p~n",
+ [Method, RequestURI, HTTPVersion, RequestLine]),
+ {ok, Info} =
+ httpd_parse:get_request_record(Socket, SocketType, ConfigDB,
+ Method, RequestURI, HTTPVersion,
+ RequestLine, ParsedHeader,
+ [], InitData),
+ %% Control that the Host header field is provided
+ case Info#mod.absolute_uri of
+ nohost ->
+ case Info#mod.http_version of
+ "HTTP/1.1" ->
+ httpd_response:send_status(Info, 400, none),
+ {error,"No host specified"};
+ _ ->
+ {ok, Info, BodyPart}
+ end;
+ _ ->
+ {ok, Info, BodyPart}
+ end
+ end.
+
+
+hsplit(_MaxHdrSz, Accu,[]) ->
+ not_terminated;
+hsplit(_MaxHdrSz, Accu, [ $\r, $\n, $\r, $\n | Tail]) ->
+ [lists:reverse(Accu), Tail];
+hsplit(nolimit, Accu, [H|T]) ->
+ hsplit(nolimit,[H|Accu],T);
+hsplit(MaxHdrSz, Accu, [H|T]) when length(Accu) < MaxHdrSz ->
+ hsplit(MaxHdrSz,[H|Accu],T);
+hsplit(MaxHdrSz, Accu, D) ->
+ throw({error,{header_too_long,length(Accu),length(D)}}).
+
+
+
+%%----------------------------------------------------------------------
+%% The http/1.1 standard chapter 8.2.3 says that a request containing
+%% An Except header-field must be responded to by 100 (Continue) by
+%% the server before the client sends the body.
+%%----------------------------------------------------------------------
+
+read_entity_body(SocketType, Socket, Timeout, Max, Length, BodyPart, Info,
+ ConfigDB) when integer(Max) ->
+ case expect(Info#mod.http_version, Info#mod.parsed_header, ConfigDB) of
+ continue when Max > Length ->
+ ?DEBUG("read_entity_body()->100 Continue ~n", []),
+ httpd_response:send_status(Info, 100, ""),
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ continue when Max < Length ->
+ httpd_response:send_status(Info, 417, "Body to big"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect denied according to size"};
+ break ->
+ httpd_response:send_status(Info, 417, "Method not allowed"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect conditions was not fullfilled"};
+ no_expect_header ->
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ http_1_0_expect_header ->
+ httpd_response:send_status(Info, 400,
+ "Only HTTP/1.1 Clients "
+ "may use the Expect Header"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Due to a HTTP/1.0 expect header"}
+ end;
+
+read_entity_body(SocketType, Socket, Timeout, Max, Length, BodyPart,
+ Info, ConfigDB) ->
+ case expect(Info#mod.http_version, Info#mod.parsed_header, ConfigDB) of
+ continue ->
+ ?DEBUG("read_entity_body() -> 100 Continue ~n", []),
+ httpd_response:send_status(Info, 100, ""),
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ break->
+ httpd_response:send_status(Info, 417, "Method not allowed"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect conditions was not fullfilled"};
+ no_expect_header ->
+ read_entity_body2(SocketType, Socket, Timeout, Max, Length,
+ BodyPart, Info, ConfigDB);
+ http_1_0_expect_header ->
+ httpd_response:send_status(Info, 400,
+ "HTTP/1.0 Clients are not allowed "
+ "to use the Expect Header"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect header field in an HTTP/1.0 request"}
+ end.
+
+%%----------------------------------------------------------------------
+%% control if the body is transfer encoded
+%%----------------------------------------------------------------------
+read_entity_body2(SocketType, Socket, Timeout, Max, Length, BodyPart,
+ Info, ConfigDB) ->
+ ?DEBUG("read_entity_body2() -> "
+ "~n Max: ~p"
+ "~n Length: ~p"
+ "~n Socket: ~p", [Max, Length, Socket]),
+
+ case transfer_coding(Info) of
+ {chunked, ChunkedData} ->
+ ?DEBUG("read_entity_body2() -> "
+ "Transfer-encoding: Chunked Data: BodyPart ~s", [BodyPart]),
+ read_chunked_entity(Info, Timeout, Max, Length, ChunkedData, [],
+ BodyPart);
+ unknown_coding ->
+ ?DEBUG("read_entity_body2() -> Transfer-encoding: Unknown",[]),
+ httpd_response:send_status(Info, 501, "Unknown Transfer-Encoding"),
+ httpd_socket:close(SocketType, Socket),
+ {socket_closed,"Expect conditions was not fullfilled"};
+ none ->
+ ?DEBUG("read_entity_body2() -> Transfer-encoding: none",[]),
+ read_entity_body(SocketType, Socket, Timeout, Max, Length,
+ BodyPart)
+ end.
+
+
+%%----------------------------------------------------------------------
+%% The body was plain read it from the socket
+%% ----------------------------------------------------------------------
+read_entity_body(_SocketType, _Socket, _Timeout, _Max, 0, _BodyPart) ->
+ {ok, []};
+
+read_entity_body(_SocketType, _Socket, _Timeout, Max, Len, _BodyPart)
+ when Max < Len ->
+ ?vlog("body to long: "
+ "~n Max: ~p"
+ "~n Len: ~p", [Max,Len]),
+ throw({error,{body_too_long,Max,Len}});
+
+%% OTP-4409: Fixing POST problem
+read_entity_body(_,_,_,_, Len, BodyPart) when Len == length(BodyPart) ->
+ ?vtrace("read_entity_body -> done when"
+ "~n Len = length(BodyPart): ~p", [Len]),
+ {ok, BodyPart};
+
+%% OTP-4550: Fix problem with trailing garbage produced by some clients.
+read_entity_body(_, _, _, _, Len, BodyPart) when Len < length(BodyPart) ->
+ ?vtrace("read_entity_body -> done when"
+ "~n Len: ~p"
+ "~n length(BodyPart): ~p", [Len, length(BodyPart)]),
+ {ok, lists:sublist(BodyPart,Len)};
+
+read_entity_body(SocketType, Socket, Timeout, Max, Len, BodyPart) ->
+ ?vtrace("read_entity_body -> entry when"
+ "~n Len: ~p"
+ "~n length(BodyPart): ~p", [Len, length(BodyPart)]),
+ %% OTP-4548:
+ %% The length calculation was previously (inets-2.*) done in the
+ %% read function. As of 3.0 it was removed from read but not
+ %% included here.
+ L = Len - length(BodyPart),
+ case httpd_socket:recv(SocketType, Socket, L, Timeout) of
+ {ok, Body} ->
+ ?vtrace("read_entity_body -> received some data:"
+ "~n length(Body): ~p", [length(Body)]),
+ {ok, BodyPart ++ Body};
+ {error,closed} ->
+ {socket_closed,normal};
+ {error,etimedout} ->
+ {socket_closed, timeout};
+ {error,Reason} ->
+ {socket_closed, Reason};
+ Other ->
+ {socket_closed, Other}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% If the body of the message is encoded used the chunked transfer encoding
+%% it looks somethin like this:
+%% METHOD URI HTTP/VSN
+%% Transfer-Encoding: chunked
+%% CRLF
+%% ChunkSize
+%% Chunk
+%% ChunkSize
+%% Chunk
+%% 0
+%% Trailer
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+read_chunked_entity(Info, Timeout, Max, Length, ChunkedData, Body, []) ->
+ ?DEBUG("read_chunked_entity()->:no_chunks ~n", []),
+ read_chunked_entity(Info#mod.socket_type, Info#mod.socket,
+ Timeout, Max, Length, ChunkedData, Body,
+ Info#mod.config_db, Info);
+
+read_chunked_entity(Info, Timeout, Max, Length, ChunkedData, Body, BodyPart) ->
+ %% Get the size
+ ?DEBUG("read_chunked_entity() -> PrefetchedBodyPart: ~p ~n",[BodyPart]),
+ case parse_chunk_size(Info, Timeout, BodyPart) of
+ {ok, Size, NewBodyPart} when Size > 0 ->
+ ?DEBUG("read_chunked_entity() -> Size: ~p ~n", [Size]),
+ case parse_chunked_entity_body(Info, Timeout, Max, length(Body),
+ Size, NewBodyPart) of
+ {ok, Chunk, NewBodyPart1} ->
+ ?DEBUG("read_chunked_entity()->Size: ~p ~n", [Size]),
+ read_chunked_entity(Info, Timeout, Max, Length,
+ ChunkedData, Body ++ Chunk,
+ NewBodyPart1);
+ OK ->
+ httpd_socket:close(Info#mod.socket_type, Info#mod.socket),
+ {socket_closed, error}
+ end;
+ {ok, 0, Trailers} ->
+ ?DEBUG("read_chunked_entity()->Size: 0, Trailers: ~s Body: ~s ~n",
+ [Trailers, Body]),
+ case parse_chunk_trailer(Info, Timeout, Info#mod.config_db,
+ Trailers) of
+ {ok, TrailerFields} ->
+ {ok, TrailerFields, Body};
+ _->
+ {ok, []}
+ end;
+ Error ->
+ Error
+ end.
+
+
+parse_chunk_size(Info, Timeout, BodyPart) ->
+ case httpd_util:split(remove_newline(BodyPart), "\r\n", 2) of
+ {ok, [Size, Body]} ->
+ ?DEBUG("parse_chunk_size()->Size: ~p ~n", [Size]),
+ {ok, httpd_util:hexlist_to_integer(Size), Body};
+ {ok, [Size]} ->
+ ?DEBUG("parse_chunk_size()->Size: ~p ~n", [Size]),
+ Sz = get_chunk_size(Info#mod.socket_type,
+ Info#mod.socket, Timeout,
+ lists:reverse(Size)),
+ {ok, Sz, []}
+ end.
+
+%%----------------------------------------------------------------------
+%% We got the chunk size get the chunk
+%%
+%% Max: Max numbers of bytes to read may also be undefined
+%% Length: Numbers of bytes already read
+%% Size Numbers of byte to read for the chunk
+%%----------------------------------------------------------------------
+
+%% body to big
+parse_chunked_entity_body(Info, Timeout, Max, Length, Size, BodyPart)
+ when Max =< (Length + Size) ->
+ {error, body_to_big};
+
+%% Prefetched body part is bigger than the current chunk
+%% (i.e. BodyPart includes more than one chunk)
+parse_chunked_entity_body(Info, Timeout, Max, Length, Size, BodyPart)
+ when (Size+2) =< length(BodyPart) ->
+ Chunk = string:substr(BodyPart, 1, Size),
+ Rest = string:substr(BodyPart, Size+3),
+ ?DEBUG("parse_chunked_entity_body() -> ~nChunk: ~s ~nRest: ~s ~n",
+ [Chunk, Rest]),
+ {ok, Chunk, Rest};
+
+
+%% We just got a part of the current chunk
+parse_chunked_entity_body(Info, Timeout, Max, Length, Size, BodyPart) ->
+ %% OTP-4551:
+ %% Subtracting BodyPart from Size does not produce an integer
+ %% when BodyPart is a list...
+ Remaining = Size - length(BodyPart),
+ LastPartOfChunk = read_chunked_entity_body(Info#mod.socket_type,
+ Info#mod.socket,
+ Timeout, Max,
+ Length, Remaining),
+ %% Remove newline
+ httpd_socket:recv(Info#mod.socket_type, Info#mod.socket, 2, Timeout),
+ ?DEBUG("parse_chunked_entity_body() -> "
+ "~nBodyPart: ~s"
+ "~nLastPartOfChunk: ~s ~n",
+ [BodyPart, LastPartOfChunk]),
+ {ok, BodyPart ++ LastPartOfChunk, []}.
+
+
+%%----------------------------------------------------------------------
+%% If the data we got along with the header contained the whole chunked body
+%% It may aswell contain the trailer :-(
+%%----------------------------------------------------------------------
+%% Either trailer begins with \r\n and then all data is there or
+%% The trailer has data then read upto \r\n\r\n
+parse_chunk_trailer(Info,Timeout,ConfigDB,"\r\n")->
+ {ok,[]};
+parse_chunk_trailer(Info,Timeout,ConfigDB,Trailers) ->
+ ?DEBUG("parse_chunk_trailer()->Trailers: ~s ~n", [Trailers]),
+ case string:rstr(Trailers,"\r\n\r\n") of
+ 0 ->
+ MaxHdrSz=httpd_util:lookup(ConfigDB, max_header_size, 10240),
+ read_trailer_end(Info,Timeout,MaxHdrSz,Trailers);
+ _->
+ %%We got the whole header parse it up
+ parse_trailers(Trailers)
+ end.
+
+parse_trailers(Trailer)->
+ ?DEBUG("parse_trailer()->Trailer: ~s",[Trailer]),
+ {ok,[Fields0|Crap]}=httpd_util:split(Trailer,"\r\n\r\n",2),
+ Fields=string:tokens(Fields0,"\r\n"),
+ [getTrailerField(X)||X<-Fields,lists:member($:,X)].
+
+
+read_trailer_end(Info,Timeout,MaxHdrSz,[])->
+ ?DEBUG("read_trailer_end()->[]",[]),
+ case read_trailer(Info#mod.socket_type,Info#mod.socket,
+ Timeout,MaxHdrSz,[],[],
+ httpd_util:key1search(Info#mod.parsed_header,"trailer",[])) of
+ {ok,Trailers}->
+ Trailers;
+ _->
+ []
+ end;
+read_trailer_end(Info,Timeout,MaxHdrSz,Trailers)->
+ ?DEBUG("read_trailer_end()->Trailers: ~s ~n ",[Trailers]),
+ %% Get the last paart of the the last headerfield
+ End=lists:reverse(lists:takewhile(fun(X)->case X of 10 ->false;13->false;_ ->true end end,lists:reverse(Trailers))),
+ Fields0=regexp:split(Trailers,"\r\n"),
+ %%Get rid of the last header field
+ [_Last|Fields]=lists:reverse(Fields0),
+ Headers=[getTrailerField(X)||X<-Fields,lists:member($:,X)],
+ case read_trailer(Info#mod.socket_type,Info#mod.socket,
+ Timeout,MaxHdrSz,Headers,End,
+ httpd_util:key1search(Info#mod.parsed_header,"trailer",[])) of
+ {ok,Trailers}->
+ Trailers;
+ _->
+ []
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% The code below is a a good way to read in chunked encoding but
+%% that require that the encoding comes from a stream and not from a list
+%%&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+
+%%----------------------------------------------------------------------
+%% The body is encoded by chubnked encoding read it in
+%% ChunkedData= Chunked extensions
+%% Body= the inread chunked body
+%% Max: Max numbers of bytes to read
+%% Length: Numbers of bytes already readed
+%% Size Numbers of byte to read for the chunk
+%%----------------------------------------------------------------------
+
+
+
+read_chunked_entity(SocketType, Socket, Timeout, Max, Length, ChunkedData,
+ Body, ConfigDB, Info) ->
+ T = t(),
+ case get_chunk_size(SocketType,Socket,Timeout,[]) of
+ Size when integer(Size), Size>0 ->
+ case read_chunked_entity_body(SocketType, Socket,
+ Timeout-(t()-T),
+ Max, length(Body), Size) of
+ {ok,Chunk} ->
+ ?DEBUG("read_chunked_entity/9 Got a chunk: ~p " ,[Chunk]),
+ %% Two bytes are left of the chunk, that is the CRLF
+ %% at the end that is not a part of the message
+ %% So we read it and do nothing with it.
+ httpd_socket:recv(SocketType,Socket,2,Timeout-(t()-T)),
+ read_chunked_entity(SocketType, Socket, Timeout-(t()-T),
+ Max, Length, ChunkedData, Body++Chunk,
+ ConfigDB, Info);
+ Error ->
+ ?DEBUG("read_chunked_entity/9 Error: ~p " ,[Error]),
+ httpd_socket:close(SocketType,Socket),
+ {socket_closed,error}
+ end;
+ Size when integer(Size), Size == 0 ->
+ %% Must read in any trailer fields here
+ read_chunk_trailer(SocketType, Socket, Timeout,
+ Max, Info, ChunkedData, Body, ConfigDB);
+ Error ->
+ Error
+ end.
+
+
+%% If a user wants to send header data after the chunked data we
+%% must pick it out
+read_chunk_trailer(SocketType, Socket, Timeout, Max, Info, ChunkedData,
+ Body, ConfigDB) ->
+ ?DEBUG("read_chunk_trailer/8: ~p " ,[Body]),
+ MaxHdrSz = httpd_util:lookup(ConfigDB,max_header_size,10240),
+ case httpd_util:key1search(Info#mod.parsed_header,"trailer")of
+ undefined ->
+ {ok,Body};
+ Fields ->
+ case read_trailer(SocketType, Socket, Timeout,
+ MaxHdrSz, [], [],
+ string:tokens(
+ httpd_util:to_lower(Fields),",")) of
+ {ok,[]} ->
+ {ok,Body};
+ {ok,HeaderFields} ->
+ % ParsedExtraHeaders =
+ % httpd_parse:tagup_header(httpd_parse:split_lines(HeaderFields)),
+ {ok,HeaderFields,Body};
+ Error ->
+ Error
+ end
+ end.
+
+read_chunked_entity_body(SocketType, Socket, Timeout, Max, Length, Size)
+ when integer(Max) ->
+ read_entity_body(SocketType, Socket, Timeout, Max-Length, Size, []);
+
+read_chunked_entity_body(SocketType, Socket, Timeout, Max, _Length, Size) ->
+ read_entity_body(SocketType, Socket, Timeout, Max, Size, []).
+
+%% If we read in the \r\n the httpd_util:hexlist_to_integer
+%% Will remove it and we get rid of it emmediatly :-)
+get_chunk_size(SocketType, Socket, Timeout, Size) ->
+ T = t(),
+ ?DEBUG("get_chunk_size: ~p " ,[Size]),
+ case httpd_socket:recv(SocketType,Socket,1,Timeout) of
+ {ok,[Digit]} when Digit==$\n ->
+ httpd_util:hexlist_to_integer(lists:reverse(Size));
+ {ok,[Digit]} ->
+ get_chunk_size(SocketType,Socket,Timeout-(t()-T),[Digit|Size]);
+ {error,closed} ->
+ {socket_closed,normal};
+ {error,etimedout} ->
+ {socket_closed, timeout};
+ {error,Reason} ->
+ {socket_closed, Reason};
+ Other ->
+ {socket_closed,Other}
+ end.
+
+
+
+
+%%----------------------------------------------------------------------
+%% Reads the HTTP-trailer
+%% Would be easy to tweak the read_head to do this but in this way
+%% the chunked encoding can be updated better.
+%%----------------------------------------------------------------------
+
+
+%% When end is reached
+%% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,Last,[]) ->
+%% {ok,Headers};
+
+%% When header to big
+read_trailer(_,_,_,MaxHdrSz,Headers,Bs,_Fields)
+ when MaxHdrSz < length(Headers) ->
+ ?vlog("header to long: "
+ "~n MaxHdrSz: ~p"
+ "~n length(Bs): ~p", [MaxHdrSz,length(Bs)]),
+ throw({error,{header_too_long,MaxHdrSz,length(Bs)}});
+
+%% The last Crlf is there
+read_trailer(_, _, _, _, Headers, [$\n, $\r], _) ->
+ {ok,Headers};
+
+read_trailer(SocketType, Socket, Timeout, MaxHdrSz, Headers,
+ [$\n, $\r|Rest], Fields) ->
+ case getTrailerField(lists:reverse(Rest))of
+ {error,Reason}->
+ {error,"Bad trailer"};
+ {HeaderField,Value}->
+ case lists:member(HeaderField,Fields) of
+ true ->
+ read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+ [{HeaderField,Value} |Headers],[],
+ lists:delete(HeaderField,Fields));
+ false ->
+ read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+ Headers,[],Fields)
+ end
+ end;
+
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,[$\n, $\r|Rest],Fields) ->
+% case Rest of
+% [] ->
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,Rest,Fields);
+% Field ->
+% case getTrailerField(lists:reverse(Rest))of
+% {error,Reason}->
+% {error,"Bad trailer"};
+% {HeaderField,Value}->
+% case lists:member(HeaderField,Fields) of
+% true ->
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+% [{HeaderField,Value} |Headers],[],
+% lists:delete(HeaderField,Fields));
+% false ->
+% read_trailer(SocketType,Socket,Timeout,MaxHdrSz,
+% Headers,[],Fields)
+% end
+% end
+% end;
+
+read_trailer(SocketType,Socket,Timeout,MaxHdrSz,Headers,Bs,Fields) ->
+ %% ?vlog("read_header -> entry with Timeout: ~p",[Timeout]),
+ T = t(),
+ case (catch httpd_socket:recv(SocketType,Socket,1,Timeout)) of
+ {ok,[B]} ->
+ read_trailer(SocketType, Socket, Timeout-(t()-T),
+ MaxHdrSz, Headers, [B|Bs], Fields);
+ {error,closed} ->
+ {socket_closed,normal};
+ {error,etimedout} ->
+ {socket_closed, timeout};
+ {error,Reason} ->
+ {socket_closed, Reason};
+ Other ->
+ {socket_closed,Other}
+ end.
+
+getTrailerField(HeaderField)->
+ case string:str(HeaderField,":") of
+ 0->
+ {error,"badheaderfield"};
+ Number ->
+ {httpd_util:to_lower(string:substr(HeaderField,1,Number-1)),
+ httpd_util:to_lower(string:substr(HeaderField,Number+1))}
+ end.
+
+
+
+
+%% Time in milli seconds
+t() ->
+ {A,B,C} = erlang:now(),
+ A*1000000000+B*1000+(C div 1000).
+
+%%----------------------------------------------------------------------
+%% If the user sends an expect header-field with the value 100-continue
+%% We must send a 100 status message if he is a HTTP/1.1 client.
+
+%% If it is an HTTP/1.0 client it's little more difficult.
+%% If expect is not defined it is easy but in the other case shall we
+%% Break or the transmission or let it continue the standard is not clear
+%% if to break connection or wait for data.
+%%----------------------------------------------------------------------
+expect(HTTPVersion,ParsedHeader,ConfigDB)->
+ case HTTPVersion of
+ [$H,$T,$T,$P,$\/,$1,$.,N|_Whatever]when N>=1->
+ case httpd_util:key1search(ParsedHeader,"expect") of
+ "100-continue" ->
+ continue;
+ undefined ->
+ no_expect_header;
+ NewValue ->
+ break
+ end;
+ _OldVersion ->
+ case httpd_util:key1search(ParsedHeader,"expect") of
+ undefined ->
+ no_expect_header;
+ NewValue ->
+ case httpd_util:lookup(ConfigDB,expect,continue) of
+ continue->
+ no_expect_header;
+ _ ->
+ http_1_0_expect_header
+ end
+ end
+ end.
+
+
+%%----------------------------------------------------------------------
+%% According to the http/1.1 standard all applications must understand
+%% Chunked encoded data. (Last line chapter 3.6.1).
+transfer_coding(#mod{parsed_header = Ph}) ->
+ case httpd_util:key1search(Ph, "transfer-encoding", none) of
+ none ->
+ none;
+ [$c,$h,$u,$n,$k,$e,$d|Data]->
+ {chunked,Data};
+ _ ->
+ unknown_coding
+ end.
+
+
+
+handle_read_error({header_too_long,Max,Rem},
+ SocketType,Socket,ConfigDB,Peername) ->
+ String = io_lib:format("header too long: ~p : ~p",[Max,Rem]),
+ handle_read_error(ConfigDB,String,SocketType,Socket,Peername,
+ max_header_action,close);
+handle_read_error({body_too_long,Max,Actual},
+ SocketType,Socket,ConfigDB,Peername) ->
+ String = io_lib:format("body too long: ~p : ~p",[Max,Actual]),
+ handle_read_error(ConfigDB,String,SocketType,Socket,Peername,
+ max_body_action,close);
+handle_read_error(Error,SocketType,Socket,ConfigDB,Peername) ->
+ ok.
+
+
+handle_read_error(ConfigDB, ReasonString, SocketType, Socket, Peername,
+ Item, Default) ->
+ ?vlog("error reading request: ~s",[ReasonString]),
+ E = lists:flatten(
+ io_lib:format("Error reading request: ~s",[ReasonString])),
+ error_log(mod_log, SocketType, Socket, ConfigDB, Peername, E),
+ error_log(mod_disk_log, SocketType, Socket, ConfigDB, Peername, E),
+ case httpd_util:lookup(ConfigDB,Item,Default) of
+ reply414 ->
+ send_read_status(SocketType, Socket, 414, ReasonString, ConfigDB);
+ _ ->
+ ok
+ end.
+
+send_read_status(SocketType, Socket, Code, ReasonString, ConfigDB) ->
+ httpd_response:send_status(SocketType, Socket, Code, ReasonString,
+ ConfigDB).
+
+
+error_log(Mod, SocketType, Socket, ConfigDB, Peername, String) ->
+ Modules = httpd_util:lookup(ConfigDB, modules,
+ [mod_get, mod_head, mod_log]),
+ case lists:member(Mod, Modules) of
+ true ->
+ Mod:error_log(SocketType, Socket, ConfigDB, Peername, String);
+ _ ->
+ ok
+ end.
+
+
+sz(L) when list(L) ->
+ length(L);
+sz(B) when binary(B) ->
+ size(B);
+sz(O) ->
+ {unknown_size,O}.
+
+
+%% Socket utility functions:
+
+close(SocketType, Socket, ConfigDB) ->
+ case httpd_socket:close(SocketType, Socket) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ ?vlog("error while closing socket: ~p",[Reason]),
+ ok
+ end.
+
+close_sleep({ssl, _}, Time) ->
+ sleep(Time);
+close_sleep(_, _) ->
+ ok.
+
+
+sleep(T) -> receive after T -> ok end.
+
+
+dec(N) when integer(N) ->
+ N-1;
+dec(N) ->
+ N.
+
+
+content_length(#mod{parsed_header = Ph}) ->
+ list_to_integer(httpd_util:key1search(Ph, "content-length","0")).
+
+
+remove_newline(List)->
+ lists:dropwhile(fun newline/1,List).
+
+newline($\r) ->
+ true;
+newline($\n) ->
+ true;
+newline(_Sign) ->
+ false.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
new file mode 100644
index 0000000000..1685cbc129
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
@@ -0,0 +1,437 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_response.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_response).
+-export([send/1, send_status/3, send_status/5]).
+
+%%code is the key for the statuscode ex: 200 404 ...
+-define(HTTP11HEADERFIELDS,[content_length, accept_ranges, cache_control, date,
+ pragma, trailer, transfer_encoding, etag, location,
+ retry_after, server, allow,
+ content_encoding, content_language,
+ content_location, content_MD5, content_range,
+ content_type, expires, last_modified]).
+
+-define(HTTP10HEADERFIELDS,[content_length, date, pragma, transfer_encoding,
+ location, server, allow, content_encoding,
+ content_type, last_modified]).
+
+-define(PROCEED_RESPONSE(StatusCode, Info),
+ {proceed,
+ [{response,{already_sent, StatusCode,
+ httpd_util:key1search(Info#mod.data,content_lenght)}}]}).
+
+
+-include("httpd.hrl").
+
+-define(VMODULE,"RESPONSE").
+-include("httpd_verbosity.hrl").
+
+%% send
+
+send(#mod{config_db = ConfigDB} = Info) ->
+ ?vtrace("send -> Request line: ~p", [Info#mod.request_line]),
+ Modules = httpd_util:lookup(ConfigDB,modules,[mod_get, mod_head, mod_log]),
+ case traverse_modules(Info, Modules) of
+ done ->
+ Info;
+ {proceed, Data} ->
+ case httpd_util:key1search(Data, status) of
+ {StatusCode, PhraseArgs, Reason} ->
+ ?vdebug("send -> proceed/status: ~n"
+ "~n StatusCode: ~p"
+ "~n PhraseArgs: ~p"
+ "~n Reason: ~p",
+ [StatusCode, PhraseArgs, Reason]),
+ send_status(Info, StatusCode, PhraseArgs),
+ Info;
+
+ undefined ->
+ case httpd_util:key1search(Data, response) of
+ {already_sent, StatusCode, Size} ->
+ ?vtrace("send -> already sent: "
+ "~n StatusCode: ~p"
+ "~n Size: ~p",
+ [StatusCode, Size]),
+ Info;
+ {response, Header, Body} -> %% New way
+ send_response(Info, Header, Body),
+ Info;
+ {StatusCode, Response} -> %% Old way
+ send_response_old(Info, StatusCode, Response),
+ Info;
+ undefined ->
+ ?vtrace("send -> undefined response", []),
+ send_status(Info, 500, none),
+ Info
+ end
+ end
+ end.
+
+
+%% traverse_modules
+
+traverse_modules(Info,[]) ->
+ {proceed,Info#mod.data};
+traverse_modules(Info,[Module|Rest]) ->
+ case (catch apply(Module,do,[Info])) of
+ {'EXIT', Reason} ->
+ ?vlog("traverse_modules -> exit reason: ~p",[Reason]),
+ String =
+ lists:flatten(
+ io_lib:format("traverse exit from apply: ~p:do => ~n~p",
+ [Module, Reason])),
+ report_error(mod_log, Info#mod.config_db, String),
+ report_error(mod_disk_log, Info#mod.config_db, String),
+ done;
+ done ->
+ done;
+ {break,NewData} ->
+ {proceed,NewData};
+ {proceed,NewData} ->
+ traverse_modules(Info#mod{data=NewData},Rest)
+ end.
+
+%% send_status %%
+
+
+send_status(#mod{socket_type = SocketType,
+ socket = Socket,
+ connection = Conn} = Info, 100, _PhraseArgs) ->
+ ?DEBUG("send_status -> StatusCode: ~p~n",[100]),
+ Header = httpd_util:header(100, Conn),
+ httpd_socket:deliver(SocketType, Socket,
+ [Header, "Content-Length:0\r\n\r\n"]);
+
+send_status(#mod{socket_type = SocketType,
+ socket = Socket,
+ config_db = ConfigDB} = Info, StatusCode, PhraseArgs) ->
+ send_status(SocketType, Socket, StatusCode, PhraseArgs, ConfigDB).
+
+send_status(SocketType, Socket, StatusCode, PhraseArgs, ConfigDB) ->
+ ?DEBUG("send_status -> ~n"
+ " StatusCode: ~p~n"
+ " PhraseArgs: ~p",
+ [StatusCode, PhraseArgs]),
+ Header = httpd_util:header(StatusCode, "text/html", false),
+ ReasonPhrase = httpd_util:reason_phrase(StatusCode),
+ Message = httpd_util:message(StatusCode, PhraseArgs, ConfigDB),
+ Body = get_body(ReasonPhrase, Message),
+ Header1 =
+ Header ++
+ "Content-Length:" ++
+ integer_to_list(length(Body)) ++
+ "\r\n\r\n",
+ httpd_socket:deliver(SocketType, Socket, [Header1, Body]).
+
+
+get_body(ReasonPhrase, Message)->
+ "<HTML>
+ <HEAD>
+ <TITLE>"++ReasonPhrase++"</TITLE>
+ </HEAD>
+ <BODY>
+ <H1>"++ReasonPhrase++"</H1>\n"++Message++"\n</BODY>
+ </HTML>\n".
+
+
+%%% Create a response from the Key/Val tuples In the Head List
+%%% Body is a tuple {body,Fun(),Args}
+
+%% send_response
+%% Allowed Fields
+
+% HTTP-Version StatusCode Reason-Phrase
+% *((general-headers
+% response-headers
+% entity-headers)CRLF)
+% CRLF
+% ?(BODY)
+
+% General Header fields
+% ======================
+% Cache-Control cache_control
+% Connection %%Is set dependiong on the request
+% Date
+% Pramga
+% Trailer
+% Transfer-Encoding
+
+% Response Header field
+% =====================
+% Accept-Ranges
+% (Age) Mostly for proxys
+% Etag
+% Location
+% (Proxy-Authenticate) Only for proxies
+% Retry-After
+% Server
+% Vary
+% WWW-Authenticate
+%
+% Entity Header Fields
+% ====================
+% Allow
+% Content-Encoding
+% Content-Language
+% Content-Length
+% Content-Location
+% Content-MD5
+% Content-Range
+% Content-Type
+% Expires
+% Last-Modified
+
+
+send_response(Info, Header, Body) ->
+ ?vtrace("send_response -> (new) entry with"
+ "~n Header: ~p", [Header]),
+ case httpd_util:key1search(Header, code) of
+ undefined ->
+ %% No status code
+ %% Ooops this must be very bad:
+ %% generate a 404 content not availible
+ send_status(Info, 404, "The file is not availible");
+ StatusCode ->
+ case send_header(Info, StatusCode, Header) of
+ ok ->
+ send_body(Info, StatusCode, Body);
+ Error ->
+ ?vlog("head delivery failure: ~p", [Error]),
+ done
+ end
+ end.
+
+
+send_header(#mod{socket_type = Type, socket = Sock,
+ http_version = Ver, connection = Conn} = Info,
+ StatusCode, Head0) ->
+ ?vtrace("send_haeder -> entry with"
+ "~n Ver: ~p"
+ "~n Conn: ~p", [Ver, Conn]),
+ Head1 = create_header(Ver, Head0),
+ StatusLine = [Ver, " ",
+ io_lib:write(StatusCode), " ",
+ httpd_util:reason_phrase(StatusCode), "\r\n"],
+ Connection = get_connection(Conn, Ver),
+ Head = list_to_binary([StatusLine, Head1, Connection,"\r\n"]),
+ ?vtrace("deliver head", []),
+ httpd_socket:deliver(Type, Sock, Head).
+
+
+send_body(_, _, nobody) ->
+ ?vtrace("send_body -> no body", []),
+ ok;
+
+send_body(#mod{socket_type = Type, socket = Sock},
+ StatusCode, Body) when list(Body) ->
+ ?vtrace("deliver body of size ~p", [length(Body)]),
+ httpd_socket:deliver(Type, Sock, Body);
+
+send_body(#mod{socket_type = Type, socket = Sock} = Info,
+ StatusCode, {Fun, Args}) ->
+ case (catch apply(Fun, Args)) of
+ close ->
+ httpd_socket:close(Type, Sock),
+ done;
+
+ sent ->
+ ?PROCEED_RESPONSE(StatusCode, Info);
+
+ {ok, Body} ->
+ ?vtrace("deliver body", []),
+ case httpd_socket:deliver(Type, Sock, Body) of
+ ok ->
+ ?PROCEED_RESPONSE(StatusCode, Info);
+ Error ->
+ ?vlog("body delivery failure: ~p", [Error]),
+ done
+ end;
+
+ Error ->
+ ?vlog("failure of apply(~p,~p): ~p", [Fun, Args, Error]),
+ done
+ end;
+send_body(I, S, B) ->
+ ?vinfo("BAD ARGS: "
+ "~n I: ~p"
+ "~n S: ~p"
+ "~n B: ~p", [I, S, B]),
+ exit({bad_args, {I, S, B}}).
+
+
+%% Return a HTTP-header field that indicates that the
+%% connection will be inpersistent
+get_connection(true,"HTTP/1.0")->
+ "Connection:close\r\n";
+get_connection(false,"HTTP/1.1") ->
+ "Connection:close\r\n";
+get_connection(_,_) ->
+ "".
+
+
+create_header("HTTP/1.1", Data) ->
+ create_header1(?HTTP11HEADERFIELDS, Data);
+create_header(_, Data) ->
+ create_header1(?HTTP10HEADERFIELDS, Data).
+
+create_header1(Fields, Data) ->
+ ?DEBUG("create_header() -> "
+ "~n Fields :~p~n Data: ~p ~n", [Fields, Data]),
+ mapfilter(fun(Field)->
+ transform({Field, httpd_util:key1search(Data, Field)})
+ end, Fields, undefined).
+
+
+%% Do a map and removes the values that evaluates to RemoveVal
+mapfilter(Fun,List,RemoveVal)->
+ mapfilter(Fun,List,[],RemoveVal).
+
+mapfilter(Fun,[],[RemoveVal|Acc],RemoveVal)->
+ Acc;
+mapfilter(Fun,[],Acc,_RemoveVal)->
+ Acc;
+
+mapfilter(Fun,[Elem|Rest],[RemoveVal|Acc],RemoveVal)->
+ mapfilter(Fun,Rest,[Fun(Elem)|Acc],RemoveVal);
+mapfilter(Fun,[Elem|Rest],Acc,RemoveVal)->
+ mapfilter(Fun,Rest,[Fun(Elem)|Acc],RemoveVal).
+
+
+transform({content_type,undefined})->
+ ["Content-Type:text/plain\r\n"];
+
+transform({date,undefined})->
+ ["Date:",httpd_util:rfc1123_date(),"\r\n"];
+
+transform({date,RFCDate})->
+ ["Date:",RFCDate,"\r\n"];
+
+
+transform({_Key,undefined})->
+ undefined;
+transform({accept_ranges,Value})->
+ ["Accept-Ranges:",Value,"\r\n"];
+transform({cache_control,Value})->
+ ["Cache-Control:",Value,"\r\n"];
+transform({pragma,Value})->
+ ["Pragma:",Value,"\r\n"];
+transform({trailer,Value})->
+ ["Trailer:",Value,"\r\n"];
+transform({transfer_encoding,Value})->
+ ["Pragma:",Value,"\r\n"];
+transform({etag,Value})->
+ ["ETag:",Value,"\r\n"];
+transform({location,Value})->
+ ["Retry-After:",Value,"\r\n"];
+transform({server,Value})->
+ ["Server:",Value,"\r\n"];
+transform({allow,Value})->
+ ["Allow:",Value,"\r\n"];
+transform({content_encoding,Value})->
+ ["Content-Encoding:",Value,"\r\n"];
+transform({content_language,Value})->
+ ["Content-Language:",Value,"\r\n"];
+transform({retry_after,Value})->
+ ["Retry-After:",Value,"\r\n"];
+transform({server,Value})->
+ ["Server:",Value,"\r\n"];
+transform({allow,Value})->
+ ["Allow:",Value,"\r\n"];
+transform({content_encoding,Value})->
+ ["Content-Encoding:",Value,"\r\n"];
+transform({content_language,Value})->
+ ["Content-Language:",Value,"\r\n"];
+transform({content_location,Value})->
+ ["Content-Location:",Value,"\r\n"];
+transform({content_length,Value})->
+ ["Content-Length:",Value,"\r\n"];
+transform({content_MD5,Value})->
+ ["Content-MD5:",Value,"\r\n"];
+transform({content_range,Value})->
+ ["Content-Range:",Value,"\r\n"];
+transform({content_type,Value})->
+ ["Content-Type:",Value,"\r\n"];
+transform({expires,Value})->
+ ["Expires:",Value,"\r\n"];
+transform({last_modified,Value})->
+ ["Last-Modified:",Value,"\r\n"].
+
+
+
+%%----------------------------------------------------------------------
+%% This is the old way of sending data it is strongly encouraged to
+%% Leave this method and go on to the newer form of response
+%% OTP-4408
+%%----------------------------------------------------------------------
+
+send_response_old(#mod{socket_type = Type,
+ socket = Sock,
+ method = "HEAD"} = Info,
+ StatusCode, Response) ->
+ ?vtrace("send_response_old(HEAD) -> entry with"
+ "~n StatusCode: ~p"
+ "~n Response: ~p",
+ [StatusCode,Response]),
+ case httpd_util:split(lists:flatten(Response),"\r\n\r\n|\n\n",2) of
+ {ok, [Head, Body]} ->
+ Header =
+ httpd_util:header(StatusCode,Info#mod.connection) ++
+ "Content-Length:" ++ content_length(Body),
+ httpd_socket:deliver(Type, Sock, [Header,Head,"\r\n"]);
+
+ Error ->
+ send_status(Info, 500, "Internal Server Error")
+ end;
+
+send_response_old(#mod{socket_type = Type,
+ socket = Sock} = Info,
+ StatusCode, Response) ->
+ ?vtrace("send_response_old -> entry with"
+ "~n StatusCode: ~p"
+ "~n Response: ~p",
+ [StatusCode,Response]),
+ case httpd_util:split(lists:flatten(Response),"\r\n\r\n|\n\n",2) of
+ {ok, [_Head, Body]} ->
+ Header =
+ httpd_util:header(StatusCode,Info#mod.connection) ++
+ "Content-Length:" ++ content_length(Body),
+ httpd_socket:deliver(Type, Sock, [Header, Response]);
+
+ {ok, Body} ->
+ Header =
+ httpd_util:header(StatusCode,Info#mod.connection) ++
+ "Content-Length:" ++ content_length(Body) ++ "\r\n",
+ httpd_socket:deliver(Type, Sock, [Header, Response]);
+
+ {error, Reason} ->
+ send_status(Info, 500, "Internal Server Error")
+ end.
+
+content_length(Body)->
+ integer_to_list(httpd_util:flatlength(Body))++"\r\n".
+
+
+report_error(Mod, ConfigDB, Error) ->
+ Modules = httpd_util:lookup(ConfigDB, modules,
+ [mod_get, mod_head, mod_log]),
+ case lists:member(Mod, Modules) of
+ true ->
+ Mod:report_error(ConfigDB, Error);
+ _ ->
+ ok
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_socket.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_socket.erl
new file mode 100644
index 0000000000..375b43784b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_socket.erl
@@ -0,0 +1,381 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_socket.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_socket).
+-export([start/1,
+ listen/2, listen/3, accept/2, accept/3,
+ deliver/3, send/3, recv/4,
+ close/2,
+ peername/2, resolve/1, config/1,
+ controlling_process/3,
+ active_once/2]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"SOCKET").
+-include("httpd_verbosity.hrl").
+
+-include_lib("kernel/include/inet.hrl").
+
+%% start -> ok | {error,Reason}
+
+start(ip_comm) ->
+ case inet_db:start() of
+ {ok,_Pid} ->
+ ok;
+ {error,{already_started,_Pid}} ->
+ ok;
+ Error ->
+ Error
+ end;
+start({ssl,_SSLConfig}) ->
+ case ssl:start() of
+ ok ->
+ ok;
+ {ok, _} ->
+ ok;
+ {error,{already_started,_}} ->
+ ok;
+ Error ->
+ Error
+ end.
+
+%% listen
+
+listen(SocketType,Port) ->
+ listen(SocketType,undefined,Port).
+
+listen(ip_comm,Addr,Port) ->
+ ?DEBUG("listening(ip_comm) to port ~p", [Port]),
+ Opt = sock_opt(Addr,[{backlog,128},{reuseaddr,true}]),
+ case gen_tcp:listen(Port,Opt) of
+ {ok,ListenSocket} ->
+ ListenSocket;
+ Error ->
+ Error
+ end;
+listen({ssl,SSLConfig},Addr,Port) ->
+ ?DEBUG("listening(ssl) to port ~p"
+ "~n SSLConfig: ~p", [Port,SSLConfig]),
+ Opt = sock_opt(Addr,SSLConfig),
+ case ssl:listen(Port, Opt) of
+ {ok,ListenSocket} ->
+ ListenSocket;
+ Error ->
+ Error
+ end.
+
+
+sock_opt(undefined,Opt) -> [{packet,0},{active,false}|Opt];
+sock_opt(Addr,Opt) -> [{ip, Addr},{packet,0},{active,false}|Opt].
+
+%% -define(packet_type_http,true).
+%% -define(packet_type_httph,true).
+
+%% -ifdef(packet_type_http).
+%% sock_opt(undefined,Opt) -> [{packet,http},{active,false}|Opt];
+%% sock_opt(Addr,Opt) -> [{ip, Addr},{packet,http},{active,false}|Opt].
+%% -elif(packet_type_httph).
+%% sock_opt(undefined,Opt) -> [{packet,httph},{active,false}|Opt];
+%% sock_opt(Addr,Opt) -> [{ip, Addr},{packet,httph},{active,false}|Opt].
+%% -else.
+%% sock_opt(undefined,Opt) -> [{packet,0},{active,false}|Opt];
+%% sock_opt(Addr,Opt) -> [{ip, Addr},{packet,0},{active,false}|Opt].
+%% -endif.
+
+
+%% active_once
+
+active_once(Type, Sock) ->
+ active(Type, Sock, once).
+
+active(ip_comm, Sock, Active) ->
+ inet:setopts(Sock, [{active, Active}]);
+active({ssl, _SSLConfig}, Sock, Active) ->
+ ssl:setopts(Sock, [{active, Active}]).
+
+%% accept
+
+accept(A, B) ->
+ accept(A, B, infinity).
+
+
+accept(ip_comm,ListenSocket, T) ->
+ ?DEBUG("accept(ip_comm) on socket ~p", [ListenSocket]),
+ case gen_tcp:accept(ListenSocket, T) of
+ {ok,Socket} ->
+ Socket;
+ Error ->
+ ?vtrace("accept(ip_comm) failed for reason:"
+ "~n Error: ~p",[Error]),
+ Error
+ end;
+accept({ssl,_SSLConfig},ListenSocket, T) ->
+ ?DEBUG("accept(ssl) on socket ~p", [ListenSocket]),
+ case ssl:accept(ListenSocket, T) of
+ {ok,Socket} ->
+ Socket;
+ Error ->
+ ?vtrace("accept(ssl) failed for reason:"
+ "~n Error: ~p",[Error]),
+ Error
+ end.
+
+
+%% controlling_process
+
+controlling_process(ip_comm, Socket, Pid) ->
+ gen_tcp:controlling_process(Socket, Pid);
+controlling_process({ssl, _}, Socket, Pid) ->
+ ssl:controlling_process(Socket, Pid).
+
+
+%% deliver
+
+deliver(SocketType, Socket, IOListOrBinary) ->
+ case send(SocketType, Socket, IOListOrBinary) of
+% {error, einval} ->
+% ?vlog("deliver failed for reason: einval"
+% "~n SocketType: ~p"
+% "~n Socket: ~p"
+% "~n Data: ~p",
+% [SocketType, Socket, type(IOListOrBinary)]),
+% (catch close(SocketType, Socket)),
+% socket_closed;
+ {error, _Reason} ->
+ ?vlog("deliver(~p) failed for reason:"
+ "~n Reason: ~p",[SocketType,_Reason]),
+ (catch close(SocketType, Socket)),
+ socket_closed;
+ _ ->
+ ok
+ end.
+
+% type(L) when list(L) ->
+% {list, L};
+% type(B) when binary(B) ->
+% Decoded =
+% case (catch binary_to_term(B)) of
+% {'EXIT', _} ->
+% %% Oups, not a term, try list
+% case (catch binary_to_list(B)) of
+% %% Oups, not a list either, give up
+% {'EXIT', _} ->
+% {size, size(B)};
+% L ->
+% {list, L}
+% end;
+
+% T ->
+% {term, T}
+% end,
+% {binary, Decoded};
+% type(T) when tuple(T) ->
+% {tuple, T};
+% type(I) when integer(I) ->
+% {integer, I};
+% type(F) when float(F) ->
+% {float, F};
+% type(P) when pid(P) ->
+% {pid, P};
+% type(P) when port(P) ->
+% {port, P};
+% type(R) when reference(R) ->
+% {reference, R};
+% type(T) ->
+% {term, T}.
+
+
+
+send(ip_comm,Socket,Data) ->
+ ?DEBUG("send(ip_comm) -> ~p bytes on socket ~p",[data_size(Data),Socket]),
+ gen_tcp:send(Socket,Data);
+send({ssl,SSLConfig},Socket,Data) ->
+ ?DEBUG("send(ssl) -> ~p bytes on socket ~p",[data_size(Data),Socket]),
+ ssl:send(Socket, Data).
+
+recv(ip_comm,Socket,Length,Timeout) ->
+ ?DEBUG("recv(ip_comm) -> read from socket ~p",[Socket]),
+ gen_tcp:recv(Socket,Length,Timeout);
+recv({ssl,SSLConfig},Socket,Length,Timeout) ->
+ ?DEBUG("recv(ssl) -> read from socket ~p",[Socket]),
+ ssl:recv(Socket,Length,Timeout).
+
+-ifdef(inets_debug).
+data_size(L) when list(L) ->
+ httpd_util:flatlength(L);
+data_size(B) when binary(B) ->
+ size(B);
+data_size(O) ->
+ {unknown_size,O}.
+-endif.
+
+
+%% peername
+
+peername(ip_comm, Socket) ->
+ case inet:peername(Socket) of
+ {ok,{{A,B,C,D},Port}} ->
+ PeerName = integer_to_list(A)++"."++integer_to_list(B)++"."++
+ integer_to_list(C)++"."++integer_to_list(D),
+ ?DEBUG("peername(ip_comm) on socket ~p: ~p",
+ [Socket,{Port,PeerName}]),
+ {Port,PeerName};
+ {error,Reason} ->
+ ?vlog("failed getting peername:"
+ "~n Reason: ~p"
+ "~n Socket: ~p",
+ [Reason,Socket]),
+ {-1,"unknown"}
+ end;
+peername({ssl,_SSLConfig},Socket) ->
+ case ssl:peername(Socket) of
+ {ok,{{A,B,C,D},Port}} ->
+ PeerName = integer_to_list(A)++"."++integer_to_list(B)++"."++
+ integer_to_list(C)++"."++integer_to_list(D),
+ ?DEBUG("peername(ssl) on socket ~p: ~p",
+ [Socket, {Port,PeerName}]),
+ {Port,PeerName};
+ {error,_Reason} ->
+ {-1,"unknown"}
+ end.
+
+%% resolve
+
+resolve(_) ->
+ {ok,Name} = inet:gethostname(),
+ Name.
+
+%% close
+
+close(ip_comm,Socket) ->
+ Res =
+ case (catch gen_tcp:close(Socket)) of
+ ok -> ok;
+ {error,Reason} -> {error,Reason};
+ {'EXIT',{noproc,_}} -> {error,closed};
+ {'EXIT',Reason} -> {error,Reason};
+ Otherwise -> {error,Otherwise}
+ end,
+ ?vtrace("close(ip_comm) result: ~p",[Res]),
+ Res;
+close({ssl,_SSLConfig},Socket) ->
+ Res =
+ case (catch ssl:close(Socket)) of
+ ok -> ok;
+ {error,Reason} -> {error,Reason};
+ {'EXIT',{noproc,_}} -> {error,closed};
+ {'EXIT',Reason} -> {error,Reason};
+ Otherwise -> {error,Otherwise}
+ end,
+ ?vtrace("close(ssl) result: ~p",[Res]),
+ Res.
+
+%% config (debug: {certfile, "/var/tmp/server_root/conf/ssl_server.pem"})
+
+config(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,com_type,ip_comm) of
+ ssl ->
+ case ssl_certificate_file(ConfigDB) of
+ undefined ->
+ {error,
+ ?NICE("Directive SSLCertificateFile "
+ "not found in the config file")};
+ SSLCertificateFile ->
+ {ssl,
+ SSLCertificateFile++
+ ssl_certificate_key_file(ConfigDB)++
+ ssl_verify_client(ConfigDB)++
+ ssl_ciphers(ConfigDB)++
+ ssl_password(ConfigDB)++
+ ssl_verify_depth(ConfigDB)++
+ ssl_ca_certificate_file(ConfigDB)}
+ end;
+ ip_comm ->
+ ip_comm
+ end.
+
+ssl_certificate_file(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_certificate_file) of
+ undefined ->
+ undefined;
+ SSLCertificateFile ->
+ [{certfile,SSLCertificateFile}]
+ end.
+
+ssl_certificate_key_file(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_certificate_key_file) of
+ undefined ->
+ [];
+ SSLCertificateKeyFile ->
+ [{keyfile,SSLCertificateKeyFile}]
+ end.
+
+ssl_verify_client(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_verify_client) of
+ undefined ->
+ [];
+ SSLVerifyClient ->
+ [{verify,SSLVerifyClient}]
+ end.
+
+ssl_ciphers(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_ciphers) of
+ undefined ->
+ [];
+ Ciphers ->
+ [{ciphers, Ciphers}]
+ end.
+
+ssl_password(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB,ssl_password_callback_module) of
+ undefined ->
+ [];
+ Module ->
+ case httpd_util:lookup(ConfigDB, ssl_password_callback_function) of
+ undefined ->
+ [];
+ Function ->
+ case catch apply(Module, Function, []) of
+ Password when list(Password) ->
+ [{password, Password}];
+ Error ->
+ error_report(ssl_password,Module,Function,Error),
+ []
+ end
+ end
+ end.
+
+ssl_verify_depth(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB, ssl_verify_client_depth) of
+ undefined ->
+ [];
+ Depth ->
+ [{depth, Depth}]
+ end.
+
+ssl_ca_certificate_file(ConfigDB) ->
+ case httpd_util:lookup(ConfigDB, ssl_ca_certificate_file) of
+ undefined ->
+ [];
+ File ->
+ [{cacertfile, File}]
+ end.
+
+
+error_report(Where,M,F,Error) ->
+ error_logger:error_report([{?MODULE, Where}, {apply, {M, F, []}}, Error]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_sup.erl
new file mode 100644
index 0000000000..e7a3557c9d
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_sup.erl
@@ -0,0 +1,202 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_sup.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+%%----------------------------------------------------------------------
+%% Purpose: The top supervisor for the inets application
+%%----------------------------------------------------------------------
+
+-module(httpd_sup).
+
+-behaviour(supervisor).
+
+-include("httpd_verbosity.hrl").
+
+%% public
+-export([start/2, start_link/2, start2/2, start_link2/2, stop/1, stop/2, stop2/1]).
+-export([init/1]).
+
+
+-define(D(F, A), io:format("~p:" ++ F ++ "~n", [?MODULE|A])).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% supervisor callback functions
+
+start(ConfigFile, Verbosity) ->
+ case start_link(ConfigFile, Verbosity) of
+ {ok, Pid} ->
+ unlink(Pid),
+ {ok, Pid};
+
+ Else ->
+ Else
+ end.
+
+
+start_link(ConfigFile, Verbosity) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok, ConfigList, Addr, Port} ->
+ Name = make_name(Addr, Port),
+ SupName = {local, Name},
+ supervisor:start_link(SupName, ?MODULE,
+ [ConfigFile, ConfigList,
+ Verbosity, Addr, Port]);
+
+ {error, Reason} ->
+ error_logger:error_report(Reason),
+ {stop, Reason};
+
+ Else ->
+ error_logger:error_report(Else),
+ {stop, Else}
+ end.
+
+
+start2(ConfigList, Verbosity) ->
+ case start_link2(ConfigList, Verbosity) of
+ {ok, Pid} ->
+ unlink(Pid),
+ {ok, Pid};
+
+ Else ->
+ Else
+ end.
+
+
+start_link2(ConfigList, Verbosity) ->
+ case get_addr_and_port2(ConfigList) of
+ {ok, Addr, Port} ->
+ Name = make_name(Addr, Port),
+ SupName = {local, Name},
+ supervisor:start_link(SupName, ?MODULE,
+ [undefined, ConfigList, Verbosity, Addr, Port]);
+
+ {error, Reason} ->
+ error_logger:error_report(Reason),
+ {stop, Reason};
+
+ Else ->
+ error_logger:error_report(Else),
+ {stop, Else}
+ end.
+
+
+
+stop(Pid) when pid(Pid) ->
+ do_stop(Pid);
+stop(ConfigFile) when list(ConfigFile) ->
+ case get_addr_and_port(ConfigFile) of
+ {ok, _, Addr, Port} ->
+ stop(Addr, Port);
+
+ Error ->
+ Error
+ end;
+stop(StartArgs) ->
+ ok.
+
+
+stop(Addr, Port) when integer(Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ Pid when pid(Pid) ->
+ do_stop(Pid),
+ ok;
+ _ ->
+ not_started
+ end.
+
+stop2(ConfigList) when list(ConfigList) ->
+ {ok, Addr, Port} = get_addr_and_port2(ConfigList),
+ stop(Addr, Port).
+
+
+do_stop(Pid) ->
+ exit(Pid, shutdown).
+
+
+init([ConfigFile, ConfigList, Verbosity, Addr, Port]) ->
+ init(ConfigFile, ConfigList, Verbosity, Addr, Port);
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+init(ConfigFile, ConfigList, Verbosity, Addr, Port) ->
+ Flags = {one_for_one, 0, 1},
+ AccSupVerbosity = get_acc_sup_verbosity(Verbosity),
+ MiscSupVerbosity = get_misc_sup_verbosity(Verbosity),
+ Sups = [sup_spec(httpd_acceptor_sup, Addr, Port, AccSupVerbosity),
+ sup_spec(httpd_misc_sup, Addr, Port, MiscSupVerbosity),
+ worker_spec(httpd_manager, Addr, Port, ConfigFile, ConfigList,
+ Verbosity, [gen_server])],
+ {ok, {Flags, Sups}}.
+
+
+sup_spec(Name, Addr, Port, Verbosity) ->
+ {{Name, Addr, Port},
+ {Name, start, [Addr, Port, Verbosity]},
+ permanent, 2000, supervisor, [Name, supervisor]}.
+
+worker_spec(Name, Addr, Port, ConfigFile, ConfigList, Verbosity, Modules) ->
+ {{Name, Addr, Port},
+ {Name, start_link, [ConfigFile, ConfigList, Verbosity]},
+ permanent, 2000, worker, [Name] ++ Modules}.
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_sup",Addr,Port).
+
+
+%% get_addr_and_port
+
+get_addr_and_port(ConfigFile) ->
+ case httpd_conf:load(ConfigFile) of
+ {ok, ConfigList} ->
+ {ok, Addr, Port} = get_addr_and_port2(ConfigList),
+ {ok, ConfigList, Addr, Port};
+ Error ->
+ Error
+ end.
+
+
+get_addr_and_port2(ConfigList) ->
+ Port = httpd_util:key1search(ConfigList, port, 80),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ {ok, Addr, Port}.
+
+get_acc_sup_verbosity(V) ->
+ case key1search(V, all) of
+ undefined ->
+ key1search(V, acceptor_sup_verbosity, ?default_verbosity);
+ Verbosity ->
+ Verbosity
+ end.
+
+
+get_misc_sup_verbosity(V) ->
+ case key1search(V, all) of
+ undefined ->
+ key1search(V, misc_sup_verbosity, ?default_verbosity);
+ Verbosity ->
+ Verbosity
+ end.
+
+
+key1search(L, K) ->
+ httpd_util:key1search(L, K).
+
+key1search(L, K, D) ->
+ httpd_util:key1search(L, K, D).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_util.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_util.erl
new file mode 100644
index 0000000000..045e6f6516
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_util.erl
@@ -0,0 +1,773 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_util.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_util).
+-export([key1search/2, key1search/3, lookup/2, lookup/3, multi_lookup/2,
+ lookup_mime/2, lookup_mime/3, lookup_mime_default/2,
+ lookup_mime_default/3, reason_phrase/1, message/3, rfc1123_date/0,
+ rfc1123_date/1, day/1, month/1, decode_hex/1, decode_base64/1, encode_base64/1,
+ flatlength/1, split_path/1, split_script_path/1, suffix/1, to_upper/1,
+ to_lower/1, split/3, header/2, header/3, header/4, uniq/1,
+ make_name/2,make_name/3,make_name/4,strip/1,
+ hexlist_to_integer/1,integer_to_hexlist/1,
+ convert_request_date/1,create_etag/1,create_etag/2,getSize/1,
+ response_generated/1]).
+
+%%Since hexlist_to_integer is a lousy name make a name convert
+-export([encode_hex/1]).
+-include("httpd.hrl").
+
+%% key1search
+
+key1search(TupleList,Key) ->
+ key1search(TupleList,Key,undefined).
+
+key1search(TupleList,Key,Undefined) ->
+ case lists:keysearch(Key,1,TupleList) of
+ {value,{Key,Value}} ->
+ Value;
+ false ->
+ Undefined
+ end.
+
+%% lookup
+
+lookup(Table,Key) ->
+ lookup(Table,Key,undefined).
+
+lookup(Table,Key,Undefined) ->
+ case catch ets:lookup(Table,Key) of
+ [{Key,Value}|_] ->
+ Value;
+ _->
+ Undefined
+ end.
+
+%% multi_lookup
+
+multi_lookup(Table,Key) ->
+ remove_key(ets:lookup(Table,Key)).
+
+remove_key([]) ->
+ [];
+remove_key([{_Key,Value}|Rest]) ->
+ [Value|remove_key(Rest)].
+
+%% lookup_mime
+
+lookup_mime(ConfigDB,Suffix) ->
+ lookup_mime(ConfigDB,Suffix,undefined).
+
+lookup_mime(ConfigDB,Suffix,Undefined) ->
+ [{mime_types,MimeTypesDB}|_]=ets:lookup(ConfigDB,mime_types),
+ case ets:lookup(MimeTypesDB,Suffix) of
+ [] ->
+ Undefined;
+ [{Suffix,MimeType}|_] ->
+ MimeType
+ end.
+
+%% lookup_mime_default
+
+lookup_mime_default(ConfigDB,Suffix) ->
+ lookup_mime_default(ConfigDB,Suffix,undefined).
+
+lookup_mime_default(ConfigDB,Suffix,Undefined) ->
+ [{mime_types,MimeTypesDB}|_]=ets:lookup(ConfigDB,mime_types),
+ case ets:lookup(MimeTypesDB,Suffix) of
+ [] ->
+ case ets:lookup(ConfigDB,default_type) of
+ [] ->
+ Undefined;
+ [{default_type,DefaultType}|_] ->
+ DefaultType
+ end;
+ [{Suffix,MimeType}|_] ->
+ MimeType
+ end.
+
+%% reason_phrase
+reason_phrase(100) -> "Continue";
+reason_phrase(101) -> "Swithing protocol";
+reason_phrase(200) -> "OK";
+reason_phrase(201) -> "Created";
+reason_phrase(202) -> "Accepted";
+reason_phrase(204) -> "No Content";
+reason_phrase(205) -> "Reset Content";
+reason_phrase(206) -> "Partial Content";
+reason_phrase(301) -> "Moved Permanently";
+reason_phrase(302) -> "Moved Temporarily";
+reason_phrase(304) -> "Not Modified";
+reason_phrase(400) -> "Bad Request";
+reason_phrase(401) -> "Unauthorized";
+reason_phrase(402) -> "Payment Required";
+reason_phrase(403) -> "Forbidden";
+reason_phrase(404) -> "Not Found";
+reason_phrase(405) -> "Method Not Allowed";
+reason_phrase(408) -> "Request Timeout";
+reason_phrase(411) -> "Length Required";
+reason_phrase(414) -> "Request-URI Too Long";
+reason_phrase(412) -> "Precondition Failed";
+reason_phrase(416) -> "request Range Not Satisfiable";
+reason_phrase(417) -> "Expectation failed";
+reason_phrase(500) -> "Internal Server Error";
+reason_phrase(501) -> "Not Implemented";
+reason_phrase(502) -> "Bad Gateway";
+reason_phrase(503) -> "Service Unavailable";
+reason_phrase(_) -> "Internal Server Error".
+
+%% message
+
+message(301,URL,_) ->
+ "The document has moved <A HREF=\""++URL++"\">here</A>.";
+message(304,_URL,_) ->
+ "The document has not been changed.";
+message(400,none,_) ->
+ "Your browser sent a query that this server could not understand.";
+message(401,none,_) ->
+ "This server could not verify that you
+are authorized to access the document you
+requested. Either you supplied the wrong
+credentials (e.g., bad password), or your
+browser does not understand how to supply
+the credentials required.";
+message(403,RequestURI,_) ->
+ "You do not have permission to access "++RequestURI++" on this server.";
+message(404,RequestURI,_) ->
+ "The requested URL "++RequestURI++" was not found on this server.";
+message(412,none,_) ->
+ "The requested preconditions where false";
+message(414,ReasonPhrase,_) ->
+ "Message "++ReasonPhrase++".";
+message(416,ReasonPhrase,_) ->
+ ReasonPhrase;
+
+message(500,none,ConfigDB) ->
+ ServerAdmin=lookup(ConfigDB,server_admin,"unknown@unknown"),
+ "The server encountered an internal error or
+misconfiguration and was unable to complete
+your request.
+<P>Please contact the server administrator "++ServerAdmin++",
+and inform them of the time the error occurred
+and anything you might have done that may have
+caused the error.";
+message(501,{Method,RequestURI,HTTPVersion},_ConfigDB) ->
+ Method++" to "++RequestURI++" ("++HTTPVersion++") not supported.";
+message(503,String,_ConfigDB) ->
+ "This service in unavailable due to: "++String.
+
+%%convert_rfc_date(Date)->{{YYYY,MM,DD},{HH,MIN,SEC}}
+
+convert_request_date([D,A,Y,DateType|Rest]) ->
+ Func=case DateType of
+ $\, ->
+ fun convert_rfc1123_date/1;
+ $\ ->
+ fun convert_ascii_date/1;
+ _ ->
+ fun convert_rfc850_date/1
+ end,
+ case catch Func([D,A,Y,DateType|Rest])of
+ {ok,Date} ->
+ Date;
+ _Error ->
+ bad_date
+ end.
+
+convert_rfc850_date(DateStr) ->
+ case string:tokens(DateStr," ") of
+ [_WeekDay,Date,Time,_TimeZone|_Rest] ->
+ convert_rfc850_date(Date,Time);
+ _Error ->
+ bad_date
+ end.
+
+convert_rfc850_date([D1,D2,_,M,O,N,_,Y1,Y2|_Rest],[H1,H2,_Col,M1,M2,_Col,S1,S2|_Rest2])->
+ Year=list_to_integer([50,48,Y1,Y2]),
+ Day=list_to_integer([D1,D2]),
+ Month=convert_month([M,O,N]),
+ Hour=list_to_integer([H1,H2]),
+ Min=list_to_integer([M1,M2]),
+ Sec=list_to_integer([S1,S2]),
+ {ok,{{Year,Month,Day},{Hour,Min,Sec}}};
+convert_rfc850_date(_BadDate,_BadTime)->
+ bad_date.
+
+convert_ascii_date([_D,_A,_Y,_SP,M,O,N,_SP,D1,D2,_SP,H1,H2,_Col,M1,M2,_Col,S1,S2,_SP,Y1,Y2,Y3,Y4|_Rest])->
+ Year=list_to_integer([Y1,Y2,Y3,Y4]),
+ Day=case D1 of
+ $\ ->
+ list_to_integer([D2]);
+ _->
+ list_to_integer([D1,D2])
+ end,
+ Month=convert_month([M,O,N]),
+ Hour=list_to_integer([H1,H2]),
+ Min=list_to_integer([M1,M2]),
+ Sec=list_to_integer([S1,S2]),
+ {ok,{{Year,Month,Day},{Hour,Min,Sec}}};
+convert_ascii_date(BadDate)->
+ bad_date.
+convert_rfc1123_date([_D,_A,_Y,_C,_SP,D1,D2,_SP,M,O,N,_SP,Y1,Y2,Y3,Y4,_SP,H1,H2,_Col,M1,M2,_Col,S1,S2|Rest])->
+ Year=list_to_integer([Y1,Y2,Y3,Y4]),
+ Day=list_to_integer([D1,D2]),
+ Month=convert_month([M,O,N]),
+ Hour=list_to_integer([H1,H2]),
+ Min=list_to_integer([M1,M2]),
+ Sec=list_to_integer([S1,S2]),
+ {ok,{{Year,Month,Day},{Hour,Min,Sec}}};
+convert_rfc1123_date(BadDate)->
+ bad_date.
+
+convert_month("Jan")->1;
+convert_month("Feb") ->2;
+convert_month("Mar") ->3;
+convert_month("Apr") ->4;
+convert_month("May") ->5;
+convert_month("Jun") ->6;
+convert_month("Jul") ->7;
+convert_month("Aug") ->8;
+convert_month("Sep") ->9;
+convert_month("Oct") ->10;
+convert_month("Nov") ->11;
+convert_month("Dec") ->12.
+
+
+%% rfc1123_date
+
+rfc1123_date() ->
+ {{YYYY,MM,DD},{Hour,Min,Sec}}=calendar:universal_time(),
+ DayNumber=calendar:day_of_the_week({YYYY,MM,DD}),
+ lists:flatten(io_lib:format("~s, ~2.2.0w ~3.s ~4.4.0w ~2.2.0w:~2.2.0w:~2.2.0w GMT",
+ [day(DayNumber),DD,month(MM),YYYY,Hour,Min,Sec])).
+
+rfc1123_date({{YYYY,MM,DD},{Hour,Min,Sec}}) ->
+ DayNumber=calendar:day_of_the_week({YYYY,MM,DD}),
+ lists:flatten(io_lib:format("~s, ~2.2.0w ~3.s ~4.4.0w ~2.2.0w:~2.2.0w:~2.2.0w GMT",
+ [day(DayNumber),DD,month(MM),YYYY,Hour,Min,Sec])).
+
+%% uniq
+
+uniq([]) ->
+ [];
+uniq([First,First|Rest]) ->
+ uniq([First|Rest]);
+uniq([First|Rest]) ->
+ [First|uniq(Rest)].
+
+
+%% day
+
+day(1) -> "Mon";
+day(2) -> "Tue";
+day(3) -> "Wed";
+day(4) -> "Thu";
+day(5) -> "Fri";
+day(6) -> "Sat";
+day(7) -> "Sun".
+
+%% month
+
+month(1) -> "Jan";
+month(2) -> "Feb";
+month(3) -> "Mar";
+month(4) -> "Apr";
+month(5) -> "May";
+month(6) -> "Jun";
+month(7) -> "Jul";
+month(8) -> "Aug";
+month(9) -> "Sep";
+month(10) -> "Oct";
+month(11) -> "Nov";
+month(12) -> "Dec".
+
+%% decode_hex
+
+decode_hex([$%,Hex1,Hex2|Rest]) ->
+ [hex2dec(Hex1)*16+hex2dec(Hex2)|decode_hex(Rest)];
+decode_hex([First|Rest]) ->
+ [First|decode_hex(Rest)];
+decode_hex([]) ->
+ [].
+
+hex2dec(X) when X>=$0,X=<$9 -> X-$0;
+hex2dec(X) when X>=$A,X=<$F -> X-$A+10;
+hex2dec(X) when X>=$a,X=<$f -> X-$a+10.
+
+%% decode_base64 (DEBUG STRING: QWxhZGRpbjpvcGVuIHNlc2FtZQ==)
+
+decode_base64([]) ->
+ [];
+decode_base64([Sextet1,Sextet2,$=,$=|Rest]) ->
+ Bits2x6=
+ (d(Sextet1) bsl 18) bor
+ (d(Sextet2) bsl 12),
+ Octet1=Bits2x6 bsr 16,
+ [Octet1|decode_base64(Rest)];
+decode_base64([Sextet1,Sextet2,Sextet3,$=|Rest]) ->
+ Bits3x6=
+ (d(Sextet1) bsl 18) bor
+ (d(Sextet2) bsl 12) bor
+ (d(Sextet3) bsl 6),
+ Octet1=Bits3x6 bsr 16,
+ Octet2=(Bits3x6 bsr 8) band 16#ff,
+ [Octet1,Octet2|decode_base64(Rest)];
+decode_base64([Sextet1,Sextet2,Sextet3,Sextet4|Rest]) ->
+ Bits4x6=
+ (d(Sextet1) bsl 18) bor
+ (d(Sextet2) bsl 12) bor
+ (d(Sextet3) bsl 6) bor
+ d(Sextet4),
+ Octet1=Bits4x6 bsr 16,
+ Octet2=(Bits4x6 bsr 8) band 16#ff,
+ Octet3=Bits4x6 band 16#ff,
+ [Octet1,Octet2,Octet3|decode_base64(Rest)];
+decode_base64(CatchAll) ->
+ "BAD!".
+
+d(X) when X >= $A, X =<$Z ->
+ X-65;
+d(X) when X >= $a, X =<$z ->
+ X-71;
+d(X) when X >= $0, X =<$9 ->
+ X+4;
+d($+) -> 62;
+d($/) -> 63;
+d(_) -> 63.
+
+
+encode_base64([]) ->
+ [];
+encode_base64([A]) ->
+ [e(A bsr 2), e((A band 3) bsl 4), $=, $=];
+encode_base64([A,B]) ->
+ [e(A bsr 2), e(((A band 3) bsl 4) bor (B bsr 4)), e((B band 15) bsl 2), $=];
+encode_base64([A,B,C|Ls]) ->
+ encode_base64_do(A,B,C, Ls).
+encode_base64_do(A,B,C, Rest) ->
+ BB = (A bsl 16) bor (B bsl 8) bor C,
+ [e(BB bsr 18), e((BB bsr 12) band 63),
+ e((BB bsr 6) band 63), e(BB band 63)|encode_base64(Rest)].
+
+e(X) when X >= 0, X < 26 -> X+65;
+e(X) when X>25, X<52 -> X+71;
+e(X) when X>51, X<62 -> X-4;
+e(62) -> $+;
+e(63) -> $/;
+e(X) -> exit({bad_encode_base64_token, X}).
+
+
+%% flatlength
+
+flatlength(List) ->
+ flatlength(List, 0).
+
+flatlength([H|T],L) when list(H) ->
+ flatlength(H,flatlength(T,L));
+flatlength([H|T],L) when binary(H) ->
+ flatlength(T,L+size(H));
+flatlength([H|T],L) ->
+ flatlength(T,L+1);
+flatlength([],L) ->
+ L.
+
+%% split_path
+
+split_path(Path) ->
+ case regexp:match(Path,"[\?].*\$") of
+ %% A QUERY_STRING exists!
+ {match,Start,Length} ->
+ {httpd_util:decode_hex(string:substr(Path,1,Start-1)),
+ string:substr(Path,Start,Length)};
+ %% A possible PATH_INFO exists!
+ nomatch ->
+ split_path(Path,[])
+ end.
+
+split_path([],SoFar) ->
+ {httpd_util:decode_hex(lists:reverse(SoFar)),[]};
+split_path([$/|Rest],SoFar) ->
+ Path=httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok,FileInfo} when FileInfo#file_info.type == regular ->
+ {Path,[$/|Rest]};
+ {ok,FileInfo} ->
+ split_path(Rest,[$/|SoFar]);
+ {error,Reason} ->
+ split_path(Rest,[$/|SoFar])
+ end;
+split_path([C|Rest],SoFar) ->
+ split_path(Rest,[C|SoFar]).
+
+%% split_script_path
+
+split_script_path(Path) ->
+ case split_script_path(Path, []) of
+ {Script, AfterPath} ->
+ {PathInfo, QueryString} = pathinfo_querystring(AfterPath),
+ {Script, {PathInfo, QueryString}};
+ not_a_script ->
+ not_a_script
+ end.
+
+pathinfo_querystring(Str) ->
+ pathinfo_querystring(Str, []).
+pathinfo_querystring([], SoFar) ->
+ {lists:reverse(SoFar), []};
+pathinfo_querystring([$?|Rest], SoFar) ->
+ {lists:reverse(SoFar), Rest};
+pathinfo_querystring([C|Rest], SoFar) ->
+ pathinfo_querystring(Rest, [C|SoFar]).
+
+split_script_path([$?|QueryString], SoFar) ->
+ Path = httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok,FileInfo} when FileInfo#file_info.type == regular ->
+ {Path, [$?|QueryString]};
+ {ok,FileInfo} ->
+ not_a_script;
+ {error,Reason} ->
+ not_a_script
+ end;
+split_script_path([], SoFar) ->
+ Path = httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok,FileInfo} when FileInfo#file_info.type == regular ->
+ {Path, []};
+ {ok,FileInfo} ->
+ not_a_script;
+ {error,Reason} ->
+ not_a_script
+ end;
+split_script_path([$/|Rest], SoFar) ->
+ Path = httpd_util:decode_hex(lists:reverse(SoFar)),
+ case file:read_file_info(Path) of
+ {ok, FileInfo} when FileInfo#file_info.type == regular ->
+ {Path, [$/|Rest]};
+ {ok, _FileInfo} ->
+ split_script_path(Rest, [$/|SoFar]);
+ {error, _Reason} ->
+ split_script_path(Rest, [$/|SoFar])
+ end;
+split_script_path([C|Rest], SoFar) ->
+ split_script_path(Rest,[C|SoFar]).
+
+%% suffix
+
+suffix(Path) ->
+ case filename:extension(Path) of
+ [] ->
+ [];
+ Extension ->
+ tl(Extension)
+ end.
+
+%% to_upper
+
+to_upper([C|Cs]) when C >= $a, C =< $z ->
+ [C-($a-$A)|to_upper(Cs)];
+to_upper([C|Cs]) ->
+ [C|to_upper(Cs)];
+to_upper([]) ->
+ [].
+
+%% to_lower
+
+to_lower([C|Cs]) when C >= $A, C =< $Z ->
+ [C+($a-$A)|to_lower(Cs)];
+to_lower([C|Cs]) ->
+ [C|to_lower(Cs)];
+to_lower([]) ->
+ [].
+
+
+%% strip
+strip(Value)->
+ lists:reverse(remove_ws(lists:reverse(remove_ws(Value)))).
+
+remove_ws([$\s|Rest])->
+ remove_ws(Rest);
+remove_ws([$\t|Rest]) ->
+ remove_ws(Rest);
+remove_ws(Rest) ->
+ Rest.
+
+%% split
+
+split(String,RegExp,Limit) ->
+ case regexp:parse(RegExp) of
+ {error,Reason} ->
+ {error,Reason};
+ {ok,_} ->
+ {ok,do_split(String,RegExp,Limit)}
+ end.
+
+do_split(String,RegExp,1) ->
+ [String];
+
+do_split(String,RegExp,Limit) ->
+ case regexp:first_match(String,RegExp) of
+ {match,Start,Length} ->
+ [string:substr(String,1,Start-1)|
+ do_split(lists:nthtail(Start+Length-1,String),RegExp,Limit-1)];
+ nomatch ->
+ [String]
+ end.
+
+%% header
+header(StatusCode,Date)when list(Date)->
+ header(StatusCode,"text/plain",false);
+
+header(StatusCode, PersistentConnection) when integer(StatusCode)->
+ Date = rfc1123_date(),
+ Connection =
+ case PersistentConnection of
+ true ->
+ "";
+ _ ->
+ "Connection: close \r\n"
+ end,
+ io_lib:format("HTTP/1.1 ~w ~s \r\nDate: ~s\r\nServer: ~s\r\n~s",
+ [StatusCode, httpd_util:reason_phrase(StatusCode),
+ Date, ?SERVER_SOFTWARE, Connection]).
+
+%%----------------------------------------------------------------------
+
+header(StatusCode, MimeType, Date) when list(Date) ->
+ header(StatusCode, MimeType, false,rfc1123_date());
+
+
+header(StatusCode, MimeType, PersistentConnection) when integer(StatusCode) ->
+ header(StatusCode, MimeType, PersistentConnection,rfc1123_date()).
+
+
+%%----------------------------------------------------------------------
+
+header(416, MimeType,PersistentConnection,Date)->
+ Connection =
+ case PersistentConnection of
+ true ->
+ "";
+ _ ->
+ "Connection: close \r\n"
+ end,
+ io_lib:format("HTTP/1.1 ~w ~s \r\nDate: ~s\r\nServer: ~s\r\n"
+ "Content-Range:bytes *"
+ "Content-Type: ~s\r\n~s",
+ [416, httpd_util:reason_phrase(416),
+ Date, ?SERVER_SOFTWARE, MimeType, Connection]);
+
+
+header(StatusCode, MimeType,PersistentConnection,Date) when integer(StatusCode)->
+ Connection =
+ case PersistentConnection of
+ true ->
+ "";
+ _ ->
+ "Connection: close \r\n"
+ end,
+ io_lib:format("HTTP/1.1 ~w ~s \r\nDate: ~s\r\nServer: ~s\r\n"
+ "Content-Type: ~s\r\n~s",
+ [StatusCode, httpd_util:reason_phrase(StatusCode),
+ Date, ?SERVER_SOFTWARE, MimeType, Connection]).
+
+
+
+%% make_name/2, make_name/3
+%% Prefix -> string()
+%% First part of the name, e.g. "httpd"
+%% Addr -> {A,B,C,D} | string() | undefined
+%% The address part of the name.
+%% e.g. "123.234.55.66" or {123,234,55,66} or "otp.ericsson.se"
+%% for a host address or undefined if local host.
+%% Port -> integer()
+%% Last part of the name, such as the HTTPD server port
+%% number (80).
+%% Postfix -> Any string that will be added last to the name
+%%
+%% Example:
+%% make_name("httpd","otp.ericsson.se",80) => httpd__otp_ericsson_se__80
+%% make_name("httpd",undefined,8088) => httpd_8088
+
+make_name(Prefix,Port) ->
+ make_name(Prefix,undefined,Port,"").
+
+make_name(Prefix,Addr,Port) ->
+ make_name(Prefix,Addr,Port,"").
+
+make_name(Prefix,"*",Port,Postfix) ->
+ make_name(Prefix,undefined,Port,Postfix);
+
+make_name(Prefix,any,Port,Postfix) ->
+ make_name1(io_lib:format("~s_~w~s",[Prefix,Port,Postfix]));
+
+make_name(Prefix,undefined,Port,Postfix) ->
+ make_name1(io_lib:format("~s_~w~s",[Prefix,Port,Postfix]));
+
+make_name(Prefix,Addr,Port,Postfix) ->
+ NameString =
+ Prefix ++ "__" ++ make_name2(Addr) ++ "__" ++
+ integer_to_list(Port) ++ Postfix,
+ make_name1(NameString).
+
+make_name1(String) ->
+ list_to_atom(lists:flatten(String)).
+
+make_name2({A,B,C,D}) ->
+ io_lib:format("~w_~w_~w_~w",[A,B,C,D]);
+make_name2(Addr) ->
+ search_and_replace(Addr,$.,$_).
+
+search_and_replace(S,A,B) ->
+ Fun = fun(What) ->
+ case What of
+ A -> B;
+ O -> O
+ end
+ end,
+ lists:map(Fun,S).
+
+
+
+%%----------------------------------------------------------------------
+%% Converts a string that constists of 0-9,A-F,a-f to a
+%% integer
+%%----------------------------------------------------------------------
+
+hexlist_to_integer([])->
+ empty;
+
+
+%%When the string only contains one value its eaasy done.
+%% 0-9
+hexlist_to_integer([Size]) when Size>=48 , Size=<57 ->
+ Size-48;
+%% A-F
+hexlist_to_integer([Size]) when Size>=65 , Size=<70 ->
+ Size-55;
+%% a-f
+hexlist_to_integer([Size]) when Size>=97 , Size=<102 ->
+ Size-87;
+hexlist_to_integer([Size]) ->
+ not_a_num;
+
+hexlist_to_integer(Size) ->
+ Len=string:span(Size,"1234567890abcdefABCDEF"),
+ hexlist_to_integer2(Size,16 bsl (4 *(Len-2)),0).
+
+hexlist_to_integer2([],_Pos,Sum)->
+ Sum;
+hexlist_to_integer2([HexVal|HexString],Pos,Sum)when HexVal>=48,HexVal=<57->
+ hexlist_to_integer2(HexString,Pos bsr 4,Sum+((HexVal-48)*Pos));
+
+hexlist_to_integer2([HexVal|HexString],Pos,Sum)when HexVal>=65,HexVal=<70->
+ hexlist_to_integer2(HexString,Pos bsr 4,Sum+((HexVal-55)*Pos));
+
+hexlist_to_integer2([HexVal|HexString],Pos,Sum)when HexVal>=97,HexVal=<102->
+ hexlist_to_integer2(HexString,Pos bsr 4,Sum+((HexVal-87)*Pos));
+
+hexlist_to_integer2(_AfterHexString,_Pos,Sum)->
+ Sum.
+
+%%----------------------------------------------------------------------
+%%Converts an integer to an hexlist
+%%----------------------------------------------------------------------
+encode_hex(Num)->
+ integer_to_hexlist(Num).
+
+
+integer_to_hexlist(Num)->
+ integer_to_hexlist(Num,getSize(Num),[]).
+
+integer_to_hexlist(Num,Pot,Res) when Pot<0 ->
+ convert_to_ascii([Num|Res]);
+
+integer_to_hexlist(Num,Pot,Res) ->
+ Position=(16 bsl (Pot*4)),
+ PosVal=Num div Position,
+ integer_to_hexlist(Num-(PosVal*Position),Pot-1,[PosVal|Res]).
+convert_to_ascii(RevesedNum)->
+ convert_to_ascii(RevesedNum,[]).
+
+convert_to_ascii([],Num)->
+ Num;
+convert_to_ascii([Num|Reversed],Number)when Num>-1, Num<10 ->
+ convert_to_ascii(Reversed,[Num+48|Number]);
+convert_to_ascii([Num|Reversed],Number)when Num>9, Num<16 ->
+ convert_to_ascii(Reversed,[Num+55|Number]);
+convert_to_ascii(NumReversed,Number) ->
+ error.
+
+
+
+getSize(Num)->
+ getSize(Num,0).
+
+getSize(Num,Pot)when Num<(16 bsl(Pot *4)) ->
+ Pot-1;
+
+getSize(Num,Pot) ->
+ getSize(Num,Pot+1).
+
+
+
+
+
+create_etag(FileInfo)->
+ create_etag(FileInfo#file_info.mtime,FileInfo#file_info.size).
+
+create_etag({{Year,Month,Day},{Hour,Min,Sec}},Size)->
+ create_part([Year,Month,Day,Hour,Min,Sec])++io_lib:write(Size);
+
+create_etag(FileInfo,Size)->
+ create_etag(FileInfo#file_info.mtime,Size).
+
+create_part(Values)->
+ lists:map(fun(Val0)->
+ Val=Val0 rem 60,
+ if
+ Val=<25 ->
+ 65+Val; % A-Z
+ Val=<50 ->
+ 72+Val; % a-z
+ %%Since no date s
+ true ->
+ Val-3
+ end
+ end,Values).
+
+
+
+%%----------------------------------------------------------------------
+%%Function that controls whether a response is generated or not
+%%----------------------------------------------------------------------
+response_generated(Info)->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason}->
+ true;
+ %%No status code control repsonsxe
+ undefined ->
+ case httpd_util:key1search(Info#mod.data, response) of
+ %% No response has been generated!
+ undefined ->
+ false;
+ %% A response has been generated or sent!
+ Response ->
+ true
+ end
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.erl
new file mode 100644
index 0000000000..f676eb4c99
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.erl
@@ -0,0 +1,93 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_verbosity.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(httpd_verbosity).
+
+-include_lib("stdlib/include/erl_compile.hrl").
+
+-export([print/4,print/5,printc/4,validate/1]).
+
+print(silence,_Severity,_Format,_Arguments) ->
+ ok;
+print(Verbosity,Severity,Format,Arguments) ->
+ print1(printable(Verbosity,Severity),Format,Arguments).
+
+
+print(silence,_Severity,_Module,_Format,_Arguments) ->
+ ok;
+print(Verbosity,Severity,Module,Format,Arguments) ->
+ print1(printable(Verbosity,Severity),Module,Format,Arguments).
+
+
+printc(silence,Severity,Format,Arguments) ->
+ ok;
+printc(Verbosity,Severity,Format,Arguments) ->
+ print2(printable(Verbosity,Severity),Format,Arguments).
+
+
+print1(false,_Format,_Arguments) -> ok;
+print1(Verbosity,Format,Arguments) ->
+ V = image_of_verbosity(Verbosity),
+ S = image_of_sname(get(sname)),
+ io:format("** HTTPD ~s ~s: " ++ Format ++ "~n",[S,V]++Arguments).
+
+print1(false,_Module,_Format,_Arguments) -> ok;
+print1(Verbosity,Module,Format,Arguments) ->
+ V = image_of_verbosity(Verbosity),
+ S = image_of_sname(get(sname)),
+ io:format("** HTTPD ~s ~s ~s: " ++ Format ++ "~n",[S,Module,V]++Arguments).
+
+
+print2(false,_Format,_Arguments) -> ok;
+print2(_Verbosity,Format,Arguments) ->
+ io:format(Format ++ "~n",Arguments).
+
+
+%% printable(Verbosity,Severity)
+printable(info,info) -> info;
+printable(log,info) -> info;
+printable(log,log) -> log;
+printable(debug,info) -> info;
+printable(debug,log) -> log;
+printable(debug,debug) -> debug;
+printable(trace,V) -> V;
+printable(_Verb,_Sev) -> false.
+
+
+image_of_verbosity(info) -> "INFO";
+image_of_verbosity(log) -> "LOG";
+image_of_verbosity(debug) -> "DEBUG";
+image_of_verbosity(trace) -> "TRACE";
+image_of_verbosity(_) -> "".
+
+%% ShortName
+image_of_sname(acc) -> "ACCEPTOR";
+image_of_sname(acc_sup) -> "ACCEPTOR_SUP";
+image_of_sname(auth) -> "AUTH";
+image_of_sname(man) -> "MANAGER";
+image_of_sname(misc_sup) -> "MISC_SUP";
+image_of_sname(sec) -> "SECURITY";
+image_of_sname(P) when pid(P) -> io_lib:format("REQUEST_HANDLER(~p)",[P]);
+image_of_sname(undefined) -> "";
+image_of_sname(V) -> io_lib:format("~p",[V]).
+
+
+validate(info) -> info;
+validate(log) -> log;
+validate(debug) -> debug;
+validate(trace) -> trace;
+validate(_) -> silence.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.hrl
new file mode 100644
index 0000000000..cecaf693d3
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_verbosity.hrl
@@ -0,0 +1,62 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: httpd_verbosity.hrl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+
+-ifndef(dont_use_verbosity).
+
+-ifndef(default_verbosity).
+-define(default_verbosity,silence).
+-endif.
+
+-define(vvalidate(V), httpd_verbosity:validate(V)).
+
+-ifdef(VMODULE).
+
+-define(vinfo(F,A), httpd_verbosity:print(get(verbosity),info, ?VMODULE,F,A)).
+-define(vlog(F,A), httpd_verbosity:print(get(verbosity),log, ?VMODULE,F,A)).
+-define(vdebug(F,A),httpd_verbosity:print(get(verbosity),debug,?VMODULE,F,A)).
+-define(vtrace(F,A),httpd_verbosity:print(get(verbosity),trace,?VMODULE,F,A)).
+
+-else.
+
+-define(vinfo(F,A), httpd_verbosity:print(get(verbosity),info, F,A)).
+-define(vlog(F,A), httpd_verbosity:print(get(verbosity),log, F,A)).
+-define(vdebug(F,A),httpd_verbosity:print(get(verbosity),debug,F,A)).
+-define(vtrace(F,A),httpd_verbosity:print(get(verbosity),trace,F,A)).
+
+-endif.
+
+-define(vinfoc(F,A), httpd_verbosity:printc(get(verbosity),info, F,A)).
+-define(vlogc(F,A), httpd_verbosity:printc(get(verbosity),log, F,A)).
+-define(vdebugc(F,A),httpd_verbosity:printc(get(verbosity),debug,F,A)).
+-define(vtracec(F,A),httpd_verbosity:printc(get(verbosity),trace,F,A)).
+
+-else.
+
+-define(vvalidate(V),ok).
+
+-define(vinfo(F,A),ok).
+-define(vlog(F,A),ok).
+-define(vdebug(F,A),ok).
+-define(vtrace(F,A),ok).
+
+-define(vinfoc(F,A),ok).
+-define(vlogc(F,A),ok).
+-define(vdebugc(F,A),ok).
+-define(vtracec(F,A),ok).
+
+-endif.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.app.src b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.app.src
new file mode 100644
index 0000000000..750dbc6dba
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.app.src
@@ -0,0 +1,56 @@
+{application,inets,
+ [{description,"INETS CXC 138 49"},
+ {vsn,"%VSN%"},
+ {modules,[
+ %% FTP
+ ftp,
+
+ %% HTTP client:
+ http,
+ http_lib,
+ httpc_handler,
+ httpc_manager,
+ uri,
+
+ %% HTTP server:
+ httpd,
+ httpd_acceptor,
+ httpd_acceptor_sup,
+ httpd_conf,
+ httpd_example,
+ httpd_manager,
+ httpd_misc_sup,
+ httpd_parse,
+ httpd_request_handler,
+ httpd_response,
+ httpd_socket,
+ httpd_sup,
+ httpd_util,
+ httpd_verbosity,
+ inets_sup,
+ mod_actions,
+ mod_alias,
+ mod_auth,
+ mod_auth_dets,
+ mod_auth_mnesia,
+ mod_auth_plain,
+ mod_auth_server,
+ mod_browser,
+ mod_cgi,
+ mod_dir,
+ mod_disk_log,
+ mod_esi,
+ mod_get,
+ mod_head,
+ mod_htaccess,
+ mod_include,
+ mod_log,
+ mod_range,
+ mod_responsecontrol,
+ mod_security,
+ mod_security_server,
+ mod_trace
+ ]},
+ {registered,[inets_sup]},
+ {applications,[kernel,stdlib]},
+ {mod,{inets_sup,[]}}]}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.appup.src b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.appup.src
new file mode 100644
index 0000000000..e9ad0d0fe2
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.appup.src
@@ -0,0 +1,133 @@
+{"%VSN%",
+ [{"3.0.5",
+ [
+ {load_module, ftp, soft_purge, soft_purge, []}
+ ]
+ },
+ {"3.0.4",
+ [
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []}
+ ]
+ },
+ {"3.0.3",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [mod_disk_log, httpd_conf, httpd_socket]}]
+ },
+ {"3.0.2",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0.1",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge,
+ [httpd_manager, httpd_misc_sup]},
+ {update, httpd_misc_sup, soft, soft_purge, soft_purge, []},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ }
+ ],
+ [{"3.0.5",
+ [
+ {load_module, ftp, soft_purge, soft_purge, []}
+ ]
+ },
+ {"3.0.4",
+ [{update, httpd_acceptor, soft, soft_purge, soft_purge, []}]
+ },
+ {"3.0.3",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [mod_disk_log, httpd_conf, httpd_socket]}]
+ },
+ {"3.0.2",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge, [mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0.1",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge, [httpd_manager]},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ },
+ {"3.0",
+ [{load_module, httpd, soft_purge, soft_purge, [httpd_conf, httpd_sup]},
+ {load_module, httpd_conf, soft_purge, soft_purge, []},
+ {load_module, httpd_socket, soft_purge, soft_purge, []},
+ {load_module, httpd_response, soft_purge, soft_purge,
+ [mod_auth, mod_disk_log]},
+ {load_module, mod_disk_log, soft_purge, soft_purge, []},
+ {load_module, mod_auth, soft_purge, soft_purge, []},
+ {update, httpd_sup, soft, soft_purge, soft_purge,
+ [httpd_manager, httpd_misc_sup]},
+ {update, httpd_misc_sup, soft, soft_purge, soft_purge, []},
+ {update, httpd_acceptor, soft, soft_purge, soft_purge, []},
+ {update, httpd_manager, soft, soft_purge, soft_purge,
+ [httpd_request_handler, httpd_conf, httpd_socket]},
+ {update, httpd_request_handler, soft, soft_purge, soft_purge,
+ [httpd_response]}]
+ }
+ ]
+}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.config b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.config
new file mode 100644
index 0000000000..814ddd9fc0
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets.config
@@ -0,0 +1,2 @@
+[{inets,[{services,[{httpd,"/var/tmp/server_root/conf/8888.conf"},
+ {httpd,"/var/tmp/server_root/conf/8080.conf"}]}]}].
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets_sup.erl
new file mode 100644
index 0000000000..878fa2c54b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/inets_sup.erl
@@ -0,0 +1,158 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: inets_sup.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(inets_sup).
+
+-export([crock/0]).
+-export([start/2, stop/1, init/1]).
+-export([start_child/2, stop_child/2, which_children/0]).
+
+
+%% crock (Used for debugging!)
+
+crock() ->
+ application:start(sasl),
+ application:start(inets).
+
+
+%% start
+
+start(Type, State) ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+
+%% stop
+
+stop(State) ->
+ ok.
+
+
+%% start_child
+
+start_child(ConfigFile, Verbosity) ->
+ {ok, Spec} = httpd_child_spec(ConfigFile, Verbosity),
+ supervisor:start_child(?MODULE, Spec).
+
+
+%% stop_child
+
+stop_child(Addr, Port) ->
+ Name = {httpd_sup, Addr, Port},
+ case supervisor:terminate_child(?MODULE, Name) of
+ ok ->
+ supervisor:delete_child(?MODULE, Name);
+ Error ->
+ Error
+ end.
+
+
+%% which_children
+
+which_children() ->
+ supervisor:which_children(?MODULE).
+
+
+%% init
+
+init([]) ->
+ case get_services() of
+ {error, Reason} ->
+ {error,Reason};
+ Services ->
+ SupFlags = {one_for_one, 10, 3600},
+ {ok, {SupFlags, child_spec(Services, [])}}
+ end.
+
+get_services() ->
+ case (catch application:get_env(inets, services)) of
+ {ok, Services} ->
+ Services;
+ _ ->
+ []
+ end.
+
+
+child_spec([], Acc) ->
+ Acc;
+child_spec([{httpd, ConfigFile, Verbosity}|Rest], Acc) ->
+ case httpd_child_spec(ConfigFile, Verbosity) of
+ {ok, Spec} ->
+ child_spec(Rest, [Spec | Acc]);
+ {error, Reason} ->
+ error_msg("Failed creating child spec "
+ "using ~p for reason: ~p", [ConfigFile, Reason]),
+ child_spec(Rest, Acc)
+ end;
+child_spec([{httpd, ConfigFile}|Rest], Acc) ->
+ case httpd_child_spec(ConfigFile, []) of
+ {ok, Spec} ->
+ child_spec(Rest, [Spec | Acc]);
+ {error, Reason} ->
+ error_msg("Failed creating child spec "
+ "using ~p for reason: ~p", [ConfigFile, Reason]),
+ child_spec(Rest, Acc)
+ end.
+
+
+httpd_child_spec(ConfigFile, Verbosity) ->
+ case httpd_conf:load(ConfigFile) of
+ {ok, ConfigList} ->
+ Port = httpd_util:key1search(ConfigList, port, 80),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ {ok, httpd_child_spec(ConfigFile, Addr, Port, Verbosity)};
+ Error ->
+ Error
+ end.
+
+
+httpd_child_spec(ConfigFile, Addr, Port, Verbosity) ->
+ {{httpd_sup, Addr, Port},{httpd_sup, start_link,[ConfigFile, Verbosity]},
+ permanent, 20000, supervisor,
+ [ftp,
+ httpd,
+ httpd_conf,
+ httpd_example,
+ httpd_manager,
+ httpd_misc_sup,
+ httpd_listener,
+ httpd_parse,
+ httpd_request,
+ httpd_response,
+ httpd_socket,
+ httpd_sup,
+ httpd_util,
+ httpd_verbosity,
+ inets_sup,
+ mod_actions,
+ mod_alias,
+ mod_auth,
+ mod_cgi,
+ mod_dir,
+ mod_disk_log,
+ mod_esi,
+ mod_get,
+ mod_head,
+ mod_include,
+ mod_log,
+ mod_auth_mnesia,
+ mod_auth_plain,
+ mod_auth_dets,
+ mod_security]}.
+
+
+error_msg(F, A) ->
+ error_logger:error_msg(F ++ "~n", A).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
new file mode 100644
index 0000000000..0a96560c92
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
@@ -0,0 +1,138 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+
+-include_lib("kernel/include/file.hrl").
+
+-define(SOCKET_CHUNK_SIZE,8192).
+-define(SOCKET_MAX_POLL,25).
+-define(FILE_CHUNK_SIZE,64*1024).
+-define(NICE(Reason),lists:flatten(atom_to_list(?MODULE)++": "++Reason)).
+-define(DEFAULT_CONTEXT,
+ [{errmsg,"[an error occurred while processing this directive]"},
+ {timefmt,"%A, %d-%b-%y %T %Z"},
+ {sizefmt,"abbrev"}]).
+
+
+-ifdef(inets_debug).
+-define(DEBUG(Format, Args), io:format("D(~p:~p:~p) : "++Format++"~n",
+ [self(),?MODULE,?LINE]++Args)).
+-else.
+-define(DEBUG(F,A),[]).
+-endif.
+
+-define(MAXBODYSIZE,16#ffffffff).
+
+-define(HTTP_VERSION_10,0).
+-define(HTTP_VERSION_11,1).
+
+-define(CR,13).
+-define(LF,10).
+
+
+-record(init_data,{peername,resolve}).
+
+
+-record(mod,{
+ init_data, %
+ data= [], % list() Used to propagate data between modules
+ socket_type=ip_comm, % socket_type() IP or SSL socket
+ socket, % socket() Actual socket
+ config_db, % ets() {key,val} db with config entries
+ method, % atom() HTTP method, e.g. 'GET'
+% request_uri, % string() Request URI
+ path, % string() Absolute path. May include query etc
+ http_version, % int() HTTP minor version number, e.g. 0 or 1
+% request_line, % string() Request Line
+ headers, % #req_headers{} Parsed request headers
+ entity_body= <<>>, % binary() Body of request
+ connection, % boolean() true if persistant connection
+ status_code, % int() Status code
+ logging % int() 0=No logging
+ % 1=Only mod_log present
+ % 2=Only mod_disk_log present
+ % 3=Both mod_log and mod_disk_log present
+ }).
+
+% -record(ssl,{
+% certfile, %
+% keyfile, %
+% verify= 0, %
+% ciphers, %
+% password, %
+% depth = 1, %
+% cacertfile, %
+
+% cachetimeout % Found in yaws....
+% }).
+
+
+-record(http_request,{
+ method, % atom() if known else string() HTTP methd
+ path, % {abs_path,string()} URL path
+ version % {int(),int()} {Major,Minor} HTTP version
+ }).
+
+-record(http_response,{
+ version, % {int(),int()} {Major,Minor} HTTP version
+ status, % int() Status code
+ phrase % string() HTTP Reason phrase
+ }).
+
+
+%%% Request headers
+-record(req_headers,{
+%%% --- Standard "General" headers
+% cache_control,
+ connection="keep-alive",
+% date,
+% pragma,
+% trailer,
+ transfer_encoding,
+% upgrade,
+% via,
+% warning,
+%%% --- Standard "Request" headers
+% accept,
+% accept_charset,
+% accept_encoding,
+% accept_language,
+ authorization,
+ expect, %% FIXME! Update inet_drv.c!!
+% from,
+ host,
+ if_match,
+ if_modified_since,
+ if_none_match,
+ if_range,
+ if_unmodified_since,
+% max_forwards,
+% proxy_authorization,
+ range,
+% referer,
+% te, %% FIXME! Update inet_drv.c!!
+ user_agent,
+%%% --- Standard "Entity" headers
+% content_encoding,
+% content_language,
+ content_length="0",
+% content_location,
+% content_md5,
+% content_range,
+ content_type,
+% last_modified,
+ other=[] % (list) Key/Value list with other headers
+ }).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_actions.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_actions.erl
new file mode 100644
index 0000000000..47395d4c12
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_actions.erl
@@ -0,0 +1,92 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_actions.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(mod_actions).
+-export([do/1,load/2]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ Path=mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ Suffix=httpd_util:suffix(Path),
+ MimeType=httpd_util:lookup_mime(Info#mod.config_db,Suffix,
+ "text/plain"),
+ Actions=httpd_util:multi_lookup(Info#mod.config_db,action),
+ case action(Info#mod.request_uri,MimeType,Actions) of
+ {yes,RequestURI} ->
+ {proceed,[{new_request_uri,RequestURI}|Info#mod.data]};
+ no ->
+ Scripts=httpd_util:multi_lookup(Info#mod.config_db,script),
+ case script(Info#mod.request_uri,Info#mod.method,Scripts) of
+ {yes,RequestURI} ->
+ {proceed,[{new_request_uri,RequestURI}|Info#mod.data]};
+ no ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+action(RequestURI,MimeType,[]) ->
+ no;
+action(RequestURI,MimeType,[{MimeType,CGIScript}|Rest]) ->
+ {yes,CGIScript++RequestURI};
+action(RequestURI,MimeType,[_|Rest]) ->
+ action(RequestURI,MimeType,Rest).
+
+script(RequestURI,Method,[]) ->
+ no;
+script(RequestURI,Method,[{Method,CGIScript}|Rest]) ->
+ {yes,CGIScript++RequestURI};
+script(RequestURI,Method,[_|Rest]) ->
+ script(RequestURI,Method,Rest).
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$A,$c,$t,$i,$o,$n,$ |Action],[]) ->
+ case regexp:split(Action," ") of
+ {ok,[MimeType,CGIScript]} ->
+ {ok,[],{action,{MimeType,CGIScript}}};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Action)++" is an invalid Action")}
+ end;
+load([$S,$c,$r,$i,$p,$t,$ |Script],[]) ->
+ case regexp:split(Script," ") of
+ {ok,[Method,CGIScript]} ->
+ {ok,[],{script,{Method,CGIScript}}};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Script)++" is an invalid Script")}
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_alias.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_alias.erl
new file mode 100644
index 0000000000..6b8f7210c4
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_alias.erl
@@ -0,0 +1,175 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_alias.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(mod_alias).
+-export([do/1,real_name/3,real_script_name/3,default_index/2,load/2,path/3]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_alias(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+do_alias(Info) ->
+ ?DEBUG("do_alias -> Request URI: ~p",[Info#mod.request_uri]),
+ {ShortPath,Path,AfterPath} =
+ real_name(Info#mod.config_db,Info#mod.request_uri,
+ httpd_util:multi_lookup(Info#mod.config_db,alias)),
+ %% Relocate if a trailing slash is missing else proceed!
+ LastChar = lists:last(ShortPath),
+ case file:read_file_info(ShortPath) of
+ {ok,FileInfo} when FileInfo#file_info.type == directory,LastChar /= $/ ->
+ ?LOG("do_alias -> ~n"
+ " ShortPath: ~p~n"
+ " LastChar: ~p~n"
+ " FileInfo: ~p",
+ [ShortPath,LastChar,FileInfo]),
+ ServerName = httpd_util:lookup(Info#mod.config_db,server_name),
+ Port = port_string(httpd_util:lookup(Info#mod.config_db,port,80)),
+ URL = "http://"++ServerName++Port++Info#mod.request_uri++"/",
+ ReasonPhrase = httpd_util:reason_phrase(301),
+ Message = httpd_util:message(301,URL,Info#mod.config_db),
+ {proceed,
+ [{response,
+ {301, ["Location: ", URL, "\r\n"
+ "Content-Type: text/html\r\n",
+ "\r\n",
+ "<HTML>\n<HEAD>\n<TITLE>",ReasonPhrase,
+ "</TITLE>\n</HEAD>\n"
+ "<BODY>\n<H1>",ReasonPhrase,
+ "</H1>\n", Message,
+ "\n</BODY>\n</HTML>\n"]}}|
+ [{real_name,{Path,AfterPath}}|Info#mod.data]]};
+ NoFile ->
+ {proceed,[{real_name,{Path,AfterPath}}|Info#mod.data]}
+ end.
+
+port_string(80) ->
+ "";
+port_string(Port) ->
+ ":"++integer_to_list(Port).
+
+%% real_name
+
+real_name(ConfigDB, RequestURI,[]) ->
+ DocumentRoot = httpd_util:lookup(ConfigDB, document_root, ""),
+ RealName = DocumentRoot++RequestURI,
+ {ShortPath, _AfterPath} = httpd_util:split_path(RealName),
+ {Path, AfterPath}=httpd_util:split_path(default_index(ConfigDB,RealName)),
+ {ShortPath, Path, AfterPath};
+real_name(ConfigDB, RequestURI, [{FakeName,RealName}|Rest]) ->
+ case regexp:match(RequestURI, "^"++FakeName) of
+ {match, _, _} ->
+ {ok, ActualName, _} = regexp:sub(RequestURI,
+ "^"++FakeName, RealName),
+ {ShortPath, _AfterPath} = httpd_util:split_path(ActualName),
+ {Path, AfterPath} =
+ httpd_util:split_path(default_index(ConfigDB, ActualName)),
+ {ShortPath, Path, AfterPath};
+ nomatch ->
+ real_name(ConfigDB,RequestURI,Rest)
+ end.
+
+%% real_script_name
+
+real_script_name(ConfigDB,RequestURI,[]) ->
+ not_a_script;
+real_script_name(ConfigDB,RequestURI,[{FakeName,RealName}|Rest]) ->
+ case regexp:match(RequestURI,"^"++FakeName) of
+ {match,_,_} ->
+ {ok,ActualName,_}=regexp:sub(RequestURI,"^"++FakeName,RealName),
+ httpd_util:split_script_path(default_index(ConfigDB,ActualName));
+ nomatch ->
+ real_script_name(ConfigDB,RequestURI,Rest)
+ end.
+
+%% default_index
+
+default_index(ConfigDB, Path) ->
+ case file:read_file_info(Path) of
+ {ok, FileInfo} when FileInfo#file_info.type == directory ->
+ DirectoryIndex = httpd_util:lookup(ConfigDB, directory_index, []),
+ append_index(Path, DirectoryIndex);
+ _ ->
+ Path
+ end.
+
+append_index(RealName, []) ->
+ RealName;
+append_index(RealName, [Index|Rest]) ->
+ case file:read_file_info(filename:join(RealName, Index)) of
+ {error,Reason} ->
+ append_index(RealName, Rest);
+ _ ->
+ filename:join(RealName,Index)
+ end.
+
+%% path
+
+path(Data, ConfigDB, RequestURI) ->
+ case httpd_util:key1search(Data,real_name) of
+ undefined ->
+ DocumentRoot = httpd_util:lookup(ConfigDB, document_root, ""),
+ {Path,AfterPath} =
+ httpd_util:split_path(DocumentRoot++RequestURI),
+ Path;
+ {Path,AfterPath} ->
+ Path
+ end.
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$D,$i,$r,$e,$c,$t,$o,$r,$y,$I,$n,$d,$e,$x,$ |DirectoryIndex],[]) ->
+ {ok, DirectoryIndexes} = regexp:split(DirectoryIndex," "),
+ {ok,[], {directory_index, DirectoryIndexes}};
+load([$A,$l,$i,$a,$s,$ |Alias],[]) ->
+ case regexp:split(Alias," ") of
+ {ok, [FakeName, RealName]} ->
+ {ok,[],{alias,{FakeName,RealName}}};
+ {ok, _} ->
+ {error,?NICE(httpd_conf:clean(Alias)++" is an invalid Alias")}
+ end;
+load([$S,$c,$r,$i,$p,$t,$A,$l,$i,$a,$s,$ |ScriptAlias],[]) ->
+ case regexp:split(ScriptAlias," ") of
+ {ok, [FakeName, RealName]} ->
+ %% Make sure the path always has a trailing slash..
+ RealName1 = filename:join(filename:split(RealName)),
+ {ok, [], {script_alias,{FakeName, RealName1++"/"}}};
+ {ok, _} ->
+ {error, ?NICE(httpd_conf:clean(ScriptAlias)++
+ " is an invalid ScriptAlias")}
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.erl
new file mode 100644
index 0000000000..9f3289c826
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.erl
@@ -0,0 +1,748 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+-module(mod_auth).
+
+
+%% The functions that the webbserver call on startup stop
+%% and when the server traverse the modules.
+-export([do/1, load/2, store/2, remove/1]).
+
+%% User entries to the gen-server.
+-export([add_user/2, add_user/5, add_user/6,
+ add_group_member/3, add_group_member/4, add_group_member/5,
+ list_users/1, list_users/2, list_users/3,
+ delete_user/2, delete_user/3, delete_user/4,
+ delete_group_member/3, delete_group_member/4, delete_group_member/5,
+ list_groups/1, list_groups/2, list_groups/3,
+ delete_group/2, delete_group/3, delete_group/4,
+ get_user/2, get_user/3, get_user/4,
+ list_group_members/2, list_group_members/3, list_group_members/4,
+ update_password/6, update_password/5]).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+-define(VMODULE,"AUTH").
+-include("httpd_verbosity.hrl").
+
+-define(NOPASSWORD,"NoPassword").
+
+
+%% do
+do(Info) ->
+ ?vtrace("do", []),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed, Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ %% Is it a secret area?
+ case secretp(Path,Info#mod.config_db) of
+ {yes, Directory, DirectoryData} ->
+ %% Authenticate (allow)
+ case allow((Info#mod.init_data)#init_data.peername,
+ Info#mod.socket_type,Info#mod.socket,
+ DirectoryData) of
+ allowed ->
+ case deny((Info#mod.init_data)#init_data.peername,
+ Info#mod.socket_type, Info#mod.socket,
+ DirectoryData) of
+ not_denied ->
+ case httpd_util:key1search(DirectoryData,
+ auth_type) of
+ undefined ->
+ {proceed, Info#mod.data};
+ none ->
+ {proceed, Info#mod.data};
+ AuthType ->
+ do_auth(Info,
+ Directory,
+ DirectoryData,
+ AuthType)
+ end;
+ {denied, Reason} ->
+ {proceed,
+ [{status,{403,Info#mod.request_uri,Reason}}|
+ Info#mod.data]}
+ end;
+ {not_allowed, Reason} ->
+ {proceed,[{status,{403,Info#mod.request_uri,Reason}}|
+ Info#mod.data]}
+ end;
+ no ->
+ {proceed, Info#mod.data}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed, Info#mod.data}
+ end
+ end.
+
+
+do_auth(Info, Directory, DirectoryData, AuthType) ->
+ %% Authenticate (require)
+ case require(Info, Directory, DirectoryData) of
+ authorized ->
+ {proceed,Info#mod.data};
+ {authorized, User} ->
+ {proceed, [{remote_user,User}|Info#mod.data]};
+ {authorization_failed, Reason} ->
+ ?vtrace("do_auth -> authorization_failed: ~p",[Reason]),
+ {proceed, [{status,{401,none,Reason}}|Info#mod.data]};
+ {authorization_required, Realm} ->
+ ?vtrace("do_auth -> authorization_required: ~p",[Realm]),
+ ReasonPhrase = httpd_util:reason_phrase(401),
+ Message = httpd_util:message(401,none,Info#mod.config_db),
+ {proceed,
+ [{response,
+ {401,
+ ["WWW-Authenticate: Basic realm=\"",Realm,
+ "\"\r\n\r\n","<HTML>\n<HEAD>\n<TITLE>",
+ ReasonPhrase,"</TITLE>\n",
+ "</HEAD>\n<BODY>\n<H1>",ReasonPhrase,
+ "</H1>\n",Message,"\n</BODY>\n</HTML>\n"]}}|
+ Info#mod.data]};
+ {status, {StatusCode,PhraseArgs,Reason}} ->
+ {proceed, [{status,{StatusCode,PhraseArgs,Reason}}|
+ Info#mod.data]}
+ end.
+
+
+%% require
+
+require(Info, Directory, DirectoryData) ->
+ ParsedHeader = Info#mod.parsed_header,
+ ValidUsers = httpd_util:key1search(DirectoryData, require_user),
+ ValidGroups = httpd_util:key1search(DirectoryData, require_group),
+
+ %% Any user or group restrictions?
+ case ValidGroups of
+ undefined when ValidUsers == undefined ->
+ authorized;
+ _ ->
+ case httpd_util:key1search(ParsedHeader, "authorization") of
+ %% Authorization required!
+ undefined ->
+ case httpd_util:key1search(DirectoryData, auth_name) of
+ undefined ->
+ {status,{500,none,?NICE("AuthName directive not specified")}};
+ Realm ->
+ {authorization_required, Realm}
+ end;
+ %% Check credentials!
+ [$B,$a,$s,$i,$c,$ | EncodedString] ->
+ DecodedString = httpd_util:decode_base64(EncodedString),
+ case a_valid_user(Info, DecodedString,
+ ValidUsers, ValidGroups,
+ Directory, DirectoryData) of
+ {yes, User} ->
+ {authorized, User};
+ {no, Reason} ->
+ {authorization_failed, Reason};
+ {status, {StatusCode,PhraseArgs,Reason}} ->
+ {status,{StatusCode,PhraseArgs,Reason}}
+ end;
+ %% Bad credentials!
+ BadCredentials ->
+ {status,{401,none,?NICE("Bad credentials "++BadCredentials)}}
+ end
+ end.
+
+a_valid_user(Info,DecodedString,ValidUsers,ValidGroups,Dir,DirData) ->
+ case httpd_util:split(DecodedString,":",2) of
+ {ok,[SupposedUser, Password]} ->
+ case user_accepted(SupposedUser, ValidUsers) of
+ true ->
+ check_password(SupposedUser, Password, Dir, DirData);
+ false ->
+ case group_accepted(Info,SupposedUser,ValidGroups,Dir,DirData) of
+ true ->
+ check_password(SupposedUser,Password,Dir,DirData);
+ false ->
+ {no,?NICE("No such user exists")}
+ end
+ end;
+ {ok,BadCredentials} ->
+ {status,{401,none,?NICE("Bad credentials "++BadCredentials)}}
+ end.
+
+user_accepted(SupposedUser, undefined) ->
+ false;
+user_accepted(SupposedUser, ValidUsers) ->
+ lists:member(SupposedUser, ValidUsers).
+
+
+group_accepted(Info, User, undefined, Dir, DirData) ->
+ false;
+group_accepted(Info, User, [], Dir, DirData) ->
+ false;
+group_accepted(Info, User, [Group|Rest], Dir, DirData) ->
+ Ret = int_list_group_members(Group, Dir, DirData),
+ case Ret of
+ {ok, UserList} ->
+ case lists:member(User, UserList) of
+ true ->
+ true;
+ false ->
+ group_accepted(Info, User, Rest, Dir, DirData)
+ end;
+ Other ->
+ false
+ end.
+
+check_password(User, Password, Dir, DirData) ->
+ case int_get_user(DirData, User) of
+ {ok, UStruct} ->
+ case UStruct#httpd_user.password of
+ Password ->
+ %% FIXME
+ {yes, UStruct#httpd_user.username};
+ Other ->
+ {no, "No such user"} % Don't say 'Bad Password' !!!
+ end;
+ _ ->
+ {no, "No such user"}
+ end.
+
+
+%% Middle API. Theese functions call the appropriate authentication module.
+int_get_user(DirData, User) ->
+ AuthMod = auth_mod_name(DirData),
+ apply(AuthMod, get_user, [DirData, User]).
+
+int_list_group_members(Group, Dir, DirData) ->
+ AuthMod = auth_mod_name(DirData),
+ apply(AuthMod, list_group_members, [DirData, Group]).
+
+auth_mod_name(DirData) ->
+ case httpd_util:key1search(DirData, auth_type, plain) of
+ plain -> mod_auth_plain;
+ mnesia -> mod_auth_mnesia;
+ dets -> mod_auth_dets
+ end.
+
+
+%%
+%% Is it a secret area?
+%%
+
+%% secretp
+
+secretp(Path,ConfigDB) ->
+ Directories = ets:match(ConfigDB,{directory,'$1','_'}),
+ case secret_path(Path, Directories) of
+ {yes,Directory} ->
+ {yes,Directory,
+ lists:flatten(ets:match(ConfigDB,{directory,Directory,'$1'}))};
+ no ->
+ no
+ end.
+
+secret_path(Path,Directories) ->
+ secret_path(Path, httpd_util:uniq(lists:sort(Directories)),to_be_found).
+
+secret_path(Path,[],to_be_found) ->
+ no;
+secret_path(Path,[],Directory) ->
+ {yes,Directory};
+secret_path(Path,[[NewDirectory]|Rest],Directory) ->
+ case regexp:match(Path,NewDirectory) of
+ {match,_,_} when Directory == to_be_found ->
+ secret_path(Path,Rest,NewDirectory);
+ {match,_,Length} when Length > length(Directory)->
+ secret_path(Path,Rest,NewDirectory);
+ {match,_,Length} ->
+ secret_path(Path,Rest,Directory);
+ nomatch ->
+ secret_path(Path,Rest,Directory)
+ end.
+
+%%
+%% Authenticate
+%%
+
+%% allow
+
+allow({_,RemoteAddr},SocketType,Socket,DirectoryData) ->
+ Hosts = httpd_util:key1search(DirectoryData, allow_from, all),
+ case validate_addr(RemoteAddr,Hosts) of
+ true ->
+ allowed;
+ false ->
+ {not_allowed, ?NICE("Connection from your host is not allowed")}
+ end.
+
+validate_addr(RemoteAddr,all) -> % When called from 'allow'
+ true;
+validate_addr(RemoteAddr,none) -> % When called from 'deny'
+ false;
+validate_addr(RemoteAddr,[]) ->
+ false;
+validate_addr(RemoteAddr,[HostRegExp|Rest]) ->
+ ?DEBUG("validate_addr -> RemoteAddr: ~p HostRegExp: ~p",
+ [RemoteAddr, HostRegExp]),
+ case regexp:match(RemoteAddr, HostRegExp) of
+ {match,_,_} ->
+ true;
+ nomatch ->
+ validate_addr(RemoteAddr,Rest)
+ end.
+
+%% deny
+
+deny({_,RemoteAddr},SocketType,Socket,DirectoryData) ->
+ ?DEBUG("deny -> RemoteAddr: ~p",[RemoteAddr]),
+ Hosts = httpd_util:key1search(DirectoryData, deny_from, none),
+ ?DEBUG("deny -> Hosts: ~p",[Hosts]),
+ case validate_addr(RemoteAddr,Hosts) of
+ true ->
+ {denied, ?NICE("Connection from your host is not allowed")};
+ false ->
+ not_denied
+ end.
+
+%%
+%% Configuration
+%%
+
+%% load/2
+%%
+
+%% mod_auth recognizes the following Configuration Directives:
+%% <Directory /path/to/directory>
+%% AuthDBType
+%% AuthName
+%% AuthUserFile
+%% AuthGroupFile
+%% AuthAccessPassword
+%% require
+%% allow
+%% </Directory>
+
+%% When a <Directory> directive is found, a new context is set to
+%% [{directory, Directory, DirData}|OtherContext]
+%% DirData in this case is a key-value list of data belonging to the
+%% directory in question.
+%%
+%% When the </Directory> statement is found, the Context created earlier
+%% will be returned as a ConfigList and the context will return to the
+%% state it was previously.
+
+load([$<,$D,$i,$r,$e,$c,$t,$o,$r,$y,$ |Directory],[]) ->
+ Dir = httpd_conf:custom_clean(Directory,"",">"),
+ {ok,[{directory, Dir, [{path, Dir}]}]};
+load(eof,[{directory,Directory, DirData}|_]) ->
+ {error, ?NICE("Premature end-of-file in "++Directory)};
+
+load([$A,$u,$t,$h,$N,$a,$m,$e,$ |AuthName], [{directory,Directory, DirData}|Rest]) ->
+ {ok, [{directory,Directory,
+ [ {auth_name, httpd_conf:clean(AuthName)}|DirData]} | Rest ]};
+
+load([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e,$ |AuthUserFile0],
+ [{directory, Directory, DirData}|Rest]) ->
+ AuthUserFile = httpd_conf:clean(AuthUserFile0),
+ {ok,[{directory,Directory,
+ [ {auth_user_file, AuthUserFile}|DirData]} | Rest ]};
+
+load([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e,$ |AuthGroupFile0],
+ [{directory,Directory, DirData}|Rest]) ->
+ AuthGroupFile = httpd_conf:clean(AuthGroupFile0),
+ {ok,[{directory,Directory,
+ [ {auth_group_file, AuthGroupFile}|DirData]} | Rest]};
+
+%AuthAccessPassword
+load([$A,$u,$t,$h,$A,$c,$c,$e,$s,$s,$P,$a,$s,$s,$w,$o,$r,$d,$ |AuthAccessPassword0],
+ [{directory,Directory, DirData}|Rest]) ->
+ AuthAccessPassword = httpd_conf:clean(AuthAccessPassword0),
+ {ok,[{directory,Directory,
+ [{auth_access_password, AuthAccessPassword}|DirData]} | Rest]};
+
+
+
+
+load([$A,$u,$t,$h,$D,$B,$T,$y,$p,$e,$ |Type],
+ [{directory, Dir, DirData}|Rest]) ->
+ case httpd_conf:clean(Type) of
+ "plain" ->
+ {ok, [{directory, Dir, [{auth_type, plain}|DirData]} | Rest ]};
+ "mnesia" ->
+ {ok, [{directory, Dir, [{auth_type, mnesia}|DirData]} | Rest ]};
+ "dets" ->
+ {ok, [{directory, Dir, [{auth_type, dets}|DirData]} | Rest ]};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(Type)++" is an invalid AuthDBType")}
+ end;
+
+load([$r,$e,$q,$u,$i,$r,$e,$ |Require],[{directory,Directory, DirData}|Rest]) ->
+ case regexp:split(Require," ") of
+ {ok,["user"|Users]} ->
+ {ok,[{directory,Directory,
+ [{require_user,Users}|DirData]} | Rest]};
+ {ok,["group"|Groups]} ->
+ {ok,[{directory,Directory,
+ [{require_group,Groups}|DirData]} | Rest]};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Require)++" is an invalid require")}
+ end;
+
+load([$a,$l,$l,$o,$w,$ |Allow],[{directory,Directory, DirData}|Rest]) ->
+ case regexp:split(Allow," ") of
+ {ok,["from","all"]} ->
+ {ok,[{directory,Directory,
+ [{allow_from,all}|DirData]} | Rest]};
+ {ok,["from"|Hosts]} ->
+ {ok,[{directory,Directory,
+ [{allow_from,Hosts}|DirData]} | Rest]};
+ {ok,_} ->
+ {error,?NICE(httpd_conf:clean(Allow)++" is an invalid allow")}
+ end;
+
+load([$d,$e,$n,$y,$ |Deny],[{directory,Directory, DirData}|Rest]) ->
+ case regexp:split(Deny," ") of
+ {ok, ["from", "all"]} ->
+ {ok,[{directory, Directory,
+ [{deny_from, all}|DirData]} | Rest]};
+ {ok, ["from"|Hosts]} ->
+ {ok,[{directory, Directory,
+ [{deny_from, Hosts}|DirData]} | Rest]};
+ {ok, _} ->
+ {error,?NICE(httpd_conf:clean(Deny)++" is an invalid deny")}
+ end;
+
+load("</Directory>",[{directory,Directory, DirData}|Rest]) ->
+ {ok, Rest, {directory, Directory, DirData}};
+
+load([$A,$u,$t,$h,$M,$n,$e,$s,$i,$a,$D,$B,$ |AuthMnesiaDB],
+ [{directory, Dir, DirData}|Rest]) ->
+ case httpd_conf:clean(AuthMnesiaDB) of
+ "On" ->
+ {ok,[{directory,Dir,[{auth_type,mnesia}|DirData]}|Rest]};
+ "Off" ->
+ {ok,[{directory,Dir,[{auth_type,plain}|DirData]}|Rest]};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(AuthMnesiaDB)++" is an invalid AuthMnesiaDB")}
+ end.
+
+%% store
+
+store({directory,Directory0, DirData0}, ConfigList) ->
+ Port = httpd_util:key1search(ConfigList, port),
+ DirData = case httpd_util:key1search(ConfigList, bind_address) of
+ undefined ->
+ [{port, Port}|DirData0];
+ Addr ->
+ [{port, Port},{bind_address,Addr}|DirData0]
+ end,
+ Directory =
+ case filename:pathtype(Directory0) of
+ relative ->
+ SR = httpd_util:key1search(ConfigList, server_root),
+ filename:join(SR, Directory0);
+ _ ->
+ Directory0
+ end,
+ AuthMod =
+ case httpd_util:key1search(DirData0, auth_type) of
+ mnesia -> mod_auth_mnesia;
+ dets -> mod_auth_dets;
+ plain -> mod_auth_plain;
+ _ -> no_module_at_all
+ end,
+ case AuthMod of
+ no_module_at_all ->
+ {ok, {directory, Directory, DirData}};
+ _ ->
+ %% Control that there are a password or add a standard password:
+ %% "NoPassword"
+ %% In this way a user must select to use a noPassword
+ Pwd = case httpd_util:key1search(DirData,auth_access_password)of
+ undefined->
+ ?NOPASSWORD;
+ PassW->
+ PassW
+ end,
+ DirDataLast = lists:keydelete(auth_access_password,1,DirData),
+ case catch AuthMod:store_directory_data(Directory, DirDataLast) of
+ ok ->
+ add_auth_password(Directory,Pwd,ConfigList),
+ {ok, {directory, Directory, DirDataLast}};
+ {ok, NewDirData} ->
+ add_auth_password(Directory,Pwd,ConfigList),
+ {ok, {directory, Directory, NewDirData}};
+ {error, Reason} ->
+ {error, Reason};
+ Other ->
+ ?ERROR("unexpected result: ~p",[Other]),
+ {error, Other}
+ end
+ end.
+
+
+add_auth_password(Dir, Pwd0, ConfigList) ->
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ Port = httpd_util:key1search(ConfigList, port),
+ mod_auth_server:start(Addr, Port),
+ mod_auth_server:add_password(Addr, Port, Dir, Pwd0).
+
+%% remove
+
+
+remove(ConfigDB) ->
+ lists:foreach(fun({directory, Dir, DirData}) ->
+ AuthMod = auth_mod_name(DirData),
+ (catch apply(AuthMod, remove, [DirData]))
+ end,
+ ets:match_object(ConfigDB,{directory,'_','_'})),
+ Addr = case lookup(ConfigDB, bind_address) of
+ [] ->
+ undefined;
+ [{bind_address, Address}] ->
+ Address
+ end,
+ [{port, Port}] = lookup(ConfigDB, port),
+ mod_auth_server:stop(Addr, Port),
+ ok.
+
+
+
+
+%% --------------------------------------------------------------------
+
+%% update_password
+
+update_password(Port, Dir, Old, New, New)->
+ update_password(undefined, Port, Dir, Old, New, New).
+
+update_password(Addr, Port, Dir, Old, New, New) when list(New) ->
+ mod_auth_server:update_password(Addr, Port, Dir, Old, New);
+
+update_password(_Addr, _Port, _Dir, _Old, New, New) ->
+ {error, badtype};
+update_password(_Addr, _Port, _Dir, _Old, New, New1) ->
+ {error, notqeual}.
+
+
+%% add_user
+
+add_user(UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ case get_options(Opt, userData) of
+ {error, Reason}->
+ {error, Reason};
+ {UserData, Password}->
+ User = [#httpd_user{username = UserName,
+ password = Password,
+ user_data = UserData}],
+ mod_auth_server:add_user(Addr, Port, Dir, User, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end.
+
+
+add_user(UserName, Password, UserData, Port, Dir) ->
+ add_user(UserName, Password, UserData, undefined, Port, Dir).
+add_user(UserName, Password, UserData, Addr, Port, Dir) ->
+ User = [#httpd_user{username = UserName,
+ password = Password,
+ user_data = UserData}],
+ mod_auth_server:add_user(Addr, Port, Dir, User, ?NOPASSWORD).
+
+
+%% get_user
+
+get_user(UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:get_user(Addr, Port, Dir, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+get_user(UserName, Port, Dir) ->
+ get_user(UserName, undefined, Port, Dir).
+get_user(UserName, Addr, Port, Dir) ->
+ mod_auth_server:get_user(Addr, Port, Dir, UserName, ?NOPASSWORD).
+
+
+%% add_group_member
+
+add_group_member(GroupName, UserName, Opt)->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ mod_auth_server:add_group_member(Addr, Port, Dir,
+ GroupName, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+add_group_member(GroupName, UserName, Port, Dir) ->
+ add_group_member(GroupName, UserName, undefined, Port, Dir).
+
+add_group_member(GroupName, UserName, Addr, Port, Dir) ->
+ mod_auth_server:add_group_member(Addr, Port, Dir,
+ GroupName, UserName, ?NOPASSWORD).
+
+
+%% delete_group_member
+
+delete_group_member(GroupName, UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:delete_group_member(Addr, Port, Dir,
+ GroupName, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+delete_group_member(GroupName, UserName, Port, Dir) ->
+ delete_group_member(GroupName, UserName, undefined, Port, Dir).
+delete_group_member(GroupName, UserName, Addr, Port, Dir) ->
+ mod_auth_server:delete_group_member(Addr, Port, Dir,
+ GroupName, UserName, ?NOPASSWORD).
+
+
+%% list_users
+
+list_users(Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:list_users(Addr, Port, Dir, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+list_users(Port, Dir) ->
+ list_users(undefined, Port, Dir).
+list_users(Addr, Port, Dir) ->
+ mod_auth_server:list_users(Addr, Port, Dir, ?NOPASSWORD).
+
+
+%% delete_user
+
+delete_user(UserName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:delete_user(Addr, Port, Dir, UserName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+delete_user(UserName, Port, Dir) ->
+ delete_user(UserName, undefined, Port, Dir).
+delete_user(UserName, Addr, Port, Dir) ->
+ mod_auth_server:delete_user(Addr, Port, Dir, UserName, ?NOPASSWORD).
+
+
+%% delete_group
+
+delete_group(GroupName, Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ mod_auth_server:delete_group(Addr, Port, Dir, GroupName, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+delete_group(GroupName, Port, Dir) ->
+ delete_group(GroupName, undefined, Port, Dir).
+delete_group(GroupName, Addr, Port, Dir) ->
+ mod_auth_server:delete_group(Addr, Port, Dir, GroupName, ?NOPASSWORD).
+
+
+%% list_groups
+
+list_groups(Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd}->
+ mod_auth_server:list_groups(Addr, Port, Dir, AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+list_groups(Port, Dir) ->
+ list_groups(undefined, Port, Dir).
+list_groups(Addr, Port, Dir) ->
+ mod_auth_server:list_groups(Addr, Port, Dir, ?NOPASSWORD).
+
+
+%% list_group_members
+
+list_group_members(GroupName,Opt) ->
+ case get_options(Opt, mandatory) of
+ {Addr, Port, Dir, AuthPwd} ->
+ mod_auth_server:list_group_members(Addr, Port, Dir, GroupName,
+ AuthPwd);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+list_group_members(GroupName, Port, Dir) ->
+ list_group_members(GroupName, undefined, Port, Dir).
+list_group_members(GroupName, Addr, Port, Dir) ->
+ mod_auth_server:list_group_members(Addr, Port, Dir, GroupName, ?NOPASSWORD).
+
+
+
+%% Opt = [{port, Port},
+%% {addr, Addr},
+%% {dir, Dir},
+%% {authPassword, AuthPassword} | FunctionSpecificData]
+get_options(Opt, mandatory)->
+ case httpd_util:key1search(Opt, port, undefined) of
+ Port when integer(Port) ->
+ case httpd_util:key1search(Opt, dir, undefined) of
+ Dir when list(Dir) ->
+ Addr = httpd_util:key1search(Opt,
+ addr,
+ undefined),
+ AuthPwd = httpd_util:key1search(Opt,
+ authPassword,
+ ?NOPASSWORD),
+ {Addr, Port, Dir, AuthPwd};
+ _->
+ {error, bad_dir}
+ end;
+ _ ->
+ {error, bad_dir}
+ end;
+
+%% FunctionSpecificData = {userData, UserData} | {password, Password}
+get_options(Opt, userData)->
+ case httpd_util:key1search(Opt, userData, undefined) of
+ undefined ->
+ {error, no_userdata};
+ UserData ->
+ case httpd_util:key1search(Opt, password, undefined) of
+ undefined->
+ {error, no_password};
+ Pwd ->
+ {UserData, Pwd}
+ end
+ end.
+
+
+lookup(Db, Key) ->
+ ets:lookup(Db, Key).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.hrl
new file mode 100644
index 0000000000..2b8ea6657f
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth.hrl
@@ -0,0 +1,26 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth.hrl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
+%%
+
+-record(httpd_user,
+ {username,
+ password,
+ user_data}).
+
+-record(httpd_group,
+ {name,
+ userlist}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_dets.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_dets.erl
new file mode 100644
index 0000000000..d947d6cf49
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_dets.erl
@@ -0,0 +1,222 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_dets.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_auth_dets).
+
+%% dets authentication storage
+
+-export([get_user/2,
+ list_group_members/2,
+ add_user/2,
+ add_group_member/3,
+ list_users/1,
+ delete_user/2,
+ list_groups/1,
+ delete_group_member/3,
+ delete_group/2,
+ remove/1]).
+
+-export([store_directory_data/2]).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+store_directory_data(Directory, DirData) ->
+ ?CDEBUG("store_directory_data -> ~n"
+ " Directory: ~p~n"
+ " DirData: ~p",
+ [Directory, DirData]),
+
+ PWFile = httpd_util:key1search(DirData, auth_user_file),
+ GroupFile = httpd_util:key1search(DirData, auth_group_file),
+ Addr = httpd_util:key1search(DirData, bind_address),
+ Port = httpd_util:key1search(DirData, port),
+
+ PWName = httpd_util:make_name("httpd_dets_pwdb",Addr,Port),
+ case dets:open_file(PWName,[{type,set},{file,PWFile},{repair,true}]) of
+ {ok, PWDB} ->
+ GDBName = httpd_util:make_name("httpd_dets_groupdb",Addr,Port),
+ case dets:open_file(GDBName,[{type,set},{file,GroupFile},{repair,true}]) of
+ {ok, GDB} ->
+ NDD1 = lists:keyreplace(auth_user_file, 1, DirData,
+ {auth_user_file, PWDB}),
+ NDD2 = lists:keyreplace(auth_group_file, 1, NDD1,
+ {auth_group_file, GDB}),
+ {ok, NDD2};
+ {error, Err}->
+ {error, {{file, GroupFile},Err}}
+ end;
+ {error, Err2} ->
+ {error, {{file, PWFile},Err2}}
+ end.
+
+%%
+%% Storage format of users in the dets table:
+%% {{UserName, Addr, Port, Dir}, Password, UserData}
+%%
+
+add_user(DirData, UStruct) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ Record = {{UStruct#httpd_user.username, Addr, Port, Dir},
+ UStruct#httpd_user.password, UStruct#httpd_user.user_data},
+ case dets:lookup(PWDB, UStruct#httpd_user.username) of
+ [Record] ->
+ {error, user_already_in_db};
+ _ ->
+ dets:insert(PWDB, Record),
+ true
+ end.
+
+get_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ User = {UserName, Addr, Port, Dir},
+ case dets:lookup(PWDB, User) of
+ [{User, Password, UserData}] ->
+ {ok, #httpd_user{username=UserName, password=Password, user_data=UserData}};
+ Other ->
+ {error, no_such_user}
+ end.
+
+list_users(DirData) ->
+ ?DEBUG("list_users -> ~n"
+ " DirData: ~p", [DirData]),
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case dets:traverse(PWDB, fun(X) -> {continue, X} end) of %% SOOOO Ugly !
+ Records when list(Records) ->
+ ?DEBUG("list_users -> ~n"
+ " Records: ~p", [Records]),
+ {ok, [UserName || {{UserName, AnyAddr, AnyPort, AnyDir}, Password, _Data} <- Records,
+ AnyAddr == Addr, AnyPort == Port, AnyDir == Dir]};
+ O ->
+ ?DEBUG("list_users -> ~n"
+ " O: ~p", [O]),
+ {ok, []}
+ end.
+
+delete_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ User = {UserName, Addr, Port, Dir},
+ case dets:lookup(PWDB, User) of
+ [{User, SomePassword, UserData}] ->
+ dets:delete(PWDB, User),
+ lists:foreach(fun(Group) -> delete_group_member(DirData, Group, UserName) end,
+ list_groups(DirData)),
+ true;
+ _ ->
+ {error, no_such_user}
+ end.
+
+%%
+%% Storage of groups in the dets table:
+%% {Group, UserList} where UserList is a list of strings.
+%%
+add_group_member(DirData, GroupName, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ case lists:member(UserName, Users) of
+ true ->
+ true;
+ false ->
+ dets:insert(GDB, {Group, [UserName|Users]}),
+ true
+ end;
+ [] ->
+ dets:insert(GDB, {Group, [UserName]}),
+ true;
+ Other ->
+ {error, Other}
+ end.
+
+list_group_members(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ {ok, Users};
+ Other ->
+ {error, no_such_group}
+ end.
+
+list_groups(DirData) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case dets:match(GDB, {'$1', '_'}) of
+ [] ->
+ {ok, []};
+ List when list(List) ->
+ Groups = lists:flatten(List),
+ {ok, [GroupName || {GroupName, AnyAddr, AnyPort, AnyDir} <- Groups,
+ AnyAddr == Addr, AnyPort == Port, AnyDir == Dir]};
+ _ ->
+ {ok, []}
+ end.
+
+delete_group_member(DirData, GroupName, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, GroupName) of
+ [{Group, Users}] ->
+ case lists:member(UserName, Users) of
+ true ->
+ dets:delete(GDB, Group),
+ dets:insert(GDB, {Group,
+ lists:delete(UserName, Users)}),
+ true;
+ false ->
+ {error, no_such_group_member}
+ end;
+ _ ->
+ {error, no_such_group}
+ end.
+
+delete_group(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ Group = {GroupName, Addr, Port, Dir},
+ case dets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ dets:delete(GDB, Group),
+ true;
+ _ ->
+ {error, no_such_group}
+ end.
+
+lookup_common(DirData) ->
+ Dir = httpd_util:key1search(DirData, path),
+ Port = httpd_util:key1search(DirData, port),
+ Addr = httpd_util:key1search(DirData, bind_address),
+ {Addr, Port, Dir}.
+
+%% remove/1
+%%
+%% Closes dets tables used by this auth mod.
+%%
+remove(DirData) ->
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ dets:close(GDB),
+ dets:close(PWDB),
+ ok.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
new file mode 100644
index 0000000000..ea2f0cb905
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
@@ -0,0 +1,269 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_mnesia.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+-module(mod_auth_mnesia).
+-export([get_user/2,
+ list_group_members/2,
+ add_user/2,
+ add_group_member/3,
+ list_users/1,
+ delete_user/2,
+ list_groups/1,
+ delete_group_member/3,
+ delete_group/2]).
+
+-export([store_user/5, store_user/6,
+ store_group_member/5, store_group_member/6,
+ list_group_members/3, list_group_members/4,
+ list_groups/2, list_groups/3,
+ list_users/2, list_users/3,
+ remove_user/4, remove_user/5,
+ remove_group_member/5, remove_group_member/6,
+ remove_group/4, remove_group/5]).
+
+-export([store_directory_data/2]).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+
+
+store_directory_data(Directory, DirData) ->
+ %% We don't need to do anything here, we could ofcourse check that the appropriate
+ %% mnesia tables has been created prior to starting the http server.
+ ok.
+
+
+%%
+%% API
+%%
+
+%% Compability API
+
+
+store_user(UserName, Password, Port, Dir, AccessPassword) ->
+ %% AccessPassword is ignored - was not used in previous version
+ DirData = [{path,Dir},{port,Port}],
+ UStruct = #httpd_user{username = UserName,
+ password = Password},
+ add_user(DirData, UStruct).
+
+store_user(UserName, Password, Addr, Port, Dir, AccessPassword) ->
+ %% AccessPassword is ignored - was not used in previous version
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ UStruct = #httpd_user{username = UserName,
+ password = Password},
+ add_user(DirData, UStruct).
+
+store_group_member(GroupName, UserName, Port, Dir, AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ add_group_member(DirData, GroupName, UserName).
+
+store_group_member(GroupName, UserName, Addr, Port, Dir, AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ add_group_member(DirData, GroupName, UserName).
+
+list_group_members(GroupName, Port, Dir) ->
+ DirData = [{path,Dir},{port,Port}],
+ list_group_members(DirData, GroupName).
+
+list_group_members(GroupName, Addr, Port, Dir) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ list_group_members(DirData, GroupName).
+
+list_groups(Port, Dir) ->
+ DirData = [{path,Dir},{port,Port}],
+ list_groups(DirData).
+
+list_groups(Addr, Port, Dir) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ list_groups(DirData).
+
+list_users(Port, Dir) ->
+ DirData = [{path,Dir},{port,Port}],
+ list_users(DirData).
+
+list_users(Addr, Port, Dir) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ list_users(DirData).
+
+remove_user(UserName, Port, Dir, _AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ delete_user(DirData, UserName).
+
+remove_user(UserName, Addr, Port, Dir, _AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ delete_user(DirData, UserName).
+
+remove_group_member(GroupName,UserName,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ delete_group_member(DirData, GroupName, UserName).
+
+remove_group_member(GroupName,UserName,Addr,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ delete_group_member(DirData, GroupName, UserName).
+
+remove_group(GroupName,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{port,Port}],
+ delete_group(DirData, GroupName).
+
+remove_group(GroupName,Addr,Port,Dir,_AccessPassword) ->
+ DirData = [{path,Dir},{bind_address,Addr},{port,Port}],
+ delete_group(DirData, GroupName).
+
+%%
+%% Storage format of users in the mnesia table:
+%% httpd_user records
+%%
+
+add_user(DirData, UStruct) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ UserName = UStruct#httpd_user.username,
+ Password = UStruct#httpd_user.password,
+ Data = UStruct#httpd_user.user_data,
+ User=#httpd_user{username={UserName,Addr,Port,Dir},
+ password=Password,
+ user_data=Data},
+ case mnesia:transaction(fun() -> mnesia:write(User) end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+get_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:read({httpd_user,
+ {UserName,Addr,Port,Dir}})
+ end) of
+ {aborted,Reason} ->
+ {error, Reason};
+ {'atomic',[]} ->
+ {error, no_such_user};
+ {'atomic', [Record]} when record(Record, httpd_user) ->
+ {ok, Record#httpd_user{username=UserName}};
+ Other ->
+ {error, no_such_user}
+ end.
+
+list_users(DirData) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:match_object({httpd_user,
+ {'_',Addr,Port,Dir},'_','_'})
+ end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ {'atomic',Users} ->
+ {ok,
+ lists:foldr(fun({httpd_user, {UserName, AnyAddr, AnyPort, AnyDir},
+ Password, Data}, Acc) ->
+ [UserName|Acc]
+ end,
+ [], Users)}
+ end.
+
+delete_user(DirData, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:delete({httpd_user,
+ {UserName,Addr,Port,Dir}})
+ end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+%%
+%% Storage of groups in the mnesia table:
+%% Multiple instances of {#httpd_group, User}
+%%
+
+add_group_member(DirData, GroupName, User) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ Group=#httpd_group{name={GroupName, Addr, Port, Dir}, userlist=User},
+ case mnesia:transaction(fun() -> mnesia:write(Group) end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+list_group_members(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:read({httpd_group,
+ {GroupName,Addr,Port,Dir}})
+ end) of
+ {aborted, Reason} ->
+ {error,Reason};
+ {'atomic', Members} ->
+ {ok,[UserName || {httpd_group,{AnyGroupName,AnyAddr,AnyPort,AnyDir},UserName} <- Members,
+ AnyGroupName == GroupName, AnyAddr == Addr,
+ AnyPort == Port, AnyDir == Dir]}
+ end.
+
+list_groups(DirData) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:match_object({httpd_group,
+ {'_',Addr,Port,Dir},'_'})
+ end) of
+ {aborted, Reason} ->
+ {error, Reason};
+ {'atomic', Groups} ->
+ GroupNames=
+ [GroupName || {httpd_group,{GroupName,AnyAddr,AnyPort,AnyDir}, UserName} <- Groups,
+ AnyAddr == Addr, AnyPort == AnyPort, AnyDir == Dir],
+ {ok, httpd_util:uniq(lists:sort(GroupNames))}
+ end.
+
+delete_group_member(DirData, GroupName, UserName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ Group = #httpd_group{name={GroupName, Addr, Port, Dir}, userlist=UserName},
+ case mnesia:transaction(fun() -> mnesia:delete_object(Group) end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+%% THIS IS WRONG (?) !
+%% Should first match out all httpd_group records for this group and then
+%% do mnesia:delete on those. Or ?
+
+delete_group(DirData, GroupName) ->
+ {Addr, Port, Dir} = lookup_common(DirData),
+ case mnesia:transaction(fun() ->
+ mnesia:delete({httpd_group,
+ {GroupName,Addr,Port,Dir}})
+ end) of
+ {aborted,Reason} ->
+ {error,Reason};
+ _ ->
+ true
+ end.
+
+%% Utility functions.
+
+lookup_common(DirData) ->
+ Dir = httpd_util:key1search(DirData, path),
+ Port = httpd_util:key1search(DirData, port),
+ Addr = httpd_util:key1search(DirData, bind_address),
+ {Addr, Port, Dir}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_plain.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_plain.erl
new file mode 100644
index 0000000000..75cc60f288
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_plain.erl
@@ -0,0 +1,338 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_plain.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_auth_plain).
+
+-include("httpd.hrl").
+-include("mod_auth.hrl").
+
+-define(VMODULE,"AUTH_PLAIN").
+-include("httpd_verbosity.hrl").
+
+
+%% Internal API
+-export([store_directory_data/2]).
+
+
+-export([get_user/2,
+ list_group_members/2,
+ add_user/2,
+ add_group_member/3,
+ list_users/1,
+ delete_user/2,
+ list_groups/1,
+ delete_group_member/3,
+ delete_group/2,
+ remove/1]).
+
+%%
+%% API
+%%
+
+%%
+%% Storage format of users in the ets table:
+%% {UserName, Password, UserData}
+%%
+
+add_user(DirData, #httpd_user{username = User} = UStruct) ->
+ ?vtrace("add_user -> entry with:"
+ "~n User: ~p",[User]),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ Record = {User,
+ UStruct#httpd_user.password,
+ UStruct#httpd_user.user_data},
+ case ets:lookup(PWDB, User) of
+ [{User, _SomePassword, _SomeData}] ->
+ {error, user_already_in_db};
+ _ ->
+ ets:insert(PWDB, Record),
+ true
+ end.
+
+get_user(DirData, User) ->
+ ?vtrace("get_user -> entry with:"
+ "~n User: ~p",[User]),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:lookup(PWDB, User) of
+ [{User, PassWd, Data}] ->
+ {ok, #httpd_user{username=User, password=PassWd, user_data=Data}};
+ _ ->
+ {error, no_such_user}
+ end.
+
+list_users(DirData) ->
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:match(PWDB, '$1') of
+ Records when list(Records) ->
+ {ok, lists:foldr(fun({User,PassWd,Data}, A) -> [User|A] end,
+ [], lists:flatten(Records))};
+ O ->
+ {ok, []}
+ end.
+
+delete_user(DirData, UserName) ->
+ ?vtrace("delete_user -> entry with:"
+ "~n UserName: ~p",[UserName]),
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:lookup(PWDB, UserName) of
+ [{UserName, SomePassword, SomeData}] ->
+ ets:delete(PWDB, UserName),
+ case list_groups(DirData) of
+ {ok,Groups}->
+ lists:foreach(fun(Group) ->
+ delete_group_member(DirData, Group, UserName)
+ end,Groups),
+ true;
+ _->
+ true
+ end;
+ _ ->
+ {error, no_such_user}
+ end.
+
+%%
+%% Storage of groups in the ets table:
+%% {Group, UserList} where UserList is a list of strings.
+%%
+
+add_group_member(DirData, Group, UserName) ->
+ ?DEBUG("add_group_members -> ~n"
+ " Group: ~p~n"
+ " UserName: ~p",[Group,UserName]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ case lists:member(UserName, Users) of
+ true ->
+ ?DEBUG("add_group_members -> already member in group",[]),
+ true;
+ false ->
+ ?DEBUG("add_group_members -> add",[]),
+ ets:insert(GDB, {Group, [UserName|Users]}),
+ true
+ end;
+ [] ->
+ ?DEBUG("add_group_members -> create grouo",[]),
+ ets:insert(GDB, {Group, [UserName]}),
+ true;
+ Other ->
+ ?ERROR("add_group_members -> Other: ~p",[Other]),
+ {error, Other}
+ end.
+
+list_group_members(DirData, Group) ->
+ ?DEBUG("list_group_members -> Group: ~p",[Group]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ ?DEBUG("list_group_members -> Users: ~p",[Users]),
+ {ok, Users};
+ _ ->
+ {error, no_such_group}
+ end.
+
+list_groups(DirData) ->
+ ?DEBUG("list_groups -> entry",[]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:match(GDB, '$1') of
+ [] ->
+ ?DEBUG("list_groups -> []",[]),
+ {ok, []};
+ Groups0 when list(Groups0) ->
+ ?DEBUG("list_groups -> Groups0: ~p",[Groups0]),
+ {ok, httpd_util:uniq(lists:foldr(fun({G, U}, A) -> [G|A] end,
+ [], lists:flatten(Groups0)))};
+ _ ->
+ {ok, []}
+ end.
+
+delete_group_member(DirData, Group, User) ->
+ ?DEBUG("list_group_members -> ~n"
+ " Group: ~p~n"
+ " User: ~p",[Group,User]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ UDB = httpd_util:key1search(DirData, auth_user_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] when list(Users) ->
+ case lists:member(User, Users) of
+ true ->
+ ?DEBUG("list_group_members -> deleted from group",[]),
+ ets:delete(GDB, Group),
+ ets:insert(GDB, {Group, lists:delete(User, Users)}),
+ true;
+ false ->
+ ?DEBUG("list_group_members -> not member",[]),
+ {error, no_such_group_member}
+ end;
+ _ ->
+ ?ERROR("list_group_members -> no such group",[]),
+ {error, no_such_group}
+ end.
+
+delete_group(DirData, Group) ->
+ ?DEBUG("list_group_members -> Group: ~p",[Group]),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ case ets:lookup(GDB, Group) of
+ [{Group, Users}] ->
+ ?DEBUG("list_group_members -> delete",[]),
+ ets:delete(GDB, Group),
+ true;
+ _ ->
+ ?ERROR("delete_group -> no such group",[]),
+ {error, no_such_group}
+ end.
+
+
+store_directory_data(Directory, DirData) ->
+ PWFile = httpd_util:key1search(DirData, auth_user_file),
+ GroupFile = httpd_util:key1search(DirData, auth_group_file),
+ case load_passwd(PWFile) of
+ {ok, PWDB} ->
+ case load_group(GroupFile) of
+ {ok, GRDB} ->
+ %% Address and port is included in the file names...
+ Addr = httpd_util:key1search(DirData, bind_address),
+ Port = httpd_util:key1search(DirData, port),
+ {ok, PasswdDB} = store_passwd(Addr,Port,PWDB),
+ {ok, GroupDB} = store_group(Addr,Port,GRDB),
+ NDD1 = lists:keyreplace(auth_user_file, 1, DirData,
+ {auth_user_file, PasswdDB}),
+ NDD2 = lists:keyreplace(auth_group_file, 1, NDD1,
+ {auth_group_file, GroupDB}),
+ {ok, NDD2};
+ Err ->
+ ?ERROR("failed storing directory data: "
+ "load group error: ~p",[Err]),
+ {error, Err}
+ end;
+ Err2 ->
+ ?ERROR("failed storing directory data: "
+ "load passwd error: ~p",[Err2]),
+ {error, Err2}
+ end.
+
+
+
+%% load_passwd
+
+load_passwd(AuthUserFile) ->
+ case file:open(AuthUserFile, [read]) of
+ {ok,Stream} ->
+ parse_passwd(Stream, []);
+ {error, _} ->
+ {error, ?NICE("Can't open "++AuthUserFile)}
+ end.
+
+parse_passwd(Stream,PasswdList) ->
+ Line =
+ case io:get_line(Stream, '') of
+ eof ->
+ eof;
+ String ->
+ httpd_conf:clean(String)
+ end,
+ parse_passwd(Stream, PasswdList, Line).
+
+parse_passwd(Stream, PasswdList, eof) ->
+ file:close(Stream),
+ {ok, PasswdList};
+parse_passwd(Stream, PasswdList, "") ->
+ parse_passwd(Stream, PasswdList);
+parse_passwd(Stream, PasswdList, [$#|_]) ->
+ parse_passwd(Stream, PasswdList);
+parse_passwd(Stream, PasswdList, Line) ->
+ case regexp:split(Line,":") of
+ {ok, [User,Password]} ->
+ parse_passwd(Stream, [{User,Password, []}|PasswdList]);
+ {ok,_} ->
+ {error, ?NICE(Line)}
+ end.
+
+%% load_group
+
+load_group(AuthGroupFile) ->
+ case file:open(AuthGroupFile, [read]) of
+ {ok, Stream} ->
+ parse_group(Stream,[]);
+ {error, _} ->
+ {error, ?NICE("Can't open "++AuthGroupFile)}
+ end.
+
+parse_group(Stream, GroupList) ->
+ Line=
+ case io:get_line(Stream,'') of
+ eof ->
+ eof;
+ String ->
+ httpd_conf:clean(String)
+ end,
+ parse_group(Stream, GroupList, Line).
+
+parse_group(Stream, GroupList, eof) ->
+ file:close(Stream),
+ {ok, GroupList};
+parse_group(Stream, GroupList, "") ->
+ parse_group(Stream, GroupList);
+parse_group(Stream, GroupList, [$#|_]) ->
+ parse_group(Stream, GroupList);
+parse_group(Stream, GroupList, Line) ->
+ case regexp:split(Line, ":") of
+ {ok, [Group,Users]} ->
+ {ok, UserList} = regexp:split(Users," "),
+ parse_group(Stream, [{Group,UserList}|GroupList]);
+ {ok, _} ->
+ {error, ?NICE(Line)}
+ end.
+
+
+%% store_passwd
+
+store_passwd(Addr,Port,PasswdList) ->
+ Name = httpd_util:make_name("httpd_passwd",Addr,Port),
+ PasswdDB = ets:new(Name, [set, public]),
+ store_passwd(PasswdDB, PasswdList).
+
+store_passwd(PasswdDB, []) ->
+ {ok, PasswdDB};
+store_passwd(PasswdDB, [User|Rest]) ->
+ ets:insert(PasswdDB, User),
+ store_passwd(PasswdDB, Rest).
+
+%% store_group
+
+store_group(Addr,Port,GroupList) ->
+ Name = httpd_util:make_name("httpd_group",Addr,Port),
+ GroupDB = ets:new(Name, [set, public]),
+ store_group(GroupDB, GroupList).
+
+
+store_group(GroupDB,[]) ->
+ {ok, GroupDB};
+store_group(GroupDB,[User|Rest]) ->
+ ets:insert(GroupDB, User),
+ store_group(GroupDB, Rest).
+
+
+%% remove/1
+%%
+%% Deletes ets tables used by this auth mod.
+%%
+remove(DirData) ->
+ PWDB = httpd_util:key1search(DirData, auth_user_file),
+ GDB = httpd_util:key1search(DirData, auth_group_file),
+ ets:delete(PWDB),
+ ets:delete(GDB).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_server.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_server.erl
new file mode 100644
index 0000000000..59402ac169
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_server.erl
@@ -0,0 +1,422 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_auth_server.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+
+-module(mod_auth_server).
+
+-include("httpd.hrl").
+%% -include("mod_auth.hrl").
+-include("httpd_verbosity.hrl").
+
+-behaviour(gen_server).
+
+
+%% mod_auth exports
+-export([start/2, stop/2,
+ add_password/4, update_password/5,
+ add_user/5, delete_user/5, get_user/5, list_users/4,
+ add_group_member/6, delete_group_member/6, list_group_members/5,
+ delete_group/5, list_groups/4]).
+
+%% Management exports
+-export([verbosity/3]).
+
+%% gen_server exports
+-export([start_link/3,
+ init/1,
+ handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+
+-record(state,{tab}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% External API %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% start_link/3
+%%
+%% NOTE: This is called by httpd_misc_sup when the process is started
+%%
+start_link(Addr, Port, Verbosity)->
+ ?vlog("start_link -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ gen_server:start_link({local, Name}, ?MODULE, [Verbosity],
+ [{timeout, infinity}]).
+
+
+%% start/2
+
+start(Addr, Port)->
+ ?vtrace("start -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ Verbosity = get(auth_verbosity),
+ case (catch httpd_misc_sup:start_auth_server(Addr, Port,
+ Verbosity)) of
+ {ok, Pid} ->
+ put(auth_server, Pid),
+ ok;
+ {error, Reason} ->
+ exit({failed_start_auth_server, Reason});
+ Error ->
+ exit({failed_start_auth_server, Error})
+ end;
+ _ -> %% Already started...
+ ok
+ end.
+
+
+%% stop/2
+
+stop(Addr, Port)->
+ ?vtrace("stop -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined -> %% Already stopped
+ ok;
+ _ ->
+ (catch httpd_misc_sup:stop_auth_server(Addr, Port))
+ end.
+
+
+%% verbosity/3
+
+verbosity(Addr, Port, Verbosity) ->
+ Name = make_name(Addr, Port),
+ Req = {verbosity, Verbosity},
+ call(Name, Req).
+
+
+%% add_password/4
+
+add_password(Addr, Port, Dir, Password)->
+ Name = make_name(Addr, Port),
+ Req = {add_password, Dir, Password},
+ call(Name, Req).
+
+
+%% update_password/6
+
+update_password(Addr, Port, Dir, Old, New) when list(New) ->
+ Name = make_name(Addr, Port),
+ Req = {update_password, Dir, Old, New},
+ call(Name, Req).
+
+
+%% add_user/5
+
+add_user(Addr, Port, Dir, User, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {add_user, Addr, Port, Dir, User, Password},
+ call(Name, Req).
+
+
+%% delete_user/5
+
+delete_user(Addr, Port, Dir, UserName, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {delete_user, Addr, Port, Dir, UserName, Password},
+ call(Name, Req).
+
+
+%% get_user/5
+
+get_user(Addr, Port, Dir, UserName, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {get_user, Addr, Port, Dir, UserName, Password},
+ call(Name, Req).
+
+
+%% list_users/4
+
+list_users(Addr, Port, Dir, Password) ->
+ Name = make_name(Addr,Port),
+ Req = {list_users, Addr, Port, Dir, Password},
+ call(Name, Req).
+
+
+%% add_group_member/6
+
+add_group_member(Addr, Port, Dir, GroupName, UserName, Password) ->
+ Name = make_name(Addr,Port),
+ Req = {add_group_member, Addr, Port, Dir, GroupName, UserName, Password},
+ call(Name, Req).
+
+
+%% delete_group_member/6
+
+delete_group_member(Addr, Port, Dir, GroupName, UserName, Password) ->
+ Name = make_name(Addr,Port),
+ Req = {delete_group_member, Addr, Port, Dir, GroupName, UserName, Password},
+ call(Name, Req).
+
+
+%% list_group_members/4
+
+list_group_members(Addr, Port, Dir, Group, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {list_group_members, Addr, Port, Dir, Group, Password},
+ call(Name, Req).
+
+
+%% delete_group/5
+
+delete_group(Addr, Port, Dir, GroupName, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {delete_group, Addr, Port, Dir, GroupName, Password},
+ call(Name, Req).
+
+
+%% list_groups/4
+
+list_groups(Addr, Port, Dir, Password) ->
+ Name = make_name(Addr, Port),
+ Req = {list_groups, Addr, Port, Dir, Password},
+ call(Name, Req).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Server call-back functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% init
+
+init([undefined]) ->
+ init([?default_verbosity]);
+
+init([Verbosity]) ->
+ put(sname,auth),
+ put(verbosity,Verbosity),
+ ?vlog("starting",[]),
+ {ok,#state{tab = ets:new(auth_pwd,[set,protected])}}.
+
+
+%% handle_call
+
+%% Add a user
+handle_call({add_user, Addr, Port, Dir, User, AuthPwd}, _From, State) ->
+ Reply = api_call(Addr, Port, Dir, add_user, User, AuthPwd, State),
+ {reply, Reply, State};
+
+%% Get data about a user
+handle_call({get_user, Addr, Port, Dir, User, AuthPwd}, _From, State) ->
+ Reply = api_call(Addr, Port, Dir, get_user, [User], AuthPwd, State),
+ {reply, Reply, State};
+
+%% Add a group member
+handle_call({add_group_member, Addr, Port, Dir, Group, User, AuthPwd},
+ _From, State) ->
+ Reply = api_call(Addr, Port, Dir, add_group_member, [Group, User],
+ AuthPwd, State),
+ {reply, Reply, State};
+
+%% delete a group
+handle_call({delete_group_member, Addr, Port, Dir, Group, User, AuthPwd},
+ _From, State)->
+ Reply = api_call(Addr, Port, Dir, delete_group_member, [Group, User],
+ AuthPwd, State),
+ {reply, Reply, State};
+
+%% List all users thats standalone users
+handle_call({list_users, Addr, Port, Dir, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, list_users, [], AuthPwd, State),
+ {reply, Reply, State};
+
+%% Delete a user
+handle_call({delete_user, Addr, Port, Dir, User, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, delete_user, [User], AuthPwd, State),
+ {reply, Reply, State};
+
+%% Delete a group
+handle_call({delete_group, Addr, Port, Dir, Group, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, delete_group, [Group], AuthPwd, State),
+ {reply, Reply, State};
+
+%% List the current groups
+handle_call({list_groups, Addr, Port, Dir, AuthPwd}, _From, State)->
+ Reply = api_call(Addr, Port, Dir, list_groups, [], AuthPwd, State),
+ {reply, Reply, State};
+
+%% List the members of the given group
+handle_call({list_group_members, Addr, Port, Dir, Group, AuthPwd},
+ _From, State)->
+ Reply = api_call(Addr, Port, Dir, list_group_members, [Group],
+ AuthPwd, State),
+ {reply, Reply, State};
+
+
+%% Add password for a directory
+handle_call({add_password, Dir, Password}, _From, State)->
+ Reply = do_add_password(Dir, Password, State),
+ {reply, Reply, State};
+
+
+%% Update the password for a directory
+
+handle_call({update_password, Dir, Old, New},_From,State)->
+ Reply =
+ case getPassword(State, Dir) of
+ OldPwd when binary(OldPwd)->
+ case erlang:md5(Old) of
+ OldPwd ->
+ %% The old password is right =>
+ %% update the password to the new
+ do_update_password(Dir,New,State),
+ ok;
+ _->
+ {error, error_new}
+ end;
+ _->
+ {error, error_old}
+ end,
+ {reply, Reply, State};
+
+handle_call(stop, _From, State)->
+ {stop, normal, State};
+
+handle_call({verbosity,Verbosity},_From,State)->
+ OldVerbosity = put(verbosity,Verbosity),
+ ?vlog("set verbosity: ~p -> ~p",[Verbosity,OldVerbosity]),
+ {reply,OldVerbosity,State}.
+
+handle_info(Info,State)->
+ {noreply,State}.
+
+handle_cast(Request,State)->
+ {noreply,State}.
+
+
+terminate(Reason,State) ->
+ ets:delete(State#state.tab),
+ ok.
+
+
+%% code_change({down, ToVsn}, State, Extra)
+%%
+code_change({down, _}, #state{tab = Tab}, downgrade_to_2_6_0) ->
+ ?vlog("downgrade to 2.6.0", []),
+ {ok, {state, Tab, undefined}};
+
+
+%% code_change(FromVsn, State, Extra)
+%%
+code_change(_, {state, Tab, _}, upgrade_from_2_6_0) ->
+ ?vlog("upgrade from 2.6.0", []),
+ {ok, #state{tab = Tab}}.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The functions that really changes the data in the database %%
+%% of users to different directories %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% API gateway
+
+api_call(Addr, Port, Dir, Func, Args,Password,State) ->
+ case controlPassword(Password,State,Dir) of
+ ok->
+ ConfigName = httpd_util:make_name("httpd_conf",Addr,Port),
+ case ets:match_object(ConfigName, {directory, Dir, '$1'}) of
+ [{directory, Dir, DirData}] ->
+ AuthMod = auth_mod_name(DirData),
+ ?DEBUG("api_call -> call ~p:~p",[AuthMod,Func]),
+ Ret = (catch apply(AuthMod, Func, [DirData|Args])),
+ ?DEBUG("api_call -> Ret: ~p",[ret]),
+ Ret;
+ O ->
+ ?DEBUG("api_call -> O: ~p",[O]),
+ {error, no_such_directory}
+ end;
+ bad_password ->
+ {error,bad_password}
+ end.
+
+controlPassword(Password,State,Dir)when Password=:="DummyPassword"->
+ bad_password;
+
+controlPassword(Password,State,Dir)->
+ case getPassword(State,Dir) of
+ Pwd when binary(Pwd)->
+ case erlang:md5(Password) of
+ Pwd ->
+ ok;
+ _->
+ bad_password
+ end;
+ _ ->
+ bad_password
+ end.
+
+
+getPassword(State,Dir)->
+ case lookup(State#state.tab, Dir) of
+ [{_,Pwd}]->
+ Pwd;
+ _ ->
+ {error,bad_password}
+ end.
+
+do_update_password(Dir, New, State) ->
+ ets:insert(State#state.tab, {Dir, erlang:md5(New)}).
+
+do_add_password(Dir, Password, State) ->
+ case getPassword(State,Dir) of
+ PwdExists when binary(PwdExists) ->
+ {error, dir_protected};
+ {error, _} ->
+ do_update_password(Dir, Password, State)
+ end.
+
+
+auth_mod_name(DirData) ->
+ case httpd_util:key1search(DirData, auth_type, plain) of
+ plain -> mod_auth_plain;
+ mnesia -> mod_auth_mnesia;
+ dets -> mod_auth_dets
+ end.
+
+
+lookup(Db, Key) ->
+ ets:lookup(Db, Key).
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_auth",Addr,Port).
+
+
+call(Name, Req) ->
+ case (catch gen_server:call(Name, Req)) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Reply ->
+ Reply
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_browser.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_browser.erl
new file mode 100644
index 0000000000..1153a5fc47
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_browser.erl
@@ -0,0 +1,213 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_browser.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+%% ----------------------------------------------------------------------
+%%
+%% Browsers sends a string to the webbserver
+%% to identify themsevles. They are a bit nasty
+%% since the only thing that the specification really
+%% is strict about is that they shall be short
+%% tree axamples:
+%%
+%% Netscape Mozilla/4.75 [en] (X11; U; SunOS 5.8 sun4u)
+%% IE5 Mozilla/4.0 (compatible; MSIE 5.0; SP1B; SunOS 5.8 sun4u; X11)
+%% Lynx Lynx/2.8.3rel.1 libwww-FM/2.142
+%%
+%% ----------------------------------------------------------------------
+
+-module(mod_browser).
+
+%% Remember that the order of the mozilla browsers are
+%% important since some browsers include others to behave
+%% as they were something else
+-define(MOZILLA_BROWSERS,[{opera,"opera"},{msie,"msie"}]).
+
+
+%% If your operatingsystem is not recognized add it to this list.
+-define(OPERATIVE_SYSTEMS,[{win3x,["win16","windows 3","windows 16-bit"]},
+ {win95,["win95","windows 95"]},
+ {win98,["win98", "windows 98"]},
+ {winnt,["winnt", "windows nt"]},
+ {win2k,["nt 5"]},
+ {sunos4,["sunos 4"]},
+ {sunos5,["sunos 5"]},
+ {sun,["sunos"]},
+ {aix,["aix"]},
+ {linux,["linux"]},
+ {sco,["sco","unix_sv"]},
+ {freebsd,["freebsd"]},
+ {bsd,["bsd"]}]).
+
+-define(LYNX,lynx).
+-define(MOZILLA,mozilla).
+-define(EMACS,emacs).
+-define(STAROFFICE,soffice).
+-define(MOSAIC,mosaic).
+-define(NETSCAPE,netscape).
+-define(UNKOWN,unknown).
+
+-include("httpd.hrl").
+
+-export([do/1, test/0, getBrowser/1]).
+
+
+do(Info) ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ {Status_code,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ undefined ->
+ {proceed,[{'user-agent',getBrowser1(Info)}|Info#mod.data]}
+ end.
+
+getBrowser1(Info) ->
+ PHead=Info#mod.parsed_header,
+ case httpd_util:key1search(PHead,"User-Agent") of
+ undefined->
+ undefined;
+ AgentString ->
+ getBrowser(AgentString)
+ end.
+
+getBrowser(AgentString) ->
+ LAgentString = httpd_util:to_lower(AgentString),
+ case regexp:first_match(LAgentString,"^[^ ]*") of
+ {match,Start,Length} ->
+ Browser=lists:sublist(LAgentString,Start,Length),
+ case browserType(Browser) of
+ {mozilla,Vsn} ->
+ {getMozilla(LAgentString,
+ ?MOZILLA_BROWSERS,{?NETSCAPE,Vsn}),
+ operativeSystem(LAgentString)};
+ AnyBrowser ->
+ {AnyBrowser,operativeSystem(LAgentString)}
+ end;
+ nomatch ->
+ browserType(LAgentString)
+ end.
+
+browserType([$l,$y,$n,$x|Version]) ->
+ {?LYNX,browserVersion(Version)};
+browserType([$m,$o,$z,$i,$l,$l,$a|Version]) ->
+ {?MOZILLA,browserVersion(Version)};
+browserType([$e,$m,$a,$c,$s|Version]) ->
+ {?EMACS,browserVersion(Version)};
+browserType([$e,$t,$a,$r,$o,$f,$f,$i,$c,$e|Version]) ->
+ {?STAROFFICE,browserVersion(Version)};
+browserType([$m,$o,$s,$a,$i,$c|Version]) ->
+ {?MOSAIC,browserVersion(Version)};
+browserType(Unknown)->
+ unknown.
+
+
+browserVersion([$/|VsnString]) ->
+ case catch list_to_float(VsnString) of
+ Number when float(Number) ->
+ Number;
+ Whatever ->
+ case string:span(VsnString,"1234567890.") of
+ 0 ->
+ unknown;
+ VLength ->
+ Vsn = string:substr(VsnString,1,VLength),
+ case string:tokens(Vsn,".") of
+ [Number] ->
+ list_to_float(Number++".0");
+ [Major,Minor|_MinorMinor] ->
+ list_to_float(Major++"."++Minor)
+ end
+ end
+ end;
+browserVersion(VsnString) ->
+ browserVersion([$/|VsnString]).
+
+operativeSystem(OpString) ->
+ operativeSystem(OpString, ?OPERATIVE_SYSTEMS).
+
+operativeSystem(OpString,[]) ->
+ unknown;
+operativeSystem(OpString,[{RetVal,RegExps}|Rest]) ->
+ case controlOperativeSystem(OpString,RegExps) of
+ true->
+ RetVal;
+ _ ->
+ operativeSystem(OpString,Rest)
+ end.
+
+controlOperativeSystem(OpString,[]) ->
+ false;
+controlOperativeSystem(OpString,[Regexp|Regexps]) ->
+ case regexp:match(OpString,Regexp) of
+ {match,_,_}->
+ true;
+ nomatch->
+ controlOperativeSystem(OpString,Regexps)
+ end.
+
+
+%% OK this is ugly but thats the only way since
+%% all browsers dont conform to the name/vsn standard
+%% First we check if it is one of the browsers that
+%% not are the default mozillaborwser against the regexp
+%% for the different browsers. if no match it a mozilla
+%% browser i.e opera netscape or internet explorer
+
+getMozilla(AgentString,[],Default) ->
+ Default;
+getMozilla(AgentString,[{Agent,AgentRegExp}|Rest],Default) ->
+ case regexp:match(AgentString,AgentRegExp) of
+ {match,_,_} ->
+ {Agent,getVersion(AgentString,AgentRegExp)};
+ nomatch ->
+ getMozilla(AgentString,Rest,Default)
+ end.
+
+getVersion(AgentString,AgentRegExp) ->
+ case regexp:match(AgentString,AgentRegExp++"[0-9\.\ ]*") of
+ {match,Start,Length} when length(AgentRegExp) < Length ->
+ %% Ok we got the number split it out
+ RealStart=Start+length(AgentRegExp),
+ RealLength=Length-length(AgentRegExp),
+ VsnString=string:substr(AgentString,RealStart,RealLength),
+ case string:strip(VsnString,both,$\ ) of
+ [] ->
+ unknown;
+ Vsn ->
+ case string:tokens(Vsn,".") of
+ [Number]->
+ list_to_float(Number++".0");
+ [Major,Minor|_MinorMinor]->
+ list_to_float(Major++"."++Minor)
+ end
+ end;
+ nomatch ->
+ unknown
+ end.
+
+
+test()->
+ io:format("~n--------------------------------------------------------~n"),
+ Res1=getBrowser("Mozilla/4.75 [en] (X11; U; SunOS 5.8 sun4u)"),
+ io:format("~p",[Res1]),
+ io:format("~n--------------------------------------------------------~n"),
+ io:format("~n--------------------------------------------------------~n"),
+ Res2=getBrowser("Mozilla/4.0 (compatible; MSIE 5.0; SP1B; SunOS 5.8 sun4u; X11)"),
+ io:format("~p",[Res2]),
+ io:format("~n--------------------------------------------------------~n"),
+ io:format("~n--------------------------------------------------------~n"),
+ Res3=getBrowser("Lynx/2.8.3rel.1 libwww-FM/2.142"),
+ io:format("~p",[Res3]),
+ io:format("~n--------------------------------------------------------~n").
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_cgi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_cgi.erl
new file mode 100644
index 0000000000..d3f67eb77a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_cgi.erl
@@ -0,0 +1,692 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_cgi.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_cgi).
+-export([do/1,env/3,status_code/1,load/2]).
+
+%%Exports to the interface for sending chunked data
+%% to http/1.1 users and full responses to http/1.0
+-export([send/5,final_send/4, update_status_code/2,get_new_size/2]).
+-include("httpd.hrl").
+
+-define(VMODULE,"CGI").
+-include("httpd_verbosity.hrl").
+
+-define(GATEWAY_INTERFACE,"CGI/1.1").
+-define(DEFAULT_CGI_TIMEOUT,15000).
+
+%% do
+
+do(Info) ->
+ ?vtrace("do",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode, PhraseArgs, Reason} ->
+ {proceed, Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ ?vtrace("do -> no status code has been generated", []),
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ ?vtrace("do -> no response has been generated", []),
+ RequestURI =
+ case httpd_util:key1search(Info#mod.data,
+ new_request_uri) of
+ undefined ->
+ Info#mod.request_uri;
+ Value ->
+ Value
+ end,
+ ?vtrace("do -> RequestURI: ~p", [RequestURI]),
+ ScriptAliases =
+ httpd_util:multi_lookup(Info#mod.config_db,
+ script_alias),
+ ?vtrace("do -> ScriptAliases: ~p", [ScriptAliases]),
+ case mod_alias:real_script_name(Info#mod.config_db,
+ RequestURI,
+ ScriptAliases) of
+ {Script, AfterScript} ->
+ exec_script(Info, Script, AfterScript, RequestURI);
+ not_a_script ->
+ {proceed,Info#mod.data}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+
+%% is_executable(File) ->
+%% ?DEBUG("is_executable -> entry with~n"
+%% " File: ~s",[File]),
+%% Dir = filename:dirname(File),
+%% FileName = filename:basename(File),
+%% is_executable(FileName,Dir).
+%%
+%% is_executable(FileName,Dir) ->
+%% ?DEBUG("is_executable -> entry with~n"
+%% " Dir: ~s~n"
+%% " FileName: ~s",[Dir,FileName]),
+%% case os:find_executable(FileName, Dir) of
+%% false ->
+%% false;
+%% _ ->
+%% true
+%% end.
+
+
+%% -------------------------
+%% Start temporary (hopefully) fix for win32
+%% OTP-3627
+%%
+
+is_executable(File) ->
+ Dir = filename:dirname(File),
+ FileName = filename:basename(File),
+ case os:type() of
+ {win32,_} ->
+ is_win32_executable(Dir,FileName);
+ _ ->
+ is_other_executable(Dir,FileName)
+ end.
+
+
+is_win32_executable(D,F) ->
+ case ends_with(F,[".bat",".exe",".com"]) of
+ false ->
+ %% This is why we cant use 'os:find_executable' directly.
+ %% It assumes that executable files is given without extension
+ case os:find_executable(F,D) of
+ false ->
+ false;
+ _ ->
+ true
+ end;
+ true ->
+ case file:read_file_info(D ++ "/" ++ F) of
+ {ok,_} ->
+ true;
+ _ ->
+ false
+ end
+ end.
+
+
+is_other_executable(D,F) ->
+ case os:find_executable(F,D) of
+ false ->
+ false;
+ _ ->
+ true
+ end.
+
+
+ends_with(File,[]) ->
+ false;
+ends_with(File,[Ext|Rest]) ->
+ case ends_with1(File,Ext) of
+ true ->
+ true;
+ false ->
+ ends_with(File,Rest)
+ end.
+
+ends_with1(S,E) when length(S) >= length(E) ->
+ case to_lower(string:right(S,length(E))) of
+ E ->
+ true;
+ _ ->
+ false
+ end;
+ends_with1(_S,_E) ->
+ false.
+
+
+to_lower(S) -> to_lower(S,[]).
+
+to_lower([],L) -> lists:reverse(L);
+to_lower([H|T],L) -> to_lower(T,[to_lower1(H)|L]).
+
+to_lower1(C) when C >= $A, C =< $Z ->
+ C + ($a - $A);
+to_lower1(C) ->
+ C.
+
+%%
+%% End fix
+%% ---------------------------------
+
+
+env(VarName, Value) ->
+ {VarName, Value}.
+
+env(Info, Script, AfterScript) ->
+ ?vtrace("env -> entry with"
+ "~n Script: ~p"
+ "~n AfterScript: ~p",
+ [Script, AfterScript]),
+ {_, RemoteAddr} = (Info#mod.init_data)#init_data.peername,
+ ServerName = (Info#mod.init_data)#init_data.resolve,
+ PH = parsed_header(Info#mod.parsed_header),
+ Env =
+ [env("SERVER_SOFTWARE",?SERVER_SOFTWARE),
+ env("SERVER_NAME",ServerName),
+ env("GATEWAY_INTERFACE",?GATEWAY_INTERFACE),
+ env("SERVER_PROTOCOL",?SERVER_PROTOCOL),
+ env("SERVER_PORT",
+ integer_to_list(httpd_util:lookup(Info#mod.config_db,port,80))),
+ env("REQUEST_METHOD",Info#mod.method),
+ env("REMOTE_ADDR",RemoteAddr),
+ env("SCRIPT_NAME",Script)],
+ Env1 =
+ case Info#mod.method of
+ "GET" ->
+ case AfterScript of
+ {[], QueryString} ->
+ [env("QUERY_STRING", QueryString)|Env];
+ {PathInfo, []} ->
+ Aliases = httpd_util:multi_lookup(
+ Info#mod.config_db,alias),
+ {_, PathTranslated, _} =
+ mod_alias:real_name(
+ Info#mod.config_db, PathInfo, Aliases),
+ [Env|
+ [env("PATH_INFO","/"++httpd_util:decode_hex(PathInfo)),
+ env("PATH_TRANSLATED",PathTranslated)]];
+ {PathInfo, QueryString} ->
+ Aliases = httpd_util:multi_lookup(
+ Info#mod.config_db,alias),
+ {_, PathTranslated, _} =
+ mod_alias:real_name(
+ Info#mod.config_db, PathInfo, Aliases),
+ [Env|
+ [env("PATH_INFO",
+ httpd_util:decode_hex(PathInfo)),
+ env("PATH_TRANSLATED",PathTranslated),
+ env("QUERY_STRING", QueryString)]];
+ [] ->
+ Env
+ end;
+ "POST" ->
+ [env("CONTENT_LENGTH",
+ integer_to_list(httpd_util:flatlength(
+ Info#mod.entity_body)))|Env];
+ _ ->
+ Env
+ end,
+ Env2 =
+ case httpd_util:key1search(Info#mod.data,remote_user) of
+ undefined ->
+ Env1;
+ RemoteUser ->
+ [env("REMOTE_USER",RemoteUser)|Env1] %% OTP-4416
+ end,
+ lists:flatten([Env2|PH]).
+
+
+parsed_header(List) ->
+ parsed_header(List, []).
+
+parsed_header([], SoFar) ->
+ SoFar;
+parsed_header([{Name,[Value|R1]}|R2], SoFar) when list(Value)->
+ NewName=lists:map(fun(X) -> if X == $- -> $_; true -> X end end,Name),
+ Env = env("HTTP_"++httpd_util:to_upper(NewName),
+ multi_value([Value|R1])),
+ parsed_header(R2, [Env|SoFar]);
+
+parsed_header([{Name,Value}|Rest], SoFar) ->
+ {ok,NewName,_} = regexp:gsub(Name, "-", "_"),
+ Env=env("HTTP_"++httpd_util:to_upper(NewName),Value),
+ parsed_header(Rest, [Env|SoFar]).
+
+
+multi_value([]) ->
+ [];
+multi_value([Value]) ->
+ Value;
+multi_value([Value|Rest]) ->
+ Value++", "++multi_value(Rest).
+
+
+exec_script(Info, Script, AfterScript, RequestURI) ->
+ ?vdebug("exec_script -> entry with"
+ "~n Script: ~p"
+ "~n AfterScript: ~p",
+ [Script,AfterScript]),
+ exec_script(is_executable(Script),Info,Script,AfterScript,RequestURI).
+
+exec_script(true, Info, Script, AfterScript, RequestURI) ->
+ ?vtrace("exec_script -> entry when script is executable",[]),
+ process_flag(trap_exit,true),
+ Dir = filename:dirname(Script),
+ [Script_Name|_] = string:tokens(RequestURI, "?"),
+ Env = env(Info, Script_Name, AfterScript),
+ Port = (catch open_port({spawn,Script},[stream,{cd, Dir},{env, Env}])),
+ ?vtrace("exec_script -> Port: ~w",[Port]),
+ case Port of
+ P when port(P) ->
+ %% Send entity_body to port.
+ Res = case Info#mod.entity_body of
+ [] ->
+ true;
+ EntityBody ->
+ (catch port_command(Port, EntityBody))
+ end,
+ case Res of
+ {'EXIT',Reason} ->
+ ?vlog("port send failed:"
+ "~n Port: ~p"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Port,Info#mod.request_uri,Reason]),
+ exit({open_cmd_failed,Reason,
+ [{mod,?MODULE},{port,Port},
+ {uri,Info#mod.request_uri},
+ {script,Script},{env,Env},{dir,Dir},
+ {ebody_size,sz(Info#mod.entity_body)}]});
+ true ->
+ proxy(Info, Port)
+ end;
+ {'EXIT',Reason} ->
+ ?vlog("open port failed: exit"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Info#mod.request_uri,Reason]),
+ exit({open_port_failed,Reason,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]});
+ O ->
+ ?vlog("open port failed: unknown result"
+ "~n URI: ~p"
+ "~n O: ~p",
+ [Info#mod.request_uri,O]),
+ exit({open_port_failed,O,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]})
+ end;
+
+exec_script(false,Info,Script,_AfterScript,_RequestURI) ->
+ ?vlog("script ~s not executable",[Script]),
+ {proceed,
+ [{status,
+ {404,Info#mod.request_uri,
+ ?NICE("You don't have permission to execute " ++
+ Info#mod.request_uri ++ " on this server")}}|
+ Info#mod.data]}.
+
+
+
+%%
+%% Socket <-> Port communication
+%%
+
+proxy(#mod{config_db = ConfigDb} = Info, Port) ->
+ Timeout = httpd_util:lookup(ConfigDb, cgi_timeout, ?DEFAULT_CGI_TIMEOUT),
+ proxy(Info, Port, 0, undefined,[], Timeout).
+
+proxy(Info, Port, Size, StatusCode, AccResponse, Timeout) ->
+ ?vdebug("proxy -> entry with"
+ "~n Size: ~p"
+ "~n StatusCode ~p"
+ "~n Timeout: ~p",
+ [Size, StatusCode, Timeout]),
+ receive
+ {Port, {data, Response}} when port(Port) ->
+ ?vtrace("proxy -> got some data from the port",[]),
+
+ NewStatusCode = update_status_code(StatusCode, Response),
+
+ ?vtrace("proxy -> NewStatusCode: ~p",[NewStatusCode]),
+ case send(Info, NewStatusCode, Response, Size, AccResponse) of
+ socket_closed ->
+ ?vtrace("proxy -> socket closed: kill port",[]),
+ (catch port_close(Port)), % KILL the port !!!!
+ process_flag(trap_exit,false),
+ {proceed,
+ [{response,{already_sent,200,Size}}|Info#mod.data]};
+
+ head_sent ->
+ ?vtrace("proxy -> head sent: kill port",[]),
+ (catch port_close(Port)), % KILL the port !!!!
+ process_flag(trap_exit,false),
+ {proceed,
+ [{response,{already_sent,200,Size}}|Info#mod.data]};
+
+ {http_response, NewAccResponse} ->
+ ?vtrace("proxy -> head response: continue",[]),
+ NewSize = get_new_size(Size, Response),
+ proxy(Info, Port, NewSize, NewStatusCode,
+ NewAccResponse, Timeout);
+
+ _ ->
+ ?vtrace("proxy -> continue",[]),
+ %% The data is sent and the socket is not closed, continue
+ NewSize = get_new_size(Size, Response),
+ proxy(Info, Port, NewSize, NewStatusCode,
+ "nonempty", Timeout)
+ end;
+
+ {'EXIT', Port, normal} when port(Port) ->
+ ?vtrace("proxy -> exit signal from port: normal",[]),
+ NewStatusCode = update_status_code(StatusCode,AccResponse),
+ final_send(Info,NewStatusCode,Size,AccResponse),
+ process_flag(trap_exit,false),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]};
+
+ {'EXIT', Port, Reason} when port(Port) ->
+ ?vtrace("proxy -> exit signal from port: ~p",[Reason]),
+ process_flag(trap_exit, false),
+ {proceed, [{status,{400,none,reason(Reason)}}|Info#mod.data]};
+
+ {'EXIT', Pid, Reason} when pid(Pid) ->
+ %% This is the case that a linked process has died,
+ %% It would be nice to response with a server error
+ %% but since the heade alredy is sent
+ ?vtrace("proxy -> exit signal from ~p: ~p",[Pid, Reason]),
+ proxy(Info, Port, Size, StatusCode, AccResponse, Timeout);
+
+ %% This should not happen
+ WhatEver ->
+ ?vinfo("proxy -> received garbage: ~n~p", [WhatEver]),
+ NewStatusCode = update_status_code(StatusCode, AccResponse),
+ final_send(Info, StatusCode, Size, AccResponse),
+ process_flag(trap_exit, false),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]}
+
+ after Timeout ->
+ ?vlog("proxy -> timeout",[]),
+ (catch port_close(Port)), % KILL the port !!!!
+ httpd_socket:close(Info#mod.socket_type, Info#mod.socket),
+ process_flag(trap_exit,false),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]}
+ end.
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The functions that handles the sending of the data to the client %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%----------------------------------------------------------------------
+%% Send the header the first time the size of the body is Zero
+%%----------------------------------------------------------------------
+
+send(#mod{method = "HEAD"} = Info, StatusCode, Response, 0, []) ->
+ first_handle_head_request(Info, StatusCode, Response);
+send(Info, StatusCode, Response, 0, []) ->
+ first_handle_other_request(Info, StatusCode, Response);
+
+%%----------------------------------------------------------------------
+%% The size of the body is bigger than zero =>
+%% we have a part of the body to send
+%%----------------------------------------------------------------------
+send(Info, StatusCode, Response, Size, AccResponse) ->
+ handle_other_request(Info, StatusCode, Response).
+
+
+%%----------------------------------------------------------------------
+%% The function is called the last time when the port has closed
+%%----------------------------------------------------------------------
+
+final_send(Info, StatusCode, Size, AccResponse)->
+ final_handle_other_request(Info, StatusCode).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The code that handles the head requests %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%----------------------------------------------------------------------
+%% The request is a head request if its a HTPT/1.1 request answer to it
+%% otherwise we must collect the size of hte body before we can answer.
+%% Return Values:
+%% head_sent
+%%----------------------------------------------------------------------
+first_handle_head_request(Info, StatusCode, Response)->
+ case Info#mod.http_version of
+ "HTTP/1.1" ->
+ %% Since we have all we need to create the header create it
+ %% send it and return head_sent.
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok, [HeadEnd, Rest]} ->
+ HeadEnd1 = removeStatus(HeadEnd),
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [create_header(Info,StatusCode),
+ HeadEnd1,"\r\n\r\n"]);
+ _ ->
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [create_header(Info, StatusCode),
+ "Content-Type:text/html\r\n\r\n"])
+ end;
+ _ ->
+ Response1= case regexp:split(Response,"\r\n\r\n|\n\n") of
+ {ok,[HeadEnd|Rest]} ->
+ removeStatus(HeadEnd);
+ _ ->
+ ["Content-Type:text/html"]
+ end,
+ H1 = httpd_util:header(StatusCode,Info#mod.connection),
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [H1,Response1,"\r\n\r\n"])
+ end,
+ head_sent.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Handle the requests that is to the other methods %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%----------------------------------------------------------------------
+%% Create the http-response header and send it to the user if it is
+%% a http/1.1 request otherwise we must accumulate it
+%%----------------------------------------------------------------------
+first_handle_other_request(Info,StatusCode,Response)->
+ Header = create_header(Info,StatusCode),
+ Response1 =
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok,[HeadPart,[]]} ->
+ [Header, removeStatus(HeadPart),"\r\n\r\n"];
+
+ {ok,[HeadPart,BodyPart]} ->
+ [Header, removeStatus(HeadPart), "\r\n\r\n",
+ httpd_util:integer_to_hexlist(length(BodyPart)),
+ "\r\n", BodyPart];
+ _WhatEver ->
+ %% No response header field from the cgi-script,
+ %% Just a body
+ [Header, "Content-Type:text/html","\r\n\r\n",
+ httpd_util:integer_to_hexlist(length(Response)),
+ "\r\n", Response]
+ end,
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket, Response1).
+
+
+handle_other_request(#mod{http_version = "HTTP/1.1",
+ socket_type = Type, socket = Sock} = Info,
+ StatusCode, Response0) ->
+ Response = create_chunk(Info, Response0),
+ httpd_socket:deliver(Type, Sock, Response);
+handle_other_request(#mod{socket_type = Type, socket = Sock} = Info,
+ StatusCode, Response) ->
+ httpd_socket:deliver(Type, Sock, Response).
+
+
+final_handle_other_request(#mod{http_version = "HTTP/1.1",
+ socket_type = Type, socket = Sock},
+ StatusCode) ->
+ httpd_socket:deliver(Type, Sock, "0\r\n");
+final_handle_other_request(#mod{socket_type = Type, socket = Sock},
+ StatusCode) ->
+ httpd_socket:close(Type, Sock),
+ socket_closed.
+
+
+create_chunk(_Info, Response) ->
+ HEXSize = httpd_util:integer_to_hexlist(length(lists:flatten(Response))),
+ HEXSize++"\r\n"++Response++"\r\n".
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The various helper functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+update_status_code(undefined, Response) ->
+ case status_code(Response) of
+ {ok, StatusCode1} ->
+ StatusCode1;
+ _ ->
+ ?vlog("invalid response from script:~n~p", [Response]),
+ 500
+ end;
+update_status_code(StatusCode,_Response)->
+ StatusCode.
+
+
+get_new_size(0,Response)->
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok,[Head,Body]}->
+ length(lists:flatten(Body));
+ _ ->
+ %%No header in the respone
+ length(lists:flatten(Response))
+ end;
+
+get_new_size(Size,Response)->
+ Size+length(lists:flatten(Response)).
+
+%%----------------------------------------------------------------------
+%% Creates the http-header for a response
+%%----------------------------------------------------------------------
+create_header(Info,StatusCode)->
+ Cache=case httpd_util:lookup(Info#mod.config_db,script_nocache,false) of
+ true->
+ Date=httpd_util:rfc1123_date(),
+ "Cache-Control:no-cache\r\nPragma:no-cache\r\nExpires:"++ Date ++ "\r\n";
+ false ->
+ []
+ end,
+ case Info#mod.http_version of
+ "HTTP/1.1" ->
+ Header=httpd_util:header(StatusCode, Info#mod.connection),
+ Header++"Transfer-encoding:chunked\r\n"++Cache;
+ _ ->
+ httpd_util:header(StatusCode,Info#mod.connection)++Cache
+ end.
+
+
+
+%% status_code
+
+status_code(Response) ->
+ case httpd_util:split(Response,"\n\n|\r\n\r\n",2) of
+ {ok,[Header,Body]} ->
+ case regexp:split(Header,"\n|\r\n") of
+ {ok,HeaderFields} ->
+ {ok,extract_status_code(HeaderFields)};
+ {error,_} ->
+ {error, bad_script_output(Response)}
+ end;
+ _ ->
+ %% No header field in the returned data return 200 the standard code
+ {ok, 200}
+ end.
+
+bad_script_output(Bad) ->
+ lists:flatten(io_lib:format("Bad script output ~s",[Bad])).
+
+
+extract_status_code([]) ->
+ 200;
+extract_status_code([[$L,$o,$c,$a,$t,$i,$o,$n,$:,$ |_]|_]) ->
+ 302;
+extract_status_code([[$S,$t,$a,$t,$u,$s,$:,$ |CodeAndReason]|_]) ->
+ case httpd_util:split(CodeAndReason," ",2) of
+ {ok,[Code,_]} ->
+ list_to_integer(Code);
+ {ok,_} ->
+ 200
+ end;
+extract_status_code([_|Rest]) ->
+ extract_status_code(Rest).
+
+
+sz(B) when binary(B) -> {binary,size(B)};
+sz(L) when list(L) -> {list,length(L)};
+sz(_) -> undefined.
+
+
+%% Convert error to printable string
+%%
+reason({error,emfile}) -> ": To many open files";
+reason({error,{enfile,_}}) -> ": File/port table overflow";
+reason({error,enomem}) -> ": Not enough memory";
+reason({error,eagain}) -> ": No more available OS processes";
+reason(_) -> "".
+
+removeStatus(Head)->
+ case httpd_util:split(Head,"Status:.\r\n",2) of
+ {ok,[HeadPart,HeadEnd]}->
+ HeadPart++HeadEnd;
+ _ ->
+ Head
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% There are 2 config directives for mod_cgi: %%
+%% ScriptNoCache true|false, defines whether the server shall add %%
+%% header fields to stop proxies and %%
+%% clients from saving the page in history %%
+%% or cache %%
+%% %%
+%% ScriptTimeout Seconds, The number of seconds that the server %%
+%% maximum will wait for the script to %%
+%% generate a part of the document %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+load([$S,$c,$r,$i,$p,$t,$N,$o,$C,$a,$c,$h,$e |CacheArg],[])->
+ case catch list_to_atom(httpd_conf:clean(CacheArg)) of
+ true ->
+ {ok, [], {script_nocache,true}};
+ false ->
+ {ok, [], {script_nocache,false}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(CacheArg)++
+ " is an invalid ScriptNoCache directive")}
+ end;
+
+load([$S,$c,$r,$i,$p,$t,$T,$i,$m,$e,$o,$u,$t,$ |Timeout],[])->
+ case catch list_to_integer(httpd_conf:clean(Timeout)) of
+ TimeoutSec when integer(TimeoutSec) ->
+ {ok, [], {script_timeout,TimeoutSec*1000}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(Timeout)++
+ " is an invalid ScriptTimeout")}
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_dir.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_dir.erl
new file mode 100644
index 0000000000..9dda6d9119
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_dir.erl
@@ -0,0 +1,266 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_dir.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_dir).
+-export([do/1]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_dir(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_dir(Info) ->
+ ?DEBUG("do_dir -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ DefaultPath = mod_alias:default_index(Info#mod.config_db,Path),
+ %% Is it a directory?
+ case file:read_file_info(DefaultPath) of
+ {ok,FileInfo} when FileInfo#file_info.type == directory ->
+ DecodedRequestURI =
+ httpd_util:decode_hex(Info#mod.request_uri),
+ ?DEBUG("do_dir -> ~n"
+ " Path: ~p~n"
+ " DefaultPath: ~p~n"
+ " DecodedRequestURI: ~p",
+ [Path,DefaultPath,DecodedRequestURI]),
+ case dir(DefaultPath,string:strip(DecodedRequestURI,right,$/),Info#mod.config_db) of
+ {ok, Dir} ->
+ Head=[{content_type,"text/html"},
+ {content_length,integer_to_list(httpd_util:flatlength(Dir))},
+ {date,httpd_util:rfc1123_date(FileInfo#file_info.mtime)},
+ {code,200}],
+ {proceed,[{response,{response,Head,Dir}},
+ {mime_type,"text/html"}|Info#mod.data]};
+ {error, Reason} ->
+ ?ERROR("do_dir -> dir operation failed: ~p",[Reason]),
+ {proceed,
+ [{status,{404,Info#mod.request_uri,Reason}}|
+ Info#mod.data]}
+ end;
+ {ok,FileInfo} ->
+ ?DEBUG("do_dir -> ~n"
+ " Path: ~p~n"
+ " DefaultPath: ~p~n"
+ " FileInfo: ~p",
+ [Path,DefaultPath,FileInfo]),
+ {proceed,Info#mod.data};
+ {error,Reason} ->
+ ?LOG("do_dir -> failed reading file info (~p) for: ~p",
+ [Reason,DefaultPath]),
+ {proceed,
+ [{status,read_file_info_error(Reason,Info,DefaultPath)}|
+ Info#mod.data]}
+ end.
+
+dir(Path,RequestURI,ConfigDB) ->
+ case file:list_dir(Path) of
+ {ok,FileList} ->
+ SortedFileList=lists:sort(FileList),
+ {ok,[header(Path,RequestURI),
+ body(Path,RequestURI,ConfigDB,SortedFileList),
+ footer(Path,SortedFileList)]};
+ {error,Reason} ->
+ {error,?NICE("Can't open directory "++Path++": "++Reason)}
+ end.
+
+%% header
+
+header(Path,RequestURI) ->
+ Header=
+ "<HTML>\n<HEAD>\n<TITLE>Index of "++RequestURI++"</TITLE>\n</HEAD>\n<BODY>\n<H1>Index of "++
+ RequestURI++"</H1>\n<PRE><IMG SRC=\""++icon(blank)++
+ "\" ALT=" "> Name Last modified Size Description
+<HR>\n",
+ case regexp:sub(RequestURI,"[^/]*\$","") of
+ {ok,"/",_} ->
+ Header;
+ {ok,ParentRequestURI,_} ->
+ {ok,ParentPath,_}=regexp:sub(string:strip(Path,right,$/),"[^/]*\$",""),
+ Header++format(ParentPath,ParentRequestURI)
+ end.
+
+format(Path,RequestURI) ->
+ {ok,FileInfo}=file:read_file_info(Path),
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">Parent directory</A> ~2.2.0w-~s-~w ~2.2.0w:~2.2.0w -\n",
+ [icon(back),"DIR",RequestURI,Day,
+ httpd_util:month(Month),Year,Hour,Minute]).
+
+%% body
+
+body(Path,RequestURI,ConfigDB,[]) ->
+ [];
+body(Path,RequestURI,ConfigDB,[Entry|Rest]) ->
+ [format(Path,RequestURI,ConfigDB,Entry)|body(Path,RequestURI,ConfigDB,Rest)].
+
+format(Path,RequestURI,ConfigDB,Entry) ->
+ case file:read_file_info(Path++"/"++Entry) of
+ {ok,FileInfo} when FileInfo#file_info.type == directory ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ EntryLength=length(Entry),
+ if
+ EntryLength > 21 ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~-21.s..</A>~2.2.0w-~s-~w ~2.2.0w:~2.2.0w -\n",
+ [icon(folder),"DIR",RequestURI++"/"++Entry++"/",Entry,
+ Day,httpd_util:month(Month),Year,Hour,Minute]);
+ true ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~s</A>~*.*c~2.2.0w-~s-~w ~2.2.0w:~2.2.0w -\n",
+ [icon(folder),"DIR",RequestURI++"/"++Entry++"/",Entry,
+ 23-EntryLength,23-EntryLength,$ ,Day,
+ httpd_util:month(Month),Year,Hour,Minute])
+ end;
+ {ok,FileInfo} ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ Suffix=httpd_util:suffix(Entry),
+ MimeType=httpd_util:lookup_mime(ConfigDB,Suffix,""),
+ EntryLength=length(Entry),
+ if
+ EntryLength > 21 ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~-21.s..</A>~2.2.0w-~s-~w ~2.2.0w:~2.2.0w~8wk ~s\n",
+ [icon(Suffix,MimeType),Suffix,RequestURI++"/"++Entry,
+ Entry,Day,httpd_util:month(Month),Year,Hour,Minute,
+ trunc(FileInfo#file_info.size/1024+1),MimeType]);
+ true ->
+ io_lib:format("<IMG SRC=\"~s\" ALT=\"[~s]\"> <A HREF=\"~s\">~s</A>~*.*c~2.2.0w-~s-~w ~2.2.0w:~2.2.0w~8wk ~s\n",
+ [icon(Suffix,MimeType),Suffix,RequestURI++"/"++Entry,
+ Entry,23-EntryLength,23-EntryLength,$ ,Day,
+ httpd_util:month(Month),Year,Hour,Minute,
+ trunc(FileInfo#file_info.size/1024+1),MimeType])
+ end;
+ {error,Reason} ->
+ ""
+ end.
+
+%% footer
+
+footer(Path,FileList) ->
+ case lists:member("README",FileList) of
+ true ->
+ {ok,Body}=file:read_file(Path++"/README"),
+ "</PRE>\n<HR>\n<PRE>\n"++binary_to_list(Body)++
+ "\n</PRE>\n</BODY>\n</HTML>\n";
+ false ->
+ "</PRE>\n</BODY>\n</HTML>\n"
+ end.
+
+%%
+%% Icon mappings are hard-wired ala default Apache (Ugly!)
+%%
+
+icon(Suffix,MimeType) ->
+ case icon(Suffix) of
+ undefined ->
+ case MimeType of
+ [$t,$e,$x,$t,$/|_] ->
+ "/icons/text.gif";
+ [$i,$m,$a,$g,$e,$/|_] ->
+ "/icons/image2.gif";
+ [$a,$u,$d,$i,$o,$/|_] ->
+ "/icons/sound2.gif";
+ [$v,$i,$d,$e,$o,$/|_] ->
+ "/icons/movie.gif";
+ _ ->
+ "/icons/unknown.gif"
+ end;
+ Icon ->
+ Icon
+ end.
+
+icon(blank) -> "/icons/blank.gif";
+icon(back) -> "/icons/back.gif";
+icon(folder) -> "/icons/folder.gif";
+icon("bin") -> "/icons/binary.gif";
+icon("exe") -> "/icons/binary.gif";
+icon("hqx") -> "/icons/binhex.gif";
+icon("tar") -> "/icons/tar.gif";
+icon("wrl") -> "/icons/world2.gif";
+icon("wrl.gz") -> "/icons/world2.gif";
+icon("vrml") -> "/icons/world2.gif";
+icon("vrm") -> "/icons/world2.gif";
+icon("iv") -> "/icons/world2.gif";
+icon("Z") -> "/icons/compressed.gif";
+icon("z") -> "/icons/compressed.gif";
+icon("tgz") -> "/icons/compressed.gif";
+icon("gz") -> "/icons/compressed.gif";
+icon("zip") -> "/icons/compressed.gif";
+icon("ps") -> "/icons/a.gif";
+icon("ai") -> "/icons/a.gif";
+icon("eps") -> "/icons/a.gif";
+icon("html") -> "/icons/layout.gif";
+icon("shtml") -> "/icons/layout.gif";
+icon("htm") -> "/icons/layout.gif";
+icon("pdf") -> "/icons/layout.gif";
+icon("txt") -> "/icons/text.gif";
+icon("erl") -> "/icons/burst.gif";
+icon("c") -> "/icons/c.gif";
+icon("pl") -> "/icons/p.gif";
+icon("py") -> "/icons/p.gif";
+icon("for") -> "/icons/f.gif";
+icon("dvi") -> "/icons/dvi.gif";
+icon("uu") -> "/icons/uuencoded.gif";
+icon("conf") -> "/icons/script.gif";
+icon("sh") -> "/icons/script.gif";
+icon("shar") -> "/icons/script.gif";
+icon("csh") -> "/icons/script.gif";
+icon("ksh") -> "/icons/script.gif";
+icon("tcl") -> "/icons/script.gif";
+icon("tex") -> "/icons/tex.gif";
+icon("core") -> "/icons/tex.gif";
+icon(_) -> undefined.
+
+
+read_file_info_error(eacces,Info,Path) ->
+ read_file_info_error(403,Info,Path,
+ ": Missing search permissions for one "
+ "of the parent directories");
+read_file_info_error(enoent,Info,Path) ->
+ read_file_info_error(404,Info,Path,"");
+read_file_info_error(enotdir,Info,Path) ->
+ read_file_info_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+read_file_info_error(_,Info,Path) ->
+ read_file_info_error(500,none,Path,"").
+
+read_file_info_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't access "++Path++Reason)};
+read_file_info_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,
+ ?NICE("Can't access "++Path++Reason)}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_disk_log.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_disk_log.erl
new file mode 100644
index 0000000000..bb175f24b0
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_disk_log.erl
@@ -0,0 +1,404 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_disk_log.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_disk_log).
+-export([do/1,error_log/5,security_log/2,load/2,store/2,remove/1]).
+
+-export([report_error/2]).
+
+-define(VMODULE,"DISK_LOG").
+-include("httpd_verbosity.hrl").
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ AuthUser = auth_user(Info#mod.data),
+ Date = custom_date(),
+ log_internal_info(Info,Date,Info#mod.data),
+ LogFormat = get_log_format(Info#mod.config_db),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ transfer_log(Info, "-", AuthUser, Date, StatusCode, 0, LogFormat),
+ if
+ StatusCode >= 400 ->
+ error_log(Info, Date, Reason, LogFormat);
+ true ->
+ not_an_error
+ end,
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ {already_sent,StatusCode,Size} ->
+ transfer_log(Info, "-", AuthUser, Date, StatusCode,
+ Size, LogFormat),
+ {proceed,Info#mod.data};
+
+ {response, Head, Body} ->
+ Size = httpd_util:key1search(Head, content_length, 0),
+ Code = httpd_util:key1search(Head, code, 200),
+ transfer_log(Info, "-", AuthUser, Date, Code,
+ Size, LogFormat),
+ {proceed,Info#mod.data};
+
+ {StatusCode,Response} ->
+ transfer_log(Info, "-", AuthUser, Date, 200,
+ httpd_util:flatlength(Response), LogFormat),
+ {proceed,Info#mod.data};
+ undefined ->
+ transfer_log(Info, "-", AuthUser, Date, 200,
+ 0, LogFormat),
+ {proceed,Info#mod.data}
+ end
+ end.
+
+custom_date() ->
+ LocalTime = calendar:local_time(),
+ UniversalTime = calendar:universal_time(),
+ Minutes = round(diff_in_minutes(LocalTime,UniversalTime)),
+ {{YYYY,MM,DD},{Hour,Min,Sec}} = LocalTime,
+ Date =
+ io_lib:format("~.2.0w/~.3s/~.4w:~.2.0w:~.2.0w:~.2.0w ~c~.2.0w~.2.0w",
+ [DD,httpd_util:month(MM),YYYY,Hour,Min,Sec,sign(Minutes),
+ abs(Minutes) div 60,abs(Minutes) rem 60]),
+ lists:flatten(Date).
+
+diff_in_minutes(L,U) ->
+ (calendar:datetime_to_gregorian_seconds(L) -
+ calendar:datetime_to_gregorian_seconds(U))/60.
+
+sign(Minutes) when Minutes > 0 ->
+ $+;
+sign(Minutes) ->
+ $-.
+
+auth_user(Data) ->
+ case httpd_util:key1search(Data,remote_user) of
+ undefined ->
+ "-";
+ RemoteUser ->
+ RemoteUser
+ end.
+
+%% log_internal_info
+
+log_internal_info(Info,Date,[]) ->
+ ok;
+log_internal_info(Info,Date,[{internal_info,Reason}|Rest]) ->
+ Format = get_log_format(Info#mod.config_db),
+ error_log(Info,Date,Reason,Format),
+ log_internal_info(Info,Date,Rest);
+log_internal_info(Info,Date,[_|Rest]) ->
+ log_internal_info(Info,Date,Rest).
+
+
+%% transfer_log
+
+transfer_log(Info,RFC931,AuthUser,Date,StatusCode,Bytes,Format) ->
+ case httpd_util:lookup(Info#mod.config_db,transfer_disk_log) of
+ undefined ->
+ no_transfer_log;
+ TransferDiskLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ Entry = io_lib:format("~s ~s ~s [~s] \"~s\" ~w ~w~n",
+ [RemoteHost,RFC931,AuthUser,Date,
+ Info#mod.request_line,StatusCode,Bytes]),
+ write(TransferDiskLog, Entry, Format)
+ end.
+
+
+%% error_log
+
+error_log(Info, Date, Reason, Format) ->
+ Format=get_log_format(Info#mod.config_db),
+ case httpd_util:lookup(Info#mod.config_db,error_disk_log) of
+ undefined ->
+ no_error_log;
+ ErrorDiskLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ Entry =
+ io_lib:format("[~s] access to ~s failed for ~s, reason: ~p~n",
+ [Date, Info#mod.request_uri,
+ RemoteHost, Reason]),
+ write(ErrorDiskLog, Entry, Format)
+ end.
+
+error_log(SocketType, Socket, ConfigDB, {PortNumber, RemoteHost}, Reason) ->
+ Format = get_log_format(ConfigDB),
+ case httpd_util:lookup(ConfigDB,error_disk_log) of
+ undefined ->
+ no_error_log;
+ ErrorDiskLog ->
+ Date = custom_date(),
+ Entry =
+ io_lib:format("[~s] server crash for ~s, reason: ~p~n",
+ [Date,RemoteHost,Reason]),
+ write(ErrorDiskLog, Entry, Format),
+ ok
+ end.
+
+
+%% security_log
+
+security_log(ConfigDB, Event) ->
+ Format = get_log_format(ConfigDB),
+ case httpd_util:lookup(ConfigDB,security_disk_log) of
+ undefined ->
+ no_error_log;
+ DiskLog ->
+ Date = custom_date(),
+ Entry = io_lib:format("[~s] ~s ~n", [Date, Event]),
+ write(DiskLog, Entry, Format),
+ ok
+ end.
+
+report_error(ConfigDB, Error) ->
+ Format = get_log_format(ConfigDB),
+ case httpd_util:lookup(ConfigDB, error_disk_log) of
+ undefined ->
+ no_error_log;
+ ErrorDiskLog ->
+ Date = custom_date(),
+ Entry = io_lib:format("[~s] reporting error: ~s",[Date,Error]),
+ write(ErrorDiskLog, Entry, Format),
+ ok
+ end.
+
+%%----------------------------------------------------------------------
+%% Get the current format of the disklog
+%%----------------------------------------------------------------------
+get_log_format(ConfigDB)->
+ httpd_util:lookup(ConfigDB,disk_log_format,external).
+
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$T,$r,$a,$n,$s,$f,$e,$r,$D,$i,$s,$k,$L,$o,$g,$S,$i,$z,$e,$ |
+ TransferDiskLogSize],[]) ->
+ case regexp:split(TransferDiskLogSize," ") of
+ {ok,[MaxBytes,MaxFiles]} ->
+ case httpd_conf:make_integer(MaxBytes) of
+ {ok,MaxBytesInteger} ->
+ case httpd_conf:make_integer(MaxFiles) of
+ {ok,MaxFilesInteger} ->
+ {ok,[],{transfer_disk_log_size,
+ {MaxBytesInteger,MaxFilesInteger}}};
+ {error,_} ->
+ {error,
+ ?NICE(httpd_conf:clean(TransferDiskLogSize)++
+ " is an invalid TransferDiskLogSize")}
+ end;
+ {error,_} ->
+ {error,?NICE(httpd_conf:clean(TransferDiskLogSize)++
+ " is an invalid TransferDiskLogSize")}
+ end
+ end;
+load([$T,$r,$a,$n,$s,$f,$e,$r,$D,$i,$s,$k,$L,$o,$g,$ |TransferDiskLog],[]) ->
+ {ok,[],{transfer_disk_log,httpd_conf:clean(TransferDiskLog)}};
+
+load([$E,$r,$r,$o,$r,$D,$i,$s,$k,$L,$o,$g,$S,$i,$z,$e,$ | ErrorDiskLogSize],[]) ->
+ case regexp:split(ErrorDiskLogSize," ") of
+ {ok,[MaxBytes,MaxFiles]} ->
+ case httpd_conf:make_integer(MaxBytes) of
+ {ok,MaxBytesInteger} ->
+ case httpd_conf:make_integer(MaxFiles) of
+ {ok,MaxFilesInteger} ->
+ {ok,[],{error_disk_log_size,
+ {MaxBytesInteger,MaxFilesInteger}}};
+ {error,_} ->
+ {error,?NICE(httpd_conf:clean(ErrorDiskLogSize)++
+ " is an invalid ErrorDiskLogSize")}
+ end;
+ {error,_} ->
+ {error,?NICE(httpd_conf:clean(ErrorDiskLogSize)++
+ " is an invalid ErrorDiskLogSize")}
+ end
+ end;
+load([$E,$r,$r,$o,$r,$D,$i,$s,$k,$L,$o,$g,$ |ErrorDiskLog],[]) ->
+ {ok, [], {error_disk_log, httpd_conf:clean(ErrorDiskLog)}};
+
+load([$S,$e,$c,$u,$r,$i,$t,$y,$D,$i,$s,$k,$L,$o,$g,$S,$i,$z,$e,$ |SecurityDiskLogSize],[]) ->
+ case regexp:split(SecurityDiskLogSize, " ") of
+ {ok, [MaxBytes, MaxFiles]} ->
+ case httpd_conf:make_integer(MaxBytes) of
+ {ok, MaxBytesInteger} ->
+ case httpd_conf:make_integer(MaxFiles) of
+ {ok, MaxFilesInteger} ->
+ {ok, [], {security_disk_log_size,
+ {MaxBytesInteger, MaxFilesInteger}}};
+ {error,_} ->
+ {error, ?NICE(httpd_conf:clean(SecurityDiskLogSize)++
+ " is an invalid SecurityDiskLogSize")}
+ end;
+ {error, _} ->
+ {error, ?NICE(httpd_conf:clean(SecurityDiskLogSize)++
+ " is an invalid SecurityDiskLogSize")}
+ end
+ end;
+load([$S,$e,$c,$u,$r,$i,$t,$y,$D,$i,$s,$k,$L,$o,$g,$ |SecurityDiskLog],[]) ->
+ {ok, [], {security_disk_log, httpd_conf:clean(SecurityDiskLog)}};
+
+load([$D,$i,$s,$k,$L,$o,$g,$F,$o,$r,$m,$a,$t,$ |Format],[]) ->
+ case httpd_conf:clean(Format) of
+ "internal" ->
+ {ok, [], {disk_log_format,internal}};
+ "external" ->
+ {ok, [], {disk_log_format,external}};
+ _Default ->
+ {ok, [], {disk_log_format,external}}
+ end.
+
+%% store
+
+store({transfer_disk_log,TransferDiskLog},ConfigList) ->
+ case create_disk_log(TransferDiskLog, transfer_disk_log_size, ConfigList) of
+ {ok,TransferDB} ->
+ {ok,{transfer_disk_log,TransferDB}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({security_disk_log,SecurityDiskLog},ConfigList) ->
+ case create_disk_log(SecurityDiskLog, security_disk_log_size, ConfigList) of
+ {ok,SecurityDB} ->
+ {ok,{security_disk_log,SecurityDB}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({error_disk_log,ErrorDiskLog},ConfigList) ->
+ case create_disk_log(ErrorDiskLog, error_disk_log_size, ConfigList) of
+ {ok,ErrorDB} ->
+ {ok,{error_disk_log,ErrorDB}};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+
+%%----------------------------------------------------------------------
+%% Open or creates the disklogs
+%%----------------------------------------------------------------------
+log_size(ConfigList, Tag) ->
+ httpd_util:key1search(ConfigList, Tag, {500*1024,8}).
+
+create_disk_log(LogFile, SizeTag, ConfigList) ->
+ Filename = httpd_conf:clean(LogFile),
+ {MaxBytes, MaxFiles} = log_size(ConfigList, SizeTag),
+ case filename:pathtype(Filename) of
+ absolute ->
+ create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList);
+ volumerelative ->
+ create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList);
+ relative ->
+ case httpd_util:key1search(ConfigList,server_root) of
+ undefined ->
+ {error,
+ ?NICE(Filename++
+ " is an invalid ErrorLog beacuse ServerRoot is not defined")};
+ ServerRoot ->
+ AbsoluteFilename = filename:join(ServerRoot,Filename),
+ create_disk_log(AbsoluteFilename, MaxBytes, MaxFiles,
+ ConfigList)
+ end
+ end.
+
+create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList) ->
+ Format = httpd_util:key1search(ConfigList, disk_log_format, external),
+ open(Filename, MaxBytes, MaxFiles, Format).
+
+
+
+%% remove
+remove(ConfigDB) ->
+ lists:foreach(fun([DiskLog]) -> close(DiskLog) end,
+ ets:match(ConfigDB,{transfer_disk_log,'$1'})),
+ lists:foreach(fun([DiskLog]) -> close(DiskLog) end,
+ ets:match(ConfigDB,{error_disk_log,'$1'})),
+ ok.
+
+
+%%
+%% Some disk_log wrapper functions:
+%%
+
+%%----------------------------------------------------------------------
+%% Function: open/4
+%% Description: Open a disk log file.
+%% Control which format the disk log will be in. The external file
+%% format is used as default since that format was used by older
+%% implementations of inets.
+%%
+%% When the internal disk log format is used, we will do some extra
+%% controls. If the files are valid, try to repair them and if
+%% thats not possible, truncate.
+%%----------------------------------------------------------------------
+
+open(Filename, MaxBytes, MaxFiles, internal) ->
+ Opts = [{format, internal}, {repair, truncate}],
+ open1(Filename, MaxBytes, MaxFiles, Opts);
+open(Filename, MaxBytes, MaxFiles, _) ->
+ Opts = [{format, external}],
+ open1(Filename, MaxBytes, MaxFiles, Opts).
+
+open1(Filename, MaxBytes, MaxFiles, Opts0) ->
+ Opts1 = [{name, Filename}, {file, Filename}, {type, wrap}] ++ Opts0,
+ case open2(Opts1, {MaxBytes, MaxFiles}) of
+ {ok, LogDB} ->
+ {ok, LogDB};
+ {error, Reason} ->
+ ?vlog("failed opening disk log with args:"
+ "~n Filename: ~p"
+ "~n MaxBytes: ~p"
+ "~n MaxFiles: ~p"
+ "~n Opts0: ~p"
+ "~nfor reason:"
+ "~n ~p", [Filename, MaxBytes, MaxFiles, Opts0, Reason]),
+ {error,
+ ?NICE("Can't create " ++ Filename ++
+ lists:flatten(io_lib:format(", ~p",[Reason])))};
+ _ ->
+ {error, ?NICE("Can't create "++Filename)}
+ end.
+
+open2(Opts, Size) ->
+ case disk_log:open(Opts) of
+ {error, {badarg, size}} ->
+ %% File did not exist, add the size option and try again
+ disk_log:open([{size, Size} | Opts]);
+ Else ->
+ Else
+ end.
+
+
+%%----------------------------------------------------------------------
+%% Actually writes the entry to the disk_log. If the log is an
+%% internal disk_log write it with log otherwise with blog.
+%%----------------------------------------------------------------------
+write(Log, Entry, internal) ->
+ disk_log:log(Log, Entry);
+
+write(Log, Entry, _) ->
+ disk_log:blog(Log, Entry).
+
+%% Close the log file
+close(Log) ->
+ disk_log:close(Log).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
new file mode 100644
index 0000000000..cb211749da
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
@@ -0,0 +1,481 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_esi.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_esi).
+-export([do/1,load/2]).
+
+%%Functions provided to help erl scheme alias programmer to
+%%Create dynamic webpages that are sent back to the user during
+%%Generation
+-export([deliver/2]).
+
+
+-include("httpd.hrl").
+
+-define(VMODULE,"ESI").
+-include("httpd_verbosity.hrl").
+
+-define(GATEWAY_INTERFACE,"CGI/1.1").
+-define(DEFAULT_ERL_TIMEOUT,15000).
+%% do
+
+do(Info) ->
+ ?vtrace("do",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ case erl_or_eval(Info#mod.request_uri,
+ Info#mod.config_db) of
+ {eval,CGIBody,Modules} ->
+ eval(Info,Info#mod.method,CGIBody,Modules);
+ {erl,CGIBody,Modules} ->
+ erl(Info,Info#mod.method,CGIBody,Modules);
+ proceed ->
+ {proceed,Info#mod.data}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+
+
+%% erl_or_eval
+
+erl_or_eval(RequestURI, ConfigDB) ->
+ case erlp(RequestURI, ConfigDB) of
+ false ->
+ case evalp(RequestURI, ConfigDB) of
+ false ->
+ ?vtrace("neither erl nor eval",[]),
+ proceed;
+ Other ->
+ Other
+ end;
+ Other ->
+ Other
+ end.
+
+erlp(RequestURI, ConfigDB) ->
+ case httpd_util:multi_lookup(ConfigDB, erl_script_alias) of
+ [] ->
+ false;
+ AliasMods ->
+ erlp_find_alias(RequestURI,AliasMods)
+ end.
+
+erlp_find_alias(_RequestURI,[]) ->
+ ?vtrace("erlp_find_alias -> no match",[]),
+ false;
+erlp_find_alias(RequestURI,[{Alias,Modules}|Rest]) ->
+ case regexp:first_match(RequestURI,"^"++Alias++"/") of
+ {match,1,Length} ->
+ ?vtrace("erlp -> match with Length: ~p",[Length]),
+ {erl,string:substr(RequestURI,Length+1),Modules};
+ nomatch ->
+ erlp_find_alias(RequestURI,Rest)
+ end.
+
+evalp(RequestURI, ConfigDB) ->
+ case httpd_util:multi_lookup(ConfigDB, eval_script_alias) of
+ [] ->
+ false;
+ AliasMods ->
+ evalp_find_alias(RequestURI,AliasMods)
+ end.
+
+evalp_find_alias(_RequestURI,[]) ->
+ ?vtrace("evalp_find_alias -> no match",[]),
+ false;
+evalp_find_alias(RequestURI,[{Alias,Modules}|Rest]) ->
+ case regexp:first_match(RequestURI,"^"++Alias++"\\?") of
+ {match, 1, Length} ->
+ ?vtrace("evalp_find_alias -> match with Length: ~p",[Length]),
+ {eval, string:substr(RequestURI,Length+1),Modules};
+ nomatch ->
+ evalp_find_alias(RequestURI,Rest)
+ end.
+
+
+%%
+%% Erl mechanism
+%%
+
+%%This is exactly the same as the GET method the difference is that
+%%The response must not contain any data expect the response header
+
+
+erl(Info,"HEAD",CGIBody,Modules) ->
+ erl(Info,"GET",CGIBody,Modules);
+
+erl(Info,"GET",CGIBody,Modules) ->
+ ?vtrace("erl GET request",[]),
+ case httpd_util:split(CGIBody,":|%3A|/",2) of
+ {ok, [Mod,FuncAndInput]} ->
+ ?vtrace("~n Mod: ~p"
+ "~n FuncAndInput: ~p",[Mod,FuncAndInput]),
+ case httpd_util:split(FuncAndInput,"[\?/]",2) of
+ {ok, [Func,Input]} ->
+ ?vtrace("~n Func: ~p"
+ "~n Input: ~p",[Func,Input]),
+ exec(Info,"GET",CGIBody,Modules,Mod,Func,
+ {input_type(FuncAndInput),Input});
+ {ok, [Func]} ->
+ exec(Info,"GET",CGIBody,Modules,Mod,Func,{no_input,""});
+ {ok, BadRequest} ->
+ {proceed,[{status,{400,none,BadRequest}}|Info#mod.data]}
+ end;
+ {ok, BadRequest} ->
+ ?vlog("erl BAD (GET-) request",[]),
+ {proceed, [{status,{400,none,BadRequest}}|Info#mod.data]}
+ end;
+
+erl(Info, "POST", CGIBody, Modules) ->
+ ?vtrace("erl POST request",[]),
+ case httpd_util:split(CGIBody,":|%3A|/",2) of
+ {ok,[Mod,Func]} ->
+ ?vtrace("~n Mod: ~p"
+ "~n Func: ~p",[Mod,Func]),
+ exec(Info,"POST",CGIBody,Modules,Mod,Func,
+ {entity_body,Info#mod.entity_body});
+ {ok,BadRequest} ->
+ ?vlog("erl BAD (POST-) request",[]),
+ {proceed,[{status,{400,none,BadRequest}}|Info#mod.data]}
+ end.
+
+input_type([]) ->
+ no_input;
+input_type([$/|Rest]) ->
+ path_info;
+input_type([$?|Rest]) ->
+ query_string;
+input_type([First|Rest]) ->
+ input_type(Rest).
+
+
+%% exec
+
+exec(Info,Method,CGIBody,["all"],Mod,Func,{Type,Input}) ->
+ ?vtrace("exec ~s 'all'",[Method]),
+ exec(Info,Method,CGIBody,[Mod],Mod,Func,{Type,Input});
+exec(Info,Method,CGIBody,Modules,Mod,Func,{Type,Input}) ->
+ ?vtrace("exec ~s request with:"
+ "~n Modules: ~p"
+ "~n Mod: ~p"
+ "~n Func: ~p"
+ "~n Type: ~p"
+ "~n Input: ~p",
+ [Method,Modules,Mod,Func,Type,Input]),
+ case lists:member(Mod,Modules) of
+ true ->
+ {_,RemoteAddr}=(Info#mod.init_data)#init_data.peername,
+ ServerName=(Info#mod.init_data)#init_data.resolve,
+ Env=get_environment(Info,ServerName,Method,RemoteAddr,Type,Input),
+ ?vtrace("and now call the module",[]),
+ case try_new_erl_scheme_method(Info,Env,Input,list_to_atom(Mod),list_to_atom(Func)) of
+ {error,not_new_method}->
+ case catch apply(list_to_atom(Mod),list_to_atom(Func),[Env,Input]) of
+ {'EXIT',Reason} ->
+ ?vlog("exit with Reason: ~p",[Reason]),
+ {proceed,[{status,{500,none,Reason}}|Info#mod.data]};
+ Response ->
+ control_response_header(Info,Mod,Func,Response)
+ end;
+ ResponseResult->
+ ResponseResult
+ end;
+ false ->
+ ?vlog("unknown module",[]),
+ {proceed,[{status,{403,Info#mod.request_uri,
+ ?NICE("Client not authorized to evaluate: "++CGIBody)}}|Info#mod.data]}
+ end.
+
+control_response_header(Info,Mod,Func,Response)->
+ case control_response(Response,Info,Mod,Func) of
+ {proceed,[{response,{StatusCode,Response}}|Rest]} ->
+ case httpd_util:lookup(Info#mod.config_db,erl_script_nocache,false) of
+ true ->
+ case httpd_util:split(Response,"\r\n\r\n|\n\n",2) of
+ {ok,[Head,Body]}->
+ Date=httpd_util:rfc1123_date(),
+ Cache="Cache-Control:no-cache\r\nPragma:no-cache\r\nExpires:"++ Date ++ "\r\n",
+ {proceed,[{response,{StatusCode,[Head,"\r\n",Cache,"\r\n",Body]}}|Rest]};
+ _->
+ {proceed,[{response,{StatusCode,Response}}|Rest]}
+ end;
+ WhatEver->
+ {proceed,[{response,{StatusCode,Response}}|Rest]}
+ end;
+ WhatEver->
+ WhatEver
+ end.
+
+control_response(Response,Info,Mod,Func)->
+ ?vdebug("Response: ~n~p",[Response]),
+ case mod_cgi:status_code(lists:flatten(Response)) of
+ {ok,StatusCode} ->
+ {proceed,[{response,{StatusCode,Response}}|Info#mod.data]};
+ {error,Reason} ->
+ {proceed,
+ [{status,{400,none,
+ ?NICE("Error in "++Mod++":"++Func++"/2: "++
+ lists:flatten(io_lib:format("~p",[Reason])))}}|
+ Info#mod.data]}
+ end.
+
+parsed_header([]) ->
+ [];
+parsed_header([{Name,[Value|R1]}|R2]) when list(Value) ->
+ NewName=lists:map(fun(X) -> if X == $- -> $_; true -> X end end,Name),
+ [{list_to_atom("http_"++httpd_util:to_lower(NewName)),
+ multi_value([Value|R1])}|parsed_header(R2)];
+parsed_header([{Name,Value}|Rest]) when list(Value)->
+ {ok,NewName,_}=regexp:gsub(Name,"-","_"),
+ [{list_to_atom("http_"++httpd_util:to_lower(NewName)),Value}|
+ parsed_header(Rest)].
+
+multi_value([]) ->
+ [];
+multi_value([Value]) ->
+ Value;
+multi_value([Value|Rest]) ->
+ Value++", "++multi_value(Rest).
+
+%%
+%% Eval mechanism
+%%
+
+
+eval(Info,"POST",CGIBody,Modules) ->
+ ?vtrace("eval(POST) -> method not supported",[]),
+ {proceed,[{status,{501,{"POST",Info#mod.request_uri,Info#mod.http_version},
+ ?NICE("Eval mechanism doesn't support method POST")}}|
+ Info#mod.data]};
+
+eval(Info,"HEAD",CGIBody,Modules) ->
+ %%The function that sends the data in httpd_response handles HEAD reqest by not
+ %% Sending the body
+ eval(Info,"GET",CGIBody,Modules);
+
+
+eval(Info,"GET",CGIBody,Modules) ->
+ ?vtrace("eval(GET) -> entry when"
+ "~n Modules: ~p",[Modules]),
+ case auth(CGIBody,Modules) of
+ true ->
+ case lib:eval_str(string:concat(CGIBody,". ")) of
+ {error,Reason} ->
+ ?vlog("eval -> error:"
+ "~n Reason: ~p",[Reason]),
+ {proceed,[{status,{500,none,Reason}}|Info#mod.data]};
+ {ok,Response} ->
+ ?vtrace("eval -> ok:"
+ "~n Response: ~p",[Response]),
+ case mod_cgi:status_code(lists:flatten(Response)) of
+ {ok,StatusCode} ->
+ {proceed,[{response,{StatusCode,Response}}|Info#mod.data]};
+ {error,Reason} ->
+ {proceed,[{status,{400,none,Reason}}|Info#mod.data]}
+ end
+ end;
+ false ->
+ ?vlog("eval -> auth failed",[]),
+ {proceed,[{status,
+ {403,Info#mod.request_uri,
+ ?NICE("Client not authorized to evaluate: "++CGIBody)}}|
+ Info#mod.data]}
+ end.
+
+auth(CGIBody,["all"]) ->
+ true;
+auth(CGIBody,Modules) ->
+ case regexp:match(CGIBody,"^[^\:(%3A)]*") of
+ {match,Start,Length} ->
+ lists:member(string:substr(CGIBody,Start,Length),Modules);
+ nomatch ->
+ false
+ end.
+
+%%----------------------------------------------------------------------
+%%Creates the environment list that will be the first arg to the
+%%Functions that is called through the ErlScript Schema
+%%----------------------------------------------------------------------
+
+get_environment(Info,ServerName,Method,RemoteAddr,Type,Input)->
+ Env=[{server_software,?SERVER_SOFTWARE},
+ {server_name,ServerName},
+ {gateway_interface,?GATEWAY_INTERFACE},
+ {server_protocol,?SERVER_PROTOCOL},
+ {server_port,httpd_util:lookup(Info#mod.config_db,port,80)},
+ {request_method,Method},
+ {remote_addr,RemoteAddr},
+ {script_name,Info#mod.request_uri}|
+ parsed_header(Info#mod.parsed_header)],
+ get_environment(Type,Input,Env,Info).
+
+
+get_environment(Type,Input,Env,Info)->
+ Env1=case Type of
+ query_string ->
+ [{query_string,Input}|Env];
+ path_info ->
+ Aliases=httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_,PathTranslated,_}=mod_alias:real_name(Info#mod.config_db,[$/|Input],Aliases),
+ [{path_info,"/"++httpd_util:decode_hex(Input)},
+ {path_translated,PathTranslated}|Env];
+ entity_body ->
+ [{content_length,httpd_util:flatlength(Input)}|Env];
+ no_input ->
+ Env
+ end,
+ get_environment(Info,Env1).
+
+get_environment(Info,Env)->
+ case httpd_util:key1search(Info#mod.data,remote_user) of
+ undefined ->
+ Env;
+ RemoteUser ->
+ [{remote_user,RemoteUser}|Env]
+ end.
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$E,$r,$l,$S,$c,$r,$i,$p,$t,$A,$l,$i,$a,$s,$ |ErlScriptAlias],[]) ->
+ case regexp:split(ErlScriptAlias," ") of
+ {ok, [ErlName|Modules]} ->
+ {ok, [], {erl_script_alias, {ErlName,Modules}}};
+ {ok, _} ->
+ {error,?NICE(httpd_conf:clean(ErlScriptAlias)++
+ " is an invalid ErlScriptAlias")}
+ end;
+load([$E,$v,$a,$l,$S,$c,$r,$i,$p,$t,$A,$l,$i,$a,$s,$ |EvalScriptAlias],[]) ->
+ case regexp:split(EvalScriptAlias, " ") of
+ {ok, [EvalName|Modules]} ->
+ {ok, [], {eval_script_alias, {EvalName,Modules}}};
+ {ok, _} ->
+ {error, ?NICE(httpd_conf:clean(EvalScriptAlias)++
+ " is an invalid EvalScriptAlias")}
+ end;
+load([$E,$r,$l,$S,$c,$r,$i,$p,$t,$T,$i,$m,$e,$o,$u,$t,$ |Timeout],[])->
+ case catch list_to_integer(httpd_conf:clean(Timeout)) of
+ TimeoutSec when integer(TimeoutSec) ->
+ {ok, [], {erl_script_timeout,TimeoutSec*1000}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(Timeout)++
+ " is an invalid ErlScriptTimeout")}
+ end;
+load([$E,$r,$l,$S,$c,$r,$i,$p,$t,$N,$o,$C,$a,$c,$h,$e |CacheArg],[])->
+ case catch list_to_atom(httpd_conf:clean(CacheArg)) of
+ true ->
+ {ok, [], {erl_script_nocache,true}};
+ false ->
+ {ok, [], {erl_script_nocache,false}};
+ _ ->
+ {error, ?NICE(httpd_conf:clean(CacheArg)++
+ " is an invalid ErlScriptNoCache directive")}
+ end.
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Functions below handles the data from the dynamic webpages %%
+%% That sends data back to the user part by part %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%----------------------------------------------------------------------
+%%Deliver is the callback function users can call to deliver back data to the
+%%client
+%%----------------------------------------------------------------------
+
+deliver(SessionID,Data)when pid(SessionID) ->
+ SessionID ! {ok,Data},
+ ok;
+deliver(SessionID,Data) ->
+ {error,bad_sessionID}.
+
+
+%%----------------------------------------------------------------------
+%% The method that tries to execute the new format
+%%----------------------------------------------------------------------
+
+%%It would be nicer to use erlang:function_exported/3 but if the
+%%Module isn't loaded the function says that it is not loaded
+
+
+try_new_erl_scheme_method(Info,Env,Input,Mod,Func)->
+ process_flag(trap_exit,true),
+ Pid=spawn_link(Mod,Func,[self(),Env,Input]),
+ Timeout=httpd_util:lookup(Info#mod.config_db,erl_script_timeout,?DEFAULT_ERL_TIMEOUT),
+ RetVal=receive_response_data(Info,Pid,0,undefined,[],Timeout),
+ process_flag(trap_exit,false),
+ RetVal.
+
+
+%%----------------------------------------------------------------------
+%%The function recieves the data from the process that generates the page
+%%and send the data to the client through the mod_cgi:send function
+%%----------------------------------------------------------------------
+
+receive_response_data(Info,Pid,Size,StatusCode,AccResponse,Timeout) ->
+ ?DEBUG("receive_response_data()-> Script Size: ~p,StatusCode ~p ,Timeout: ~p ~n",[Size,StatusCode,Timeout]),
+ receive
+ {ok, Response} ->
+ NewStatusCode=mod_cgi:update_status_code(StatusCode,Response),
+
+ ?DEBUG("receive_response_data/2 NewStatusCode: ~p~n",[NewStatusCode]),
+ case mod_cgi:send(Info, NewStatusCode,Response, Size,AccResponse) of
+ socket_closed ->
+ (catch exit(Pid,final)),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]};
+ head_sent->
+ (catch exit(Pid,final)),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]};
+ _ ->
+ %%The data is sent and the socket is not closed contine
+ NewSize = mod_cgi:get_new_size(Size,Response),
+ receive_response_data(Info,Pid,NewSize,NewStatusCode,"notempty",Timeout)
+ end;
+ {'EXIT', Pid, Reason} when AccResponse==[] ->
+ {error,not_new_method};
+ {'EXIT', Pid, Reason} when pid(Pid) ->
+ NewStatusCode=mod_cgi:update_status_code(StatusCode,AccResponse),
+ mod_cgi:final_send(Info,NewStatusCode,Size,AccResponse),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]};
+ %% This should not happen!
+ WhatEver ->
+ NewStatusCode=mod_cgi:update_status_code(StatusCode,AccResponse),
+ mod_cgi:final_send(Info,StatusCode,Size,AccResponse),
+ {proceed, [{response,{already_sent,200,Size}}|Info#mod.data]}
+ after
+ Timeout ->
+ (catch exit(Pid,timeout)), % KILL the port !!!!
+ httpd_socket:close(Info#mod.socket_type,Info#mod.socket),
+ {proceed,[{response,{already_sent,200,Size}}|Info#mod.data]}
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_get.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_get.erl
new file mode 100644
index 0000000000..4136d31669
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_get.erl
@@ -0,0 +1,151 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_get.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_get).
+-export([do/1]).
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_get(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+
+do_get(Info) ->
+ ?DEBUG("do_get -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri),
+ {FileInfo, LastModified} =get_modification_date(Path),
+
+ send_response(Info#mod.socket,Info#mod.socket_type,Path,Info,FileInfo,LastModified).
+
+
+%%The common case when no range is specified
+send_response(Socket,SocketType,Path,Info,FileInfo,LastModified)->
+ %% Send the file!
+ %% Find the modification date of the file
+ case file:open(Path,[raw,binary]) of
+ {ok, FileDescriptor} ->
+ ?DEBUG("do_get -> FileDescriptor: ~p",[FileDescriptor]),
+ Suffix = httpd_util:suffix(Path),
+ MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,
+ Suffix,"text/plain"),
+ %FileInfo=file:read_file_info(Path),
+ Date = httpd_util:rfc1123_date(),
+ Size = integer_to_list(FileInfo#file_info.size),
+ Header=case Info#mod.http_version of
+ "HTTP/1.1" ->
+ [httpd_util:header(200, MimeType, Info#mod.connection),
+ "Last-Modified: ", LastModified, "\r\n",
+ "Etag: ",httpd_util:create_etag(FileInfo),"\r\n",
+ "Content-Length: ",Size,"\r\n\r\n"];
+ "HTTP/1.0" ->
+ [httpd_util:header(200, MimeType, Info#mod.connection),
+ "Last-Modified: ", LastModified, "\r\n",
+ "Content-Length: ",Size,"\r\n\r\n"]
+ end,
+
+ send(Info#mod.socket_type, Info#mod.socket,
+ Header, FileDescriptor),
+ file:close(FileDescriptor),
+ {proceed,[{response,{already_sent,200,
+ FileInfo#file_info.size}},
+ {mime_type,MimeType}|Info#mod.data]};
+ {error, Reason} ->
+
+ {proceed,
+ [{status,open_error(Reason,Info,Path)}|Info#mod.data]}
+ end.
+
+%% send
+
+send(SocketType,Socket,Header,FileDescriptor) ->
+ ?DEBUG("send -> send header",[]),
+ case httpd_socket:deliver(SocketType,Socket,Header) of
+ socket_closed ->
+ ?LOG("send -> socket closed while sending header",[]),
+ socket_close;
+ _ ->
+ send_body(SocketType,Socket,FileDescriptor)
+ end.
+
+send_body(SocketType,Socket,FileDescriptor) ->
+ case file:read(FileDescriptor,?FILE_CHUNK_SIZE) of
+ {ok,Binary} ->
+ ?DEBUG("send_body -> send another chunk: ~p",[size(Binary)]),
+ case httpd_socket:deliver(SocketType,Socket,Binary) of
+ socket_closed ->
+ ?LOG("send_body -> socket closed while sending",[]),
+ socket_close;
+ _ ->
+ send_body(SocketType,Socket,FileDescriptor)
+ end;
+ eof ->
+ ?DEBUG("send_body -> done with this file",[]),
+ eof
+ end.
+
+
+%% open_error - Handle file open failure
+%%
+open_error(eacces,Info,Path) ->
+ open_error(403,Info,Path,"");
+open_error(enoent,Info,Path) ->
+ open_error(404,Info,Path,"");
+open_error(enotdir,Info,Path) ->
+ open_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+open_error(emfile,_Info,Path) ->
+ open_error(500,none,Path,": To many open files");
+open_error({enfile,_},_Info,Path) ->
+ open_error(500,none,Path,": File table overflow");
+open_error(_Reason,_Info,Path) ->
+ open_error(500,none,Path,"").
+
+open_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't open "++Path++Reason)};
+open_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,?NICE("Can't open "++Path++Reason)}.
+
+get_modification_date(Path)->
+ case file:read_file_info(Path) of
+ {ok, FileInfo0} ->
+ {FileInfo0, httpd_util:rfc1123_date(FileInfo0#file_info.mtime)};
+ _ ->
+ {#file_info{},""}
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_head.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_head.erl
new file mode 100644
index 0000000000..ce71e6532e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_head.erl
@@ -0,0 +1,89 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_head.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_head).
+-export([do/1]).
+
+-include("httpd.hrl").
+
+%% do
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "HEAD" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ _undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ do_head(Info);
+ %% A response has been sent! Nothing to do about it!
+ {already_sent,StatusCode,Size} ->
+ {proceed,Info#mod.data};
+ %% A response has been generated!
+ {StatusCode,Response} ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a HEAD method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_head(Info) ->
+ ?DEBUG("do_head -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ Suffix = httpd_util:suffix(Path),
+ %% Does the file exists?
+ case file:read_file_info(Path) of
+ {ok,FileInfo} ->
+ MimeType=httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Length=io_lib:write(FileInfo#file_info.size),
+ Head=[{content_type,MimeType},{content_length,Length},{code,200}],
+ {proceed,[{response,{response,Head,nobody}}|Info#mod.data]};
+ {error,Reason} ->
+ {proceed,
+ [{status,read_file_info_error(Reason,Info,Path)}|Info#mod.data]}
+ end.
+
+%% read_file_info_error - Handle file info read failure
+%%
+read_file_info_error(eacces,Info,Path) ->
+ read_file_info_error(403,Info,Path,"");
+read_file_info_error(enoent,Info,Path) ->
+ read_file_info_error(404,Info,Path,"");
+read_file_info_error(enotdir,Info,Path) ->
+ read_file_info_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+read_file_info_error(emfile,_Info,Path) ->
+ read_file_info_error(500,none,Path,": To many open files");
+read_file_info_error({enfile,_},_Info,Path) ->
+ read_file_info_error(500,none,Path,": File table overflow");
+read_file_info_error(_Reason,_Info,Path) ->
+ read_file_info_error(500,none,Path,"").
+
+read_file_info_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't access "++Path++Reason)};
+read_file_info_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,
+ ?NICE("Can't access "++Path++Reason)}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
new file mode 100644
index 0000000000..3806ce2e06
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
@@ -0,0 +1,1136 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_htaccess.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+
+-module(mod_htaccess).
+
+-export([do/1, load/2]).
+-export([debug/0]).
+
+-include("httpd.hrl").
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Public methods that interface the eswapi %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% Public method called by the webbserver to insert the data about
+% Names on accessfiles
+%----------------------------------------------------------------------
+load([$A,$c,$c,$e,$s,$s,$F,$i,$l,$e,$N,$a,$m,$e|FileNames],Context)->
+ CleanFileNames=httpd_conf:clean(FileNames),
+ %%io:format("\n The filenames is:" ++ FileNames ++ "\n"),
+ {ok,[],{access_files,string:tokens(CleanFileNames," ")}}.
+
+
+%----------------------------------------------------------------------
+% Public method that the webbserver calls to control the page
+%----------------------------------------------------------------------
+do(Info)->
+ case httpd_util:key1search(Info#mod.data,status) of
+ {Status_code,PhraseArgs,Reason}->
+ {proceed,Info#mod.data};
+ undefined ->
+ control_path(Info)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The functions that start the control if there is a accessfile %%
+%% and if so controls if the dir is allowed or not %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Info = record mod as specified in httpd.hrl
+%returns either {proceed,Info#mod.data}
+%{proceed,[{status,403....}|Info#mod.data]}
+%{proceed,[{status,401....}|Info#mod.data]}
+%{proceed,[{status,500....}|Info#mod.data]}
+%----------------------------------------------------------------------
+control_path(Info) ->
+ Path = mod_alias:path(Info#mod.data,
+ Info#mod.config_db,
+ Info#mod.request_uri),
+ case isErlScriptOrNotAccessibleFile(Path,Info) of
+ true->
+ {proceed,Info#mod.data};
+ false->
+ case getHtAccessData(Path,Info)of
+ {ok,public}->
+ %%There was no restrictions on the page continue
+ {proceed,Info#mod.data};
+ {error,Reason} ->
+ %Something got wrong continue or quit??????????????????/
+ {proceed,Info#mod.data};
+ {accessData,AccessData}->
+ controlAllowedMethod(Info,AccessData)
+ end
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% These methods controls that the method the client used in the %%
+%% request is one of the limited %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Control that if the accessmethod used is in the list of modes to challenge
+%
+%Info is the mod record as specified in httpd.hrl
+%AccessData is an ets table whit the data in the .htaccessfiles
+%----------------------------------------------------------------------
+controlAllowedMethod(Info,AccessData)->
+ case allowedRequestMethod(Info,AccessData) of
+ allow->
+ %%The request didnt use one of the limited methods
+ ets:delete(AccessData),
+ {proceed,Info#mod.data};
+ challenge->
+ authenticateUser(Info,AccessData)
+ end.
+
+%----------------------------------------------------------------------
+%Check the specified access method in the .htaccessfile
+%----------------------------------------------------------------------
+allowedRequestMethod(Info,AccessData)->
+ case ets:lookup(AccessData,limit) of
+ [{limit,all}]->
+ challenge;
+ [{limit,Methods}]->
+ isLimitedRequestMethod(Info,Methods)
+ end.
+
+
+%----------------------------------------------------------------------
+%Check the specified accessmethods in the .htaccesfile against the users
+%accessmethod
+%
+%Info is the record from the do call
+%Methods is a list of the methods specified in the .htaccessfile
+%----------------------------------------------------------------------
+isLimitedRequestMethod(Info,Methods)->
+ case lists:member(Info#mod.method,Methods) of
+ true->
+ challenge;
+ false ->
+ allow
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% These methods controls that the user comes from an allowwed net %%
+%% and if so wheather its a valid user or a challenge shall be %%
+%% generated %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%The first thing to control is that the user is from a network
+%that has access to the page
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData)->
+ case controlNet(Info,AccessData) of
+ allow->
+ %the network is ok control that it is an allowed user
+ authenticateUser2(Info,AccessData);
+ deny->
+ %The user isnt allowed to access the pages from that network
+ ets:delete(AccessData),
+ {proceed,[{status,{403,Info#mod.request_uri,
+ "Restricted area not allowed from your network"}}|Info#mod.data]}
+ end.
+
+
+%----------------------------------------------------------------------
+%The network the user comes from is allowed to view the resources
+%control whether the user needsto supply a password or not
+%----------------------------------------------------------------------
+authenticateUser2(Info,AccessData)->
+ case ets:lookup(AccessData,require) of
+ [{require,AllowedUsers}]->
+ case ets:lookup(AccessData,auth_name) of
+ [{auth_name,Realm}]->
+ authenticateUser2(Info,AccessData,Realm,AllowedUsers);
+ _NoAuthName->
+ ets:delete(AccessData),
+ {break,[{status,{500,none,
+ ?NICE("mod_htaccess:AuthName directive not specified")}}]}
+ end;
+ [] ->
+ %%No special user is required the network is ok so let
+ %%the user in
+ ets:delete(AccessData),
+ {proceed,Info#mod.data}
+ end.
+
+
+%----------------------------------------------------------------------
+%The user must send a userId and a password to get the resource
+%Control if its already in the http-request
+%if the file with users is bad send an 500 response
+%----------------------------------------------------------------------
+authenticateUser2(Info,AccessData,Realm,AllowedUsers)->
+ case authenticateUser(Info,AccessData,AllowedUsers) of
+ allow ->
+ ets:delete(AccessData),
+ {user,Name,Pwd}=getAuthenticatingDataFromHeader(Info),
+ {proceed, [{remote_user_name,Name}|Info#mod.data]};
+ challenge->
+ ets:delete(AccessData),
+ ReasonPhrase = httpd_util:reason_phrase(401),
+ Message = httpd_util:message(401,none,Info#mod.config_db),
+ {proceed,
+ [{response,
+ {401,
+ ["WWW-Authenticate: Basic realm=\"",Realm,
+ "\"\r\n\r\n","<HTML>\n<HEAD>\n<TITLE>",
+ ReasonPhrase,"</TITLE>\n",
+ "</HEAD>\n<BODY>\n<H1>",ReasonPhrase,
+ "</H1>\n",Message,"\n</BODY>\n</HTML>\n"]}}|
+ Info#mod.data]};
+ deny->
+ ets:delete(AccessData),
+ {break,[{status,{500,none,
+ ?NICE("mod_htaccess:Bad path to user or group file")}}]}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Methods that validate the netwqork the user comes from %%
+%% according to the allowed networks %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%---------------------------------------------------------------------
+%Controls the users networkaddress agains the specifed networks to
+%allow or deny
+%
+%returns either allow or deny
+%----------------------------------------------------------------------
+controlNet(Info,AccessData)->
+ UserNetwork=getUserNetworkAddress(Info),
+ case getAllowDenyOrder(AccessData) of
+ {_deny,[],_allow,[]}->
+ allow;
+ {deny,[],allow,AllowedNetworks}->
+ controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny);
+ {allow,AllowedNetworks,deny,[]}->
+ controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny);
+
+ {deny,DeniedNetworks,allow,[]}->
+ controlIfAllowed(DeniedNetworks,UserNetwork,allow,deny);
+ {allow,[],deny,DeniedNetworks}->
+ controlIfAllowed(DeniedNetworks,UserNetwork,allow,deny);
+
+ {deny,DeniedNetworks,allow,AllowedNetworks}->
+ controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork);
+ {allow,AllowedNetworks,deny,DeniedNetworks}->
+ controlAllowDeny(AllowedNetworks,DeniedNetworks,UserNetwork)
+ end.
+
+
+%----------------------------------------------------------------------
+%Returns the users IP-Number
+%----------------------------------------------------------------------
+getUserNetworkAddress(Info)->
+ {_Socket,Address}=(Info#mod.init_data)#init_data.peername,
+ Address.
+
+
+%----------------------------------------------------------------------
+%Control the users Ip-number against the ip-numbers in the .htaccessfile
+%----------------------------------------------------------------------
+controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)->
+ case AllowedNetworks of
+ [{allow,all}]->
+ IfAllowed;
+ [{deny,all}]->
+ IfDenied;
+ [{deny,Networks}]->
+ memberNetwork(Networks,UserNetwork,IfDenied,IfAllowed);
+ [{allow,Networks}]->
+ memberNetwork(Networks,UserNetwork,IfAllowed,IfDenied);
+ _Error->
+ IfDenied
+ end.
+
+
+%---------------------------------------------------------------------%
+%The Denycontrol isn't neccessary to preform since the allow control %
+%override the deny control %
+%---------------------------------------------------------------------%
+controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)->
+ case AllowedNetworks of
+ [{allow,all}]->
+ allow;
+ [{allow,Networks}]->
+ case memberNetwork(Networks,UserNetwork) of
+ true->
+ allow;
+ false->
+ deny
+ end
+ end.
+
+
+%----------------------------------------------------------------------%
+%Control that the user is in the allowed list if so control that the %
+%network is in the denied list
+%----------------------------------------------------------------------%
+controlAllowDeny(AllowedNetworks,DeniedNetworks,UserNetwork)->
+ case controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny) of
+ allow->
+ controlIfAllowed(DeniedNetworks,UserNetwork,deny,allow);
+ deny ->
+ deny
+ end.
+
+%----------------------------------------------------------------------
+%Controls if the users Ipnumber is in the list of either denied or
+%allowed networks
+%----------------------------------------------------------------------
+memberNetwork(Networks,UserNetwork,IfTrue,IfFalse)->
+ case memberNetwork(Networks,UserNetwork) of
+ true->
+ IfTrue;
+ false->
+ IfFalse
+ end.
+
+
+%----------------------------------------------------------------------
+%regexp match the users ip-address against the networks in the list of
+%ipadresses or subnet addresses.
+memberNetwork(Networks,UserNetwork)->
+ case lists:filter(fun(Net)->
+ case regexp:match(UserNetwork,
+ formatRegexp(Net)) of
+ {match,1,_}->
+ true;
+ _NotSubNet ->
+ false
+ end
+ end,Networks) of
+ []->
+ false;
+ MemberNetWork ->
+ true
+ end.
+
+
+%----------------------------------------------------------------------
+%Creates a regexp from an ip-number i.e "127.0.0-> "^127[.]0[.]0.*"
+%"127.0.0.-> "^127[.]0[.]0[.].*"
+%----------------------------------------------------------------------
+formatRegexp(Net)->
+ [SubNet1|SubNets]=string:tokens(Net,"."),
+ NetRegexp=lists:foldl(fun(SubNet,Newnet)->
+ Newnet ++ "[.]" ++SubNet
+ end,"^"++SubNet1,SubNets),
+ case string:len(Net)-string:rchr(Net,$.) of
+ 0->
+ NetRegexp++"[.].*";
+ _->
+ NetRegexp++".*"
+ end.
+
+
+%----------------------------------------------------------------------
+%If the user has specified if the allow or deny check shall be preformed
+%first get that order if no order is specified take
+%allow - deny since its harder that deny - allow
+%----------------------------------------------------------------------
+getAllowDenyOrder(AccessData)->
+ case ets:lookup(AccessData,order) of
+ [{order,{deny,allow}}]->
+ {deny,ets:lookup(AccessData,deny),
+ allow,ets:lookup(AccessData,allow)};
+ _DefaultOrder->
+ {allow,ets:lookup(AccessData,allow),
+ deny,ets:lookup(AccessData,deny)}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The methods that validates the user %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+%Control if there is anyu autheticating data in threquest header
+%if so it controls it against the users in the list Allowed Users
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData,AllowedUsers)->
+ case getAuthenticatingDataFromHeader(Info) of
+ {user,User,PassWord}->
+ authenticateUser(Info,AccessData,AllowedUsers,
+ {user,User,PassWord});
+ {error,nouser}->
+ challenge;
+ {error,BadData}->
+ challenge
+ end.
+
+
+%----------------------------------------------------------------------
+%Returns the Autheticating data in the http-request
+%----------------------------------------------------------------------
+getAuthenticatingDataFromHeader(Info)->
+ PrsedHeader=Info#mod.parsed_header,
+ case httpd_util:key1search(PrsedHeader,"authorization" ) of
+ undefined->
+ {error,nouser};
+ [$B,$a,$s,$i,$c,$\ |EncodedString]->
+ UnCodedString=httpd_util:decode_base64(EncodedString),
+ case httpd_util:split(UnCodedString,":",2) of
+ {ok,[User,PassWord]}->
+ {user,User,PassWord};
+ {error,Error}->
+ {error,Error}
+ end;
+ BadCredentials ->
+ {error,BadCredentials}
+ end.
+
+
+%----------------------------------------------------------------------
+%Returns a list of all members of the allowed groups
+%----------------------------------------------------------------------
+getGroupMembers(Groups,AllowedGroups)->
+ Allowed=lists:foldl(fun({group,Name,Members},AllowedMembers)->
+ case lists:member(Name,AllowedGroups) of
+ true->
+ AllowedMembers++Members;
+ false ->
+ AllowedMembers
+ end
+ end,[],Groups),
+ {ok,Allowed}.
+
+authenticateUser(Info,AccessData,{{users,[]},{groups,Groups}},User)->
+ authenticateUser(Info,AccessData,{groups,Groups},User);
+authenticateUser(Info,AccessData,{{users,Users},{groups,[]}},User)->
+ authenticateUser(Info,AccessData,{users,Users},User);
+
+authenticateUser(Info,AccessData,{{users,Users},{groups,Groups}},User)->
+ AllowUser=authenticateUser(Info,AccessData,{users,Users},User),
+ AllowGroup=authenticateUser(Info,AccessData,{groups,Groups},User),
+ case {AllowGroup,AllowUser} of
+ {_,allow}->
+ allow;
+ {allow,_}->
+ allow;
+ {challenge,_}->
+ challenge;
+ {_,challenge}->
+ challenge;
+ {_deny,_deny}->
+ deny
+ end;
+
+
+%----------------------------------------------------------------------
+%Controls that the user is a member in one of the allowed group
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData,{groups,AllowedGroups},{user,User,PassWord})->
+ case getUsers(AccessData,group_file) of
+ {group_data,Groups}->
+ case getGroupMembers(Groups,AllowedGroups) of
+ {ok,Members}->
+ authenticateUser(Info,AccessData,{users,Members},
+ {user,User,PassWord});
+ {error,BadData}->
+ deny
+ end;
+ {error,BadData}->
+ deny
+ end;
+
+
+%----------------------------------------------------------------------
+%Control that the user is one of the allowed users and that the passwd is ok
+%----------------------------------------------------------------------
+authenticateUser(Info,AccessData,{users,AllowedUsers},{user,User,PassWord})->
+ case lists:member(User,AllowedUsers) of
+ true->
+ %Get the usernames and passwords from the file
+ case getUsers(AccessData,user_file) of
+ {error,BadData}->
+ deny;
+ {user_data,Users}->
+ %Users is a list of the users in
+ %the userfile [{user,User,Passwd}]
+ checkPassWord(Users,{user,User,PassWord})
+ end;
+ false ->
+ challenge
+ end.
+
+
+%----------------------------------------------------------------------
+%Control that the user User={user,"UserName","PassWd"} is
+%member of the list of Users
+%----------------------------------------------------------------------
+checkPassWord(Users,User)->
+ case lists:member(User,Users) of
+ true->
+ allow;
+ false->
+ challenge
+ end.
+
+
+%----------------------------------------------------------------------
+%Get the users in the specified file
+%UserOrGroup is an atom that specify if its a group file or a user file
+%i.e. group_file or user_file
+%----------------------------------------------------------------------
+getUsers({file,FileName},UserOrGroup)->
+ case file:open(FileName,[read]) of
+ {ok,AccessFileHandle} ->
+ getUsers({stream,AccessFileHandle},[],UserOrGroup);
+ {error,Reason} ->
+ {error,{Reason,FileName}}
+ end;
+
+
+%----------------------------------------------------------------------
+%The method that starts the lokkong for user files
+%----------------------------------------------------------------------
+
+getUsers(AccessData,UserOrGroup)->
+ case ets:lookup(AccessData,UserOrGroup) of
+ [{UserOrGroup,File}]->
+ getUsers({file,File},UserOrGroup);
+ _ ->
+ {error,noUsers}
+ end.
+
+
+%----------------------------------------------------------------------
+%Reads data from the filehandle File to the list FileData and when its
+%reach the end it returns the list in a tuple {user_file|group_file,FileData}
+%----------------------------------------------------------------------
+getUsers({stream,File},FileData,UserOrGroup)->
+ case io:get_line(File,[]) of
+ eof when UserOrGroup==user_file->
+ {user_data,FileData};
+ eof when UserOrGroup ==group_file->
+ {group_data,FileData};
+ Line ->
+ getUsers({stream,File},
+ formatUser(Line,FileData,UserOrGroup),UserOrGroup)
+ end.
+
+
+%----------------------------------------------------------------------
+%If the line is a comment remove it
+%----------------------------------------------------------------------
+formatUser([$#|UserDataComment],FileData,_UserOrgroup)->
+ FileData;
+
+
+%----------------------------------------------------------------------
+%The user name in the file is Username:Passwd\n
+%Remove the newline sign and split the user name in
+%UserName and Password
+%----------------------------------------------------------------------
+formatUser(UserData,FileData,UserOrGroup)->
+ case string:tokens(UserData," \r\n")of
+ [User|Whitespace] when UserOrGroup==user_file->
+ case string:tokens(User,":") of
+ [Name,PassWord]->
+ [{user,Name,PassWord}|FileData];
+ _Error->
+ FileData
+ end;
+ GroupData when UserOrGroup==group_file ->
+ parseGroupData(GroupData,FileData);
+ _Error ->
+ FileData
+ end.
+
+
+%----------------------------------------------------------------------
+%if everything is right GroupData is on the form
+% ["groupName:", "Member1", "Member2", "Member2"
+%----------------------------------------------------------------------
+parseGroupData([GroupName|GroupData],FileData)->
+ [{group,formatGroupName(GroupName),GroupData}|FileData].
+
+
+%----------------------------------------------------------------------
+%the line in the file is GroupName: Member1 Member2 .....MemberN
+%Remove the : from the group name
+%----------------------------------------------------------------------
+formatGroupName(GroupName)->
+ string:strip(GroupName,right,$:).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Functions that parses the accessfiles %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Control that the asset is a real file and not a request for an virtual
+%asset
+%----------------------------------------------------------------------
+isErlScriptOrNotAccessibleFile(Path,Info)->
+ case file:read_file_info(Path) of
+ {ok,_fileInfo}->
+ false;
+ {error,_Reason} ->
+ true
+ end.
+
+
+%----------------------------------------------------------------------
+%Path=PathToTheRequestedFile=String
+%Innfo=record#mod
+%----------------------------------------------------------------------
+getHtAccessData(Path,Info)->
+ HtAccessFileNames=getHtAccessFileNames(Info),
+ case getData(Path,Info,HtAccessFileNames) of
+ {ok,public}->
+ {ok,public};
+ {accessData,AccessData}->
+ {accessData,AccessData};
+ {error,Reason} ->
+ {error,Reason}
+ end.
+
+
+%----------------------------------------------------------------------
+%returns the names of the accessfiles
+%----------------------------------------------------------------------
+getHtAccessFileNames(Info)->
+ case httpd_util:lookup(Info#mod.config_db,access_files) of
+ undefined->
+ [".htaccess"];
+ Files->
+ Files
+ end.
+%----------------------------------------------------------------------
+%HtAccessFileNames=["accessfileName1",..."AccessFileName2"]
+%----------------------------------------------------------------------
+getData(Path,Info,HtAccessFileNames)->
+ case regexp:split(Path,"/") of
+ {error,Error}->
+ {error,Error};
+ {ok,SplittedPath}->
+ getData2(HtAccessFileNames,SplittedPath,Info)
+ end.
+
+
+%----------------------------------------------------------------------
+%Add to together the data in the Splittedpath up to the path
+%that is the alias or the document root
+%Since we do not need to control after any accessfiles before here
+%----------------------------------------------------------------------
+getData2(HtAccessFileNames,SplittedPath,Info)->
+ case getRootPath(SplittedPath,Info) of
+ {error,Path}->
+ {error,Path};
+ {ok,StartPath,RestOfSplittedPath} ->
+ getData2(HtAccessFileNames,StartPath,RestOfSplittedPath,Info)
+ end.
+
+
+%----------------------------------------------------------------------
+%HtAccessFilenames is a list the names the accesssfiles can have
+%Path is the shortest match agains all alias and documentroot
+%rest of splitted path is a list of the parts of the path
+%Info is the mod recod from the server
+%----------------------------------------------------------------------
+getData2(HtAccessFileNames,StartPath,RestOfSplittedPath,Info)->
+ case getHtAccessFiles(HtAccessFileNames,StartPath,RestOfSplittedPath) of
+ []->
+ %No accessfile qiut its a public directory
+ {ok,public};
+ Files ->
+ loadAccessFilesData(Files)
+ end.
+
+
+%----------------------------------------------------------------------
+%Loads the data in the accessFiles specifiied by
+% AccessFiles=["/hoem/public/html/accefile",
+% "/home/public/html/priv/accessfile"]
+%----------------------------------------------------------------------
+loadAccessFilesData(AccessFiles)->
+ loadAccessFilesData(AccessFiles,ets:new(accessData,[])).
+
+
+%----------------------------------------------------------------------
+%Returns the found data
+%----------------------------------------------------------------------
+contextToValues(AccessData)->
+ case ets:lookup(AccessData,context) of
+ [{context,Values}]->
+ ets:delete(AccessData,context),
+ insertContext(AccessData,Values),
+ {accessData,AccessData};
+ _Error->
+ {error,errorInAccessFile}
+ end.
+
+
+insertContext(AccessData,[])->
+ ok;
+
+insertContext(AccessData,[{allow,From}|Values])->
+ insertDenyAllowContext(AccessData,{allow,From}),
+ insertContext(AccessData,Values);
+
+insertContext(AccessData,[{deny,From}|Values])->
+ insertDenyAllowContext(AccessData,{deny,From}),
+ insertContext(AccessData,Values);
+
+insertContext(AccessData,[{require,{GrpOrUsr,Members}}|Values])->
+ case ets:lookup(AccessData,require) of
+ []when GrpOrUsr==users->
+ ets:insert(AccessData,{require,{{users,Members},{groups,[]}}});
+
+ [{require,{{users,Users},{groups,Groups}}}]when GrpOrUsr==users ->
+ ets:insert(AccessData,{require,{{users,Users++Members},
+ {groups,Groups}}});
+ []when GrpOrUsr==groups->
+ ets:insert(AccessData,{require,{{users,[]},{groups,Members}}});
+
+ [{require,{{users,Users},{groups,Groups}}}]when GrpOrUsr==groups ->
+ ets:insert(AccessData,{require,{{users,Users},
+ {groups,Groups++Members}}})
+ end,
+ insertContext(AccessData,Values);
+
+
+
+%%limit and order directive need no transforming they areis just to insert
+insertContext(AccessData,[Elem|Values])->
+ ets:insert(AccessData,Elem),
+ insertContext(AccessData,Values).
+
+
+insertDenyAllowContext(AccessData,{AllowDeny,From})->
+ case From of
+ all->
+ ets:insert(AccessData,{AllowDeny,all});
+ AllowedSubnets->
+ case ets:lookup(AccessData,AllowDeny) of
+ []->
+ ets:insert(AccessData,{AllowDeny,From});
+ [{AllowDeny,all}]->
+ ok;
+ [{AllowDeny,Networks}]->
+ ets:insert(AccessData,{allow,Networks++From})
+ end
+ end.
+
+loadAccessFilesData([],AccessData)->
+ %preform context to limits
+ contextToValues(AccessData),
+ {accessData,AccessData};
+
+%----------------------------------------------------------------------
+%Takes each file in the list and load the data to the ets table
+%AccessData
+%----------------------------------------------------------------------
+loadAccessFilesData([FileName|FileNames],AccessData)->
+ case loadAccessFileData({file,FileName},AccessData) of
+ overRide->
+ loadAccessFilesData(FileNames,AccessData);
+ noOverRide ->
+ {accessData,AccessData};
+ error->
+ ets:delete(AccessData),
+ {error,errorInAccessFile}
+ end.
+
+%----------------------------------------------------------------------
+%opens the filehandle to the specified file
+%----------------------------------------------------------------------
+loadAccessFileData({file,FileName},AccessData)->
+ case file:open(FileName,[read]) of
+ {ok,AccessFileHandle}->
+ loadAccessFileData({stream,AccessFileHandle},AccessData,[]);
+ {error,Reason} ->
+ overRide
+ end.
+
+%----------------------------------------------------------------------
+%%look att each line in the file and add them to the database
+%%When end of file is reached control i overrride is allowed
+%% if so return
+%----------------------------------------------------------------------
+loadAccessFileData({stream,File},AccessData,FileData)->
+ case io:get_line(File,[]) of
+ eof->
+ insertData(AccessData,FileData),
+ case ets:match_object(AccessData,{'_',error}) of
+ []->
+ %Case we got no error control that we can override a
+ %at least some of the values
+ case ets:match_object(AccessData,
+ {allow_over_ride,none}) of
+ []->
+ overRide;
+ _NoOverride->
+ noOverRide
+ end;
+ Errors->
+ error
+ end;
+ Line ->
+ loadAccessFileData({stream,File},AccessData,
+ insertLine(string:strip(Line,left),FileData))
+ end.
+
+%----------------------------------------------------------------------
+%AccessData is a ets table where the previous found data is inserted
+%FileData is a list of the directives in the last parsed file
+%before insertion a control is done that the directive is allowed to
+%override
+%----------------------------------------------------------------------
+insertData(AccessData,{{context,Values},FileData})->
+ insertData(AccessData,[{context,Values}|FileData]);
+
+insertData(AccessData,FileData)->
+ case ets:lookup(AccessData,allow_over_ride) of
+ [{allow_over_ride,all}]->
+ lists:foreach(fun(Elem)->
+ ets:insert(AccessData,Elem)
+ end,FileData);
+ []->
+ lists:foreach(fun(Elem)->
+ ets:insert(AccessData,Elem)
+ end,FileData);
+ [{allow_over_ride,Directives}]when list(Directives)->
+ lists:foreach(fun({Key,Value})->
+ case lists:member(Key,Directives) of
+ true->
+ ok;
+ false ->
+ ets:insert(AccessData,{Key,Value})
+ end
+ end,FileData);
+ [{allow_over_ride,_}]->
+ %Will never appear if the user
+ %aint doing very strang econfig files
+ ok
+ end.
+%----------------------------------------------------------------------
+%Take a line in the accessfile and transform it into a tuple that
+%later can be inserted in to the ets:table
+%----------------------------------------------------------------------
+%%%Here is the alternatives that resides inside the limit context
+
+insertLine([$o,$r,$d,$e,$r|Order],{{context,Values},FileData})->
+ {{context,[{order,getOrder(Order)}|Values]},FileData};
+%%Let the user place a tab in the beginning
+insertLine([$\t,$o,$r,$d,$e,$r|Order],{{context,Values},FileData})->
+ {{context,[{order,getOrder(Order)}|Values]},FileData};
+
+insertLine([$a,$l,$l,$o,$w|Allow],{{context,Values},FileData})->
+ {{context,[{allow,getAllowDenyData(Allow)}|Values]},FileData};
+insertLine([$\t,$a,$l,$l,$o,$w|Allow],{{context,Values},FileData})->
+ {{context,[{allow,getAllowDenyData(Allow)}|Values]},FileData};
+
+insertLine([$d,$e,$n,$y|Deny],{{context,Values},FileData})->
+ {{context,[{deny,getAllowDenyData(Deny)}|Values]},FileData};
+insertLine([$\t,$d,$e,$n,$y|Deny],{{context,Values},FileData})->
+ {{context,[{deny,getAllowDenyData(Deny)}|Values]},FileData};
+
+
+insertLine([$r,$e,$q,$u,$i,$r,$e|Require],{{context,Values},FileData})->
+ {{context,[{require,getRequireData(Require)}|Values]},FileData};
+insertLine([$\t,$r,$e,$q,$u,$i,$r,$e|Require],{{context,Values},FileData})->
+ {{context,[{require,getRequireData(Require)}|Values]},FileData};
+
+
+insertLine([$<,$/,$L,$i,$m,$i,$t|EndLimit],{Context,FileData})->
+ [Context|FileData];
+
+insertLine([$<,$L,$i,$m,$i,$t|Limit],FileData)->
+ {{context,[{limit,getLimits(Limit)}]}, FileData};
+
+
+
+insertLine([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e,$\ |AuthUserFile],FileData)->
+ [{user_file,string:strip(AuthUserFile,right,$\n)}|FileData];
+
+insertLine([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e,$\ |AuthGroupFile],
+ FileData)->
+ [{group_file,string:strip(AuthGroupFile,right,$\n)}|FileData];
+
+insertLine([$A,$l,$l,$o,$w,$O,$v,$e,$r,$R,$i,$d,$e|AllowOverRide],FileData)->
+ [{allow_over_ride,getAllowOverRideData(AllowOverRide)}
+ |FileData];
+
+insertLine([$A,$u,$t,$h,$N,$a,$m,$e,$\ |AuthName],FileData)->
+ [{auth_name,string:strip(AuthName,right,$\n)}|FileData];
+
+insertLine([$A,$u,$t,$h,$T,$y,$p,$e|AuthType],FileData)->
+ [{auth_type,getAuthorizationType(AuthType)}|FileData];
+
+insertLine(_BadDirectiveOrComment,FileData)->
+ FileData.
+
+%----------------------------------------------------------------------
+%transform the Data specified about override to a form that is ieasier
+%handled later
+%Override data="all"|"md5"|"Directive1 .... DirectioveN"
+%----------------------------------------------------------------------
+
+getAllowOverRideData(OverRideData)->
+ case string:tokens(OverRideData," \r\n") of
+ [[$a,$l,$l]|_]->
+ all;
+ [[$n,$o,$n,$e]|_]->
+ none;
+ Directives ->
+ getOverRideDirectives(Directives)
+ end.
+
+getOverRideDirectives(Directives)->
+ lists:map(fun(Directive)->
+ transformDirective(Directive)
+ end,Directives).
+transformDirective([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e|_])->
+ user_file;
+transformDirective([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e|_]) ->
+ group_file;
+transformDirective([$A,$u,$t,$h,$N,$a,$m,$e|_])->
+ auth_name;
+transformDirective([$A,$u,$t,$h,$T,$y,$p,$e|_])->
+ auth_type;
+transformDirective(_UnAllowedOverRideDirective) ->
+ unallowed.
+%----------------------------------------------------------------------
+%Replace the string that specify which method to use for authentication
+%and replace it with the atom for easier mathing
+%----------------------------------------------------------------------
+getAuthorizationType(AuthType)->
+ [Arg|Crap]=string:tokens(AuthType,"\n\r\ "),
+ case Arg of
+ [$B,$a,$s,$i,$c]->
+ basic;
+ [$M,$D,$5] ->
+ md5;
+ _What ->
+ error
+ end.
+%----------------------------------------------------------------------
+%Returns a list of the specified methods to limit or the atom all
+%----------------------------------------------------------------------
+getLimits(Limits)->
+ case regexp:split(Limits,">")of
+ {ok,[_NoEndOnLimit]}->
+ error;
+ {ok,[Methods|Crap]}->
+ case regexp:split(Methods," ")of
+ {ok,[]}->
+ all;
+ {ok,SplittedMethods}->
+ SplittedMethods;
+ {error,Error}->
+ error
+ end;
+ {error,_Error}->
+ error
+ end.
+
+
+%----------------------------------------------------------------------
+% Transform the order to prefrom deny allow control to a tuple of atoms
+%----------------------------------------------------------------------
+getOrder(Order)->
+ [First|Rest]=lists:map(fun(Part)->
+ list_to_atom(Part)
+ end,string:tokens(Order," \n\r")),
+ case First of
+ deny->
+ {deny,allow};
+ allow->
+ {allow,deny};
+ _Error->
+ error
+ end.
+
+%----------------------------------------------------------------------
+% The string AllowDeny is "from all" or "from Subnet1 Subnet2...SubnetN"
+%----------------------------------------------------------------------
+getAllowDenyData(AllowDeny)->
+ case string:tokens(AllowDeny," \n\r") of
+ [_From|AllowDenyData] when length(AllowDenyData)>=1->
+ case lists:nth(1,AllowDenyData) of
+ [$a,$l,$l]->
+ all;
+ Hosts->
+ AllowDenyData
+ end;
+ Error->
+ errror
+ end.
+%----------------------------------------------------------------------
+% Fix the string that describes who is allowed to se the page
+%----------------------------------------------------------------------
+getRequireData(Require)->
+ [UserOrGroup|UserData]=string:tokens(Require," \n\r"),
+ case UserOrGroup of
+ [$u,$s,$e,$r]->
+ {users,UserData};
+ [$g,$r,$o,$u,$p] ->
+ {groups,UserData};
+ _Whatever ->
+ error
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Methods that collects the searchways to the accessfiles %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% Get the whole path to the different accessfiles
+%----------------------------------------------------------------------
+getHtAccessFiles(HtAccessFileNames,Path,RestOfSplittedPath)->
+ getHtAccessFiles(HtAccessFileNames,Path,RestOfSplittedPath,[]).
+
+getHtAccessFiles(HtAccessFileNames,Path,[[]],HtAccessFiles)->
+ HtAccessFiles ++ accessFilesOfPath(HtAccessFileNames,Path++"/");
+
+getHtAccessFiles(HtAccessFileNames,Path,[],HtAccessFiles)->
+ HtAccessFiles;
+getHtAccessFiles(HtAccessFileNames,Path,[NextDir|RestOfSplittedPath],
+ AccessFiles)->
+ getHtAccessFiles(HtAccessFileNames,Path++"/"++NextDir,RestOfSplittedPath,
+ AccessFiles ++
+ accessFilesOfPath(HtAccessFileNames,Path++"/")).
+
+
+%----------------------------------------------------------------------
+%Control if therer are any accessfies in the path
+%----------------------------------------------------------------------
+accessFilesOfPath(HtAccessFileNames,Path)->
+ lists:foldl(fun(HtAccessFileName,Files)->
+ case file:read_file_info(Path++HtAccessFileName) of
+ {ok,FileInfo}->
+ [Path++HtAccessFileName|Files];
+ {error,_Error} ->
+ Files
+ end
+ end,[],HtAccessFileNames).
+
+
+%----------------------------------------------------------------------
+%Sake the splitted path and joins it up to the documentroot or the alias
+%that match first
+%----------------------------------------------------------------------
+
+getRootPath(SplittedPath,Info)->
+ DocRoot=httpd_util:lookup(Info#mod.config_db,document_root,"/"),
+ PresumtiveRootPath=
+ [DocRoot|lists:map(fun({Alias,RealPath})->
+ RealPath
+ end,
+ httpd_util:multi_lookup(Info#mod.config_db,alias))],
+ getRootPath(PresumtiveRootPath,SplittedPath,Info).
+
+
+getRootPath(PresumtiveRootPath,[[],Splittedpath],Info)->
+ getRootPath(PresumtiveRootPath,["/",Splittedpath],Info);
+
+
+getRootPath(PresumtiveRootPath,[Part,NextPart|SplittedPath],Info)->
+ case lists:member(Part,PresumtiveRootPath)of
+ true->
+ {ok,Part,[NextPart|SplittedPath]};
+ false ->
+ getRootPath(PresumtiveRootPath,
+ [Part++"/"++NextPart|SplittedPath],Info)
+ end;
+
+getRootPath(PresumtiveRootPath,[Part],Info)->
+ case lists:member(Part,PresumtiveRootPath)of
+ true->
+ {ok,Part,[]};
+ false ->
+ {error,Part}
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%Debug methods %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+% Simulate the webserver by calling do/1 with apropiate parameters
+%----------------------------------------------------------------------
+debug()->
+ Conf=getConfigData(),
+ Uri=getUri(),
+ {_Proceed,Data}=getDataFromAlias(Conf,Uri),
+ Init_data=#init_data{peername={socket,"127.0.0.1"}},
+ ParsedHeader=headerparts(),
+ do(#mod{init_data=Init_data,
+ data=Data,
+ config_db=Conf,
+ request_uri=Uri,
+ parsed_header=ParsedHeader,
+ method="GET"}).
+
+%----------------------------------------------------------------------
+%Add authenticate data to the fake http-request header
+%----------------------------------------------------------------------
+headerparts()->
+ [{"authorization","Basic " ++ httpd_util:encode_base64("lotta:potta")}].
+
+getDataFromAlias(Conf,Uri)->
+ mod_alias:do(#mod{config_db=Conf,request_uri=Uri}).
+
+getUri()->
+ "/appmon/test/test.html".
+
+getConfigData()->
+ Tab=ets:new(test_inets,[bag,public]),
+ ets:insert(Tab,{server_name,"localhost"}),
+ ets:insert(Tab,{bind_addresss,{127,0,0,1}}),
+ ets:insert(Tab,{erl_script_alias,{"/webcover/erl",["webcover"]}}),
+ ets:insert(Tab,{erl_script_alias,{"/erl",["webappmon"]}}),
+ ets:insert(Tab,{com_type,ip_comm}),
+ ets:insert(Tab,{modules,[mod_alias,mod_auth,mod_header]}),
+ ets:insert(Tab,{default_type,"text/plain"}),
+ ets:insert(Tab,{server_root,
+ "/home/gandalf/marting/exjobb/webtool-1.0/priv/root"}),
+ ets:insert(Tab,{port,8888}),
+ ets:insert(Tab,{document_root,
+ "/home/gandalf/marting/exjobb/webtool-1.0/priv/root"}),
+ ets:insert(Tab,
+ {alias,
+ {"/appmon"
+ ,"/home/gandalf/marting/exjobb/webappmon-1.0/priv"}}),
+ ets:insert(Tab,{alias,
+ {"/webcover"
+ ,"/home/gandalf/marting/exjobb/webcover-1.0/priv"}}),
+ ets:insert(Tab,{access_file,[".htaccess","kalle","pelle"]}),
+ Tab.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_include.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_include.erl
new file mode 100644
index 0000000000..eedbf4a669
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_include.erl
@@ -0,0 +1,722 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_include.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_include).
+-export([do/1,parse/2,config/6,include/6,echo/6,fsize/6,flastmod/6,exec/6]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"INCLUDE").
+-include("httpd_verbosity.hrl").
+
+%% do
+
+do(Info) ->
+ ?vtrace("do",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data, response) of
+ %% No response has been generated!
+ undefined ->
+ do_include(Info);
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_include(Info) ->
+ ?vtrace("do_include -> entry with"
+ "~n URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),
+ Suffix = httpd_util:suffix(Path),
+ case httpd_util:lookup_mime_default(Info#mod.config_db,Suffix) of
+ "text/x-server-parsed-html" ->
+ HeaderStart =
+ httpd_util:header(200, "text/html", Info#mod.connection),
+ ?vtrace("do_include -> send ~p", [Path]),
+ case send_in(Info,Path,HeaderStart,file:read_file_info(Path)) of
+ {ok, ErrorLog, Size} ->
+ ?vtrace("do_include -> sent ~w bytes", [Size]),
+ {proceed,[{response,{already_sent,200,Size}},
+ {mime_type,"text/html"}|
+ lists:append(ErrorLog,Info#mod.data)]};
+ {error, Reason} ->
+ ?vlog("send in failed:"
+ "~n Reason: ~p"
+ "~n Path: ~p"
+ "~n Info: ~p",
+ [Reason,Info,Path]),
+ {proceed,
+ [{status,send_error(Reason,Info,Path)}|Info#mod.data]}
+ end;
+ _ -> %% Unknown mime type, ignore
+ {proceed,Info#mod.data}
+ end.
+
+
+%%
+%% config directive
+%%
+
+config(Info, Context, ErrorLog, TagList, ValueList, R) ->
+ case verify_tags("config",[errmsg,timefmt,sizefmt],
+ TagList,ValueList) of
+ ok ->
+ {ok,update_context(TagList,ValueList,Context),ErrorLog,"",R};
+ {error,Reason} ->
+ {ok,Context,[{internal_info,Reason}|ErrorLog],
+ httpd_util:key1search(Context,errmsg,""),R}
+ end.
+
+update_context([],[],Context) ->
+ Context;
+update_context([Tag|R1],[Value|R2],Context) ->
+ update_context(R1,R2,[{Tag,Value}|Context]).
+
+verify_tags(Command,ValidTags,TagList,ValueList) when length(TagList)==length(ValueList) ->
+ verify_tags(Command,ValidTags,TagList);
+verify_tags(Command,ValidTags,TagList,ValueList) ->
+ {error,?NICE(Command++" directive has spurious tags")}.
+
+verify_tags(Command, ValidTags, []) ->
+ ok;
+verify_tags(Command, ValidTags, [Tag|Rest]) ->
+ case lists:member(Tag, ValidTags) of
+ true ->
+ verify_tags(Command, ValidTags, Rest);
+ false ->
+ {error,?NICE(Command++" directive has a spurious tag ("++
+ atom_to_list(Tag)++")")}
+ end.
+
+%%
+%% include directive
+%%
+
+include(Info,Context,ErrorLog,[virtual],[VirtualPath],R) ->
+ Aliases = httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_, Path, _AfterPath} =
+ mod_alias:real_name(Info#mod.config_db, VirtualPath, Aliases),
+ include(Info,Context,ErrorLog,R,Path);
+include(Info, Context, ErrorLog, [file], [FileName], R) ->
+ Path = file(Info#mod.config_db, Info#mod.request_uri, FileName),
+ include(Info, Context, ErrorLog, R, Path);
+include(Info, Context, ErrorLog, TagList, ValueList, R) ->
+ {ok, Context,
+ [{internal_info,?NICE("include directive has a spurious tag")}|
+ ErrorLog], httpd_util:key1search(Context, errmsg, ""), R}.
+
+include(Info, Context, ErrorLog, R, Path) ->
+ ?DEBUG("include -> read file: ~p",[Path]),
+ case file:read_file(Path) of
+ {ok, Body} ->
+ ?DEBUG("include -> size(Body): ~p",[size(Body)]),
+ {ok, NewContext, NewErrorLog, Result} =
+ parse(Info, binary_to_list(Body), Context, ErrorLog, []),
+ {ok, Context, NewErrorLog, Result, R};
+ {error, Reason} ->
+ {ok, Context,
+ [{internal_info, ?NICE("Can't open "++Path)}|ErrorLog],
+ httpd_util:key1search(Context, errmsg, ""), R}
+ end.
+
+file(ConfigDB, RequestURI, FileName) ->
+ Aliases = httpd_util:multi_lookup(ConfigDB, alias),
+ {_, Path, _AfterPath}
+ = mod_alias:real_name(ConfigDB, RequestURI, Aliases),
+ Pwd = filename:dirname(Path),
+ filename:join(Pwd, FileName).
+
+%%
+%% echo directive
+%%
+
+echo(Info,Context,ErrorLog,[var],["DOCUMENT_NAME"],R) ->
+ {ok,Context,ErrorLog,document_name(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,[var],["DOCUMENT_URI"],R) ->
+ {ok,Context,ErrorLog,document_uri(Info#mod.config_db,
+ Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,[var],["QUERY_STRING_UNESCAPED"],R) ->
+ {ok,Context,ErrorLog,query_string_unescaped(Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,[var],["DATE_LOCAL"],R) ->
+ {ok,Context,ErrorLog,date_local(),R};
+echo(Info,Context,ErrorLog,[var],["DATE_GMT"],R) ->
+ {ok,Context,ErrorLog,date_gmt(),R};
+echo(Info,Context,ErrorLog,[var],["LAST_MODIFIED"],R) ->
+ {ok,Context,ErrorLog,last_modified(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri),R};
+echo(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ {ok,Context,
+ [{internal_info,?NICE("echo directive has a spurious tag")}|
+ ErrorLog],"(none)",R}.
+
+document_name(Data,ConfigDB,RequestURI) ->
+ Path = mod_alias:path(Data,ConfigDB,RequestURI),
+ case regexp:match(Path,"[^/]*\$") of
+ {match,Start,Length} ->
+ string:substr(Path,Start,Length);
+ nomatch ->
+ "(none)"
+ end.
+
+document_uri(ConfigDB, RequestURI) ->
+ Aliases = httpd_util:multi_lookup(ConfigDB, alias),
+ {Path, AfterPath} =
+ case mod_alias:real_name(ConfigDB, RequestURI, Aliases) of
+ {_, Name, {[], []}} ->
+ {Name, ""};
+ {_, Name, {PathInfo, []}} ->
+ {Name, "/"++PathInfo};
+ {_, Name, {PathInfo, QueryString}} ->
+ {Name, "/"++PathInfo++"?"++QueryString};
+ {_, Name, _} ->
+ {Name, ""};
+ Gurka ->
+ io:format("Gurka: ~p~n", [Gurka])
+ end,
+ VirtualPath = string:substr(RequestURI, 1,
+ length(RequestURI)-length(AfterPath)),
+ {match, Start, Length} = regexp:match(Path,"[^/]*\$"),
+ FileName = string:substr(Path,Start,Length),
+ case regexp:match(VirtualPath, FileName++"\$") of
+ {match, _, _} ->
+ httpd_util:decode_hex(VirtualPath)++AfterPath;
+ nomatch ->
+ string:strip(httpd_util:decode_hex(VirtualPath),right,$/)++
+ "/"++FileName++AfterPath
+ end.
+
+query_string_unescaped(RequestURI) ->
+ case regexp:match(RequestURI,"[\?].*\$") of
+ {match,Start,Length} ->
+ %% Escape all shell-special variables with \
+ escape(string:substr(RequestURI,Start+1,Length-1));
+ nomatch ->
+ "(none)"
+ end.
+
+escape([]) -> [];
+escape([$;|R]) -> [$\\,$;|escape(R)];
+escape([$&|R]) -> [$\\,$&|escape(R)];
+escape([$(|R]) -> [$\\,$(|escape(R)];
+escape([$)|R]) -> [$\\,$)|escape(R)];
+escape([$||R]) -> [$\\,$||escape(R)];
+escape([$^|R]) -> [$\\,$^|escape(R)];
+escape([$<|R]) -> [$\\,$<|escape(R)];
+escape([$>|R]) -> [$\\,$>|escape(R)];
+escape([$\n|R]) -> [$\\,$\n|escape(R)];
+escape([$ |R]) -> [$\\,$ |escape(R)];
+escape([$\t|R]) -> [$\\,$\t|escape(R)];
+escape([C|R]) -> [C|escape(R)].
+
+date_local() ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=calendar:local_time(),
+ %% Time format hard-wired to: "%a %b %e %T %Y" according to strftime(3)
+ io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w ~w",
+ [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
+ httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
+
+date_gmt() ->
+ {{Year,Month,Day},{Hour,Minute,Second}}=calendar:universal_time(),
+ %% Time format hard-wired to: "%a %b %e %T %Z %Y" according to strftime(3)
+ io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w GMT ~w",
+ [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
+ httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
+
+last_modified(Data,ConfigDB,RequestURI) ->
+ {ok,FileInfo}=file:read_file_info(mod_alias:path(Data,ConfigDB,RequestURI)),
+ {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w ~w",
+ [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
+ httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
+
+%%
+%% fsize directive
+%%
+
+fsize(Info,Context,ErrorLog,[virtual],[VirtualPath],R) ->
+ Aliases=httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_,Path,AfterPath}=
+ mod_alias:real_name(Info#mod.config_db,VirtualPath,Aliases),
+ fsize(Info, Context, ErrorLog, R, Path);
+fsize(Info,Context,ErrorLog,[file],[FileName],R) ->
+ Path=file(Info#mod.config_db,Info#mod.request_uri,FileName),
+ fsize(Info,Context,ErrorLog,R,Path);
+fsize(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ {ok,Context,[{internal_info,?NICE("fsize directive has a spurious tag")}|
+ ErrorLog],httpd_util:key1search(Context,errmsg,""),R}.
+
+fsize(Info,Context,ErrorLog,R,Path) ->
+ case file:read_file_info(Path) of
+ {ok,FileInfo} ->
+ case httpd_util:key1search(Context,sizefmt) of
+ "bytes" ->
+ {ok,Context,ErrorLog,
+ integer_to_list(FileInfo#file_info.size),R};
+ "abbrev" ->
+ Size = integer_to_list(trunc(FileInfo#file_info.size/1024+1))++"k",
+ {ok,Context,ErrorLog,Size,R};
+ Value->
+ {ok,Context,
+ [{internal_info,
+ ?NICE("fsize directive has a spurious tag value ("++
+ Value++")")}|
+ ErrorLog],
+ httpd_util:key1search(Context, errmsg, ""), R}
+ end;
+ {error,Reason} ->
+ {ok,Context,[{internal_info,?NICE("Can't open "++Path)}|ErrorLog],
+ httpd_util:key1search(Context,errmsg,""),R}
+ end.
+
+%%
+%% flastmod directive
+%%
+
+flastmod(Info, Context, ErrorLog, [virtual], [VirtualPath],R) ->
+ Aliases=httpd_util:multi_lookup(Info#mod.config_db,alias),
+ {_,Path,AfterPath}=
+ mod_alias:real_name(Info#mod.config_db,VirtualPath,Aliases),
+ flastmod(Info,Context,ErrorLog,R,Path);
+flastmod(Info, Context, ErrorLog, [file], [FileName], R) ->
+ Path = file(Info#mod.config_db, Info#mod.request_uri, FileName),
+ flastmod(Info, Context, ErrorLog, R, Path);
+flastmod(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ {ok,Context,[{internal_info,?NICE("flastmod directive has a spurious tag")}|
+ ErrorLog],httpd_util:key1search(Context,errmsg,""),R}.
+
+flastmod(Info,Context,ErrorLog,R,File) ->
+ case file:read_file_info(File) of
+ {ok,FileInfo} ->
+ {{Yr,Mon,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
+ Result=
+ io_lib:format("~s ~s ~2w ~w:~w:~w ~w",
+ [httpd_util:day(
+ calendar:day_of_the_week(Yr,Mon, Day)),
+ httpd_util:month(Mon),Day,Hour,Minute,Second, Yr]),
+ {ok,Context,ErrorLog,Result,R};
+ {error,Reason} ->
+ {ok,Context,[{internal_info,?NICE("Can't open "++File)}|ErrorLog],
+ httpd_util:key1search(Context,errmsg,""),R}
+ end.
+
+%%
+%% exec directive
+%%
+
+exec(Info,Context,ErrorLog,[cmd],[Command],R) ->
+ ?vtrace("exec cmd:~n Command: ~p",[Command]),
+ cmd(Info,Context,ErrorLog,R,Command);
+exec(Info,Context,ErrorLog,[cgi],[RequestURI],R) ->
+ ?vtrace("exec cgi:~n RequestURI: ~p",[RequestURI]),
+ cgi(Info,Context,ErrorLog,R,RequestURI);
+exec(Info,Context,ErrorLog,TagList,ValueList,R) ->
+ ?vtrace("exec with spurious tag:"
+ "~n TagList: ~p"
+ "~n ValueList: ~p",
+ [TagList,ValueList]),
+ {ok, Context,
+ [{internal_info,?NICE("exec directive has a spurious tag")}|
+ ErrorLog], httpd_util:key1search(Context,errmsg,""),R}.
+
+%% cmd
+
+cmd(Info, Context, ErrorLog, R, Command) ->
+ process_flag(trap_exit,true),
+ Env = env(Info),
+ Dir = filename:dirname(Command),
+ Port = (catch open_port({spawn,Command},[stream,{cd,Dir},{env,Env}])),
+ case Port of
+ P when port(P) ->
+ {NewErrorLog, Result} = proxy(Port, ErrorLog),
+ {ok, Context, NewErrorLog, Result, R};
+ {'EXIT', Reason} ->
+ ?vlog("open port failed: exit"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Info#mod.request_uri,Reason]),
+ exit({open_port_failed,Reason,
+ [{uri,Info#mod.request_uri},{script,Command},
+ {env,Env},{dir,Dir}]});
+ O ->
+ ?vlog("open port failed: unknown result"
+ "~n URI: ~p"
+ "~n O: ~p",
+ [Info#mod.request_uri,O]),
+ exit({open_port_failed,O,
+ [{uri,Info#mod.request_uri},{script,Command},
+ {env,Env},{dir,Dir}]})
+ end.
+
+env(Info) ->
+ [{"DOCUMENT_NAME",document_name(Info#mod.data,Info#mod.config_db,
+ Info#mod.request_uri)},
+ {"DOCUMENT_URI", document_uri(Info#mod.config_db, Info#mod.request_uri)},
+ {"QUERY_STRING_UNESCAPED", query_string_unescaped(Info#mod.request_uri)},
+ {"DATE_LOCAL", date_local()},
+ {"DATE_GMT", date_gmt()},
+ {"LAST_MODIFIED", last_modified(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri)}
+ ].
+
+%% cgi
+
+cgi(Info, Context, ErrorLog, R, RequestURI) ->
+ ScriptAliases = httpd_util:multi_lookup(Info#mod.config_db, script_alias),
+ case mod_alias:real_script_name(Info#mod.config_db, RequestURI,
+ ScriptAliases) of
+ {Script, AfterScript} ->
+ exec_script(Info,Script,AfterScript,ErrorLog,Context,R);
+ not_a_script ->
+ {ok, Context,
+ [{internal_info, ?NICE(RequestURI++" is not a script")}|
+ ErrorLog], httpd_util:key1search(Context, errmsg, ""),R}
+ end.
+
+remove_header([]) ->
+ [];
+remove_header([$\n,$\n|Rest]) ->
+ Rest;
+remove_header([C|Rest]) ->
+ remove_header(Rest).
+
+
+exec_script(Info,Script,AfterScript,ErrorLog,Context,R) ->
+ process_flag(trap_exit,true),
+ Aliases = httpd_util:multi_lookup(Info#mod.config_db, alias),
+ {_, Path, AfterPath} = mod_alias:real_name(Info#mod.config_db,
+ Info#mod.request_uri,
+ Aliases),
+ Env = env(Info)++mod_cgi:env(Info, Path, AfterPath),
+ Dir = filename:dirname(Path),
+ Port = (catch open_port({spawn,Script},[stream,{env, Env},{cd, Dir}])),
+ case Port of
+ P when port(P) ->
+ %% Send entity body to port.
+ Res = case Info#mod.entity_body of
+ [] ->
+ true;
+ EntityBody ->
+ (catch port_command(Port,EntityBody))
+ end,
+ case Res of
+ {'EXIT', Reason} ->
+ ?vlog("port send failed:"
+ "~n Port: ~p"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Port,Info#mod.request_uri,Reason]),
+ exit({open_cmd_failed,Reason,
+ [{mod,?MODULE},{port,Port},
+ {uri,Info#mod.request_uri},
+ {script,Script},{env,Env},{dir,Dir},
+ {ebody_size,sz(Info#mod.entity_body)}]});
+ true ->
+ {NewErrorLog, Result} = proxy(Port, ErrorLog),
+ {ok, Context, NewErrorLog, remove_header(Result), R}
+ end;
+ {'EXIT', Reason} ->
+ ?vlog("open port failed: exit"
+ "~n URI: ~p"
+ "~n Reason: ~p",
+ [Info#mod.request_uri,Reason]),
+ exit({open_port_failed,Reason,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]});
+ O ->
+ ?vlog("open port failed: unknown result"
+ "~n URI: ~p"
+ "~n O: ~p",
+ [Info#mod.request_uri,O]),
+ exit({open_port_failed,O,
+ [{mod,?MODULE},{uri,Info#mod.request_uri},{script,Script},
+ {env,Env},{dir,Dir}]})
+ end.
+
+
+%%
+%% Port communication
+%%
+
+proxy(Port,ErrorLog) ->
+ process_flag(trap_exit, true),
+ proxy(Port, ErrorLog, []).
+
+proxy(Port, ErrorLog, Result) ->
+ receive
+ {Port, {data, Response}} ->
+ proxy(Port, ErrorLog, lists:append(Result,Response));
+ {'EXIT', Port, normal} when port(Port) ->
+ process_flag(trap_exit, false),
+ {ErrorLog, Result};
+ {'EXIT', Port, Reason} when port(Port) ->
+ process_flag(trap_exit, false),
+ {[{internal_info,
+ ?NICE("Scrambled output from CGI-script")}|ErrorLog],
+ Result};
+ {'EXIT', Pid, Reason} when pid(Pid) ->
+ process_flag(trap_exit, false),
+ {'EXIT', Pid, Reason};
+ %% This should not happen!
+ WhatEver ->
+ process_flag(trap_exit, false),
+ {ErrorLog, Result}
+ end.
+
+
+%% ------
+%% Temporary until I figure out a way to fix send_in_chunks
+%% (comments and directives that start in one chunk but end
+%% in another is not handled).
+%%
+
+send_in(Info, Path,Head, {ok,FileInfo}) ->
+ case file:read_file(Path) of
+ {ok, Bin} ->
+ send_in1(Info, binary_to_list(Bin), Head, FileInfo);
+ {error, Reason} ->
+ ?vlog("failed reading file: ~p",[Reason]),
+ {error, {open,Reason}}
+ end;
+send_in(Info,Path,Head,{error,Reason}) ->
+ ?vlog("failed open file: ~p",[Reason]),
+ {error, {open,Reason}}.
+
+send_in1(Info, Data,Head,FileInfo) ->
+ {ok, _Context, Err, ParsedBody} = parse(Info,Data,?DEFAULT_CONTEXT,[],[]),
+ Size = length(ParsedBody),
+ ?vdebug("send_in1 -> Size: ~p",[Size]),
+ Head1 = case Info#mod.http_version of
+ "HTTP/1.1"->
+ Head ++
+ "Content-Length: " ++
+ integer_to_list(Size) ++
+ "\r\nEtag:" ++
+ httpd_util:create_etag(FileInfo,Size) ++"\r\n" ++
+ "Last-Modified: " ++
+ httpd_util:rfc1123_date(FileInfo#file_info.mtime) ++
+ "\r\n\r\n";
+ _->
+ %% i.e http/1.0 and http/0.9
+ Head ++
+ "Content-Length: " ++
+ integer_to_list(Size) ++
+ "\r\nLast-Modified: " ++
+ httpd_util:rfc1123_date(FileInfo#file_info.mtime) ++
+ "\r\n\r\n"
+ end,
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,
+ [Head1,ParsedBody]),
+ {ok, Err, Size}.
+
+
+
+%%
+%% Addition to "Fuzzy" HTML parser. This is actually a ugly hack to
+%% avoid putting to much data on the heap. To be rewritten...
+%%
+
+% -define(CHUNK_SIZE, 4096).
+
+% send_in_chunks(Info, Path) ->
+% ?DEBUG("send_in_chunks -> Path: ~p",[Path]),
+% case file:open(Path, [read, raw]) of
+% {ok, Stream} ->
+% send_in_chunks(Info, Stream, ?DEFAULT_CONTEXT,[]);
+% {error, Reason} ->
+% ?ERROR("Failed open file: ~p",[Reason]),
+% {error, {open,Reason}}
+% end.
+
+% send_in_chunks(Info, Stream, Context, ErrorLog) ->
+% case file:read(Stream, ?CHUNK_SIZE) of
+% {ok, Data} ->
+% ?DEBUG("send_in_chunks -> read ~p bytes",[length(Data)]),
+% {ok, NewContext, NewErrorLog, ParsedBody}=
+% parse(Info, Data, Context, ErrorLog, []),
+% httpd_socket:deliver(Info#mod.socket_type,
+% Info#mod.socket, ParsedBody),
+% send_in_chunks(Info,Stream,NewContext,NewErrorLog);
+% eof ->
+% {ok, ErrorLog};
+% {error, Reason} ->
+% ?ERROR("Failed read from file: ~p",[Reason]),
+% {error, {read,Reason}}
+% end.
+
+
+%%
+%% "Fuzzy" HTML parser
+%%
+
+parse(Info,Body) ->
+ parse(Info, Body, ?DEFAULT_CONTEXT, [], []).
+
+parse(Info, [], Context, ErrorLog, Result) ->
+ {ok, Context, lists:reverse(ErrorLog), lists:reverse(Result)};
+parse(Info,[$<,$!,$-,$-,$#|R1],Context,ErrorLog,Result) ->
+ ?DEBUG("parse -> start command directive when length(R1): ~p",[length(R1)]),
+ case catch parse0(R1,Context) of
+ {parse_error,Reason} ->
+ parse(Info,R1,Context,[{internal_info,?NICE(Reason)}|ErrorLog],
+ [$#,$-,$-,$!,$<|Result]);
+ {ok,Context,Command,TagList,ValueList,R2} ->
+ ?DEBUG("parse -> Command: ~p",[Command]),
+ {ok,NewContext,NewErrorLog,MoreResult,R3}=
+ handle(Info,Context,ErrorLog,Command,TagList,ValueList,R2),
+ parse(Info,R3,NewContext,NewErrorLog,lists:reverse(MoreResult)++Result)
+ end;
+parse(Info,[$<,$!,$-,$-|R1],Context,ErrorLog,Result) ->
+ ?DEBUG("parse -> start comment when length(R1) = ~p",[length(R1)]),
+ case catch parse5(R1,[],0) of
+ {parse_error,Reason} ->
+ ?ERROR("parse -> parse error: ~p",[Reason]),
+ parse(Info,R1,Context,[{internal_info,?NICE(Reason)}|ErrorLog],Result);
+ {Comment,R2} ->
+ ?DEBUG("parse -> length(Comment) = ~p, length(R2) = ~p",
+ [length(Comment),length(R2)]),
+ parse(Info,R2,Context,ErrorLog,Comment++Result)
+ end;
+parse(Info,[C|R],Context,ErrorLog,Result) ->
+ parse(Info,R,Context,ErrorLog,[C|Result]).
+
+handle(Info,Context,ErrorLog,Command,TagList,ValueList,R) ->
+ case catch apply(?MODULE,Command,[Info,Context,ErrorLog,TagList,ValueList,
+ R]) of
+ {'EXIT',{undef,_}} ->
+ throw({parse_error,"Unknown command "++atom_to_list(Command)++
+ " in parsed doc"});
+ Result ->
+ Result
+ end.
+
+parse0([],Context) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse0([$-,$-,$>|R],Context) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse0([$ |R],Context) ->
+ parse0(R,Context);
+parse0(String,Context) ->
+ parse1(String,Context,"").
+
+parse1([],Context,Command) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse1([$-,$-,$>|R],Context,Command) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse1([$ |R],Context,Command) ->
+ parse2(R,Context,list_to_atom(lists:reverse(Command)),[],[],"");
+parse1([C|R],Context,Command) ->
+ parse1(R,Context,[C|Command]).
+
+parse2([],Context,Command,TagList,ValueList,Tag) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse2([$-,$-,$>|R],Context,Command,TagList,ValueList,Tag) ->
+ {ok,Context,Command,TagList,ValueList,R};
+parse2([$ |R],Context,Command,TagList,ValueList,Tag) ->
+ parse2(R,Context,Command,TagList,ValueList,Tag);
+parse2([$=|R],Context,Command,TagList,ValueList,Tag) ->
+ parse3(R,Context,Command,[list_to_atom(lists:reverse(Tag))|TagList],
+ ValueList);
+parse2([C|R],Context,Command,TagList,ValueList,Tag) ->
+ parse2(R,Context,Command,TagList,ValueList,[C|Tag]).
+
+parse3([],Context,Command,TagList,ValueList) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse3([$-,$-,$>|R],Context,Command,TagList,ValueList) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse3([$ |R],Context,Command,TagList,ValueList) ->
+ parse3(R,Context,Command,TagList,ValueList);
+parse3([$"|R],Context,Command,TagList,ValueList) ->
+ parse4(R,Context,Command,TagList,ValueList,"");
+parse3(String,Context,Command,TagList,ValueList) ->
+ throw({parse_error,"Premature EOF in parsed file"}).
+
+parse4([],Context,Command,TagList,ValueList,Value) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse4([$-,$-,$>|R],Context,Command,TagList,ValueList,Value) ->
+ throw({parse_error,"Premature EOF in parsed file"});
+parse4([$"|R],Context,Command,TagList,ValueList,Value) ->
+ parse2(R,Context,Command,TagList,[lists:reverse(Value)|ValueList],"");
+parse4([C|R],Context,Command,TagList,ValueList,Value) ->
+ parse4(R,Context,Command,TagList,ValueList,[C|Value]).
+
+parse5([],Comment,Depth) ->
+ ?ERROR("parse5 -> unterminated comment of ~p bytes when Depth = ~p",
+ [length(Comment),Depth]),
+ throw({parse_error,"Premature EOF in parsed file"});
+parse5([$<,$!,$-,$-|R],Comment,Depth) ->
+ parse5(R,[$-,$-,$!,$<|Comment],Depth+1);
+parse5([$-,$-,$>|R],Comment,0) ->
+ {">--"++Comment++"--!<",R};
+parse5([$-,$-,$>|R],Comment,Depth) ->
+ parse5(R,[$>,$-,$-|Comment],Depth-1);
+parse5([C|R],Comment,Depth) ->
+ parse5(R,[C|Comment],Depth).
+
+
+sz(B) when binary(B) -> {binary,size(B)};
+sz(L) when list(L) -> {list,length(L)};
+sz(_) -> undefined.
+
+
+%% send_error - Handle failure to send the file
+%%
+send_error({open,Reason},Info,Path) -> open_error(Reason,Info,Path);
+send_error({read,Reason},Info,Path) -> read_error(Reason,Info,Path).
+
+
+%% open_error - Handle file open failure
+%%
+open_error(eacces,Info,Path) ->
+ open_error(403,Info,Path,"");
+open_error(enoent,Info,Path) ->
+ open_error(404,Info,Path,"");
+open_error(enotdir,Info,Path) ->
+ open_error(404,Info,Path,
+ ": A component of the file name is not a directory");
+open_error(emfile,_Info,Path) ->
+ open_error(500,none,Path,": To many open files");
+open_error({enfile,_},_Info,Path) ->
+ open_error(500,none,Path,": File table overflow");
+open_error(_Reason,_Info,Path) ->
+ open_error(500,none,Path,"").
+
+open_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't open "++Path++Reason)};
+open_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,?NICE("Can't open "++Path++Reason)}.
+
+read_error(_Reason,_Info,Path) ->
+ read_error(500,none,Path,"").
+
+read_error(StatusCode,none,Path,Reason) ->
+ {StatusCode,none,?NICE("Can't read "++Path++Reason)};
+read_error(StatusCode,Info,Path,Reason) ->
+ {StatusCode,Info#mod.request_uri,?NICE("Can't read "++Path++Reason)}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_log.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_log.erl
new file mode 100644
index 0000000000..a24ac425e6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_log.erl
@@ -0,0 +1,250 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_log.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_log).
+-export([do/1,error_log/5,security_log/2,load/2,store/2,remove/1]).
+
+-export([report_error/2]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"LOG").
+-include("httpd_verbosity.hrl").
+
+%% do
+
+do(Info) ->
+ AuthUser = auth_user(Info#mod.data),
+ Date = custom_date(),
+ log_internal_info(Info,Date,Info#mod.data),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ transfer_log(Info,"-",AuthUser,Date,StatusCode,0),
+ if
+ StatusCode >= 400 ->
+ error_log(Info,Date,Reason);
+ true ->
+ not_an_error
+ end,
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ {already_sent,StatusCode,Size} ->
+ transfer_log(Info,"-",AuthUser,Date,StatusCode,Size),
+ {proceed,Info#mod.data};
+ {response,Head,Body} ->
+ Size=httpd_util:key1search(Head,content_length,unknown),
+ Code=httpd_util:key1search(Head,code,unknown),
+ transfer_log(Info,"-",AuthUser,Date,Code,Size),
+ {proceed,Info#mod.data};
+ {StatusCode,Response} ->
+ transfer_log(Info,"-",AuthUser,Date,200,
+ httpd_util:flatlength(Response)),
+ {proceed,Info#mod.data};
+ undefined ->
+ transfer_log(Info,"-",AuthUser,Date,200,0),
+ {proceed,Info#mod.data}
+ end
+ end.
+
+custom_date() ->
+ LocalTime=calendar:local_time(),
+ UniversalTime=calendar:universal_time(),
+ Minutes=round(diff_in_minutes(LocalTime,UniversalTime)),
+ {{YYYY,MM,DD},{Hour,Min,Sec}}=LocalTime,
+ Date =
+ io_lib:format("~.2.0w/~.3s/~.4w:~.2.0w:~.2.0w:~.2.0w ~c~.2.0w~.2.0w",
+ [DD, httpd_util:month(MM), YYYY, Hour, Min, Sec,
+ sign(Minutes),
+ abs(Minutes) div 60, abs(Minutes) rem 60]),
+ lists:flatten(Date).
+
+diff_in_minutes(L,U) ->
+ (calendar:datetime_to_gregorian_seconds(L) -
+ calendar:datetime_to_gregorian_seconds(U))/60.
+
+sign(Minutes) when Minutes > 0 ->
+ $+;
+sign(Minutes) ->
+ $-.
+
+auth_user(Data) ->
+ case httpd_util:key1search(Data,remote_user) of
+ undefined ->
+ "-";
+ RemoteUser ->
+ RemoteUser
+ end.
+
+%% log_internal_info
+
+log_internal_info(Info,Date,[]) ->
+ ok;
+log_internal_info(Info,Date,[{internal_info,Reason}|Rest]) ->
+ error_log(Info,Date,Reason),
+ log_internal_info(Info,Date,Rest);
+log_internal_info(Info,Date,[_|Rest]) ->
+ log_internal_info(Info,Date,Rest).
+
+%% transfer_log
+
+transfer_log(Info,RFC931,AuthUser,Date,StatusCode,Bytes) ->
+ case httpd_util:lookup(Info#mod.config_db,transfer_log) of
+ undefined ->
+ no_transfer_log;
+ TransferLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ case (catch io:format(TransferLog, "~s ~s ~s [~s] \"~s\" ~w ~w~n",
+ [RemoteHost, RFC931, AuthUser,
+ Date, Info#mod.request_line,
+ StatusCode, Bytes])) of
+ ok ->
+ ok;
+ Error ->
+ error_logger:error_report(Error)
+ end
+ end.
+
+%% security log
+
+security_log(Info, Reason) ->
+ case httpd_util:lookup(Info#mod.config_db, security_log) of
+ undefined ->
+ no_security_log;
+ SecurityLog ->
+ io:format(SecurityLog,"[~s] ~s~n", [custom_date(), Reason])
+ end.
+
+%% error_log
+
+error_log(Info,Date,Reason) ->
+ case httpd_util:lookup(Info#mod.config_db, error_log) of
+ undefined ->
+ no_error_log;
+ ErrorLog ->
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ io:format(ErrorLog,"[~s] access to ~s failed for ~s, reason: ~p~n",
+ [Date,Info#mod.request_uri,RemoteHost,Reason])
+ end.
+
+error_log(SocketType,Socket,ConfigDB,{PortNumber,RemoteHost},Reason) ->
+ case httpd_util:lookup(ConfigDB,error_log) of
+ undefined ->
+ no_error_log;
+ ErrorLog ->
+ Date=custom_date(),
+ io:format(ErrorLog,"[~s] server crash for ~s, reason: ~p~n",
+ [Date,RemoteHost,Reason]),
+ ok
+ end.
+
+report_error(ConfigDB,Error) ->
+ case httpd_util:lookup(ConfigDB,error_log) of
+ undefined ->
+ no_error_log;
+ ErrorLog ->
+ Date=custom_date(),
+ io:format(ErrorLog,"[~s] reporting error: ~s~n",[Date,Error]),
+ ok
+ end.
+
+%%
+%% Configuration
+%%
+
+%% load
+
+load([$T,$r,$a,$n,$s,$f,$e,$r,$L,$o,$g,$ |TransferLog],[]) ->
+ {ok,[],{transfer_log,httpd_conf:clean(TransferLog)}};
+load([$E,$r,$r,$o,$r,$L,$o,$g,$ |ErrorLog],[]) ->
+ {ok,[],{error_log,httpd_conf:clean(ErrorLog)}};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$L,$o,$g,$ |SecurityLog], []) ->
+ {ok, [], {security_log, httpd_conf:clean(SecurityLog)}}.
+
+%% store
+
+store({transfer_log,TransferLog},ConfigList) ->
+ case create_log(TransferLog,ConfigList) of
+ {ok,TransferLogStream} ->
+ {ok,{transfer_log,TransferLogStream}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({error_log,ErrorLog},ConfigList) ->
+ case create_log(ErrorLog,ConfigList) of
+ {ok,ErrorLogStream} ->
+ {ok,{error_log,ErrorLogStream}};
+ {error,Reason} ->
+ {error,Reason}
+ end;
+store({security_log, SecurityLog},ConfigList) ->
+ case create_log(SecurityLog, ConfigList) of
+ {ok, SecurityLogStream} ->
+ {ok, {security_log, SecurityLogStream}};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+create_log(LogFile,ConfigList) ->
+ Filename = httpd_conf:clean(LogFile),
+ case filename:pathtype(Filename) of
+ absolute ->
+ case file:open(Filename, [read,write]) of
+ {ok,LogStream} ->
+ file:position(LogStream,{eof,0}),
+ {ok,LogStream};
+ {error,_} ->
+ {error,?NICE("Can't create "++Filename)}
+ end;
+ volumerelative ->
+ case file:open(Filename, [read,write]) of
+ {ok,LogStream} ->
+ file:position(LogStream,{eof,0}),
+ {ok,LogStream};
+ {error,_} ->
+ {error,?NICE("Can't create "++Filename)}
+ end;
+ relative ->
+ case httpd_util:key1search(ConfigList,server_root) of
+ undefined ->
+ {error,
+ ?NICE(Filename++
+ " is an invalid logfile name beacuse ServerRoot is not defined")};
+ ServerRoot ->
+ AbsoluteFilename=filename:join(ServerRoot,Filename),
+ case file:open(AbsoluteFilename, [read,write]) of
+ {ok,LogStream} ->
+ file:position(LogStream,{eof,0}),
+ {ok,LogStream};
+ {error,Reason} ->
+ {error,?NICE("Can't create "++AbsoluteFilename)}
+ end
+ end
+ end.
+
+%% remove
+
+remove(ConfigDB) ->
+ lists:foreach(fun([Stream]) -> file:close(Stream) end,
+ ets:match(ConfigDB,{transfer_log,'$1'})),
+ lists:foreach(fun([Stream]) -> file:close(Stream) end,
+ ets:match(ConfigDB,{error_log,'$1'})),
+ lists:foreach(fun([Stream]) -> file:close(Stream) end,
+ ets:match(ConfigDB,{security_log,'$1'})),
+ ok.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
new file mode 100644
index 0000000000..f623dc3ec8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
@@ -0,0 +1,380 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_range.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_range).
+-export([do/1]).
+-include("httpd.hrl").
+
+%% do
+
+
+
+do(Info) ->
+ ?DEBUG("do -> entry",[]),
+ case Info#mod.method of
+ "GET" ->
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.parsed_header,"range") of
+ undefined ->
+ %Not a range response
+ {proceed,Info#mod.data};
+ Range ->
+ %%Control that there weren't a if-range field that stopped
+ %%The range request in favor for the whole file
+ case httpd_util:key1search(Info#mod.data,if_range) of
+ send_file ->
+ {proceed,Info#mod.data};
+ _undefined ->
+ do_get_range(Info,Range)
+ end
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end;
+ %% Not a GET method!
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+do_get_range(Info,Ranges) ->
+ ?DEBUG("do_get_range -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri),
+ {FileInfo, LastModified} =get_modification_date(Path),
+ send_range_response(Path,Info,Ranges,FileInfo,LastModified).
+
+
+send_range_response(Path,Info,Ranges,FileInfo,LastModified)->
+ case parse_ranges(Ranges) of
+ error->
+ ?ERROR("send_range_response-> Unparsable range request",[]),
+ {proceed,Info#mod.data};
+ {multipart,RangeList}->
+ send_multi_range_response(Path,Info,RangeList);
+ {Start,Stop}->
+ send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)
+ end.
+%%More than one range specified
+%%Send a multipart reponse to the user
+%
+%%An example of an multipart range response
+
+% HTTP/1.1 206 Partial Content
+% Date:Wed 15 Nov 1995 04:08:23 GMT
+% Last-modified:Wed 14 Nov 1995 04:08:23 GMT
+% Content-type: multipart/byteranges; boundary="SeparatorString"
+%
+% --"SeparatorString"
+% Content-Type: application/pdf
+% Content-Range: bytes 500-600/1010
+% .... The data..... 101 bytes
+%
+% --"SeparatorString"
+% Content-Type: application/pdf
+% Content-Range: bytes 700-1009/1010
+% .... The data.....
+
+
+
+send_multi_range_response(Path,Info,RangeList)->
+ case file:open(Path, [raw,binary]) of
+ {ok, FileDescriptor} ->
+ file:close(FileDescriptor),
+ ?DEBUG("send_multi_range_response -> FileDescriptor: ~p",[FileDescriptor]),
+ Suffix = httpd_util:suffix(Path),
+ PartMimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Date = httpd_util:rfc1123_date(),
+ {FileInfo,LastModified}=get_modification_date(Path),
+ case valid_ranges(RangeList,Path,FileInfo) of
+ {ValidRanges,true}->
+ ?DEBUG("send_multi_range_response -> Ranges are valid:",[]),
+ %Apache breaks the standard by sending the size field in the Header.
+ Header = [{code,206},
+ {content_type,"multipart/byteranges;boundary=RangeBoundarySeparator"},
+ {etag,httpd_util:create_etag(FileInfo)},
+ {last_modified,LastModified}
+ ],
+ ?DEBUG("send_multi_range_response -> Valid Ranges: ~p",[RagneList]),
+ Body={fun send_multiranges/4,[ValidRanges,Info,PartMimeType,Path]},
+ {proceed,[{response,{response,Header,Body}}|Info#mod.data]};
+ _ ->
+ {proceed, [{status, {416,"Range not valid",bad_range_boundaries }}]}
+ end;
+ {error, Reason} ->
+ ?ERROR("do_get -> failed open file: ~p",[Reason]),
+ {proceed,Info#mod.data}
+ end.
+
+send_multiranges(ValidRanges,Info,PartMimeType,Path)->
+ ?DEBUG("send_multiranges -> Start sending the ranges",[]),
+ case file:open(Path, [raw,binary]) of
+ {ok,FileDescriptor} ->
+ lists:foreach(fun(Range)->
+ send_multipart_start(Range,Info,PartMimeType,FileDescriptor)
+ end,ValidRanges),
+ file:close(FileDescriptor),
+ %%Sends an end of the multipart
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,"\r\n--RangeBoundarySeparator--"),
+ sent;
+ _ ->
+ close
+ end.
+
+send_multipart_start({{Start,End},{StartByte,EndByte,Size}},Info,PartMimeType,FileDescriptor)when StartByte<Size->
+ PartHeader=["\r\n--RangeBoundarySeparator\r\n","Content-type: ",PartMimeType,"\r\n",
+ "Content-Range:bytes=",integer_to_list(StartByte),"-",integer_to_list(EndByte),"/",
+ integer_to_list(Size),"\r\n\r\n"],
+ send_part_start(Info#mod.socket_type,Info#mod.socket,PartHeader,FileDescriptor,Start,End);
+
+
+send_multipart_start({{Start,End},{StartByte,EndByte,Size}},Info,PartMimeType,FileDescriptor)->
+ PartHeader=["\r\n--RangeBoundarySeparator\r\n","Content-type: ",PartMimeType,"\r\n",
+ "Content-Range:bytes=",integer_to_list(Size-(StartByte-Size)),"-",integer_to_list(EndByte),"/",
+ integer_to_list(Size),"\r\n\r\n"],
+ send_part_start(Info#mod.socket_type,Info#mod.socket,PartHeader,FileDescriptor,Start,End).
+
+send_part_start(SocketType,Socket,PartHeader,FileDescriptor,Start,End)->
+ case httpd_socket:deliver(SocketType,Socket,PartHeader) of
+ ok ->
+ send_part_start(SocketType,Socket,FileDescriptor,Start,End);
+ _ ->
+ close
+ end.
+
+send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)->
+ case file:open(Path, [raw,binary]) of
+ {ok, FileDescriptor} ->
+ file:close(FileDescriptor),
+ ?DEBUG("send_range_response -> FileDescriptor: ~p",[FileDescriptor]),
+ Suffix = httpd_util:suffix(Path),
+ MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Date = httpd_util:rfc1123_date(),
+ Size = get_range_size(Start,Stop,FileInfo),
+ case valid_range(Start,Stop,FileInfo) of
+ {true,StartByte,EndByte,TotByte}->
+ Head=[{code,206},{content_type, MimeType},
+ {last_modified, LastModified},
+ {etag,httpd_util:create_etag(FileInfo)},
+ {content_range,["bytes=",integer_to_list(StartByte),"-",
+ integer_to_list(EndByte),"/",integer_to_list(TotByte)]},
+ {content_length,Size}],
+ BodyFunc=fun send_range_body/5,
+ Arg=[Info#mod.socket_type, Info#mod.socket,Path,Start,Stop],
+ {proceed,[{response,{response,Head,{BodyFunc,Arg}}}|Info#mod.data]};
+ {false,Reason} ->
+ {proceed, [{status, {416,Reason,bad_range_boundaries }}]}
+ end;
+ {error, Reason} ->
+ ?ERROR("send_range_response -> failed open file: ~p",[Reason]),
+ {proceed,Info#mod.data}
+ end.
+
+
+send_range_body(SocketType,Socket,Path,Start,End) ->
+ ?DEBUG("mod_range -> send_range_body",[]),
+ case file:open(Path, [raw,binary]) of
+ {ok,FileDescriptor} ->
+ send_part_start(SocketType,Socket,FileDescriptor,Start,End),
+ file:close(FileDescriptor);
+ _ ->
+ close
+ end.
+
+send_part_start(SocketType,Socket,FileDescriptor,Start,End) ->
+ case Start of
+ from_end ->
+ file:position(FileDescriptor,{eof,End}),
+ send_body(SocketType,Socket,FileDescriptor);
+ from_start ->
+ file:position(FileDescriptor,{bof,End}),
+ send_body(SocketType,Socket,FileDescriptor);
+ Byte when integer(Byte) ->
+ file:position(FileDescriptor,{bof,Start}),
+ send_part(SocketType,Socket,FileDescriptor,End)
+ end,
+ sent.
+
+
+%%This function could replace send_body by calling it with Start=0 end =FileSize
+%% But i gues it would be stupid when we look at performance
+send_part(SocketType,Socket,FileDescriptor,End)->
+ case file:position(FileDescriptor,{cur,0}) of
+ {ok,NewPos} ->
+ if
+ NewPos > End ->
+ ok;
+ true ->
+ Size=get_file_chunk_size(NewPos,End,?FILE_CHUNK_SIZE),
+ case file:read(FileDescriptor,Size) of
+ eof ->
+ ok;
+ {error,Reason} ->
+ ok;
+ {ok,Binary} ->
+ case httpd_socket:deliver(SocketType,Socket,Binary) of
+ socket_closed ->
+ ?LOG("send_range of body -> socket closed while sending",[]),
+ socket_close;
+ _ ->
+ send_part(SocketType,Socket,FileDescriptor,End)
+ end
+ end
+ end;
+ _->
+ ok
+ end.
+
+%% validate that the range is in the limits of the file
+valid_ranges(RangeList,Path,FileInfo)->
+ lists:mapfoldl(fun({Start,End},Acc)->
+ case Acc of
+ true ->
+ case valid_range(Start,End,FileInfo) of
+ {true,StartB,EndB,Size}->
+ {{{Start,End},{StartB,EndB,Size}},true};
+ _ ->
+ false
+ end;
+ _ ->
+ {false,false}
+ end
+ end,true,RangeList).
+
+
+
+valid_range(from_end,End,FileInfo)->
+ Size=FileInfo#file_info.size,
+ if
+ End < Size ->
+ {true,(Size+End),Size-1,Size};
+ true ->
+ false
+ end;
+valid_range(from_start,End,FileInfo)->
+ Size=FileInfo#file_info.size,
+ if
+ End < Size ->
+ {true,End,Size-1,Size};
+ true ->
+ false
+ end;
+
+valid_range(Start,End,FileInfo)when Start=<End->
+ case FileInfo#file_info.size of
+ FileSize when Start< FileSize ->
+ case FileInfo#file_info.size of
+ Size when End<Size ->
+ {true,Start,End,FileInfo#file_info.size};
+ Size ->
+ {true,Start,Size-1,Size}
+ end;
+ _->
+ {false,"The size of the range is negative"}
+ end;
+
+valid_range(Start,End,FileInfo)->
+ {false,"Range starts out of file boundaries"}.
+%% Find the modification date of the file
+get_modification_date(Path)->
+ case file:read_file_info(Path) of
+ {ok, FileInfo0} ->
+ {FileInfo0, httpd_util:rfc1123_date(FileInfo0#file_info.mtime)};
+ _ ->
+ {#file_info{},""}
+ end.
+
+%Calculate the size of the chunk to read
+
+get_file_chunk_size(Position,End,DefaultChunkSize)when (Position+DefaultChunkSize) =< End->
+ DefaultChunkSize;
+get_file_chunk_size(Position,End,DefaultChunkSize)->
+ (End-Position) +1.
+
+
+
+%Get the size of the range to send. Remember that
+%A range is from startbyte up to endbyte which means that
+%the nuber of byte in a range is (StartByte-EndByte)+1
+
+get_range_size(from_end,Stop,FileInfo)->
+ integer_to_list(-1*Stop);
+
+get_range_size(from_start,StartByte,FileInfo) ->
+ integer_to_list((((FileInfo#file_info.size)-StartByte)));
+
+get_range_size(StartByte,EndByte,FileInfo) ->
+ integer_to_list((EndByte-StartByte)+1).
+
+parse_ranges([$\ ,$b,$y,$t,$e,$s,$\=|Ranges])->
+ parse_ranges([$b,$y,$t,$e,$s,$\=|Ranges]);
+parse_ranges([$b,$y,$t,$e,$s,$\=|Ranges])->
+ case string:tokens(Ranges,", ") of
+ [Range] ->
+ parse_range(Range);
+ [Range1|SplittedRanges]->
+ {multipart,lists:map(fun parse_range/1,[Range1|SplittedRanges])}
+ end;
+%Bad unit
+parse_ranges(Ranges)->
+ io:format("Bad Ranges : ~p",[Ranges]),
+ error.
+%Parse the range specification from the request to {Start,End}
+%Start=End : Numreric string | []
+
+parse_range(Range)->
+ format_range(split_range(Range,[],[])).
+format_range({[],BytesFromEnd})->
+ {from_end,-1*(list_to_integer(BytesFromEnd))};
+format_range({StartByte,[]})->
+ {from_start,list_to_integer(StartByte)};
+format_range({StartByte,EndByte})->
+ {list_to_integer(StartByte),list_to_integer(EndByte)}.
+%Last case return the splitted range
+split_range([],Current,Other)->
+ {lists:reverse(Other),lists:reverse(Current)};
+
+split_range([$-|Rest],Current,Other)->
+ split_range(Rest,Other,Current);
+
+split_range([N|Rest],Current,End) ->
+ split_range(Rest,[N|Current],End).
+
+send_body(SocketType,Socket,FileDescriptor) ->
+ case file:read(FileDescriptor,?FILE_CHUNK_SIZE) of
+ {ok,Binary} ->
+ ?DEBUG("send_body -> send another chunk: ~p",[size(Binary)]),
+ case httpd_socket:deliver(SocketType,Socket,Binary) of
+ socket_closed ->
+ ?LOG("send_body -> socket closed while sending",[]),
+ socket_close;
+ _ ->
+ send_body(SocketType,Socket,FileDescriptor)
+ end;
+ eof ->
+ ?DEBUG("send_body -> done with this file",[]),
+ eof
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
new file mode 100644
index 0000000000..b818a15f32
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
@@ -0,0 +1,320 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_responsecontrol.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+
+-module(mod_responsecontrol).
+-export([do/1]).
+
+-include("httpd.hrl").
+
+
+do(Info) ->
+ ?DEBUG("do -> response_control",[]),
+ case httpd_util:key1search(Info#mod.data,status) of
+ %% A status code has been generated!
+ {StatusCode,PhraseArgs,Reason} ->
+ {proceed,Info#mod.data};
+ %% No status code has been generated!
+ undefined ->
+ case httpd_util:key1search(Info#mod.data,response) of
+ %% No response has been generated!
+ undefined ->
+ case do_responsecontrol(Info) of
+ continue ->
+ {proceed,Info#mod.data};
+ Response ->
+ {proceed,[Response|Info#mod.data]}
+ end;
+ %% A response has been generated or sent!
+ Response ->
+ {proceed,Info#mod.data}
+ end
+ end.
+
+
+%%----------------------------------------------------------------------
+%%Control that the request header did not contians any limitations
+%%wheather a response shall be createed or not
+%%----------------------------------------------------------------------
+
+do_responsecontrol(Info) ->
+ ?DEBUG("do_response_control -> Request URI: ~p",[Info#mod.request_uri]),
+ Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
+ Info#mod.request_uri),
+ case file:read_file_info(Path) of
+ {ok, FileInfo} ->
+ control(Path,Info,FileInfo);
+ _ ->
+ %% The requested asset is not a plain file and then it must
+ %% be generated everytime its requested
+ continue
+ end.
+
+%%----------------------------------------------------------------------
+%%Control the If-Match, If-None-Match, and If-Modified-Since
+%%----------------------------------------------------------------------
+
+
+%% If a client sends more then one of the if-XXXX fields in a request
+%% The standard says it does not specify the behaviuor so I specified it :-)
+%% The priority between the fields is
+%% 1.If-modified
+%% 2.If-Unmodified
+%% 3.If-Match
+%% 4.If-Nomatch
+
+%% This means if more than one of the fields are in the request the
+%% field with highest priority will be used
+
+%%If the request is a range request the If-Range field will be the winner.
+
+control(Path,Info,FileInfo)->
+ case control_range(Path,Info,FileInfo) of
+ undefined ->
+ case control_Etag(Path,Info,FileInfo) of
+ undefined ->
+ case control_modification(Path,Info,FileInfo) of
+ continue ->
+ continue;
+ ReturnValue ->
+ send_return_value(ReturnValue,FileInfo)
+ end;
+ continue ->
+ continue;
+ ReturnValue ->
+ send_return_value(ReturnValue,FileInfo)
+ end;
+ Response->
+ Response
+ end.
+
+%%----------------------------------------------------------------------
+%%If there are both a range and an if-range field control if
+%%----------------------------------------------------------------------
+control_range(Path,Info,FileInfo) ->
+ case httpd_util:key1search(Info#mod.parsed_header,"range") of
+ undefined->
+ undefined;
+ _Range ->
+ case httpd_util:key1search(Info#mod.parsed_header,"if-range") of
+ undefined ->
+ undefined;
+ EtagOrDate ->
+ control_if_range(Path,Info,FileInfo,EtagOrDate)
+ end
+ end.
+
+control_if_range(Path,Info,FileInfo,EtagOrDate) ->
+ case httpd_util:convert_request_date(strip_date(EtagOrDate)) of
+ bad_date ->
+ FileEtag=httpd_util:create_etag(FileInfo),
+ case FileEtag of
+ EtagOrDate ->
+ continue;
+ _ ->
+ {if_range,send_file}
+ end;
+ ErlDate ->
+ %%We got the date in the request if it is
+ case control_modification_data(Info,FileInfo#file_info.mtime,"if-range") of
+ modified ->
+ {if_range,send_file};
+ _UnmodifiedOrUndefined->
+ continue
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Controls the values of the If-Match and I-None-Mtch
+%%----------------------------------------------------------------------
+control_Etag(Path,Info,FileInfo)->
+ FileEtag=httpd_util:create_etag(FileInfo),
+ %%Control if the E-Tag for the resource matches one of the Etags in
+ %%the -if-match header field
+ case control_match(Info,FileInfo,"if-match",FileEtag) of
+ nomatch ->
+ %%None of the Etags in the if-match field matched the current
+ %%Etag for the resource return a 304
+ {412,Info,Path};
+ match ->
+ continue;
+ undefined ->
+ case control_match(Info,FileInfo,"if-none-match",FileEtag) of
+ nomatch ->
+ continue;
+ match ->
+ case Info#mod.method of
+ "GET" ->
+ {304,Info,Path};
+ "HEAD" ->
+ {304,Info,Path};
+ _OtherrequestMethod ->
+ {412,Info,Path}
+ end;
+ undefined ->
+ undefined
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Control if there are any Etags for HeaderField in the request if so
+%%Control if they match the Etag for the requested file
+%%----------------------------------------------------------------------
+control_match(Info,FileInfo,HeaderField,FileEtag)->
+ case split_etags(httpd_util:key1search(Info#mod.parsed_header,HeaderField)) of
+ undefined->
+ undefined;
+ Etags->
+ %%Control that the match any star not is availible
+ case lists:member("*",Etags) of
+ true->
+ match;
+ false->
+ compare_etags(FileEtag,Etags)
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Split the etags from the request
+%%----------------------------------------------------------------------
+split_etags(undefined)->
+ undefined;
+split_etags(Tags) ->
+ string:tokens(Tags,", ").
+
+%%----------------------------------------------------------------------
+%%Control if the etag for the file is in the list
+%%----------------------------------------------------------------------
+compare_etags(Tag,Etags) ->
+ case lists:member(Tag,Etags) of
+ true ->
+ match;
+ _ ->
+ nomatch
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%%Control if the file is modificated %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%----------------------------------------------------------------------
+%%Control the If-Modified-Since and If-Not-Modified-Since header fields
+%%----------------------------------------------------------------------
+control_modification(Path,Info,FileInfo)->
+ ?DEBUG("control_modification() -> entry",[]),
+ case control_modification_data(Info,FileInfo#file_info.mtime,"if-modified-since") of
+ modified->
+ continue;
+ unmodified->
+ {304,Info,Path};
+ undefined ->
+ case control_modification_data(Info,FileInfo#file_info.mtime,"if-unmodified-since") of
+ modified ->
+ {412,Info,Path};
+ _ContinueUndefined ->
+ continue
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Controls the date from the http-request if-modified-since and
+%%if-not-modified-since against the modification data of the
+%%File
+%%----------------------------------------------------------------------
+%%Info is the record about the request
+%%ModificationTime is the time the file was edited last
+%%Header Field is the name of the field to control
+
+control_modification_data(Info,ModificationTime,HeaderField)->
+ case strip_date(httpd_util:key1search(Info#mod.parsed_header,HeaderField)) of
+ undefined->
+ undefined;
+ LastModified0 ->
+ LastModified=httpd_util:convert_request_date(LastModified0),
+ ?DEBUG("control_modification_data() -> "
+ "~n Request-Field: ~s"
+ "~n FileLastModified: ~p"
+ "~n FieldValue: ~p",
+ [HeaderField,ModificationTime,LastModified]),
+ case LastModified of
+ bad_date ->
+ undefined;
+ _ ->
+ FileTime=calendar:datetime_to_gregorian_seconds(ModificationTime),
+ FieldTime=calendar:datetime_to_gregorian_seconds(LastModified),
+ if
+ FileTime=<FieldTime ->
+ ?DEBUG("File unmodified~n", []),
+ unmodified;
+ FileTime>=FieldTime ->
+ ?DEBUG("File modified~n", []),
+ modified
+ end
+ end
+ end.
+
+%%----------------------------------------------------------------------
+%%Compare to dates on the form {{YYYY,MM,DD},{HH,MIN,SS}}
+%%If the first date is the biggest returns biggest1 (read biggestFirst)
+%%If the first date is smaller
+% compare_date(Date,bad_date)->
+% bad_date;
+
+% compare_date({D1,T1},{D2,T2})->
+% case compare_date1(D1,D2) of
+% equal ->
+% compare_date1(T1,T2);
+% GTorLT->
+% GTorLT
+% end.
+
+% compare_date1({T1,T2,T3},{T12,T22,T32}) when T1>T12 ->
+% bigger1;
+% compare_date1({T1,T2,T3},{T1,T22,T32}) when T2>T22 ->
+% bigger1;
+% compare_date1({T1,T2,T3},{T1,T2,T32}) when T3>T32 ->
+% bigger1;
+% compare_date1({T1,T2,T3},{T1,T2,T3})->
+% equal;
+% compare_date1(_D1,_D2)->
+% smaller1.
+
+
+%% IE4 & NS4 sends an extra '; length=xxxx' string at the end of the If-Modified-Since
+%% header, we detect this and ignore it (the RFCs does not mention this).
+strip_date(undefined) ->
+ undefined;
+strip_date([]) ->
+ [];
+strip_date([$;,$ |Rest]) ->
+ [];
+strip_date([C|Rest]) ->
+ [C|strip_date(Rest)].
+
+send_return_value({412,_,_},FileInfo)->
+ {status,{412,none,"Precondition Failed"}};
+
+send_return_value({304,Info,Path},FileInfo)->
+ Suffix=httpd_util:suffix(Path),
+ MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
+ Header = [{code,304},
+ {etag,httpd_util:create_etag(FileInfo)},
+ {content_length,0},
+ {last_modified,httpd_util:rfc1123_date(FileInfo#file_info.mtime)}],
+ {response,{response,Header,nobody}}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security.erl
new file mode 100644
index 0000000000..b4d52d1599
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security.erl
@@ -0,0 +1,307 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_security.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
+%%
+-module(mod_security).
+
+%% Security Audit Functionality
+
+%% User API exports
+-export([list_blocked_users/1, list_blocked_users/2, list_blocked_users/3,
+ block_user/4, block_user/5,
+ unblock_user/2, unblock_user/3, unblock_user/4,
+ list_auth_users/1, list_auth_users/2, list_auth_users/3]).
+
+%% module API exports
+-export([do/1, load/2, store/2, remove/1]).
+
+-include("httpd.hrl").
+
+-define(VMODULE,"SEC").
+-include("httpd_verbosity.hrl").
+
+
+%% do/1
+do(Info) ->
+ ?vdebug("~n do with ~n Info: ~p",[Info]),
+ %% Check and see if any user has been authorized.
+ case httpd_util:key1search(Info#mod.data,remote_user,not_defined_user) of
+ not_defined_user ->
+ %% No user has been authorized.
+ case httpd_util:key1search(Info#mod.data, status) of
+ %% A status code has been generated!
+ {401, PhraseArgs, Reason} ->
+ case httpd_util:key1search(Info#mod.parsed_header,
+ "authorization") of
+ undefined ->
+ %% Not an authorization attempt (server just replied to
+ %% challenge for authentication)
+ {proceed, Info#mod.data};
+ [$B,$a,$s,$i,$c,$ |EncodedString] ->
+ %% Someone tried to authenticate, and obviously failed!
+ ?vlog("~n Authentication failed: ~s",
+ [EncodedString]),
+ report_failed(Info, EncodedString,"Failed authentication"),
+ take_failed_action(Info, EncodedString),
+ {proceed, Info#mod.data}
+ end;
+ _ ->
+ {proceed, Info#mod.data}
+ end;
+ User ->
+ %% A user has been authenticated, now is he blocked ?
+ ?vtrace("user '~p' authentication",[User]),
+ Path = mod_alias:path(Info#mod.data,
+ Info#mod.config_db,
+ Info#mod.request_uri),
+ {Dir, SDirData} = secretp(Path, Info#mod.config_db),
+ Addr = httpd_util:lookup(Info#mod.config_db, bind_address),
+ Port = httpd_util:lookup(Info#mod.config_db, port),
+ DF = httpd_util:key1search(SDirData, data_file),
+ case mod_security_server:check_blocked_user(Info, User,
+ SDirData,
+ Addr, Port) of
+ true ->
+ ?vtrace("user blocked",[]),
+ report_failed(Info,httpd_util:decode_base64(User) ,"User Blocked"),
+ {proceed, [{status, {403, Info#mod.request_uri, ""}}|Info#mod.data]};
+ false ->
+ ?vtrace("user not blocked",[]),
+ EncodedUser=httpd_util:decode_base64(User),
+ report_failed(Info, EncodedUser,"Authentication Succedded"),
+ mod_security_server:store_successful_auth(Addr, Port,
+ User, SDirData),
+ {proceed, Info#mod.data}
+ end
+ end.
+
+
+
+report_failed(Info, EncodedString,Event) ->
+ Request = Info#mod.request_line,
+ Decoded = httpd_util:decode_base64(EncodedString),
+ {PortNumber,RemoteHost}=(Info#mod.init_data)#init_data.peername,
+ String = RemoteHost++" : " ++ Event ++ " : "++Request++" : "++Decoded,
+ mod_disk_log:security_log(Info,String),
+ mod_log:security_log(Info, String).
+
+take_failed_action(Info, EncodedString) ->
+ Path = mod_alias:path(Info#mod.data,Info#mod.config_db, Info#mod.request_uri),
+ {Dir, SDirData} = secretp(Path, Info#mod.config_db),
+ Addr = httpd_util:lookup(Info#mod.config_db, bind_address),
+ Port = httpd_util:lookup(Info#mod.config_db, port),
+ DecodedString = httpd_util:decode_base64(EncodedString),
+ mod_security_server:store_failed_auth(Info, Addr, Port,
+ DecodedString, SDirData).
+
+secretp(Path, ConfigDB) ->
+ Directories = ets:match(ConfigDB,{directory,'$1','_'}),
+ case secret_path(Path, Directories) of
+ {yes, Directory} ->
+ SDirs0 = httpd_util:multi_lookup(ConfigDB, security_directory),
+ SDir = lists:filter(fun(X) ->
+ lists:member({path, Directory}, X)
+ end, SDirs0),
+ {Directory, lists:flatten(SDir)};
+ no ->
+ error_report({internal_error_secretp, ?MODULE}),
+ {[], []}
+ end.
+
+secret_path(Path,Directories) ->
+ secret_path(Path, httpd_util:uniq(lists:sort(Directories)), to_be_found).
+
+secret_path(Path, [], to_be_found) ->
+ no;
+secret_path(Path, [], Directory) ->
+ {yes, Directory};
+secret_path(Path, [[NewDirectory]|Rest], Directory) ->
+ case regexp:match(Path, NewDirectory) of
+ {match, _, _} when Directory == to_be_found ->
+ secret_path(Path, Rest, NewDirectory);
+ {match, _, Length} when Length > length(Directory)->
+ secret_path(Path, Rest, NewDirectory);
+ {match, _, Length} ->
+ secret_path(Path, Rest, Directory);
+ nomatch ->
+ secret_path(Path, Rest, Directory)
+ end.
+
+
+load([$<,$D,$i,$r,$e,$c,$t,$o,$r,$y,$ |Directory],[]) ->
+ Dir = httpd_conf:custom_clean(Directory,"",">"),
+ {ok, [{security_directory, Dir, [{path, Dir}]}]};
+load(eof,[{security_directory,Directory, DirData}|_]) ->
+ {error, ?NICE("Premature end-of-file in "++Directory)};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$D,$a,$t,$a,$F,$i,$l,$e,$ |FileName],
+ [{security_directory, Dir, DirData}]) ->
+ File = httpd_conf:clean(FileName),
+ {ok, [{security_directory, Dir, [{data_file, File}|DirData]}]};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$C,$a,$l,$l,$b,$a,$c,$k,$M,$o,$d,$u,$l,$e,$ |ModuleName],
+ [{security_directory, Dir, DirData}]) ->
+ Mod = list_to_atom(httpd_conf:clean(ModuleName)),
+ {ok, [{security_directory, Dir, [{callback_module, Mod}|DirData]}]};
+load([$S,$e,$c,$u,$r,$i,$t,$y,$M,$a,$x,$R,$e,$t,$r,$i,$e,$s,$ |Retries],
+ [{security_directory, Dir, DirData}]) ->
+ MaxRetries = httpd_conf:clean(Retries),
+ load_return_int_tag("SecurityMaxRetries", max_retries,
+ httpd_conf:clean(Retries), Dir, DirData);
+load([$S,$e,$c,$u,$r,$i,$t,$y,$B,$l,$o,$c,$k,$T,$i,$m,$e,$ |Time],
+ [{security_directory, Dir, DirData}]) ->
+ load_return_int_tag("SecurityBlockTime", block_time,
+ httpd_conf:clean(Time), Dir, DirData);
+load([$S,$e,$c,$u,$r,$i,$t,$y,$F,$a,$i,$l,$E,$x,$p,$i,$r,$e,$T,$i,$m,$e,$ |Time],
+ [{security_directory, Dir, DirData}]) ->
+ load_return_int_tag("SecurityFailExpireTime", fail_expire_time,
+ httpd_conf:clean(Time), Dir, DirData);
+load([$S,$e,$c,$u,$r,$i,$t,$y,$A,$u,$t,$h,$T,$i,$m,$e,$o,$u,$t,$ |Time0],
+ [{security_directory, Dir, DirData}]) ->
+ Time = httpd_conf:clean(Time0),
+ load_return_int_tag("SecurityAuthTimeout", auth_timeout,
+ httpd_conf:clean(Time), Dir, DirData);
+load([$A,$u,$t,$h,$N,$a,$m,$e,$ |Name0],
+ [{security_directory, Dir, DirData}]) ->
+ Name = httpd_conf:clean(Name0),
+ {ok, [{security_directory, Dir, [{auth_name, Name}|DirData]}]};
+load("</Directory>",[{security_directory,Directory, DirData}]) ->
+ {ok, [], {security_directory, Directory, DirData}}.
+
+load_return_int_tag(Name, Atom, Time, Dir, DirData) ->
+ case Time of
+ "infinity" ->
+ {ok, [{security_directory, Dir, [{Atom, 99999999999999999999999999999}|DirData]}]};
+ Int ->
+ case catch list_to_integer(Time) of
+ {'EXIT', _} ->
+ {error, Time++" is an invalid "++Name};
+ Val ->
+ {ok, [{security_directory, Dir, [{Atom, Val}|DirData]}]}
+ end
+ end.
+
+store({security_directory, Dir0, DirData}, ConfigList) ->
+ ?CDEBUG("store(security_directory) -> ~n"
+ " Dir0: ~p~n"
+ " DirData: ~p",
+ [Dir0, DirData]),
+ Addr = httpd_util:key1search(ConfigList, bind_address),
+ Port = httpd_util:key1search(ConfigList, port),
+ mod_security_server:start(Addr, Port),
+ SR = httpd_util:key1search(ConfigList, server_root),
+ Dir =
+ case filename:pathtype(Dir0) of
+ relative ->
+ filename:join(SR, Dir0);
+ _ ->
+ Dir0
+ end,
+ case httpd_util:key1search(DirData, data_file, no_data_file) of
+ no_data_file ->
+ {error, no_security_data_file};
+ DataFile0 ->
+ DataFile =
+ case filename:pathtype(DataFile0) of
+ relative ->
+ filename:join(SR, DataFile0);
+ _ ->
+ DataFile0
+ end,
+ case mod_security_server:new_table(Addr, Port, DataFile) of
+ {ok, TwoTables} ->
+ NewDirData0 = lists:keyreplace(data_file, 1, DirData,
+ {data_file, TwoTables}),
+ NewDirData1 = case Addr of
+ undefined ->
+ [{port,Port}|NewDirData0];
+ _ ->
+ [{port,Port},{bind_address,Addr}|
+ NewDirData0]
+ end,
+ {ok, {security_directory,NewDirData1}};
+ {error, Err} ->
+ {error, {{open_data_file, DataFile}, Err}}
+ end
+ end.
+
+
+remove(ConfigDB) ->
+ Addr = case ets:lookup(ConfigDB, bind_address) of
+ [] ->
+ undefined;
+ [{bind_address, Address}] ->
+ Address
+ end,
+ [{port, Port}] = ets:lookup(ConfigDB, port),
+ mod_security_server:delete_tables(Addr, Port),
+ mod_security_server:stop(Addr, Port).
+
+
+%%
+%% User API
+%%
+
+%% list_blocked_users
+
+list_blocked_users(Port) ->
+ list_blocked_users(undefined, Port).
+
+list_blocked_users(Port, Dir) when integer(Port) ->
+ list_blocked_users(undefined,Port,Dir);
+list_blocked_users(Addr, Port) when integer(Port) ->
+ mod_security_server:list_blocked_users(Addr, Port).
+
+list_blocked_users(Addr, Port, Dir) ->
+ mod_security_server:list_blocked_users(Addr, Port, Dir).
+
+
+%% block_user
+
+block_user(User, Port, Dir, Time) ->
+ block_user(User, undefined, Port, Dir, Time).
+block_user(User, Addr, Port, Dir, Time) ->
+ mod_security_server:block_user(User, Addr, Port, Dir, Time).
+
+
+%% unblock_user
+
+unblock_user(User, Port) ->
+ unblock_user(User, undefined, Port).
+
+unblock_user(User, Port, Dir) when integer(Port) ->
+ unblock_user(User, undefined, Port, Dir);
+unblock_user(User, Addr, Port) when integer(Port) ->
+ mod_security_server:unblock_user(User, Addr, Port).
+
+unblock_user(User, Addr, Port, Dir) ->
+ mod_security_server:unblock_user(User, Addr, Port, Dir).
+
+
+%% list_auth_users
+
+list_auth_users(Port) ->
+ list_auth_users(undefined,Port).
+
+list_auth_users(Port, Dir) when integer(Port) ->
+ list_auth_users(undefined, Port, Dir);
+list_auth_users(Addr, Port) when integer(Port) ->
+ mod_security_server:list_auth_users(Addr, Port).
+
+list_auth_users(Addr, Port, Dir) ->
+ mod_security_server:list_auth_users(Addr, Port, Dir).
+
+
+error_report(M) ->
+ error_logger:error_report(M).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security_server.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security_server.erl
new file mode 100644
index 0000000000..81156c24e8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_security_server.erl
@@ -0,0 +1,727 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_security_server.erl,v 1.1 2008/12/17 09:53:36 mikpe Exp $
+%%
+%% Security Audit Functionality
+
+%%
+%% The gen_server code.
+%%
+%% A gen_server is needed in this module to take care of shared access to the
+%% data file used to store failed and successful authentications aswell as
+%% user blocks.
+%%
+%% The storage model is a write-through model with both an ets and a dets
+%% table. Writes are done to both the ets and then the dets table, but reads
+%% are only done from the ets table.
+%%
+%% This approach also enables parallelism when using dets by returning the
+%% same dets table identifier when opening several files with the same
+%% physical location.
+%%
+%% NOTE: This could be implemented using a single dets table, as it is
+%% possible to open a dets file with the ram_file flag, but this
+%% would require periodical sync's to disk, and it would be hard
+%% to decide when such an operation should occur.
+%%
+
+
+-module(mod_security_server).
+
+-include("httpd.hrl").
+-include("httpd_verbosity.hrl").
+
+
+-behaviour(gen_server).
+
+
+%% User API exports (called via mod_security)
+-export([list_blocked_users/2, list_blocked_users/3,
+ block_user/5,
+ unblock_user/3, unblock_user/4,
+ list_auth_users/2, list_auth_users/3]).
+
+%% Internal exports (for mod_security only)
+-export([start/2, stop/1, stop/2,
+ new_table/3, delete_tables/2,
+ store_failed_auth/5, store_successful_auth/4,
+ check_blocked_user/5]).
+
+%% gen_server exports
+-export([start_link/3,
+ init/1,
+ handle_info/2, handle_call/3, handle_cast/2,
+ terminate/2,
+ code_change/3]).
+
+-export([verbosity/3]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% External API %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% start_link/3
+%%
+%% NOTE: This is called by httpd_misc_sup when the process is started
+%%
+
+start_link(Addr, Port, Verbosity) ->
+ ?vtrace("start_link -> entry with"
+ "~n Addr: ~p"
+ "~n Port: ~p", [Addr, Port]),
+ Name = make_name(Addr, Port),
+ gen_server:start_link({local, Name}, ?MODULE, [Verbosity],
+ [{timeout, infinity}]).
+
+
+%% start/2
+%% Called by the mod_security module.
+
+start(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ Verbosity = get(security_verbosity),
+ case httpd_misc_sup:start_sec_server(Addr, Port, Verbosity) of
+ {ok, Pid} ->
+ put(security_server, Pid),
+ ok;
+ Error ->
+ exit({failed_start_security_server, Error})
+ end;
+ _ -> %% Already started...
+ ok
+ end.
+
+
+%% stop
+
+stop(Port) ->
+ stop(undefined, Port).
+stop(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ ok;
+ _ ->
+ httpd_misc_sup:stop_sec_server(Addr, Port)
+ end.
+
+
+%% verbosity
+
+verbosity(Addr, Port, Verbosity) ->
+ Name = make_name(Addr, Port),
+ Req = {verbosity, Verbosity},
+ call(Name, Req).
+
+
+%% list_blocked_users
+
+list_blocked_users(Addr, Port) ->
+ Name = make_name(Addr,Port),
+ Req = {list_blocked_users, Addr, Port, '_'},
+ call(Name, Req).
+
+list_blocked_users(Addr, Port, Dir) ->
+ Name = make_name(Addr, Port),
+ Req = {list_blocked_users, Addr, Port, Dir},
+ call(Name, Req).
+
+
+%% block_user
+
+block_user(User, Addr, Port, Dir, Time) ->
+ Name = make_name(Addr, Port),
+ Req = {block_user, User, Addr, Port, Dir, Time},
+ call(Name, Req).
+
+
+%% unblock_user
+
+unblock_user(User, Addr, Port) ->
+ Name = make_name(Addr, Port),
+ Req = {unblock_user, User, Addr, Port, '_'},
+ call(Name, Req).
+
+unblock_user(User, Addr, Port, Dir) ->
+ Name = make_name(Addr, Port),
+ Req = {unblock_user, User, Addr, Port, Dir},
+ call(Name, Req).
+
+
+%% list_auth_users
+
+list_auth_users(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ Req = {list_auth_users, Addr, Port, '_'},
+ call(Name, Req).
+
+list_auth_users(Addr, Port, Dir) ->
+ Name = make_name(Addr,Port),
+ Req = {list_auth_users, Addr, Port, Dir},
+ call(Name, Req).
+
+
+%% new_table
+
+new_table(Addr, Port, TabName) ->
+ Name = make_name(Addr,Port),
+ Req = {new_table, Addr, Port, TabName},
+ call(Name, Req).
+
+
+%% delete_tables
+
+delete_tables(Addr, Port) ->
+ Name = make_name(Addr, Port),
+ case whereis(Name) of
+ undefined ->
+ ok;
+ _ ->
+ call(Name, delete_tables)
+ end.
+
+
+%% store_failed_auth
+
+store_failed_auth(Info, Addr, Port, DecodedString, SDirData) ->
+ Name = make_name(Addr,Port),
+ Msg = {store_failed_auth,[Info,DecodedString,SDirData]},
+ cast(Name, Msg).
+
+
+%% store_successful_auth
+
+store_successful_auth(Addr, Port, User, SDirData) ->
+ Name = make_name(Addr,Port),
+ Msg = {store_successful_auth, [User,Addr,Port,SDirData]},
+ cast(Name, Msg).
+
+
+%% check_blocked_user
+
+check_blocked_user(Info, User, SDirData, Addr, Port) ->
+ Name = make_name(Addr, Port),
+ Req = {check_blocked_user, [Info, User, SDirData]},
+ call(Name, Req).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Server call-back functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% init
+
+init([undefined]) ->
+ init([?default_verbosity]);
+init([Verbosity]) ->
+ ?DEBUG("init -> entry with Verbosity: ~p",[Verbosity]),
+ process_flag(trap_exit, true),
+ put(sname, sec),
+ put(verbosity, Verbosity),
+ ?vlog("starting",[]),
+ {ok, []}.
+
+
+%% handle_call
+
+handle_call(stop, _From, Tables) ->
+ ?vlog("stop",[]),
+ {stop, normal, ok, []};
+
+
+handle_call({verbosity,Verbosity}, _From, Tables) ->
+ ?vlog("set verbosity to ~p",[Verbosity]),
+ OldVerbosity = get(verbosity),
+ put(verbosity,Verbosity),
+ ?vdebug("old verbosity: ~p",[OldVerbosity]),
+ {reply,OldVerbosity,Tables};
+
+
+handle_call({block_user, User, Addr, Port, Dir, Time}, _From, Tables) ->
+ ?vlog("block user '~p' for ~p",[User,Dir]),
+ Ret = block_user_int({User, Addr, Port, Dir, Time}),
+ ?vdebug("block user result: ~p",[Ret]),
+ {reply, Ret, Tables};
+
+
+handle_call({list_blocked_users, Addr, Port, Dir}, _From, Tables) ->
+ ?vlog("list blocked users for ~p",[Dir]),
+ Blocked = list_blocked(Tables, Addr, Port, Dir, []),
+ ?vdebug("list blocked users: ~p",[Blocked]),
+ {reply, Blocked, Tables};
+
+
+handle_call({unblock_user, User, Addr, Port, Dir}, _From, Tables) ->
+ ?vlog("unblock user '~p' for ~p",[User,Dir]),
+ Ret = unblock_user_int({User, Addr, Port, Dir}),
+ ?vdebug("unblock user result: ~p",[Ret]),
+ {reply, Ret, Tables};
+
+
+handle_call({list_auth_users, Addr, Port, Dir}, _From, Tables) ->
+ ?vlog("list auth users for ~p",[Dir]),
+ Auth = list_auth(Tables, Addr, Port, Dir, []),
+ ?vdebug("list auth users result: ~p",[Auth]),
+ {reply, Auth, Tables};
+
+
+handle_call({new_table, Addr, Port, Name}, _From, Tables) ->
+ case lists:keysearch(Name, 1, Tables) of
+ {value, {Name, {Ets, Dets}}} ->
+ ?DEBUG("handle_call(new_table) -> we already have this table: ~p",
+ [Name]),
+ ?vdebug("new table; we already have this one: ~p",[Name]),
+ {reply, {ok, {Ets, Dets}}, Tables};
+ false ->
+ ?LOG("handle_call(new_table) -> new_table: Name = ~p",[Name]),
+ ?vlog("new table: ~p",[Name]),
+ TName = make_name(Addr,Port,length(Tables)),
+ ?DEBUG("handle_call(new_table) -> TName: ~p",[TName]),
+ ?vdebug("new table: ~p",[TName]),
+ case dets:open_file(TName, [{type, bag}, {file, Name},
+ {repair, true},
+ {access, read_write}]) of
+ {ok, DFile} ->
+ ETS = ets:new(TName, [bag, private]),
+ sync_dets_to_ets(DFile, ETS),
+ NewTables = [{Name, {ETS, DFile}}|Tables],
+ ?DEBUG("handle_call(new_table) -> ~n"
+ " NewTables: ~p",[NewTables]),
+ ?vtrace("new tables: ~p",[NewTables]),
+ {reply, {ok, {ETS, DFile}}, NewTables};
+ {error, Err} ->
+ ?LOG("handle_call -> Err: ~p",[Err]),
+ ?vinfo("failed open dets file: ~p",[Err]),
+ {reply, {error, {create_dets, Err}}, Tables}
+ end
+ end;
+
+handle_call(delete_tables, _From, Tables) ->
+ ?vlog("delete tables",[]),
+ lists:foreach(fun({Name, {ETS, DETS}}) ->
+ dets:close(DETS),
+ ets:delete(ETS)
+ end, Tables),
+ {reply, ok, []};
+
+handle_call({check_blocked_user, [Info, User, SDirData]}, _From, Tables) ->
+ ?vlog("check blocked user '~p'",[User]),
+ {ETS, DETS} = httpd_util:key1search(SDirData, data_file),
+ Dir = httpd_util:key1search(SDirData, path),
+ Addr = httpd_util:key1search(SDirData, bind_address),
+ Port = httpd_util:key1search(SDirData, port),
+ CBModule = httpd_util:key1search(SDirData, callback_module, no_module_at_all),
+ ?vdebug("call back module: ~p",[CBModule]),
+ Ret = check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule),
+ ?vdebug("check result: ~p",[Ret]),
+ {reply, Ret, Tables};
+handle_call(Request,From,Tables) ->
+ ?vinfo("~n unknown call '~p' from ~p",[Request,From]),
+ {reply,ok,Tables}.
+
+
+%% handle_cast
+
+handle_cast({store_failed_auth, [Info, DecodedString, SDirData]}, Tables) ->
+ ?vlog("store failed auth",[]),
+ {ETS, DETS} = httpd_util:key1search(SDirData, data_file),
+ Dir = httpd_util:key1search(SDirData, path),
+ Addr = httpd_util:key1search(SDirData, bind_address),
+ Port = httpd_util:key1search(SDirData, port),
+ {ok, [User,Password]} = httpd_util:split(DecodedString,":",2),
+ ?vdebug("user '~p' and password '~p'",[User,Password]),
+ Seconds = universal_time(),
+ Key = {User, Dir, Addr, Port},
+
+ %% Event
+ CBModule = httpd_util:key1search(SDirData, callback_module, no_module_at_all),
+ ?vtrace("call back module: ~p",[CBModule]),
+ auth_fail_event(CBModule,Addr,Port,Dir,User,Password),
+
+ %% Find out if any of this user's other failed logins are too old to keep..
+ ?vtrace("remove old login failures",[]),
+ case ets:match_object(ETS, {failed, {Key, '_', '_'}}) of
+ [] ->
+ ?vtrace("no old login failures",[]),
+ no;
+ List when list(List) ->
+ ?vtrace("~p old login failures",[length(List)]),
+ ExpireTime = httpd_util:key1search(SDirData, fail_expire_time, 30)*60,
+ ?vtrace("expire time ~p",[ExpireTime]),
+ lists:map(fun({failed, {TheKey, LS, Gen}}) ->
+ Diff = Seconds-LS,
+ if
+ Diff > ExpireTime ->
+ ?vtrace("~n '~p' is to old to keep: ~p",
+ [TheKey,Gen]),
+ ets:match_delete(ETS, {failed, {TheKey, LS, Gen}}),
+ dets:match_delete(DETS, {failed, {TheKey, LS, Gen}});
+ true ->
+ ?vtrace("~n '~p' is not old enough: ~p",
+ [TheKey,Gen]),
+ ok
+ end
+ end,
+ List);
+ O ->
+ ?vlog("~n unknown login failure search resuylt: ~p",[O]),
+ no
+ end,
+
+ %% Insert the new failure..
+ Generation = length(ets:match_object(ETS, {failed, {Key, '_', '_'}})),
+ ?vtrace("insert ('~p') new login failure: ~p",[Key,Generation]),
+ ets:insert(ETS, {failed, {Key, Seconds, Generation}}),
+ dets:insert(DETS, {failed, {Key, Seconds, Generation}}),
+
+ %% See if we should block this user..
+ MaxRetries = httpd_util:key1search(SDirData, max_retries, 3),
+ BlockTime = httpd_util:key1search(SDirData, block_time, 60),
+ ?vtrace("~n Max retries ~p, block time ~p",[MaxRetries,BlockTime]),
+ case ets:match_object(ETS, {failed, {Key, '_', '_'}}) of
+ List1 ->
+ ?vtrace("~n ~p tries so far",[length(List1)]),
+ if
+ length(List1) >= MaxRetries ->
+ %% Block this user until Future
+ ?vtrace("block user '~p'",[User]),
+ Future = Seconds+BlockTime*60,
+ ?vtrace("future: ~p",[Future]),
+ Reason = io_lib:format("Blocking user ~s from dir ~s "
+ "for ~p minutes",
+ [User, Dir, BlockTime]),
+ mod_log:security_log(Info, lists:flatten(Reason)),
+
+ %% Event
+ user_block_event(CBModule,Addr,Port,Dir,User),
+
+ ets:match_delete(ETS,{blocked_user,
+ {User, Addr, Port, Dir, '$1'}}),
+ dets:match_delete(DETS, {blocked_user,
+ {User, Addr, Port, Dir, '$1'}}),
+ BlockRecord = {blocked_user,
+ {User, Addr, Port, Dir, Future}},
+ ets:insert(ETS, BlockRecord),
+ dets:insert(DETS, BlockRecord),
+ %% Remove previous failed requests.
+ ets:match_delete(ETS, {failed, {Key, '_', '_'}}),
+ dets:match_delete(DETS, {failed, {Key, '_', '_'}});
+ true ->
+ ?vtrace("still some tries to go",[]),
+ no
+ end;
+ Other ->
+ no
+ end,
+ {noreply, Tables};
+
+handle_cast({store_successful_auth, [User, Addr, Port, SDirData]}, Tables) ->
+ ?vlog("store successfull auth",[]),
+ {ETS, DETS} = httpd_util:key1search(SDirData, data_file),
+ AuthTimeOut = httpd_util:key1search(SDirData, auth_timeout, 30),
+ Dir = httpd_util:key1search(SDirData, path),
+ Key = {User, Dir, Addr, Port},
+
+ %% Remove failed entries for this Key
+ dets:match_delete(DETS, {failed, {Key, '_', '_'}}),
+ ets:match_delete(ETS, {failed, {Key, '_', '_'}}),
+
+ %% Keep track of when the last successful login took place.
+ Seconds = universal_time()+AuthTimeOut,
+ ets:match_delete(ETS, {success, {Key, '_'}}),
+ dets:match_delete(DETS, {success, {Key, '_'}}),
+ ets:insert(ETS, {success, {Key, Seconds}}),
+ dets:insert(DETS, {success, {Key, Seconds}}),
+ {noreply, Tables};
+
+handle_cast(Req, Tables) ->
+ ?vinfo("~n unknown cast '~p'",[Req]),
+ error_msg("security server got unknown cast: ~p",[Req]),
+ {noreply, Tables}.
+
+
+%% handle_info
+
+handle_info(Info, State) ->
+ ?vinfo("~n unknown info '~p'",[Info]),
+ {noreply, State}.
+
+
+%% terminate
+
+terminate(Reason, _Tables) ->
+ ?vlog("~n Terminating for reason: ~p",[Reason]),
+ ok.
+
+
+%% code_change({down, ToVsn}, State, Extra)
+%%
+code_change({down, _}, State, _Extra) ->
+ ?vlog("downgrade", []),
+ {ok, State};
+
+
+%% code_change(FromVsn, State, Extra)
+%%
+code_change(_, State, Extra) ->
+ ?vlog("upgrade", []),
+ {ok, State}.
+
+
+
+
+%% block_user_int/2
+block_user_int({User, Addr, Port, Dir, Time}) ->
+ Dirs = httpd_manager:config_match(Addr, Port, {security_directory, '_'}),
+ ?vtrace("block '~p' for ~p during ~p",[User,Dir,Time]),
+ case find_dirdata(Dirs, Dir) of
+ {ok, DirData, {ETS, DETS}} ->
+ Time1 =
+ case Time of
+ infinity ->
+ 99999999999999999999999999999;
+ _ ->
+ Time
+ end,
+ Future = universal_time()+Time1,
+ ets:match_delete(ETS, {blocked_user, {User,Addr,Port,Dir,'_'}}),
+ dets:match_delete(DETS, {blocked_user, {User,Addr,Port,Dir,'_'}}),
+ ets:insert(ETS, {blocked_user, {User,Addr,Port,Dir,Future}}),
+ dets:insert(DETS, {blocked_user, {User,Addr,Port,Dir,Future}}),
+ CBModule = httpd_util:key1search(DirData, callback_module,
+ no_module_at_all),
+ ?vtrace("call back module ~p",[CBModule]),
+ user_block_event(CBModule,Addr,Port,Dir,User),
+ true;
+ _ ->
+ {error, no_such_directory}
+ end.
+
+
+find_dirdata([], _Dir) ->
+ false;
+find_dirdata([{security_directory, DirData}|SDirs], Dir) ->
+ case lists:keysearch(path, 1, DirData) of
+ {value, {path, Dir}} ->
+ {value, {data_file, {ETS, DETS}}} =
+ lists:keysearch(data_file, 1, DirData),
+ {ok, DirData, {ETS, DETS}};
+ _ ->
+ find_dirdata(SDirs, Dir)
+ end.
+
+%% unblock_user_int/2
+
+unblock_user_int({User, Addr, Port, Dir}) ->
+ ?vtrace("unblock user '~p' for ~p",[User,Dir]),
+ Dirs = httpd_manager:config_match(Addr, Port, {security_directory, '_'}),
+ ?vtrace("~n dirs: ~p",[Dirs]),
+ case find_dirdata(Dirs, Dir) of
+ {ok, DirData, {ETS, DETS}} ->
+ case ets:match_object(ETS,{blocked_user,{User,Addr,Port,Dir,'_'}}) of
+ [] ->
+ ?vtrace("not blocked",[]),
+ {error, not_blocked};
+ Objects ->
+ ets:match_delete(ETS, {blocked_user,
+ {User, Addr, Port, Dir, '_'}}),
+ dets:match_delete(DETS, {blocked_user,
+ {User, Addr, Port, Dir, '_'}}),
+ CBModule = httpd_util:key1search(DirData, callback_module,
+ no_module_at_all),
+ user_unblock_event(CBModule,Addr,Port,Dir,User),
+ true
+ end;
+ _ ->
+ ?vlog("~n cannot unblock: no such directory '~p'",[Dir]),
+ {error, no_such_directory}
+ end.
+
+
+
+%% list_auth/2
+
+list_auth([], _Addr, _Port, Dir, Acc) ->
+ Acc;
+list_auth([{Name, {ETS, DETS}}|Tables], Addr, Port, Dir, Acc) ->
+ case ets:match_object(ETS, {success, {{'_', Dir, Addr, Port}, '_'}}) of
+ [] ->
+ list_auth(Tables, Addr, Port, Dir, Acc);
+ List when list(List) ->
+ TN = universal_time(),
+ NewAcc = lists:foldr(fun({success,{{U,Ad,P,D},T}},Ac) ->
+ if
+ T-TN > 0 ->
+ [U|Ac];
+ true ->
+ Rec = {success,{{U,Ad,P,D},T}},
+ ets:match_delete(ETS,Rec),
+ dets:match_delete(DETS,Rec),
+ Ac
+ end
+ end,
+ Acc, List),
+ list_auth(Tables, Addr, Port, Dir, NewAcc);
+ _ ->
+ list_auth(Tables, Addr, Port, Dir, Acc)
+ end.
+
+
+%% list_blocked/2
+
+list_blocked([], Addr, Port, Dir, Acc) ->
+ TN = universal_time(),
+ lists:foldl(fun({U,Ad,P,D,T}, Ac) ->
+ if
+ T-TN > 0 ->
+ [{U,Ad,P,D,local_time(T)}|Ac];
+ true ->
+ Ac
+ end
+ end,
+ [], Acc);
+list_blocked([{Name, {ETS, DETS}}|Tables], Addr, Port, Dir, Acc) ->
+ NewBlocked =
+ case ets:match_object(ETS, {blocked_user, {'_',Addr,Port,Dir,'_'}}) of
+ List when list(List) ->
+ lists:foldl(fun({blocked_user, X}, A) -> [X|A] end, Acc, List);
+ _ ->
+ Acc
+ end,
+ list_blocked(Tables, Addr, Port, Dir, NewBlocked).
+
+
+%%
+%% sync_dets_to_ets/2
+%%
+%% Reads dets-table DETS and syncronizes it with the ets-table ETS.
+%%
+sync_dets_to_ets(DETS, ETS) ->
+ dets:traverse(DETS, fun(X) ->
+ ets:insert(ETS, X),
+ continue
+ end).
+
+%%
+%% check_blocked_user/7 -> true | false
+%%
+%% Check if a specific user is blocked from access.
+%%
+%% The sideeffect of this routine is that it unblocks also other users
+%% whos blocking time has expired. This to keep the tables as small
+%% as possible.
+%%
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule) ->
+ TN = universal_time(),
+ case ets:match_object(ETS, {blocked_user, {User, '_', '_', '_', '_'}}) of
+ List when list(List) ->
+ Blocked = lists:foldl(fun({blocked_user, X}, A) ->
+ [X|A] end, [], List),
+ check_blocked_user(Info,User,Dir,Addr,Port,ETS,DETS,TN,Blocked,CBModule);
+ _ ->
+ false
+ end.
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN, [], CBModule) ->
+ false;
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN,
+ [{User,Addr,Port,Dir,T}|Ls], CBModule) ->
+ TD = T-TN,
+ if
+ TD =< 0 ->
+ %% Blocking has expired, remove and grant access.
+ unblock_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule),
+ false;
+ true ->
+ true
+ end;
+check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN,
+ [{OUser,ODir,OAddr,OPort,T}|Ls], CBModule) ->
+ TD = T-TN,
+ if
+ TD =< 0 ->
+ %% Blocking has expired, remove.
+ unblock_user(Info, OUser, ODir, OAddr, OPort, ETS, DETS, CBModule);
+ true ->
+ true
+ end,
+ check_blocked_user(Info, User, Dir, Addr, Port, ETS, DETS, TN, Ls, CBModule).
+
+unblock_user(Info, User, Dir, Addr, Port, ETS, DETS, CBModule) ->
+ Reason=io_lib:format("User ~s was removed from the block list for dir ~s",
+ [User, Dir]),
+ mod_log:security_log(Info, lists:flatten(Reason)),
+ user_unblock_event(CBModule,Addr,Port,Dir,User),
+ dets:match_delete(DETS, {blocked_user, {User, Addr, Port, Dir, '_'}}),
+ ets:match_delete(ETS, {blocked_user, {User, Addr, Port, Dir, '_'}}).
+
+
+make_name(Addr,Port) ->
+ httpd_util:make_name("httpd_security",Addr,Port).
+
+make_name(Addr,Port,Num) ->
+ httpd_util:make_name("httpd_security",Addr,Port,
+ "__" ++ integer_to_list(Num)).
+
+
+auth_fail_event(Mod,Addr,Port,Dir,User,Passwd) ->
+ event(auth_fail,Mod,Addr,Port,Dir,[{user,User},{password,Passwd}]).
+
+user_block_event(Mod,Addr,Port,Dir,User) ->
+ event(user_block,Mod,Addr,Port,Dir,[{user,User}]).
+
+user_unblock_event(Mod,Addr,Port,Dir,User) ->
+ event(user_unblock,Mod,Addr,Port,Dir,[{user,User}]).
+
+event(Event,Mod,undefined,Port,Dir,Info) ->
+ (catch Mod:event(Event,Port,Dir,Info));
+event(Event,Mod,Addr,Port,Dir,Info) ->
+ (catch Mod:event(Event,Addr,Port,Dir,Info)).
+
+universal_time() ->
+ calendar:datetime_to_gregorian_seconds(calendar:universal_time()).
+
+local_time(T) ->
+ calendar:universal_time_to_local_time(
+ calendar:gregorian_seconds_to_datetime(T)).
+
+
+error_msg(F, A) ->
+ error_logger:error_msg(F, A).
+
+
+call(Name, Req) ->
+ case (catch gen_server:call(Name, Req)) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Reply ->
+ Reply
+ end.
+
+
+cast(Name, Msg) ->
+ case (catch gen_server:cast(Name, Msg)) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Result ->
+ Result
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_trace.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_trace.erl
new file mode 100644
index 0000000000..9f4d331d82
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_trace.erl
@@ -0,0 +1,64 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mod_trace.erl,v 1.1 2008/12/17 09:53:36 mikpe Exp $
+%%
+-module(mod_trace).
+
+-export([do/1]).
+
+-include("httpd.hrl").
+
+
+do(Info) ->
+ %%?vtrace("do",[]),
+ case Info#mod.method of
+ "TRACE" ->
+ case httpd_util:response_generated(Info) of
+ false->
+ generate_trace_response(Info);
+ true->
+ {proceed,Info#mod.data}
+ end;
+ _ ->
+ {proceed,Info#mod.data}
+ end.
+
+
+%%---------------------------------------------------------------------
+%%Generate the trace response the trace response consists of a
+%%http-header and the body will be the request.
+%5----------------------------------------------------------------------
+
+generate_trace_response(Info)->
+ RequestHead=Info#mod.parsed_header,
+ Body=generate_trace_response_body(RequestHead),
+ Len=length(Body),
+ Response=["HTTP/1.1 200 OK\r\n",
+ "Content-Type:message/http\r\n",
+ "Content-Length:",integer_to_list(Len),"\r\n\r\n",
+ Info#mod.request_line,Body],
+ httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket,Response),
+ {proceed,[{response,{already_sent,200,Len}}|Info#mod.data]}.
+
+generate_trace_response_body(Parsed_header)->
+ generate_trace_response_body(Parsed_header,[]).
+
+generate_trace_response_body([],Head)->
+ lists:flatten(Head);
+generate_trace_response_body([{[],[]}|Rest],Head) ->
+ generate_trace_response_body(Rest,Head);
+generate_trace_response_body([{Field,Value}|Rest],Head) ->
+ generate_trace_response_body(Rest,[Field ++ ":" ++ Value ++ "\r\n"|Head]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/uri.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/uri.erl
new file mode 100644
index 0000000000..9a4f77f87b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/uri.erl
@@ -0,0 +1,349 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Mobile Arts AB
+%% Portions created by Mobile Arts are Copyright 2002, Mobile Arts AB
+%% All Rights Reserved.''
+%%
+%%
+%% Author : Johan Blom <[email protected]>
+%% Description :
+%% Implements various scheme dependent subsets (e.g. HTTP, FTP etc) based on
+%% RFC 2396, Uniform Resource Identifiers (URI): Generic Syntax
+%% Created : 27 Jul 2001 by Johan Blom <[email protected]>
+%%
+
+-module(uri).
+
+-author('[email protected]').
+
+-export([parse/1,resolve/2]).
+
+
+%%% Parse URI and return {Scheme,Path}
+%%% Note that Scheme specific parsing/validation is not handled here!
+resolve(Root,Rel) ->
+ ok.
+
+%%% See "http://www.isi.edu/in-notes/iana/assignments/url-schemes" for a list of
+%%% defined URL schemes and references to its sources.
+
+parse(URI) ->
+ case parse_scheme(URI) of
+ {http,Cont} -> parse_http(Cont,http);
+ {https,Cont} -> parse_http(Cont,https);
+ {ftp,Cont} -> parse_ftp(Cont,ftp);
+ {sip,Cont} -> parse_sip(Cont,sip);
+ {sms,Cont} -> parse_sms(Cont,sip);
+ {error,Error} -> {error,Error};
+ {Scheme,Cont} -> {Scheme,Cont}
+ end.
+
+
+%%% Parse the scheme.
+parse_scheme(URI) ->
+ parse_scheme(URI,[]).
+
+parse_scheme([H|URI],Acc) when $a=<H,H=<$z; $A=<H,H=<$Z ->
+ parse_scheme2(URI,[H|Acc]);
+parse_scheme(_,_) ->
+ {error,no_scheme}.
+
+parse_scheme2([H|URI],Acc)
+ when $a=<H,H=<$z; $A=<H,H=<$Z; $0=<H,H=<$9; H==$-;H==$+;H==$. ->
+ parse_scheme2(URI,[H|Acc]);
+parse_scheme2([$:|URI],Acc) ->
+ {list_to_atom(lists:reverse(Acc)),URI};
+parse_scheme2(_,_) ->
+ {error,no_scheme}.
+
+
+%%% ............................................................................
+-define(HTTP_DEFAULT_PORT, 80).
+-define(HTTPS_DEFAULT_PORT, 443).
+
+%%% HTTP (Source RFC 2396, RFC 2616)
+%%% http_URL = "*" | absoluteURI | abs_path [ "?" query ] | authority
+
+%%% http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query ]]
+%%% Returns a tuple {http,Host,Port,PathQuery} where
+%%% Host = string() Host value
+%%% Port = string() Port value
+%%% PathQuery= string() Combined absolute path and query value
+parse_http("//"++C0,Scheme) ->
+ case scan_hostport(C0,Scheme) of
+ {C1,Host,Port} ->
+ case scan_pathquery(C1) of
+ {error,Error} ->
+ {error,Error};
+ PathQuery ->
+ {Scheme,Host,Port,PathQuery}
+ end;
+ {error,Error} ->
+ {error,Error}
+ end;
+parse_http(_,_) ->
+ {error,invalid_url}.
+
+scan_pathquery(C0) ->
+ case scan_abspath(C0) of
+ {error,Error} ->
+ {error,Error};
+ {[],[]} -> % Add implicit path
+ "/";
+ {"?"++C1,Path} ->
+ case scan_query(C1,[]) of
+ {error,Error} ->
+ {error,Error};
+ Query ->
+ Path++"?"++Query
+ end;
+ {[],Path} ->
+ Path
+ end.
+
+
+%%% ............................................................................
+%%% FIXME!!! This is just a quick hack that doesn't work!
+-define(FTP_DEFAULT_PORT, 80).
+
+%%% FTP (Source RFC 2396, RFC 1738, RFC 959)
+%%% Note: This BNF has been modified to better fit with RFC 2396
+%%% ftp_URL = "ftp:" "//" [ ftp_userinfo ] host [ ":" port ] ftp_abs_path
+%%% ftp_userinfo = ftp_user [ ":" ftp_password ]
+%%% ftp_abs_path = "/" ftp_path_segments [ ";type=" ftp_type ]
+%%% ftp_path_segments = ftp_segment *( "/" ftp_segment)
+%%% ftp_segment = *[ ftp_uchar | "?" | ":" | "@" | "&" | "=" ]
+%%% ftp_type = "A" | "I" | "D" | "a" | "i" | "d"
+%%% ftp_user = *[ ftp_uchar | ";" | "?" | "&" | "=" ]
+%%% ftp_password = *[ ftp_uchar | ";" | "?" | "&" | "=" ]
+%%% ftp_uchar = ftp_unreserved | escaped
+%%% ftp_unreserved = alphanum | mark | "$" | "+" | ","
+parse_ftp("//"++C0,Scheme) ->
+ case ftp_userinfo(C0) of
+ {C1,Creds} ->
+ case scan_hostport(C1,Scheme) of
+ {C2,Host,Port} ->
+ case scan_abspath(C2) of
+ {error,Error} ->
+ {error,Error};
+ {[],[]} -> % Add implicit path
+ {Scheme,Creds,Host,Port,"/"};
+ {[],Path} ->
+ {Scheme,Creds,Host,Port,Path}
+ end;
+ {error,Error} ->
+ {error,Error}
+ end;
+ {error,Error} ->
+ {error,Error}
+ end.
+
+ftp_userinfo(C0) ->
+ User="",
+ Password="",
+ {C0,{User,Password}}.
+
+
+%%% ............................................................................
+%%% SIP (Source RFC 2396, RFC 2543)
+%%% sip_URL = "sip:" [ sip_userinfo "@" ] host [ ":" port ]
+%%% sip_url-parameters [ sip_headers ]
+%%% sip_userinfo = sip_user [ ":" sip_password ]
+%%% sip_user = *( unreserved | escaped | "&" | "=" | "+" | "$" | "," )
+%%% sip_password = *( unreserved | escaped | "&" | "=" | "+" | "$" | "," )
+%%% sip_url-parameters = *( ";" sip_url-parameter )
+%%% sip_url-parameter = sip_transport-param | sip_user-param |
+%%% sip_method-param | sip_ttl-param |
+%%% sip_maddr-param | sip_other-param
+%%% sip_transport-param = "transport=" ( "udp" | "tcp" )
+%%% sip_ttl-param = "ttl=" sip_ttl
+%%% sip_ttl = 1*3DIGIT ; 0 to 255
+%%% sip_maddr-param = "maddr=" host
+%%% sip_user-param = "user=" ( "phone" | "ip" )
+%%% sip_method-param = "method=" sip_Method
+%%% sip_tag-param = "tag=" sip_UUID
+%%% sip_UUID = 1*( hex | "-" )
+%%% sip_other-param = ( token | ( token "=" ( token | quoted-string )))
+%%% sip_Method = "INVITE" | "ACK" | "OPTIONS" | "BYE" |
+%%% "CANCEL" | "REGISTER"
+%%% sip_token = 1*< any CHAR except CTL's or separators>
+%%% sip_quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
+%%% sip_qdtext = <any TEXT-UTF8 except <">>
+%%% sip_quoted-pair = " \ " CHAR
+parse_sip(Cont,Scheme) ->
+ {Scheme,Cont}.
+
+
+
+
+%%% ............................................................................
+%%% SMS (Source draft-wilde-sms-uri-01, January 24 2002 and
+%%% draft-allocchio-gstn-01, November 2001)
+%%% The syntax definition for "gstn-phone" is taken from
+%%% [draft-allocchio-gstn-01], allowing global as well as local telephone
+%%% numbers.
+%%% Note: This BNF has been modified to better fit with RFC 2396
+%%% sms_URI = sms ":" 1*( sms-recipient ) [ sms-body ]
+%%% sms-recipient = gstn-phone sms-qualifier
+%%% [ "," sms-recipient ]
+%%% sms-qualifier = *( smsc-qualifier / pid-qualifier )
+%%% smsc-qualifier = ";smsc=" SMSC-sub-addr
+%%% pid-qualifier = ";pid=" PID-sub-addr
+%%% sms-body = ";body=" *urlc
+%%% gstn-phone = ( global-phone / local-phone )
+%%% global-phone = "+" 1*( DIGIT / written-sep )
+%%% local-phone = [ exit-code ] dial-number / exit-code [ dial-number ]
+%%% exit-code = phone-string
+%%% dial-number = phone-string
+%%% subaddr-string = phone-string
+%%% post-dial = phone-string
+%%% phone-string = 1*( DTMF / pause / tonewait / written-sep )
+%%% DTMF = ( DIGIT / "#" / "*" / "A" / "B" / "C" / "D" )
+%%% written-sep = ( "-" / "." )
+%%% pause = "p"
+%%% tonewait = "w"
+parse_sms(Cont,Scheme) ->
+ {Scheme,Cont}.
+
+
+%%% ============================================================================
+%%% Generic URI parsing. BNF rules from RFC 2396
+
+%%% hostport = host [ ":" port ]
+scan_hostport(C0,Scheme) ->
+ case scan_host(C0) of
+ {error,Error} ->
+ {error,Error};
+ {":"++C1,Host} ->
+ {C2,Port}=scan_port(C1,[]),
+ {C2,Host,list_to_integer(Port)};
+ {C1,Host} when Scheme==http ->
+ {C1,Host,?HTTP_DEFAULT_PORT};
+ {C1,Host} when Scheme==https ->
+ {C1,Host,?HTTPS_DEFAULT_PORT};
+ {C1,Host} when Scheme==ftp ->
+ {C1,Host,?FTP_DEFAULT_PORT}
+ end.
+
+
+%%% host = hostname | IPv4address
+%%% hostname = *( domainlabel "." ) toplabel [ "." ]
+%%% domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
+%%% toplabel = alpha | alpha *( alphanum | "-" ) alphanum
+%%% IPv4address = 1*digit "." 1*digit "." 1*digit "." 1*digit
+
+-define(ALPHA, 1).
+-define(DIGIT, 2).
+
+scan_host(C0) ->
+ case scan_host2(C0,[],0,[],[]) of
+ {C1,IPv4address,[?DIGIT,?DIGIT,?DIGIT,?DIGIT]} ->
+ {C1,lists:reverse(lists:append(IPv4address))};
+ {C1,Hostname,[?ALPHA|HostF]} ->
+ {C1,lists:reverse(lists:append(Hostname))};
+ _ ->
+ {error,no_host}
+ end.
+
+scan_host2([H|C0],Acc,CurF,Host,HostF) when $0=<H,H=<$9 ->
+ scan_host2(C0,[H|Acc],CurF bor ?DIGIT,Host,HostF);
+scan_host2([H|C0],Acc,CurF,Host,HostF) when $a=<H,H=<$z; $A=<H,H=<$Z ->
+ scan_host2(C0,[H|Acc],CurF bor ?ALPHA,Host,HostF);
+scan_host2([$-|C0],Acc,CurF,Host,HostF) when CurF=/=0 ->
+ scan_host2(C0,[$-|Acc],CurF,Host,HostF);
+scan_host2([$.|C0],Acc,CurF,Host,HostF) when CurF=/=0 ->
+ scan_host2(C0,[],0,[".",Acc|Host],[CurF|HostF]);
+scan_host2(C0,Acc,CurF,Host,HostF) ->
+ {C0,[Acc|Host],[CurF|HostF]}.
+
+
+%%% port = *digit
+scan_port([H|C0],Acc) when $0=<H,H=<$9 ->
+ scan_port(C0,[H|Acc]);
+scan_port(C0,Acc) ->
+ {C0,lists:reverse(Acc)}.
+
+%%% abs_path = "/" path_segments
+scan_abspath([]) ->
+ {[],[]};
+scan_abspath("/"++C0) ->
+ scan_pathsegments(C0,["/"]);
+scan_abspath(_) ->
+ {error,no_abspath}.
+
+%%% path_segments = segment *( "/" segment )
+scan_pathsegments(C0,Acc) ->
+ case scan_segment(C0,[]) of
+ {"/"++C1,Segment} ->
+ scan_pathsegments(C1,["/",Segment|Acc]);
+ {C1,Segment} ->
+ {C1,lists:reverse(lists:append([Segment|Acc]))}
+ end.
+
+
+%%% segment = *pchar *( ";" param )
+%%% param = *pchar
+scan_segment(";"++C0,Acc) ->
+ {C1,ParamAcc}=scan_pchars(C0,";"++Acc),
+ scan_segment(C1,ParamAcc);
+scan_segment(C0,Acc) ->
+ case scan_pchars(C0,Acc) of
+ {";"++C1,Segment} ->
+ {C2,ParamAcc}=scan_pchars(C1,";"++Segment),
+ scan_segment(C2,ParamAcc);
+ {C1,Segment} ->
+ {C1,Segment}
+ end.
+
+%%% query = *uric
+%%% uric = reserved | unreserved | escaped
+%%% reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+%%% "$" | ","
+%%% unreserved = alphanum | mark
+%%% mark = "-" | "_" | "." | "!" | "~" | "*" | "'" |
+%%% "(" | ")"
+%%% escaped = "%" hex hex
+scan_query([],Acc) ->
+ lists:reverse(Acc);
+scan_query([$%,H1,H2|C0],Acc) -> % escaped
+ scan_query(C0,[hex2dec(H1)*16+hex2dec(H2)|Acc]);
+scan_query([H|C0],Acc) when $a=<H,H=<$z;$A=<H,H=<$Z;$0=<H,H=<$9 -> % alphanum
+ scan_query(C0,[H|Acc]);
+scan_query([H|C0],Acc) when H==$;; H==$/; H==$?; H==$:; H==$@;
+ H==$&; H==$=; H==$+; H==$$; H==$, -> % reserved
+ scan_query(C0,[H|Acc]);
+scan_query([H|C0],Acc) when H==$-; H==$_; H==$.; H==$!; H==$~;
+ H==$*; H==$'; H==$(; H==$) -> % mark
+ scan_query(C0,[H|Acc]);
+scan_query([H|C0],Acc) ->
+ {error,no_query}.
+
+
+%%% pchar = unreserved | escaped |
+%%% ":" | "@" | "&" | "=" | "+" | "$" | ","
+scan_pchars([],Acc) ->
+ {[],Acc};
+scan_pchars([$%,H1,H2|C0],Acc) -> % escaped
+ scan_pchars(C0,[hex2dec(H1)*16+hex2dec(H2)|Acc]);
+scan_pchars([H|C0],Acc) when $a=<H,H=<$z;$A=<H,H=<$Z;$0=<H,H=<$9 -> % alphanum
+ scan_pchars(C0,[H|Acc]);
+scan_pchars([H|C0],Acc) when H==$-; H==$_; H==$.; H==$!; H==$~;
+ H==$*; H==$'; H==$(; H==$) -> % mark
+ scan_pchars(C0,[H|Acc]);
+scan_pchars([H|C0],Acc) when H==$:; H==$@; H==$&; H==$=; H==$+; H==$$; H==$, ->
+ scan_pchars(C0,[H|Acc]);
+scan_pchars(C0,Acc) ->
+ {C0,Acc}.
+
+hex2dec(X) when X>=$0,X=<$9 -> X-$0;
+hex2dec(X) when X>=$A,X=<$F -> X-$A+10;
+hex2dec(X) when X>=$a,X=<$f -> X-$a+10.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/Makefile b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/Makefile
new file mode 100644
index 0000000000..8b57868117
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/Makefile
@@ -0,0 +1,136 @@
+# ``The contents of this file are subject to the Erlang Public License,
+# Version 1.1, (the "License"); you may not use this file except in
+# compliance with the License. You should have received a copy of the
+# Erlang Public License along with this software. If not, it can be
+# retrieved via the world wide web at http://www.erlang.org/.
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and limitations
+# under the License.
+#
+# The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+# Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+# AB. All Rights Reserved.''
+#
+# $Id: Makefile,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+#
+include $(ERL_TOP)/make/target.mk
+
+ifeq ($(TYPE),debug)
+ERL_COMPILE_FLAGS += -Ddebug -W
+endif
+
+include $(ERL_TOP)/make/$(TARGET)/otp.mk
+
+# ----------------------------------------------------
+# Application version
+# ----------------------------------------------------
+include ../vsn.mk
+VSN=$(MNESIA_VSN)
+
+# ----------------------------------------------------
+# Release directory specification
+# ----------------------------------------------------
+RELSYSDIR = $(RELEASE_PATH)/lib/mnesia-$(VSN)
+
+# ----------------------------------------------------
+# Target Specs
+# ----------------------------------------------------
+MODULES= \
+ mnesia \
+ mnesia_backup \
+ mnesia_bup \
+ mnesia_checkpoint \
+ mnesia_checkpoint_sup \
+ mnesia_controller \
+ mnesia_dumper\
+ mnesia_event \
+ mnesia_frag \
+ mnesia_frag_hash \
+ mnesia_frag_old_hash \
+ mnesia_index \
+ mnesia_kernel_sup \
+ mnesia_late_loader \
+ mnesia_lib\
+ mnesia_loader \
+ mnesia_locker \
+ mnesia_log \
+ mnesia_monitor \
+ mnesia_recover \
+ mnesia_registry \
+ mnesia_schema\
+ mnesia_snmp_hook \
+ mnesia_snmp_sup \
+ mnesia_subscr \
+ mnesia_sup \
+ mnesia_sp \
+ mnesia_text \
+ mnesia_tm
+
+HRL_FILES= mnesia.hrl
+
+ERL_FILES= $(MODULES:%=%.erl)
+
+TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
+
+APP_FILE= mnesia.app
+
+APP_SRC= $(APP_FILE).src
+APP_TARGET= $(EBIN)/$(APP_FILE)
+
+APPUP_FILE= mnesia.appup
+
+APPUP_SRC= $(APPUP_FILE).src
+APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
+
+
+
+# ----------------------------------------------------
+# FLAGS
+# ----------------------------------------------------
+ERL_FLAGS +=
+ERL_COMPILE_FLAGS += \
+ +warn_unused_vars \
+ +'{parse_transform,sys_pre_attributes}' \
+ +'{attribute,insert,vsn,"mnesia_$(MNESIA_VSN)"}' \
+ -W
+
+# ----------------------------------------------------
+# Targets
+# ----------------------------------------------------
+
+opt: $(TARGET_FILES)
+
+debug:
+ @${MAKE} TYPE=debug
+
+clean:
+ rm -f $(TARGET_FILES)
+ rm -f core
+
+docs:
+
+# ----------------------------------------------------
+# Special Build Targets
+# ----------------------------------------------------
+
+$(APP_TARGET): $(APP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
+ sed -e 's;%VSN%;$(VSN);' $< > $@
+
+
+# ----------------------------------------------------
+# Release Target
+# ----------------------------------------------------
+include $(ERL_TOP)/make/otp_release_targets.mk
+
+release_spec: opt
+ $(INSTALL_DIR) $(RELSYSDIR)/src
+ $(INSTALL_DATA) $(HRL_FILES) $(ERL_FILES) $(RELSYSDIR)/src
+ $(INSTALL_DIR) $(RELSYSDIR)/ebin
+ $(INSTALL_DATA) $(TARGET_FILES) $(RELSYSDIR)/ebin
+
+release_docs_spec:
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.app.src b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.app.src
new file mode 100644
index 0000000000..fb9d7aa0ca
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.app.src
@@ -0,0 +1,50 @@
+{application, mnesia,
+ [{description, "MNESIA CXC 138 12"},
+ {vsn, "%VSN%"},
+ {modules, [
+ mnesia,
+ mnesia_backup,
+ mnesia_bup,
+ mnesia_checkpoint,
+ mnesia_checkpoint_sup,
+ mnesia_controller,
+ mnesia_dumper,
+ mnesia_event,
+ mnesia_frag,
+ mnesia_frag_hash,
+ mnesia_frag_old_hash,
+ mnesia_index,
+ mnesia_kernel_sup,
+ mnesia_late_loader,
+ mnesia_lib,
+ mnesia_loader,
+ mnesia_locker,
+ mnesia_log,
+ mnesia_monitor,
+ mnesia_recover,
+ mnesia_registry,
+ mnesia_schema,
+ mnesia_snmp_hook,
+ mnesia_snmp_sup,
+ mnesia_subscr,
+ mnesia_sup,
+ mnesia_sp,
+ mnesia_text,
+ mnesia_tm
+ ]},
+ {registered, [
+ mnesia_dumper_load_regulator,
+ mnesia_event,
+ mnesia_fallback,
+ mnesia_controller,
+ mnesia_kernel_sup,
+ mnesia_late_loader,
+ mnesia_locker,
+ mnesia_monitor,
+ mnesia_recover,
+ mnesia_substr,
+ mnesia_sup,
+ mnesia_tm
+ ]},
+ {applications, [kernel, stdlib]},
+ {mod, {mnesia_sup, []}}]}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.appup.src b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.appup.src
new file mode 100644
index 0000000000..64f50dd5c6
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.appup.src
@@ -0,0 +1,6 @@
+{"%VSN%",
+ [
+ ],
+ [
+ ]
+}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
new file mode 100644
index 0000000000..b4f03fab03
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
@@ -0,0 +1,2191 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+%% This module exports the public interface of the Mnesia DBMS engine
+
+-module(mnesia).
+%-behaviour(mnesia_access).
+
+-export([
+ %% Start, stop and debugging
+ start/0, start/1, stop/0, % Not for public use
+ set_debug_level/1, lkill/0, kill/0, % Not for public use
+ ms/0, nc/0, nc/1, ni/0, ni/1, % Not for public use
+ change_config/2,
+
+ %% Activity mgt
+ abort/1, transaction/1, transaction/2, transaction/3,
+ sync_transaction/1, sync_transaction/2, sync_transaction/3,
+ async_dirty/1, async_dirty/2, sync_dirty/1, sync_dirty/2, ets/1, ets/2,
+ activity/2, activity/3, activity/4, % Not for public use
+
+ %% Access within an activity - Lock acquisition
+ lock/2, lock/4,
+ read_lock_table/1,
+ write_lock_table/1,
+
+ %% Access within an activity - Updates
+ write/1, s_write/1, write/3, write/5,
+ delete/1, s_delete/1, delete/3, delete/5,
+ delete_object/1, s_delete_object/1, delete_object/3, delete_object/5,
+
+ %% Access within an activity - Reads
+ read/1, wread/1, read/3, read/5,
+ match_object/1, match_object/3, match_object/5,
+ select/2, select/3, select/5,
+ all_keys/1, all_keys/4,
+ index_match_object/2, index_match_object/4, index_match_object/6,
+ index_read/3, index_read/6,
+
+ %% Iterators within an activity
+ foldl/3, foldl/4, foldr/3, foldr/4,
+
+ %% Dirty access regardless of activities - Updates
+ dirty_write/1, dirty_write/2,
+ dirty_delete/1, dirty_delete/2,
+ dirty_delete_object/1, dirty_delete_object/2,
+ dirty_update_counter/2, dirty_update_counter/3,
+
+ %% Dirty access regardless of activities - Read
+ dirty_read/1, dirty_read/2,
+ dirty_select/2,
+ dirty_match_object/1, dirty_match_object/2, dirty_all_keys/1,
+ dirty_index_match_object/2, dirty_index_match_object/3,
+ dirty_index_read/3, dirty_slot/2,
+ dirty_first/1, dirty_next/2, dirty_last/1, dirty_prev/2,
+
+ %% Info
+ table_info/2, table_info/4, schema/0, schema/1,
+ error_description/1, info/0, system_info/1,
+ system_info/0, % Not for public use
+
+ %% Database mgt
+ create_schema/1, delete_schema/1,
+ backup/1, backup/2, traverse_backup/4, traverse_backup/6,
+ install_fallback/1, install_fallback/2,
+ uninstall_fallback/0, uninstall_fallback/1,
+ activate_checkpoint/1, deactivate_checkpoint/1,
+ backup_checkpoint/2, backup_checkpoint/3, restore/2,
+
+ %% Table mgt
+ create_table/1, create_table/2, delete_table/1,
+ add_table_copy/3, del_table_copy/2, move_table_copy/3,
+ add_table_index/2, del_table_index/2,
+ transform_table/3, transform_table/4,
+ change_table_copy_type/3,
+ read_table_property/2, write_table_property/2, delete_table_property/2,
+ change_table_frag/2,
+ clear_table/1,
+
+ %% Table load
+ dump_tables/1, wait_for_tables/2, force_load_table/1,
+ change_table_access_mode/2, change_table_load_order/2,
+ set_master_nodes/1, set_master_nodes/2,
+
+ %% Misc admin
+ dump_log/0, subscribe/1, unsubscribe/1, report_event/1,
+
+ %% Snmp
+ snmp_open_table/2, snmp_close_table/1,
+ snmp_get_row/2, snmp_get_next_index/2, snmp_get_mnesia_key/2,
+
+ %% Textfile access
+ load_textfile/1, dump_to_textfile/1,
+
+ %% Mnemosyne exclusive
+ get_activity_id/0, put_activity_id/1, % Not for public use
+
+ %% Mnesia internal functions
+ dirty_rpc/4, % Not for public use
+ has_var/1, fun_select/7,
+ foldl/6, foldr/6,
+
+ %% Module internal callback functions
+ remote_dirty_match_object/2, % Not for public use
+ remote_dirty_select/2 % Not for public use
+ ]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [verbose/2]).
+
+-define(DEFAULT_ACCESS, ?MODULE).
+
+%% Select
+-define(PATTERN_TO_OBJECT_MATCH_SPEC(Pat), [{Pat,[],['$_']}]).
+-define(PATTERN_TO_BINDINGS_MATCH_SPEC(Pat), [{Pat,[],['$$']}]).
+
+%% Local function in order to avoid external function call
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+is_dollar_digits(Var) ->
+ case atom_to_list(Var) of
+ [$$ | Digs] ->
+ is_digits(Digs);
+ _ ->
+ false
+ end.
+
+is_digits([Dig | Tail]) ->
+ if
+ $0 =< Dig, Dig =< $9 ->
+ is_digits(Tail);
+ true ->
+ false
+ end;
+is_digits([]) ->
+ true.
+
+has_var(X) when atom(X) ->
+ if
+ X == '_' ->
+ true;
+ atom(X) ->
+ is_dollar_digits(X);
+ true ->
+ false
+ end;
+has_var(X) when tuple(X) ->
+ e_has_var(X, size(X));
+has_var([H|T]) ->
+ case has_var(H) of
+ false -> has_var(T);
+ Other -> Other
+ end;
+has_var(_) -> false.
+
+e_has_var(_, 0) -> false;
+e_has_var(X, Pos) ->
+ case has_var(element(Pos, X))of
+ false -> e_has_var(X, Pos-1);
+ Other -> Other
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Start and stop
+
+start() ->
+ {Time , Res} = timer:tc(application, start, [?APPLICATION, temporary]),
+
+ Secs = Time div 1000000,
+ case Res of
+ ok ->
+ verbose("Mnesia started, ~p seconds~n",[ Secs]),
+ ok;
+ {error, {already_started, mnesia}} ->
+ verbose("Mnesia already started, ~p seconds~n",[ Secs]),
+ ok;
+ {error, R} ->
+ verbose("Mnesia failed to start, ~p seconds: ~p~n",[ Secs, R]),
+ {error, R}
+ end.
+
+start(ExtraEnv) when list(ExtraEnv) ->
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ patched_start(ExtraEnv);
+ Error ->
+ Error
+ end;
+start(ExtraEnv) ->
+ {error, {badarg, ExtraEnv}}.
+
+patched_start([{Env, Val} | Tail]) when atom(Env) ->
+ case mnesia_monitor:patch_env(Env, Val) of
+ {error, Reason} ->
+ {error, Reason};
+ _NewVal ->
+ patched_start(Tail)
+ end;
+patched_start([Head | _]) ->
+ {error, {bad_type, Head}};
+patched_start([]) ->
+ start().
+
+stop() ->
+ case application:stop(?APPLICATION) of
+ ok -> stopped;
+ {error, {not_started, ?APPLICATION}} -> stopped;
+ Other -> Other
+ end.
+
+change_config(extra_db_nodes, Ns) when list(Ns) ->
+ mnesia_controller:connect_nodes(Ns);
+change_config(BadKey, _BadVal) ->
+ {error, {badarg, BadKey}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Debugging
+
+set_debug_level(Level) ->
+ mnesia_subscr:set_debug_level(Level).
+
+lkill() ->
+ mnesia_sup:kill().
+
+kill() ->
+ rpc:multicall(mnesia_sup, kill, []).
+
+ms() ->
+ [
+ mnesia,
+ mnesia_backup,
+ mnesia_bup,
+ mnesia_checkpoint,
+ mnesia_checkpoint_sup,
+ mnesia_controller,
+ mnesia_dumper,
+ mnesia_loader,
+ mnesia_frag,
+ mnesia_frag_hash,
+ mnesia_frag_old_hash,
+ mnesia_index,
+ mnesia_kernel_sup,
+ mnesia_late_loader,
+ mnesia_lib,
+ mnesia_log,
+ mnesia_registry,
+ mnesia_schema,
+ mnesia_snmp_hook,
+ mnesia_snmp_sup,
+ mnesia_subscr,
+ mnesia_sup,
+ mnesia_text,
+ mnesia_tm,
+ mnesia_recover,
+ mnesia_locker,
+
+ %% Keep these last in the list, so
+ %% mnesia_sup kills these last
+ mnesia_monitor,
+ mnesia_event
+ ].
+
+nc() ->
+ Mods = ms(),
+ nc(Mods).
+
+nc(Mods) when list(Mods)->
+ [Mod || Mod <- Mods, ok /= load(Mod, compile)].
+
+ni() ->
+ Mods = ms(),
+ ni(Mods).
+
+ni(Mods) when list(Mods) ->
+ [Mod || Mod <- Mods, ok /= load(Mod, interpret)].
+
+load(Mod, How) when atom(Mod) ->
+ case try_load(Mod, How) of
+ ok ->
+ ok;
+ _ ->
+ mnesia_lib:show( "~n RETRY ~p FROM: ", [Mod]),
+ Abs = mod2abs(Mod),
+ load(Abs, How)
+ end;
+load(Abs, How) ->
+ case try_load(Abs, How) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ mnesia_lib:show( " *** ERROR *** ~p~n", [Reason]),
+ {error, Reason}
+ end.
+
+try_load(Mod, How) ->
+ mnesia_lib:show( " ~p ", [Mod]),
+ Flags = [{d, debug}],
+ case How of
+ compile ->
+ case catch c:nc(Mod, Flags) of
+ {ok, _} -> ok;
+ Other -> {error, Other}
+ end;
+ interpret ->
+ case catch int:ni(Mod, Flags) of
+ {module, _} -> ok;
+ Other -> {error, Other}
+ end
+ end.
+
+mod2abs(Mod) ->
+ ModString = atom_to_list(Mod),
+ SubDir =
+ case lists:suffix("test", ModString) of
+ true -> test;
+ false -> src
+ end,
+ filename:join([code:lib_dir(?APPLICATION), SubDir, ModString]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Activity mgt
+
+abort(Reason) ->
+ exit({aborted, Reason}).
+
+transaction(Fun) ->
+ transaction(get(mnesia_activity_state), Fun, [], infinity, ?DEFAULT_ACCESS, async).
+transaction(Fun, Retries) when integer(Retries), Retries >= 0 ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, async);
+transaction(Fun, Retries) when Retries == infinity ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, async);
+transaction(Fun, Args) ->
+ transaction(get(mnesia_activity_state), Fun, Args, infinity, ?DEFAULT_ACCESS, async).
+transaction(Fun, Args, Retries) ->
+ transaction(get(mnesia_activity_state), Fun, Args, Retries, ?DEFAULT_ACCESS, async).
+
+sync_transaction(Fun) ->
+ transaction(get(mnesia_activity_state), Fun, [], infinity, ?DEFAULT_ACCESS, sync).
+sync_transaction(Fun, Retries) when integer(Retries), Retries >= 0 ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, sync);
+sync_transaction(Fun, Retries) when Retries == infinity ->
+ transaction(get(mnesia_activity_state), Fun, [], Retries, ?DEFAULT_ACCESS, sync);
+sync_transaction(Fun, Args) ->
+ transaction(get(mnesia_activity_state), Fun, Args, infinity, ?DEFAULT_ACCESS, sync).
+sync_transaction(Fun, Args, Retries) ->
+ transaction(get(mnesia_activity_state), Fun, Args, Retries, ?DEFAULT_ACCESS, sync).
+
+
+transaction(State, Fun, Args, Retries, Mod, Kind)
+ when function(Fun), list(Args), Retries == infinity, atom(Mod) ->
+ mnesia_tm:transaction(State, Fun, Args, Retries, Mod, Kind);
+transaction(State, Fun, Args, Retries, Mod, Kind)
+ when function(Fun), list(Args), integer(Retries), Retries >= 0, atom(Mod) ->
+ mnesia_tm:transaction(State, Fun, Args, Retries, Mod, Kind);
+transaction(_State, Fun, Args, Retries, Mod, _Kind) ->
+ {aborted, {badarg, Fun, Args, Retries, Mod}}.
+
+non_transaction(State, Fun, Args, ActivityKind, Mod)
+ when function(Fun), list(Args), atom(Mod) ->
+ mnesia_tm:non_transaction(State, Fun, Args, ActivityKind, Mod);
+non_transaction(_State, Fun, Args, _ActivityKind, _Mod) ->
+ {aborted, {badarg, Fun, Args}}.
+
+async_dirty(Fun) ->
+ async_dirty(Fun, []).
+async_dirty(Fun, Args) ->
+ non_transaction(get(mnesia_activity_state), Fun, Args, async_dirty, ?DEFAULT_ACCESS).
+
+sync_dirty(Fun) ->
+ sync_dirty(Fun, []).
+sync_dirty(Fun, Args) ->
+ non_transaction(get(mnesia_activity_state), Fun, Args, sync_dirty, ?DEFAULT_ACCESS).
+
+ets(Fun) ->
+ ets(Fun, []).
+ets(Fun, Args) ->
+ non_transaction(get(mnesia_activity_state), Fun, Args, ets, ?DEFAULT_ACCESS).
+
+activity(Kind, Fun) ->
+ activity(Kind, Fun, []).
+activity(Kind, Fun, Args) when list(Args) ->
+ activity(Kind, Fun, Args, mnesia_monitor:get_env(access_module));
+activity(Kind, Fun, Mod) ->
+ activity(Kind, Fun, [], Mod).
+
+activity(Kind, Fun, Args, Mod) ->
+ State = get(mnesia_activity_state),
+ case Kind of
+ ets -> non_transaction(State, Fun, Args, Kind, Mod);
+ async_dirty -> non_transaction(State, Fun, Args, Kind, Mod);
+ sync_dirty -> non_transaction(State, Fun, Args, Kind, Mod);
+ transaction -> wrap_trans(State, Fun, Args, infinity, Mod, async);
+ {transaction, Retries} -> wrap_trans(State, Fun, Args, Retries, Mod, async);
+ sync_transaction -> wrap_trans(State, Fun, Args, infinity, Mod, sync);
+ {sync_transaction, Retries} -> wrap_trans(State, Fun, Args, Retries, Mod, sync);
+ _ -> {aborted, {bad_type, Kind}}
+ end.
+
+wrap_trans(State, Fun, Args, Retries, Mod, Kind) ->
+ case transaction(State, Fun, Args, Retries, Mod, Kind) of
+ {'atomic', GoodRes} -> GoodRes;
+ BadRes -> exit(BadRes)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access within an activity - lock acquisition
+
+%% Grab a lock on an item in the global lock table
+%% Item may be any term. Lock may be write or read.
+%% write lock is set on all the given nodes
+%% read lock is only set on the first node
+%% Nodes may either be a list of nodes or one node as an atom
+%% Mnesia on all Nodes must be connected to each other, but
+%% it is not neccessary that they are up and running.
+
+lock(LockItem, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ lock(Tid, Ts, LockItem, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:lock(Tid, Ts, LockItem, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+lock(Tid, Ts, LockItem, LockKind) ->
+ case element(1, Tid) of
+ tid ->
+ case LockItem of
+ {record, Tab, Key} ->
+ lock_record(Tid, Ts, Tab, Key, LockKind);
+ {table, Tab} ->
+ lock_table(Tid, Ts, Tab, LockKind);
+ {global, GlobalKey, Nodes} ->
+ global_lock(Tid, Ts, GlobalKey, LockKind, Nodes);
+ _ ->
+ abort({bad_type, LockItem})
+ end;
+ _Protocol ->
+ []
+ end.
+
+%% Grab a read lock on a whole table
+read_lock_table(Tab) ->
+ lock({table, Tab}, read),
+ ok.
+
+%% Grab a write lock on a whole table
+write_lock_table(Tab) ->
+ lock({table, Tab}, write),
+ ok.
+
+lock_record(Tid, Ts, Tab, Key, LockKind) when atom(Tab) ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, Key},
+ case LockKind of
+ read ->
+ mnesia_locker:rlock(Tid, Store, Oid);
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ none ->
+ [];
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+lock_record(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+lock_table(Tid, Ts, Tab, LockKind) when atom(Tab) ->
+ Store = Ts#tidstore.store,
+ case LockKind of
+ read ->
+ mnesia_locker:rlock_table(Tid, Store, Tab);
+ write ->
+ mnesia_locker:wlock_table(Tid, Store, Tab);
+ sticky_write ->
+ mnesia_locker:sticky_wlock_table(Tid, Store, Tab);
+ none ->
+ [];
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+lock_table(_Tid, _Ts, Tab, _LockKind) ->
+ abort({bad_type, Tab}).
+
+global_lock(Tid, Ts, Item, Kind, Nodes) when list(Nodes) ->
+ case element(1, Tid) of
+ tid ->
+ Store = Ts#tidstore.store,
+ GoodNs = good_global_nodes(Nodes),
+ if
+ Kind /= read, Kind /= write ->
+ abort({bad_type, Kind});
+ true ->
+ mnesia_locker:global_lock(Tid, Store, Item, Kind, GoodNs)
+ end;
+ _Protocol ->
+ []
+ end;
+global_lock(_Tid, _Ts, _Item, _Kind, Nodes) ->
+ abort({bad_type, Nodes}).
+
+good_global_nodes(Nodes) ->
+ Recover = [node() | val(recover_nodes)],
+ mnesia_lib:intersect(Nodes, Recover).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access within an activity - updates
+
+write(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ write(Tab, Val, write);
+write(Val) ->
+ abort({bad_type, Val}).
+
+s_write(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ write(Tab, Val, sticky_write).
+
+write(Tab, Val, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ write(Tid, Ts, Tab, Val, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:write(Tid, Ts, Tab, Val, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+write(Tid, Ts, Tab, Val, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_insert(Tab, Val),
+ ok;
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, element(2, Val)},
+ case LockKind of
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ write_to_store(Tab, Store, Oid, Val);
+ Protocol ->
+ do_dirty_write(Protocol, Tab, Val)
+ end;
+write(_Tid, _Ts, Tab, Val, LockKind) ->
+ abort({bad_type, Tab, Val, LockKind}).
+
+write_to_store(Tab, Store, Oid, Val) ->
+ case ?catch_val({Tab, record_validation}) of
+ {RecName, Arity, Type}
+ when size(Val) == Arity, RecName == element(1, Val) ->
+ case Type of
+ bag ->
+ ?ets_insert(Store, {Oid, Val, write});
+ _ ->
+ ?ets_delete(Store, Oid),
+ ?ets_insert(Store, {Oid, Val, write})
+ end,
+ ok;
+ {'EXIT', _} ->
+ abort({no_exists, Tab});
+ _ ->
+ abort({bad_type, Val})
+ end.
+
+delete({Tab, Key}) ->
+ delete(Tab, Key, write);
+delete(Oid) ->
+ abort({bad_type, Oid}).
+
+s_delete({Tab, Key}) ->
+ delete(Tab, Key, sticky_write);
+s_delete(Oid) ->
+ abort({bad_type, Oid}).
+
+delete(Tab, Key, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ delete(Tid, Ts, Tab, Key, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:delete(Tid, Ts, Tab, Key, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+delete(Tid, Ts, Tab, Key, LockKind)
+ when atom(Tab), Tab /= schema ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_delete(Tab, Key),
+ ok;
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, Key},
+ case LockKind of
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ ?ets_delete(Store, Oid),
+ ?ets_insert(Store, {Oid, Oid, delete}),
+ ok;
+ Protocol ->
+ do_dirty_delete(Protocol, Tab, Key)
+ end;
+delete(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+delete_object(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ delete_object(Tab, Val, write);
+delete_object(Val) ->
+ abort({bad_type, Val}).
+
+s_delete_object(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ delete_object(Tab, Val, sticky_write);
+s_delete_object(Val) ->
+ abort({bad_type, Val}).
+
+delete_object(Tab, Val, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ delete_object(Tid, Ts, Tab, Val, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:delete_object(Tid, Ts, Tab, Val, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+delete_object(Tid, Ts, Tab, Val, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_match_delete(Tab, Val),
+ ok;
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, element(2, Val)},
+ case LockKind of
+ write ->
+ mnesia_locker:wlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_wlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ case val({Tab, setorbag}) of
+ bag ->
+ ?ets_match_delete(Store, {Oid, Val, '_'}),
+ ?ets_insert(Store, {Oid, Val, delete_object});
+ _ ->
+ case ?ets_match_object(Store, {Oid, '_', write}) of
+ [] ->
+ ?ets_match_delete(Store, {Oid, Val, '_'}),
+ ?ets_insert(Store, {Oid, Val, delete_object});
+ _ ->
+ ?ets_delete(Store, Oid),
+ ?ets_insert(Store, {Oid, Oid, delete})
+ end
+ end,
+ ok;
+ Protocol ->
+ do_dirty_delete_object(Protocol, Tab, Val)
+ end;
+delete_object(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access within an activity - read
+
+read({Tab, Key}) ->
+ read(Tab, Key, read);
+read(Oid) ->
+ abort({bad_type, Oid}).
+
+wread({Tab, Key}) ->
+ read(Tab, Key, write);
+wread(Oid) ->
+ abort({bad_type, Oid}).
+
+read(Tab, Key, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ read(Tid, Ts, Tab, Key, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:read(Tid, Ts, Tab, Key, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+read(Tid, Ts, Tab, Key, LockKind)
+ when atom(Tab), Tab /= schema ->
+ case element(1, Tid) of
+ ets ->
+ ?ets_lookup(Tab, Key);
+ tid ->
+ Store = Ts#tidstore.store,
+ Oid = {Tab, Key},
+ Objs =
+ case LockKind of
+ read ->
+ mnesia_locker:rlock(Tid, Store, Oid);
+ write ->
+ mnesia_locker:rwlock(Tid, Store, Oid);
+ sticky_write ->
+ mnesia_locker:sticky_rwlock(Tid, Store, Oid);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end,
+ add_written(?ets_lookup(Store, Oid), Tab, Objs);
+ _Protocol ->
+ dirty_read(Tab, Key)
+ end;
+read(_Tid, _Ts, Tab, _Key, _LockKind) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%
+%% Iterators
+
+foldl(Fun, Acc, Tab) ->
+ foldl(Fun, Acc, Tab, read).
+
+foldl(Fun, Acc, Tab, LockKind) when function(Fun) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ foldl(Tid, Ts, Fun, Acc, Tab, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:foldl(Tid, Ts, Fun, Acc, Tab, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+foldl(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ {Type, Prev} = init_iteration(ActivityId, Opaque, Tab, LockKind),
+ Res = (catch do_foldl(ActivityId, Opaque, Tab, dirty_first(Tab), Fun, Acc, Type, Prev)),
+ close_iteration(Res, Tab).
+
+do_foldl(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) ->
+ lists:foldl(fun(Key, Acc) ->
+ lists:foldl(Fun, Acc, read(A, O, Tab, Key, read))
+ end, RAcc, Stored);
+do_foldl(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H == Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldl(A, O, Tab, dirty_next(Tab, Key), Fun, NewAcc, ordered_set, Stored);
+do_foldl(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H < Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, H, read)),
+ do_foldl(A, O, Tab, Key, Fun, NewAcc, ordered_set, Stored);
+do_foldl(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H > Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldl(A, O, Tab, dirty_next(Tab, Key), Fun, NewAcc, ordered_set, [H |Stored]);
+do_foldl(A, O, Tab, Key, Fun, Acc, Type, Stored) -> %% Type is set or bag
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ NewStored = ordsets:del_element(Key, Stored),
+ do_foldl(A, O, Tab, dirty_next(Tab, Key), Fun, NewAcc, Type, NewStored).
+
+foldr(Fun, Acc, Tab) ->
+ foldr(Fun, Acc, Tab, read).
+foldr(Fun, Acc, Tab, LockKind) when function(Fun) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ foldr(Tid, Ts, Fun, Acc, Tab, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:foldr(Tid, Ts, Fun, Acc, Tab, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+foldr(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ {Type, TempPrev} = init_iteration(ActivityId, Opaque, Tab, LockKind),
+ Prev =
+ if
+ Type == ordered_set ->
+ lists:reverse(TempPrev);
+ true -> %% Order doesn't matter for set and bag
+ TempPrev %% Keep the order so we can use ordsets:del_element
+ end,
+ Res = (catch do_foldr(ActivityId, Opaque, Tab, dirty_last(Tab), Fun, Acc, Type, Prev)),
+ close_iteration(Res, Tab).
+
+do_foldr(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) ->
+ lists:foldl(fun(Key, Acc) ->
+ lists:foldl(Fun, Acc, read(A, O, Tab, Key, read))
+ end, RAcc, Stored);
+do_foldr(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H == Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldr(A, O, Tab, dirty_prev(Tab, Key), Fun, NewAcc, ordered_set, Stored);
+do_foldr(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H > Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, H, read)),
+ do_foldr(A, O, Tab, Key, Fun, NewAcc, ordered_set, Stored);
+do_foldr(A, O, Tab, Key, Fun, Acc, ordered_set, [H | Stored]) when H < Key ->
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ do_foldr(A, O, Tab, dirty_prev(Tab, Key), Fun, NewAcc, ordered_set, [H |Stored]);
+do_foldr(A, O, Tab, Key, Fun, Acc, Type, Stored) -> %% Type is set or bag
+ NewAcc = lists:foldl(Fun, Acc, read(A, O, Tab, Key, read)),
+ NewStored = ordsets:del_element(Key, Stored),
+ do_foldr(A, O, Tab, dirty_prev(Tab, Key), Fun, NewAcc, Type, NewStored).
+
+init_iteration(ActivityId, Opaque, Tab, LockKind) ->
+ lock(ActivityId, Opaque, {table, Tab}, LockKind),
+ Type = val({Tab, setorbag}),
+ Previous = add_previous(ActivityId, Opaque, Type, Tab),
+ St = val({Tab, storage_type}),
+ if
+ St == unknown ->
+ ignore;
+ true ->
+ mnesia_lib:db_fixtable(St, Tab, true)
+ end,
+ {Type, Previous}.
+
+close_iteration(Res, Tab) ->
+ case val({Tab, storage_type}) of
+ unknown ->
+ ignore;
+ St ->
+ mnesia_lib:db_fixtable(St, Tab, false)
+ end,
+ case Res of
+ {'EXIT', {aborted, What}} ->
+ abort(What);
+ {'EXIT', What} ->
+ abort(What);
+ _ ->
+ Res
+ end.
+
+add_previous(_ActivityId, non_transaction, _Type, _Tab) ->
+ [];
+add_previous(_Tid, Ts, _Type, Tab) ->
+ Previous = ?ets_match(Ts#tidstore.store, {{Tab, '$1'}, '_', write}),
+ lists:sort(lists:concat(Previous)).
+
+%% This routine fixes up the return value from read/1 so that
+%% it is correct with respect to what this particular transaction
+%% has already written, deleted .... etc
+
+add_written([], _Tab, Objs) ->
+ Objs; % standard normal fast case
+add_written(Written, Tab, Objs) ->
+ case val({Tab, setorbag}) of
+ bag ->
+ add_written_to_bag(Written, Objs, []);
+ _ ->
+ add_written_to_set(Written)
+ end.
+
+add_written_to_set(Ws) ->
+ case lists:last(Ws) of
+ {_, _, delete} -> [];
+ {_, Val, write} -> [Val];
+ {_, _, delete_object} -> []
+ end.
+
+add_written_to_bag([{_, Val, write} | Tail], Objs, Ack) ->
+ add_written_to_bag(Tail, lists:delete(Val, Objs), [Val | Ack]);
+add_written_to_bag([], Objs, Ack) ->
+ Objs ++ lists:reverse(Ack); %% Oldest write first as in ets
+add_written_to_bag([{_, _ , delete} | Tail], _Objs, _Ack) ->
+ %% This transaction just deleted all objects
+ %% with this key
+ add_written_to_bag(Tail, [], []);
+add_written_to_bag([{_, Val, delete_object} | Tail], Objs, Ack) ->
+ add_written_to_bag(Tail, lists:delete(Val, Objs), lists:delete(Val, Ack)).
+
+match_object(Pat) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ match_object(Tab, Pat, read);
+match_object(Pat) ->
+ abort({bad_type, Pat}).
+
+match_object(Tab, Pat, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ match_object(Tid, Ts, Tab, Pat, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:match_object(Tid, Ts, Tab, Pat, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+match_object(Tid, Ts, Tab, Pat, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ mnesia_lib:db_match_object(ram_copies, Tab, Pat);
+ tid ->
+ Key = element(2, Pat),
+ case has_var(Key) of
+ false -> lock_record(Tid, Ts, Tab, Key, LockKind);
+ true -> lock_table(Tid, Ts, Tab, LockKind)
+ end,
+ Objs = dirty_match_object(Tab, Pat),
+ add_written_match(Ts#tidstore.store, Pat, Tab, Objs);
+ _Protocol ->
+ dirty_match_object(Tab, Pat)
+ end;
+match_object(_Tid, _Ts, Tab, Pat, _LockKind) ->
+ abort({bad_type, Tab, Pat}).
+
+add_written_match(S, Pat, Tab, Objs) ->
+ Ops = find_ops(S, Tab, Pat),
+ add_match(Ops, Objs, val({Tab, setorbag})).
+
+find_ops(S, Tab, Pat) ->
+ GetWritten = [{{{Tab, '_'}, Pat, write}, [], ['$_']},
+ {{{Tab, '_'}, '_', delete}, [], ['$_']},
+ {{{Tab, '_'}, Pat, delete_object}, [], ['$_']}],
+ ets:select(S, GetWritten).
+
+add_match([], Objs, _Type) ->
+ Objs;
+add_match(Written, Objs, ordered_set) ->
+ %% Must use keysort which is stable
+ add_ordered_match(lists:keysort(1,Written), Objs, []);
+add_match([{Oid, _, delete}|R], Objs, Type) ->
+ add_match(R, deloid(Oid, Objs), Type);
+add_match([{_Oid, Val, delete_object}|R], Objs, Type) ->
+ add_match(R, lists:delete(Val, Objs), Type);
+add_match([{_Oid, Val, write}|R], Objs, bag) ->
+ add_match(R, [Val | lists:delete(Val, Objs)], bag);
+add_match([{Oid, Val, write}|R], Objs, set) ->
+ add_match(R, [Val | deloid(Oid,Objs)],set).
+
+%% For ordered_set only !!
+add_ordered_match(Written = [{{_, Key}, _, _}|_], [Obj|Objs], Acc)
+ when Key > element(2, Obj) ->
+ add_ordered_match(Written, Objs, [Obj|Acc]);
+add_ordered_match([{{_, Key}, Val, write}|Rest], Objs =[Obj|_], Acc)
+ when Key < element(2, Obj) ->
+ add_ordered_match(Rest, [Val|Objs],Acc);
+add_ordered_match([{{_, Key}, _, _DelOP}|Rest], Objs =[Obj|_], Acc)
+ when Key < element(2, Obj) ->
+ add_ordered_match(Rest,Objs,Acc);
+%% Greater than last object
+add_ordered_match([{_, Val, write}|Rest], [], Acc) ->
+ add_ordered_match(Rest, [Val], Acc);
+add_ordered_match([_|Rest], [], Acc) ->
+ add_ordered_match(Rest, [], Acc);
+%% Keys are equal from here
+add_ordered_match([{_, Val, write}|Rest], [_Obj|Objs], Acc) ->
+ add_ordered_match(Rest, [Val|Objs], Acc);
+add_ordered_match([{_, _Val, delete}|Rest], [_Obj|Objs], Acc) ->
+ add_ordered_match(Rest, Objs, Acc);
+add_ordered_match([{_, Val, delete_object}|Rest], [Val|Objs], Acc) ->
+ add_ordered_match(Rest, Objs, Acc);
+add_ordered_match([{_, _, delete_object}|Rest], Objs, Acc) ->
+ add_ordered_match(Rest, Objs, Acc);
+add_ordered_match([], Objs, Acc) ->
+ lists:reverse(Acc, Objs).
+
+
+%%%%%%%%%%%%%%%%%%
+% select
+
+select(Tab, Pat) ->
+ select(Tab, Pat, read).
+select(Tab, Pat, LockKind)
+ when atom(Tab), Tab /= schema, list(Pat) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ select(Tid, Ts, Tab, Pat, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:select(Tid, Ts, Tab, Pat, LockKind);
+ _ ->
+ abort(no_transaction)
+ end;
+select(Tab, Pat, _Lock) ->
+ abort({badarg, Tab, Pat}).
+
+select(Tid, Ts, Tab, Spec, LockKind) ->
+ SelectFun = fun(FixedSpec) -> dirty_select(Tab, FixedSpec) end,
+ fun_select(Tid, Ts, Tab, Spec, LockKind, Tab, SelectFun).
+
+fun_select(Tid, Ts, Tab, Spec, LockKind, TabPat, SelectFun) ->
+ case element(1, Tid) of
+ ets ->
+ mnesia_lib:db_select(ram_copies, Tab, Spec);
+ tid ->
+ Store = Ts#tidstore.store,
+ Written = ?ets_match_object(Store, {{TabPat, '_'}, '_', '_'}),
+ %% Avoid table lock if possible
+ case Spec of
+ [{HeadPat,_, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ Key = element(2, HeadPat),
+ case has_var(Key) of
+ false -> lock_record(Tid, Ts, Tab, Key, LockKind);
+ true -> lock_table(Tid, Ts, Tab, LockKind)
+ end;
+ _ ->
+ lock_table(Tid, Ts, Tab, LockKind)
+ end,
+ case Written of
+ [] ->
+ %% Nothing changed in the table during this transaction,
+ %% Simple case get results from [d]ets
+ SelectFun(Spec);
+ _ ->
+ %% Hard (slow case) records added or deleted earlier
+ %% in the transaction, have to cope with that.
+ Type = val({Tab, setorbag}),
+ FixedSpec = get_record_pattern(Spec),
+ TabRecs = SelectFun(FixedSpec),
+ FixedRes = add_match(Written, TabRecs, Type),
+ CMS = ets:match_spec_compile(Spec),
+% case Type of
+% ordered_set ->
+% ets:match_spec_run(lists:sort(FixedRes), CMS);
+% _ ->
+% ets:match_spec_run(FixedRes, CMS)
+% end
+ ets:match_spec_run(FixedRes, CMS)
+ end;
+ _Protocol ->
+ SelectFun(Spec)
+ end.
+
+get_record_pattern([]) ->
+ [];
+get_record_pattern([{M,C,_B}|R]) ->
+ [{M,C,['$_']} | get_record_pattern(R)].
+
+deloid(_Oid, []) ->
+ [];
+deloid({Tab, Key}, [H | T]) when element(2, H) == Key ->
+ deloid({Tab, Key}, T);
+deloid(Oid, [H | T]) ->
+ [H | deloid(Oid, T)].
+
+all_keys(Tab) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ all_keys(Tid, Ts, Tab, read);
+ {Mod, Tid, Ts} ->
+ Mod:all_keys(Tid, Ts, Tab, read);
+ _ ->
+ abort(no_transaction)
+ end.
+
+all_keys(Tid, Ts, Tab, LockKind)
+ when atom(Tab), Tab /= schema ->
+ Pat0 = val({Tab, wild_pattern}),
+ Pat = setelement(2, Pat0, '$1'),
+ Keys = select(Tid, Ts, Tab, [{Pat, [], ['$1']}], LockKind),
+ case val({Tab, setorbag}) of
+ bag ->
+ mnesia_lib:uniq(Keys);
+ _ ->
+ Keys
+ end;
+all_keys(_Tid, _Ts, Tab, _LockKind) ->
+ abort({bad_type, Tab}).
+
+index_match_object(Pat, Attr) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ index_match_object(Tab, Pat, Attr, read);
+index_match_object(Pat, _Attr) ->
+ abort({bad_type, Pat}).
+
+index_match_object(Tab, Pat, Attr, LockKind) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ index_match_object(Tid, Ts, Tab, Pat, Attr, LockKind);
+ {Mod, Tid, Ts} ->
+ Mod:index_match_object(Tid, Ts, Tab, Pat, Attr, LockKind);
+ _ ->
+ abort(no_transaction)
+ end.
+
+index_match_object(Tid, Ts, Tab, Pat, Attr, LockKind)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ case element(1, Tid) of
+ ets ->
+ dirty_index_match_object(Tab, Pat, Attr); % Should be optimized?
+ tid ->
+ case mnesia_schema:attr_tab_to_pos(Tab, Attr) of
+ Pos when Pos =< size(Pat) ->
+ case LockKind of
+ read ->
+ Store = Ts#tidstore.store,
+ mnesia_locker:rlock_table(Tid, Store, Tab),
+ Objs = dirty_index_match_object(Tab, Pat, Attr),
+ add_written_match(Store, Pat, Tab, Objs);
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+ BadPos ->
+ abort({bad_type, Tab, BadPos})
+ end;
+ _Protocol ->
+ dirty_index_match_object(Tab, Pat, Attr)
+ end;
+index_match_object(_Tid, _Ts, Tab, Pat, _Attr, _LockKind) ->
+ abort({bad_type, Tab, Pat}).
+
+index_read(Tab, Key, Attr) ->
+ case get(mnesia_activity_state) of
+ {?DEFAULT_ACCESS, Tid, Ts} ->
+ index_read(Tid, Ts, Tab, Key, Attr, read);
+ {Mod, Tid, Ts} ->
+ Mod:index_read(Tid, Ts, Tab, Key, Attr, read);
+ _ ->
+ abort(no_transaction)
+ end.
+
+index_read(Tid, Ts, Tab, Key, Attr, LockKind)
+ when atom(Tab), Tab /= schema ->
+ case element(1, Tid) of
+ ets ->
+ dirty_index_read(Tab, Key, Attr); % Should be optimized?
+ tid ->
+ Pos = mnesia_schema:attr_tab_to_pos(Tab, Attr),
+ case LockKind of
+ read ->
+ case has_var(Key) of
+ false ->
+ Store = Ts#tidstore.store,
+ Objs = mnesia_index:read(Tid, Store, Tab, Key, Pos),
+ Pat = setelement(Pos, val({Tab, wild_pattern}), Key),
+ add_written_match(Store, Pat, Tab, Objs);
+ true ->
+ abort({bad_type, Tab, Attr, Key})
+ end;
+ _ ->
+ abort({bad_type, Tab, LockKind})
+ end;
+ _Protocol ->
+ dirty_index_read(Tab, Key, Attr)
+ end;
+index_read(_Tid, _Ts, Tab, _Key, _Attr, _LockKind) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Dirty access regardless of activities - updates
+
+dirty_write(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ dirty_write(Tab, Val);
+dirty_write(Val) ->
+ abort({bad_type, Val}).
+
+dirty_write(Tab, Val) ->
+ do_dirty_write(async_dirty, Tab, Val).
+
+do_dirty_write(SyncMode, Tab, Val)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ case ?catch_val({Tab, record_validation}) of
+ {RecName, Arity, _Type}
+ when size(Val) == Arity, RecName == element(1, Val) ->
+ Oid = {Tab, element(2, Val)},
+ mnesia_tm:dirty(SyncMode, {Oid, Val, write});
+ {'EXIT', _} ->
+ abort({no_exists, Tab});
+ _ ->
+ abort({bad_type, Val})
+ end;
+do_dirty_write(_SyncMode, Tab, Val) ->
+ abort({bad_type, Tab, Val}).
+
+dirty_delete({Tab, Key}) ->
+ dirty_delete(Tab, Key);
+dirty_delete(Oid) ->
+ abort({bad_type, Oid}).
+
+dirty_delete(Tab, Key) ->
+ do_dirty_delete(async_dirty, Tab, Key).
+
+do_dirty_delete(SyncMode, Tab, Key) when atom(Tab), Tab /= schema ->
+ Oid = {Tab, Key},
+ mnesia_tm:dirty(SyncMode, {Oid, Oid, delete});
+do_dirty_delete(_SyncMode, Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+dirty_delete_object(Val) when tuple(Val), size(Val) > 2 ->
+ Tab = element(1, Val),
+ dirty_delete_object(Tab, Val);
+dirty_delete_object(Val) ->
+ abort({bad_type, Val}).
+
+dirty_delete_object(Tab, Val) ->
+ do_dirty_delete_object(async_dirty, Tab, Val).
+
+do_dirty_delete_object(SyncMode, Tab, Val)
+ when atom(Tab), Tab /= schema, tuple(Val), size(Val) > 2 ->
+ Oid = {Tab, element(2, Val)},
+ mnesia_tm:dirty(SyncMode, {Oid, Val, delete_object});
+do_dirty_delete_object(_SyncMode, Tab, Val) ->
+ abort({bad_type, Tab, Val}).
+
+%% A Counter is an Oid being {CounterTab, CounterName}
+
+dirty_update_counter({Tab, Key}, Incr) ->
+ dirty_update_counter(Tab, Key, Incr);
+dirty_update_counter(Counter, _Incr) ->
+ abort({bad_type, Counter}).
+
+dirty_update_counter(Tab, Key, Incr) ->
+ do_dirty_update_counter(async_dirty, Tab, Key, Incr).
+
+do_dirty_update_counter(SyncMode, Tab, Key, Incr)
+ when atom(Tab), Tab /= schema, integer(Incr) ->
+ case ?catch_val({Tab, record_validation}) of
+ {RecName, 3, set} ->
+ Oid = {Tab, Key},
+ mnesia_tm:dirty(SyncMode, {Oid, {RecName, Incr}, update_counter});
+ _ ->
+ abort({combine_error, Tab, update_counter})
+ end;
+do_dirty_update_counter(_SyncMode, Tab, _Key, Incr) ->
+ abort({bad_type, Tab, Incr}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Dirty access regardless of activities - read
+
+dirty_read({Tab, Key}) ->
+ dirty_read(Tab, Key);
+dirty_read(Oid) ->
+ abort({bad_type, Oid}).
+
+dirty_read(Tab, Key)
+ when atom(Tab), Tab /= schema ->
+%% case catch ?ets_lookup(Tab, Key) of
+%% {'EXIT', _} ->
+ %% Bad luck, we have to perform a real lookup
+ dirty_rpc(Tab, mnesia_lib, db_get, [Tab, Key]);
+%% Val ->
+%% Val
+%% end;
+dirty_read(Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+dirty_match_object(Pat) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ dirty_match_object(Tab, Pat);
+dirty_match_object(Pat) ->
+ abort({bad_type, Pat}).
+
+dirty_match_object(Tab, Pat)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ dirty_rpc(Tab, ?MODULE, remote_dirty_match_object, [Tab, Pat]);
+dirty_match_object(Tab, Pat) ->
+ abort({bad_type, Tab, Pat}).
+
+remote_dirty_match_object(Tab, Pat) ->
+ Key = element(2, Pat),
+ case has_var(Key) of
+ false ->
+ mnesia_lib:db_match_object(Tab, Pat);
+ true ->
+ PosList = val({Tab, index}),
+ remote_dirty_match_object(Tab, Pat, PosList)
+ end.
+
+remote_dirty_match_object(Tab, Pat, [Pos | Tail]) when Pos =< size(Pat) ->
+ IxKey = element(Pos, Pat),
+ case has_var(IxKey) of
+ false ->
+ mnesia_index:dirty_match_object(Tab, Pat, Pos);
+ true ->
+ remote_dirty_match_object(Tab, Pat, Tail)
+ end;
+remote_dirty_match_object(Tab, Pat, []) ->
+ mnesia_lib:db_match_object(Tab, Pat);
+remote_dirty_match_object(Tab, Pat, _PosList) ->
+ abort({bad_type, Tab, Pat}).
+
+dirty_select(Tab, Spec) when atom(Tab), Tab /= schema, list(Spec) ->
+ dirty_rpc(Tab, ?MODULE, remote_dirty_select, [Tab, Spec]);
+dirty_select(Tab, Spec) ->
+ abort({bad_type, Tab, Spec}).
+
+remote_dirty_select(Tab, Spec) ->
+ case Spec of
+ [{HeadPat, _, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ Key = element(2, HeadPat),
+ case has_var(Key) of
+ false ->
+ mnesia_lib:db_select(Tab, Spec);
+ true ->
+ PosList = val({Tab, index}),
+ remote_dirty_select(Tab, Spec, PosList)
+ end;
+ _ ->
+ mnesia_lib:db_select(Tab, Spec)
+ end.
+
+remote_dirty_select(Tab, [{HeadPat,_, _}] = Spec, [Pos | Tail])
+ when tuple(HeadPat), size(HeadPat) > 2, Pos =< size(Spec) ->
+ Key = element(Pos, HeadPat),
+ case has_var(Key) of
+ false ->
+ Recs = mnesia_index:dirty_select(Tab, Spec, Pos),
+ %% Returns the records without applying the match spec
+ %% The actual filtering is handled by the caller
+ CMS = ets:match_spec_compile(Spec),
+ case val({Tab, setorbag}) of
+ ordered_set ->
+ ets:match_spec_run(lists:sort(Recs), CMS);
+ _ ->
+ ets:match_spec_run(Recs, CMS)
+ end;
+ true ->
+ remote_dirty_select(Tab, Spec, Tail)
+ end;
+remote_dirty_select(Tab, Spec, _) ->
+ mnesia_lib:db_select(Tab, Spec).
+
+dirty_all_keys(Tab) when atom(Tab), Tab /= schema ->
+ case ?catch_val({Tab, wild_pattern}) of
+ {'EXIT', _} ->
+ abort({no_exists, Tab});
+ Pat0 ->
+ Pat = setelement(2, Pat0, '$1'),
+ Keys = dirty_select(Tab, [{Pat, [], ['$1']}]),
+ case val({Tab, setorbag}) of
+ bag -> mnesia_lib:uniq(Keys);
+ _ -> Keys
+ end
+ end;
+dirty_all_keys(Tab) ->
+ abort({bad_type, Tab}).
+
+dirty_index_match_object(Pat, Attr) when tuple(Pat), size(Pat) > 2 ->
+ Tab = element(1, Pat),
+ dirty_index_match_object(Tab, Pat, Attr);
+dirty_index_match_object(Pat, _Attr) ->
+ abort({bad_type, Pat}).
+
+dirty_index_match_object(Tab, Pat, Attr)
+ when atom(Tab), Tab /= schema, tuple(Pat), size(Pat) > 2 ->
+ case mnesia_schema:attr_tab_to_pos(Tab, Attr) of
+ Pos when Pos =< size(Pat) ->
+ case has_var(element(2, Pat)) of
+ false ->
+ dirty_match_object(Tab, Pat);
+ true ->
+ Elem = element(Pos, Pat),
+ case has_var(Elem) of
+ false ->
+ dirty_rpc(Tab, mnesia_index, dirty_match_object,
+ [Tab, Pat, Pos]);
+ true ->
+ abort({bad_type, Tab, Attr, Elem})
+ end
+ end;
+ BadPos ->
+ abort({bad_type, Tab, BadPos})
+ end;
+dirty_index_match_object(Tab, Pat, _Attr) ->
+ abort({bad_type, Tab, Pat}).
+
+dirty_index_read(Tab, Key, Attr) when atom(Tab), Tab /= schema ->
+ Pos = mnesia_schema:attr_tab_to_pos(Tab, Attr),
+ case has_var(Key) of
+ false ->
+ mnesia_index:dirty_read(Tab, Key, Pos);
+ true ->
+ abort({bad_type, Tab, Attr, Key})
+ end;
+dirty_index_read(Tab, _Key, _Attr) ->
+ abort({bad_type, Tab}).
+
+dirty_slot(Tab, Slot) when atom(Tab), Tab /= schema, integer(Slot) ->
+ dirty_rpc(Tab, mnesia_lib, db_slot, [Tab, Slot]);
+dirty_slot(Tab, Slot) ->
+ abort({bad_type, Tab, Slot}).
+
+dirty_first(Tab) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_first, [Tab]);
+dirty_first(Tab) ->
+ abort({bad_type, Tab}).
+
+dirty_last(Tab) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_last, [Tab]);
+dirty_last(Tab) ->
+ abort({bad_type, Tab}).
+
+dirty_next(Tab, Key) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_next_key, [Tab, Key]);
+dirty_next(Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+dirty_prev(Tab, Key) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_lib, db_prev_key, [Tab, Key]);
+dirty_prev(Tab, _Key) ->
+ abort({bad_type, Tab}).
+
+
+dirty_rpc(Tab, M, F, Args) ->
+ Node = val({Tab, where_to_read}),
+ do_dirty_rpc(Tab, Node, M, F, Args).
+
+do_dirty_rpc(_Tab, nowhere, _, _, Args) ->
+ mnesia:abort({no_exists, Args});
+do_dirty_rpc(Tab, Node, M, F, Args) ->
+ case rpc:call(Node, M, F, Args) of
+ {badrpc,{'EXIT', {undef, [{ M, F, _} | _]}}}
+ when M == ?MODULE, F == remote_dirty_select ->
+ %% Oops, the other node has not been upgraded
+ %% to 4.0.3 yet. Lets do it the old way.
+ %% Remove this in next release.
+ do_dirty_rpc(Tab, Node, mnesia_lib, db_select, Args);
+ {badrpc, Reason} ->
+ erlang:yield(), %% Do not be too eager
+ case mnesia_controller:call({check_w2r, Node, Tab}) of % Sync
+ NewNode when NewNode == Node ->
+ ErrorTag = mnesia_lib:dirty_rpc_error_tag(Reason),
+ mnesia:abort({ErrorTag, Args});
+ NewNode ->
+ case get(mnesia_activity_state) of
+ {_Mod, Tid, _Ts} when record(Tid, tid) ->
+ %% In order to perform a consistent
+ %% retry of a transaction we need
+ %% to acquire the lock on the NewNode.
+ %% In this context we do neither know
+ %% the kind or granularity of the lock.
+ %% --> Abort the transaction
+ mnesia:abort({node_not_running, Node});
+ _ ->
+ %% Splendid! A dirty retry is safe
+ %% 'Node' probably went down now
+ %% Let mnesia_controller get broken link message first
+ do_dirty_rpc(Tab, NewNode, M, F, Args)
+ end
+ end;
+ Other ->
+ Other
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Info
+
+%% Info about one table
+table_info(Tab, Item) ->
+ case get(mnesia_activity_state) of
+ undefined ->
+ any_table_info(Tab, Item);
+ {?DEFAULT_ACCESS, _Tid, _Ts} ->
+ any_table_info(Tab, Item);
+ {Mod, Tid, Ts} ->
+ Mod:table_info(Tid, Ts, Tab, Item);
+ _ ->
+ abort(no_transaction)
+ end.
+
+table_info(_Tid, _Ts, Tab, Item) ->
+ any_table_info(Tab, Item).
+
+
+any_table_info(Tab, Item) when atom(Tab) ->
+ case Item of
+ master_nodes ->
+ mnesia_recover:get_master_nodes(Tab);
+% checkpoints ->
+% case ?catch_val({Tab, commit_work}) of
+% [{checkpoints, List} | _] -> List;
+% No_chk when list(No_chk) -> [];
+% Else -> info_reply(Else, Tab, Item)
+% end;
+ size ->
+ raw_table_info(Tab, Item);
+ memory ->
+ raw_table_info(Tab, Item);
+ type ->
+ case ?catch_val({Tab, setorbag}) of
+ {'EXIT', _} ->
+ bad_info_reply(Tab, Item);
+ Val ->
+ Val
+ end;
+ all ->
+ case mnesia_schema:get_table_properties(Tab) of
+ [] ->
+ abort({no_exists, Tab, Item});
+ Props ->
+ lists:map(fun({setorbag, Type}) -> {type, Type};
+ (Prop) -> Prop end,
+ Props)
+ end;
+ _ ->
+ case ?catch_val({Tab, Item}) of
+ {'EXIT', _} ->
+ bad_info_reply(Tab, Item);
+ Val ->
+ Val
+ end
+ end;
+any_table_info(Tab, _Item) ->
+ abort({bad_type, Tab}).
+
+raw_table_info(Tab, Item) ->
+ case ?catch_val({Tab, storage_type}) of
+ ram_copies ->
+ info_reply(catch ?ets_info(Tab, Item), Tab, Item);
+ disc_copies ->
+ info_reply(catch ?ets_info(Tab, Item), Tab, Item);
+ disc_only_copies ->
+ info_reply(catch dets:info(Tab, Item), Tab, Item);
+ unknown ->
+ bad_info_reply(Tab, Item);
+ {'EXIT', _} ->
+ bad_info_reply(Tab, Item)
+ end.
+
+info_reply({'EXIT', _Reason}, Tab, Item) ->
+ bad_info_reply(Tab, Item);
+info_reply({error, _Reason}, Tab, Item) ->
+ bad_info_reply(Tab, Item);
+info_reply(Val, _Tab, _Item) ->
+ Val.
+
+bad_info_reply(_Tab, size) -> 0;
+bad_info_reply(_Tab, memory) -> 0;
+bad_info_reply(Tab, Item) -> abort({no_exists, Tab, Item}).
+
+%% Raw info about all tables
+schema() ->
+ mnesia_schema:info().
+
+%% Raw info about one tables
+schema(Tab) ->
+ mnesia_schema:info(Tab).
+
+error_description(Err) ->
+ mnesia_lib:error_desc(Err).
+
+info() ->
+ case mnesia_lib:is_running() of
+ yes ->
+ TmInfo = mnesia_tm:get_info(10000),
+ Held = system_info(held_locks),
+ Queued = system_info(lock_queue),
+
+ io:format("---> Processes holding locks <--- ~n", []),
+ lists:foreach(fun(L) -> io:format("Lock: ~p~n", [L]) end,
+ Held),
+
+ io:format( "---> Processes waiting for locks <--- ~n", []),
+ lists:foreach(fun({Oid, Op, _Pid, Tid, OwnerTid}) ->
+ io:format("Tid ~p waits for ~p lock "
+ "on oid ~p owned by ~p ~n",
+ [Tid, Op, Oid, OwnerTid])
+ end, Queued),
+ mnesia_tm:display_info(group_leader(), TmInfo),
+
+ Pat = {'_', unclear, '_'},
+ Uncertain = ets:match_object(mnesia_decision, Pat),
+
+ io:format( "---> Uncertain transactions <--- ~n", []),
+ lists:foreach(fun({Tid, _, Nodes}) ->
+ io:format("Tid ~w waits for decision "
+ "from ~w~n",
+ [Tid, Nodes])
+ end, Uncertain),
+
+ mnesia_controller:info(),
+ display_system_info(Held, Queued, TmInfo, Uncertain);
+ _ ->
+ mini_info()
+ end,
+ ok.
+
+mini_info() ->
+ io:format("===> System info in version ~p, debug level = ~p <===~n",
+ [system_info(version), system_info(debug)]),
+ Not =
+ case system_info(use_dir) of
+ true -> "";
+ false -> "NOT "
+ end,
+
+ io:format("~w. Directory ~p is ~sused.~n",
+ [system_info(schema_location), system_info(directory), Not]),
+ io:format("use fallback at restart = ~w~n",
+ [system_info(fallback_activated)]),
+ Running = system_info(running_db_nodes),
+ io:format("running db nodes = ~w~n", [Running]),
+ All = mnesia_lib:all_nodes(),
+ io:format("stopped db nodes = ~w ~n", [All -- Running]).
+
+display_system_info(Held, Queued, TmInfo, Uncertain) ->
+ mini_info(),
+ display_tab_info(),
+ S = fun(Items) -> [system_info(I) || I <- Items] end,
+
+ io:format("~w transactions committed, ~w aborted, "
+ "~w restarted, ~w logged to disc~n",
+ S([transaction_commits, transaction_failures,
+ transaction_restarts, transaction_log_writes])),
+
+ {Active, Pending} =
+ case TmInfo of
+ {timeout, _} -> {infinity, infinity};
+ {info, P, A} -> {length(A), length(P)}
+ end,
+ io:format("~w held locks, ~w in queue; "
+ "~w local transactions, ~w remote~n",
+ [length(Held), length(Queued), Active, Pending]),
+
+ Ufold = fun({_, _, Ns}, {C, Old}) ->
+ New = [N || N <- Ns, not lists:member(N, Old)],
+ {C + 1, New ++ Old}
+ end,
+ {Ucount, Unodes} = lists:foldl(Ufold, {0, []}, Uncertain),
+ io:format("~w transactions waits for other nodes: ~p~n",
+ [Ucount, Unodes]).
+
+display_tab_info() ->
+ MasterTabs = mnesia_recover:get_master_node_tables(),
+ io:format("master node tables = ~p~n", [lists:sort(MasterTabs)]),
+
+ Tabs = system_info(tables),
+
+ {Unknown, Ram, Disc, DiscOnly} =
+ lists:foldl(fun storage_count/2, {[], [], [], []}, Tabs),
+
+ io:format("remote = ~p~n", [lists:sort(Unknown)]),
+ io:format("ram_copies = ~p~n", [lists:sort(Ram)]),
+ io:format("disc_copies = ~p~n", [lists:sort(Disc)]),
+ io:format("disc_only_copies = ~p~n", [lists:sort(DiscOnly)]),
+
+ Rfoldl = fun(T, Acc) ->
+ Rpat =
+ case val({T, access_mode}) of
+ read_only ->
+ lists:sort([{A, read_only} || A <- val({T, active_replicas})]);
+ read_write ->
+ table_info(T, where_to_commit)
+ end,
+ case lists:keysearch(Rpat, 1, Acc) of
+ {value, {_Rpat, Rtabs}} ->
+ lists:keyreplace(Rpat, 1, Acc, {Rpat, [T | Rtabs]});
+ false ->
+ [{Rpat, [T]} | Acc]
+ end
+ end,
+ Repl = lists:foldl(Rfoldl, [], Tabs),
+ Rdisp = fun({Rpat, Rtabs}) -> io:format("~p = ~p~n", [Rpat, Rtabs]) end,
+ lists:foreach(Rdisp, lists:sort(Repl)).
+
+storage_count(T, {U, R, D, DO}) ->
+ case table_info(T, storage_type) of
+ unknown -> {[T | U], R, D, DO};
+ ram_copies -> {U, [T | R], D, DO};
+ disc_copies -> {U, R, [T | D], DO};
+ disc_only_copies -> {U, R, D, [T | DO]}
+ end.
+
+system_info(Item) ->
+ case catch system_info2(Item) of
+ {'EXIT',Error} -> abort(Error);
+ Other -> Other
+ end.
+
+system_info2(all) ->
+ Items = system_info_items(mnesia_lib:is_running()),
+ [{I, system_info(I)} || I <- Items];
+
+system_info2(db_nodes) ->
+ DiscNs = ?catch_val({schema, disc_copies}),
+ RamNs = ?catch_val({schema, ram_copies}),
+ if
+ list(DiscNs), list(RamNs) ->
+ DiscNs ++ RamNs;
+ true ->
+ case mnesia_schema:read_nodes() of
+ {ok, Nodes} -> Nodes;
+ {error,Reason} -> exit(Reason)
+ end
+ end;
+system_info2(running_db_nodes) ->
+ case ?catch_val({current, db_nodes}) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_lib:running_nodes();
+ Other ->
+ Other
+ end;
+
+system_info2(extra_db_nodes) ->
+ case ?catch_val(extra_db_nodes) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:get_env(extra_db_nodes);
+ Other ->
+ Other
+ end;
+
+system_info2(directory) ->
+ case ?catch_val(directory) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:get_env(dir);
+ Other ->
+ Other
+ end;
+
+system_info2(use_dir) ->
+ case ?catch_val(use_dir) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:use_dir();
+ Other ->
+ Other
+ end;
+
+system_info2(schema_location) ->
+ case ?catch_val(schema_location) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_monitor:get_env(schema_location);
+ Other ->
+ Other
+ end;
+
+system_info2(fallback_activated) ->
+ case ?catch_val(fallback_activated) of
+ {'EXIT',_} ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ load_mnesia_or_abort(),
+ mnesia_bup:fallback_exists();
+ Other ->
+ Other
+ end;
+
+system_info2(version) ->
+ case ?catch_val(version) of
+ {'EXIT', _} ->
+ Apps = application:loaded_applications(),
+ case lists:keysearch(?APPLICATION, 1, Apps) of
+ {value, {_Name, _Desc, Version}} ->
+ Version;
+ false ->
+ %% Ensure that it does not match
+ {mnesia_not_loaded, node(), now()}
+ end;
+ Version ->
+ Version
+ end;
+
+system_info2(access_module) -> mnesia_monitor:get_env(access_module);
+system_info2(auto_repair) -> mnesia_monitor:get_env(auto_repair);
+system_info2(is_running) -> mnesia_lib:is_running();
+system_info2(backup_module) -> mnesia_monitor:get_env(backup_module);
+system_info2(event_module) -> mnesia_monitor:get_env(event_module);
+system_info2(debug) -> mnesia_monitor:get_env(debug);
+system_info2(dump_log_load_regulation) -> mnesia_monitor:get_env(dump_log_load_regulation);
+system_info2(dump_log_write_threshold) -> mnesia_monitor:get_env(dump_log_write_threshold);
+system_info2(dump_log_time_threshold) -> mnesia_monitor:get_env(dump_log_time_threshold);
+system_info2(dump_log_update_in_place) ->
+ mnesia_monitor:get_env(dump_log_update_in_place);
+system_info2(dump_log_update_in_place) ->
+ mnesia_monitor:get_env(dump_log_update_in_place);
+system_info2(max_wait_for_decision) -> mnesia_monitor:get_env(max_wait_for_decision);
+system_info2(embedded_mnemosyne) -> mnesia_monitor:get_env(embedded_mnemosyne);
+system_info2(ignore_fallback_at_startup) -> mnesia_monitor:get_env(ignore_fallback_at_startup);
+system_info2(fallback_error_function) -> mnesia_monitor:get_env(fallback_error_function);
+system_info2(log_version) -> mnesia_log:version();
+system_info2(protocol_version) -> mnesia_monitor:protocol_version();
+system_info2(schema_version) -> mnesia_schema:version(); %backward compatibility
+system_info2(tables) -> val({schema, tables});
+system_info2(local_tables) -> val({schema, local_tables});
+system_info2(master_node_tables) -> mnesia_recover:get_master_node_tables();
+system_info2(subscribers) -> mnesia_subscr:subscribers();
+system_info2(checkpoints) -> mnesia_checkpoint:checkpoints();
+system_info2(held_locks) -> mnesia_locker:get_held_locks();
+system_info2(lock_queue) -> mnesia_locker:get_lock_queue();
+system_info2(transactions) -> mnesia_tm:get_transactions();
+system_info2(transaction_failures) -> mnesia_lib:read_counter(trans_failures);
+system_info2(transaction_commits) -> mnesia_lib:read_counter(trans_commits);
+system_info2(transaction_restarts) -> mnesia_lib:read_counter(trans_restarts);
+system_info2(transaction_log_writes) -> mnesia_dumper:get_log_writes();
+
+system_info2(Item) -> exit({badarg, Item}).
+
+system_info_items(yes) ->
+ [
+ access_module,
+ auto_repair,
+ backup_module,
+ checkpoints,
+ db_nodes,
+ debug,
+ directory,
+ dump_log_load_regulation,
+ dump_log_time_threshold,
+ dump_log_update_in_place,
+ dump_log_write_threshold,
+ embedded_mnemosyne,
+ event_module,
+ extra_db_nodes,
+ fallback_activated,
+ held_locks,
+ ignore_fallback_at_startup,
+ fallback_error_function,
+ is_running,
+ local_tables,
+ lock_queue,
+ log_version,
+ master_node_tables,
+ max_wait_for_decision,
+ protocol_version,
+ running_db_nodes,
+ schema_location,
+ schema_version,
+ subscribers,
+ tables,
+ transaction_commits,
+ transaction_failures,
+ transaction_log_writes,
+ transaction_restarts,
+ transactions,
+ use_dir,
+ version
+ ];
+system_info_items(no) ->
+ [
+ auto_repair,
+ backup_module,
+ db_nodes,
+ debug,
+ directory,
+ dump_log_load_regulation,
+ dump_log_time_threshold,
+ dump_log_update_in_place,
+ dump_log_write_threshold,
+ event_module,
+ extra_db_nodes,
+ ignore_fallback_at_startup,
+ fallback_error_function,
+ is_running,
+ log_version,
+ max_wait_for_decision,
+ protocol_version,
+ running_db_nodes,
+ schema_location,
+ schema_version,
+ use_dir,
+ version
+ ].
+
+system_info() ->
+ IsRunning = mnesia_lib:is_running(),
+ case IsRunning of
+ yes ->
+ TmInfo = mnesia_tm:get_info(10000),
+ Held = system_info(held_locks),
+ Queued = system_info(lock_queue),
+ Pat = {'_', unclear, '_'},
+ Uncertain = ets:match_object(mnesia_decision, Pat),
+ display_system_info(Held, Queued, TmInfo, Uncertain);
+ _ ->
+ mini_info()
+ end,
+ IsRunning.
+
+load_mnesia_or_abort() ->
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ ok;
+ {error, Reason} ->
+ abort(Reason)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Database mgt
+
+create_schema(Ns) ->
+ mnesia_bup:create_schema(Ns).
+
+delete_schema(Ns) ->
+ mnesia_schema:delete_schema(Ns).
+
+backup(Opaque) ->
+ mnesia_log:backup(Opaque).
+
+backup(Opaque, Mod) ->
+ mnesia_log:backup(Opaque, Mod).
+
+traverse_backup(S, T, Fun, Acc) ->
+ mnesia_bup:traverse_backup(S, T, Fun, Acc).
+
+traverse_backup(S, SM, T, TM, F, A) ->
+ mnesia_bup:traverse_backup(S, SM, T, TM, F, A).
+
+install_fallback(Opaque) ->
+ mnesia_bup:install_fallback(Opaque).
+
+install_fallback(Opaque, Mod) ->
+ mnesia_bup:install_fallback(Opaque, Mod).
+
+uninstall_fallback() ->
+ mnesia_bup:uninstall_fallback().
+
+uninstall_fallback(Args) ->
+ mnesia_bup:uninstall_fallback(Args).
+
+activate_checkpoint(Args) ->
+ mnesia_checkpoint:activate(Args).
+
+deactivate_checkpoint(Name) ->
+ mnesia_checkpoint:deactivate(Name).
+
+backup_checkpoint(Name, Opaque) ->
+ mnesia_log:backup_checkpoint(Name, Opaque).
+
+backup_checkpoint(Name, Opaque, Mod) ->
+ mnesia_log:backup_checkpoint(Name, Opaque, Mod).
+
+restore(Opaque, Args) ->
+ mnesia_schema:restore(Opaque, Args).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt
+
+create_table(Arg) ->
+ mnesia_schema:create_table(Arg).
+create_table(Name, Arg) when list(Arg) ->
+ mnesia_schema:create_table([{name, Name}| Arg]);
+create_table(Name, Arg) ->
+ {aborted, badarg, Name, Arg}.
+
+delete_table(Tab) ->
+ mnesia_schema:delete_table(Tab).
+
+add_table_copy(Tab, N, S) ->
+ mnesia_schema:add_table_copy(Tab, N, S).
+del_table_copy(Tab, N) ->
+ mnesia_schema:del_table_copy(Tab, N).
+
+move_table_copy(Tab, From, To) ->
+ mnesia_schema:move_table(Tab, From, To).
+
+add_table_index(Tab, Ix) ->
+ mnesia_schema:add_table_index(Tab, Ix).
+del_table_index(Tab, Ix) ->
+ mnesia_schema:del_table_index(Tab, Ix).
+
+transform_table(Tab, Fun, NewA) ->
+ case catch val({Tab, record_name}) of
+ {'EXIT', Reason} ->
+ mnesia:abort(Reason);
+ OldRN ->
+ mnesia_schema:transform_table(Tab, Fun, NewA, OldRN)
+ end.
+
+transform_table(Tab, Fun, NewA, NewRN) ->
+ mnesia_schema:transform_table(Tab, Fun, NewA, NewRN).
+
+change_table_copy_type(T, N, S) ->
+ mnesia_schema:change_table_copy_type(T, N, S).
+
+clear_table(Tab) ->
+ mnesia_schema:clear_table(Tab).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt - user properties
+
+read_table_property(Tab, PropKey) ->
+ val({Tab, user_property, PropKey}).
+
+write_table_property(Tab, Prop) ->
+ mnesia_schema:write_table_property(Tab, Prop).
+
+delete_table_property(Tab, PropKey) ->
+ mnesia_schema:delete_table_property(Tab, PropKey).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt - user properties
+
+change_table_frag(Tab, FragProp) ->
+ mnesia_schema:change_table_frag(Tab, FragProp).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Table mgt - table load
+
+%% Dump a ram table to disc
+dump_tables(Tabs) ->
+ mnesia_schema:dump_tables(Tabs).
+
+%% allow the user to wait for some tables to be loaded
+wait_for_tables(Tabs, Timeout) ->
+ mnesia_controller:wait_for_tables(Tabs, Timeout).
+
+force_load_table(Tab) ->
+ case mnesia_controller:force_load_table(Tab) of
+ ok -> yes; % Backwards compatibility
+ Other -> Other
+ end.
+
+change_table_access_mode(T, Access) ->
+ mnesia_schema:change_table_access_mode(T, Access).
+
+change_table_load_order(T, O) ->
+ mnesia_schema:change_table_load_order(T, O).
+
+set_master_nodes(Nodes) when list(Nodes) ->
+ UseDir = system_info(use_dir),
+ IsRunning = system_info(is_running),
+ case IsRunning of
+ yes ->
+ CsPat = {{'_', cstruct}, '_'},
+ Cstructs0 = ?ets_match_object(mnesia_gvar, CsPat),
+ Cstructs = [Cs || {_, Cs} <- Cstructs0],
+ log_valid_master_nodes(Cstructs, Nodes, UseDir, IsRunning);
+ _NotRunning ->
+ case UseDir of
+ true ->
+ mnesia_lib:lock_table(schema),
+ Res =
+ case mnesia_schema:read_cstructs_from_disc() of
+ {ok, Cstructs} ->
+ log_valid_master_nodes(Cstructs, Nodes, UseDir, IsRunning);
+ {error, Reason} ->
+ {error, Reason}
+ end,
+ mnesia_lib:unlock_table(schema),
+ Res;
+ false ->
+ ok
+ end
+ end;
+set_master_nodes(Nodes) ->
+ {error, {bad_type, Nodes}}.
+
+log_valid_master_nodes(Cstructs, Nodes, UseDir, IsRunning) ->
+ Fun = fun(Cs) ->
+ Copies = mnesia_lib:copy_holders(Cs),
+ Valid = mnesia_lib:intersect(Nodes, Copies),
+ {Cs#cstruct.name, Valid}
+ end,
+ Args = lists:map(Fun, Cstructs),
+ mnesia_recover:log_master_nodes(Args, UseDir, IsRunning).
+
+set_master_nodes(Tab, Nodes) when list(Nodes) ->
+ UseDir = system_info(use_dir),
+ IsRunning = system_info(is_running),
+ case IsRunning of
+ yes ->
+ case ?catch_val({Tab, cstruct}) of
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ Cs ->
+ case Nodes -- mnesia_lib:copy_holders(Cs) of
+ [] ->
+ Args = [{Tab , Nodes}],
+ mnesia_recover:log_master_nodes(Args, UseDir, IsRunning);
+ BadNodes ->
+ {error, {no_exists, Tab, BadNodes}}
+ end
+ end;
+ _NotRunning ->
+ case UseDir of
+ true ->
+ mnesia_lib:lock_table(schema),
+ Res =
+ case mnesia_schema:read_cstructs_from_disc() of
+ {ok, Cstructs} ->
+ case lists:keysearch(Tab, 2, Cstructs) of
+ {value, Cs} ->
+ case Nodes -- mnesia_lib:copy_holders(Cs) of
+ [] ->
+ Args = [{Tab , Nodes}],
+ mnesia_recover:log_master_nodes(Args, UseDir, IsRunning);
+ BadNodes ->
+ {error, {no_exists, Tab, BadNodes}}
+ end;
+ false ->
+ {error, {no_exists, Tab}}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end,
+ mnesia_lib:unlock_table(schema),
+ Res;
+ false ->
+ ok
+ end
+ end;
+set_master_nodes(Tab, Nodes) ->
+ {error, {bad_type, Tab, Nodes}}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Misc admin
+
+dump_log() ->
+ mnesia_controller:sync_dump_log(user).
+
+subscribe(What) ->
+ mnesia_subscr:subscribe(self(), What).
+
+unsubscribe(What) ->
+ mnesia_subscr:unsubscribe(self(), What).
+
+report_event(Event) ->
+ mnesia_lib:report_system_event({mnesia_user, Event}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Snmp
+
+snmp_open_table(Tab, Us) ->
+ mnesia_schema:add_snmp(Tab, Us).
+
+snmp_close_table(Tab) ->
+ mnesia_schema:del_snmp(Tab).
+
+snmp_get_row(Tab, RowIndex) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_snmp_hook, get_row, [Tab, RowIndex]);
+snmp_get_row(Tab, _RowIndex) ->
+ abort({bad_type, Tab}).
+
+snmp_get_next_index(Tab, RowIndex) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_snmp_hook, get_next_index, [Tab, RowIndex]);
+snmp_get_next_index(Tab, _RowIndex) ->
+ abort({bad_type, Tab}).
+
+snmp_get_mnesia_key(Tab, RowIndex) when atom(Tab), Tab /= schema ->
+ dirty_rpc(Tab, mnesia_snmp_hook, get_mnesia_key, [Tab, RowIndex]);
+snmp_get_mnesia_key(Tab, _RowIndex) ->
+ abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Textfile access
+
+load_textfile(F) ->
+ mnesia_text:load_textfile(F).
+dump_to_textfile(F) ->
+ mnesia_text:dump_to_textfile(F).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Mnemosyne exclusive
+
+get_activity_id() ->
+ get(mnesia_activity_state).
+
+put_activity_id(Activity) ->
+ mnesia_tm:put_activity_id(Activity).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.hrl
new file mode 100644
index 0000000000..cd3cee974b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.hrl
@@ -0,0 +1,117 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia.hrl,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+%%
+
+-define(APPLICATION, mnesia).
+
+-define(ets_lookup(Tab, Key), ets:lookup(Tab, Key)).
+-define(ets_lookup_element(Tab, Key, Pos), ets:lookup_element(Tab, Key, Pos)).
+-define(ets_insert(Tab, Rec), ets:insert(Tab, Rec)).
+-define(ets_delete(Tab, Key), ets:delete(Tab, Key)).
+-define(ets_match_delete(Tab, Pat), ets:match_delete(Tab, Pat)).
+-define(ets_match_object(Tab, Pat), ets:match_object(Tab, Pat)).
+-define(ets_match(Tab, Pat), ets:match(Tab, Pat)).
+-define(ets_info(Tab, Item), ets:info(Tab, Item)).
+-define(ets_update_counter(Tab, Key, Incr), ets:update_counter(Tab, Key, Incr)).
+-define(ets_first(Tab), ets:first(Tab)).
+-define(ets_next(Tab, Key), ets:next(Tab, Key)).
+-define(ets_last(Tab), ets:last(Tab)).
+-define(ets_prev(Tab, Key), ets:prev(Tab, Key)).
+-define(ets_slot(Tab, Pos), ets:slot(Tab, Pos)).
+-define(ets_new_table(Tab, Props), ets:new(Tab, Props)).
+-define(ets_delete_table(Tab), ets:delete(Tab)).
+-define(ets_fixtable(Tab, Bool), ets:fixtable(Tab, Bool)).
+
+-define(catch_val(Var), (catch ?ets_lookup_element(mnesia_gvar, Var, 2))).
+
+%% It's important that counter is first, since we compare tid's
+
+-record(tid,
+ {counter, %% serial no for tid
+ pid}). %% owner of tid
+
+
+-record(tidstore,
+ {store, %% current ets table for tid
+ up_stores = [], %% list of upper layer stores for nested trans
+ level = 1}). %% transaction level
+
+-define(unique_cookie, {erlang:now(), node()}).
+
+-record(cstruct, {name, % Atom
+ type = set, % set | bag
+ ram_copies = [], % [Node]
+ disc_copies = [], % [Node]
+ disc_only_copies = [], % [Node]
+ load_order = 0, % Integer
+ access_mode = read_write, % read_write | read_only
+ index = [], % [Integer]
+ snmp = [], % Snmp Ustruct
+ local_content = false, % true | false
+ record_name = {bad_record_name}, % Atom (Default = Name)
+ attributes = [key, val], % [Atom]
+ user_properties = [], % [Record]
+ frag_properties = [], % [{Key, Val]
+ cookie = ?unique_cookie, % Term
+ version = {{2, 0}, []}}). % {{Integer, Integer}, [Node]}
+
+%% Record for the head structure in Mnesia's log files
+%%
+%% The definition of this record may *NEVER* be changed
+%% since it may be written to very old backup files.
+%% By holding this record definition stable we can be
+%% able to comprahend backups from timepoint 0. It also
+%% allows us to use the backup format as an interchange
+%% format between Mnesia releases.
+
+-record(log_header,{log_kind,
+ log_version,
+ mnesia_version,
+ node,
+ now}).
+
+%% Commit records stored in the transaction log
+-record(commit, {node,
+ decision, % presume_commit | Decision
+ ram_copies = [],
+ disc_copies = [],
+ disc_only_copies = [],
+ snmp = [],
+ schema_ops = []
+ }).
+
+-record(decision, {tid,
+ outcome, % presume_abort | committed
+ disc_nodes,
+ ram_nodes}).
+
+%% Maybe cyclic wait
+-record(cyclic, {node = node(),
+ oid, % {Tab, Key}
+ op, % read | write
+ lock, % read | write
+ lucky
+ }).
+
+%% Managing conditional debug functions
+
+-ifdef(debug).
+ -define(eval_debug_fun(I, C),
+ mnesia_lib:eval_debug_fun(I, C, ?FILE, ?LINE)).
+-else.
+ -define(eval_debug_fun(I, C), ok).
+-endif.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_backup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_backup.erl
new file mode 100644
index 0000000000..f01310530e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_backup.erl
@@ -0,0 +1,194 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_backup.erl,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+%%
+%0
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% This module contains one implementation of callback functions
+%% used by Mnesia at backup and restore. The user may however
+%% write an own module the same interface as mnesia_backup and
+%% configure Mnesia so the alternate module performs the actual
+%% accesses to the backup media. This means that the user may put
+%% the backup on medias that Mnesia does not know about, possibly
+%% on hosts where Erlang is not running.
+%%
+%% The OpaqueData argument is never interpreted by other parts of
+%% Mnesia. It is the property of this module. Alternate implementations
+%% of this module may have different interpretations of OpaqueData.
+%% The OpaqueData argument given to open_write/1 and open_read/1
+%% are forwarded directly from the user.
+%%
+%% All functions must return {ok, NewOpaqueData} or {error, Reason}.
+%%
+%% The NewOpaqueData arguments returned by backup callback functions will
+%% be given as input when the next backup callback function is invoked.
+%% If any return value does not match {ok, _} the backup will be aborted.
+%%
+%% The NewOpaqueData arguments returned by restore callback functions will
+%% be given as input when the next restore callback function is invoked
+%% If any return value does not match {ok, _} the restore will be aborted.
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mnesia_backup).
+-behaviour(mnesia_backup).
+
+-include_lib("kernel/include/file.hrl").
+
+-export([
+ %% Write access
+ open_write/1,
+ write/2,
+ commit_write/1,
+ abort_write/1,
+
+ %% Read access
+ open_read/1,
+ read/1,
+ close_read/1
+ ]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup callback interface
+-record(backup, {tmp_file, file, file_desc}).
+
+%% Opens backup media for write
+%%
+%% Returns {ok, OpaqueData} or {error, Reason}
+open_write(OpaqueData) ->
+ File = OpaqueData,
+ Tmp = lists:concat([File,".BUPTMP"]),
+ file:delete(Tmp),
+ file:delete(File),
+ case disk_log:open([{name, make_ref()},
+ {file, Tmp},
+ {repair, false},
+ {linkto, self()}]) of
+ {ok, Fd} ->
+ {ok, #backup{tmp_file = Tmp, file = File, file_desc = Fd}};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%% Writes BackupItems to the backup media
+%%
+%% Returns {ok, OpaqueData} or {error, Reason}
+write(OpaqueData, BackupItems) ->
+ B = OpaqueData,
+ case disk_log:log_terms(B#backup.file_desc, BackupItems) of
+ ok ->
+ {ok, B};
+ {error, Reason} ->
+ abort_write(B),
+ {error, Reason}
+ end.
+
+%% Closes the backup media after a successful backup
+%%
+%% Returns {ok, ReturnValueToUser} or {error, Reason}
+commit_write(OpaqueData) ->
+ B = OpaqueData,
+ case disk_log:sync(B#backup.file_desc) of
+ ok ->
+ case disk_log:close(B#backup.file_desc) of
+ ok ->
+ case file:rename(B#backup.tmp_file, B#backup.file) of
+ ok ->
+ {ok, B#backup.file};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%% Closes the backup media after an interrupted backup
+%%
+%% Returns {ok, ReturnValueToUser} or {error, Reason}
+abort_write(BackupRef) ->
+ Res = disk_log:close(BackupRef#backup.file_desc),
+ file:delete(BackupRef#backup.tmp_file),
+ case Res of
+ ok ->
+ {ok, BackupRef#backup.file};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restore callback interface
+
+-record(restore, {file, file_desc, cont}).
+
+%% Opens backup media for read
+%%
+%% Returns {ok, OpaqueData} or {error, Reason}
+open_read(OpaqueData) ->
+ File = OpaqueData,
+ case file:read_file_info(File) of
+ {error, Reason} ->
+ {error, Reason};
+ _FileInfo -> %% file exists
+ case disk_log:open([{file, File},
+ {name, make_ref()},
+ {repair, false},
+ {mode, read_only},
+ {linkto, self()}]) of
+ {ok, Fd} ->
+ {ok, #restore{file = File, file_desc = Fd, cont = start}};
+ {repaired, Fd, _, {badbytes, 0}} ->
+ {ok, #restore{file = File, file_desc = Fd, cont = start}};
+ {repaired, Fd, _, _} ->
+ {ok, #restore{file = File, file_desc = Fd, cont = start}};
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end.
+
+%% Reads BackupItems from the backup media
+%%
+%% Returns {ok, OpaqueData, BackupItems} or {error, Reason}
+%%
+%% BackupItems == [] is interpreted as eof
+read(OpaqueData) ->
+ R = OpaqueData,
+ Fd = R#restore.file_desc,
+ case disk_log:chunk(Fd, R#restore.cont) of
+ {error, Reason} ->
+ {error, {"Possibly truncated", Reason}};
+ eof ->
+ {ok, R, []};
+ {Cont, []} ->
+ read(R#restore{cont = Cont});
+ {Cont, BackupItems} ->
+ {ok, R#restore{cont = Cont}, BackupItems}
+ end.
+
+%% Closes the backup media after restore
+%%
+%% Returns {ok, ReturnValueToUser} or {error, Reason}
+close_read(OpaqueData) ->
+ R = OpaqueData,
+ case disk_log:close(R#restore.file_desc) of
+ ok -> {ok, R#restore.file};
+ {error, Reason} -> {error, Reason}
+ end.
+%0
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
new file mode 100644
index 0000000000..eb636a8447
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
@@ -0,0 +1,1168 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_bup.erl,v 1.1 2008/12/17 09:53:37 mikpe Exp $
+%%
+-module(mnesia_bup).
+-export([
+ %% Public interface
+ iterate/4,
+ read_schema/2,
+ fallback_bup/0,
+ fallback_exists/0,
+ tm_fallback_start/1,
+ create_schema/1,
+ install_fallback/1,
+ install_fallback/2,
+ uninstall_fallback/0,
+ uninstall_fallback/1,
+ traverse_backup/4,
+ traverse_backup/6,
+ make_initial_backup/3,
+ fallback_to_schema/0,
+ lookup_schema/2,
+ schema2bup/1,
+ refresh_cookie/2,
+
+ %% Internal
+ fallback_receiver/2,
+ install_fallback_master/2,
+ uninstall_fallback_master/2,
+ local_uninstall_fallback/2,
+ do_traverse_backup/7,
+ trav_apply/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [verbose/2, dbg_out/2]).
+
+-record(restore, {mode, bup_module, bup_data}).
+
+-record(fallback_args, {opaque,
+ scope = global,
+ module = mnesia_monitor:get_env(backup_module),
+ use_default_dir = true,
+ mnesia_dir,
+ fallback_bup,
+ fallback_tmp,
+ skip_tables = [],
+ keep_tables = [],
+ default_op = keep_tables
+ }).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup iterator
+
+%% Reads schema section and iterates over all records in a backup.
+%%
+%% Fun(BunchOfRecords, Header, Schema, Acc) is applied when a suitable amount
+%% of records has been collected.
+%%
+%% BunchOfRecords will be [] when the iteration is done.
+iterate(Mod, Fun, Opaque, Acc) ->
+ R = #restore{bup_module = Mod, bup_data = Opaque},
+ case catch read_schema_section(R) of
+ {error, Reason} ->
+ {error, Reason};
+ {R2, {Header, Schema, Rest}} ->
+ case catch iter(R2, Header, Schema, Fun, Acc, Rest) of
+ {ok, R3, Res} ->
+ catch safe_apply(R3, close_read, [R3#restore.bup_data]),
+ {ok, Res};
+ {error, Reason} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ {error, Reason};
+ {'EXIT', Pid, Reason} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ {error, {'EXIT', Pid, Reason}};
+ {'EXIT', Reason} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ {error, {'EXIT', Reason}}
+ end
+ end.
+
+iter(R, Header, Schema, Fun, Acc, []) ->
+ case safe_apply(R, read, [R#restore.bup_data]) of
+ {R2, []} ->
+ Res = Fun([], Header, Schema, Acc),
+ {ok, R2, Res};
+ {R2, BupItems} ->
+ iter(R2, Header, Schema, Fun, Acc, BupItems)
+ end;
+iter(R, Header, Schema, Fun, Acc, BupItems) ->
+ Acc2 = Fun(BupItems, Header, Schema, Acc),
+ iter(R, Header, Schema, Fun, Acc2, []).
+
+safe_apply(R, write, [_, Items]) when Items == [] ->
+ R;
+safe_apply(R, What, Args) ->
+ Abort = fun(Re) -> abort_restore(R, What, Args, Re) end,
+ receive
+ {'EXIT', Pid, Re} -> Abort({'EXIT', Pid, Re})
+ after 0 ->
+ Mod = R#restore.bup_module,
+ case catch apply(Mod, What, Args) of
+ {ok, Opaque, Items} when What == read ->
+ {R#restore{bup_data = Opaque}, Items};
+ {ok, Opaque} when What /= read->
+ R#restore{bup_data = Opaque};
+ {error, Re} ->
+ Abort(Re);
+ Re ->
+ Abort(Re)
+ end
+ end.
+
+abort_restore(R, What, Args, Reason) ->
+ Mod = R#restore.bup_module,
+ Opaque = R#restore.bup_data,
+ dbg_out("Restore aborted. ~p:~p~p -> ~p~n",
+ [Mod, What, Args, Reason]),
+ catch apply(Mod, close_read, [Opaque]),
+ throw({error, Reason}).
+
+fallback_to_schema() ->
+ Fname = fallback_bup(),
+ fallback_to_schema(Fname).
+
+fallback_to_schema(Fname) ->
+ Mod = mnesia_backup,
+ case read_schema(Mod, Fname) of
+ {error, Reason} ->
+ {error, Reason};
+ Schema ->
+ case catch lookup_schema(schema, Schema) of
+ {error, _} ->
+ {error, "No schema in fallback"};
+ List ->
+ {ok, fallback, List}
+ end
+ end.
+
+%% Opens Opaque reads schema and then close
+read_schema(Mod, Opaque) ->
+ R = #restore{bup_module = Mod, bup_data = Opaque},
+ case catch read_schema_section(R) of
+ {error, Reason} ->
+ {error, Reason};
+ {R2, {_Header, Schema, _}} ->
+ catch safe_apply(R2, close_read, [R2#restore.bup_data]),
+ Schema
+ end.
+
+%% Open backup media and extract schema
+%% rewind backup media and leave it open
+%% Returns {R, {Header, Schema}}
+read_schema_section(R) ->
+ case catch do_read_schema_section(R) of
+ {'EXIT', Reason} ->
+ catch safe_apply(R, close_read, [R#restore.bup_data]),
+ {error, {'EXIT', Reason}};
+ {error, Reason} ->
+ catch safe_apply(R, close_read, [R#restore.bup_data]),
+ {error, Reason};
+ {R2, {H, Schema, Rest}} ->
+ Schema2 = convert_schema(H#log_header.log_version, Schema),
+ {R2, {H, Schema2, Rest}}
+ end.
+
+do_read_schema_section(R) ->
+ R2 = safe_apply(R, open_read, [R#restore.bup_data]),
+ {R3, RawSchema} = safe_apply(R2, read, [R2#restore.bup_data]),
+ do_read_schema_section(R3, verify_header(RawSchema), []).
+
+do_read_schema_section(R, {ok, B, C, []}, Acc) ->
+ case safe_apply(R, read, [R#restore.bup_data]) of
+ {R2, []} ->
+ {R2, {B, Acc, []}};
+ {R2, RawSchema} ->
+ do_read_schema_section(R2, {ok, B, C, RawSchema}, Acc)
+ end;
+
+do_read_schema_section(R, {ok, B, C, [Head | Tail]}, Acc)
+ when element(1, Head) == schema ->
+ do_read_schema_section(R, {ok, B, C, Tail}, Acc ++ [Head]);
+
+do_read_schema_section(R, {ok, B, _C, Rest}, Acc) ->
+ {R, {B, Acc, Rest}};
+
+do_read_schema_section(_R, {error, Reason}, _Acc) ->
+ {error, Reason}.
+
+verify_header([H | RawSchema]) when record(H, log_header) ->
+ Current = mnesia_log:backup_log_header(),
+ if
+ H#log_header.log_kind == Current#log_header.log_kind ->
+ Versions = ["0.1", "1.1", Current#log_header.log_version],
+ case lists:member(H#log_header.log_version, Versions) of
+ true ->
+ {ok, H, Current, RawSchema};
+ false ->
+ {error, {"Bad header version. Cannot be used as backup.", H}}
+ end;
+ true ->
+ {error, {"Bad kind of header. Cannot be used as backup.", H}}
+ end;
+verify_header(RawSchema) ->
+ {error, {"Missing header. Cannot be used as backup.", catch hd(RawSchema)}}.
+
+refresh_cookie(Schema, NewCookie) ->
+ case lists:keysearch(schema, 2, Schema) of
+ {value, {schema, schema, List}} ->
+ Cs = mnesia_schema:list2cs(List),
+ Cs2 = Cs#cstruct{cookie = NewCookie},
+ Item = {schema, schema, mnesia_schema:cs2list(Cs2)},
+ lists:keyreplace(schema, 2, Schema, Item);
+
+ false ->
+ Reason = "No schema found. Cannot be used as backup.",
+ throw({error, {Reason, Schema}})
+ end.
+
+%% Convert schema items from an external backup
+%% If backup format is the latest, no conversion is needed
+%% All supported backup formats should have their converters
+%% here as separate function clauses.
+convert_schema("0.1", Schema) ->
+ convert_0_1(Schema);
+convert_schema("1.1", Schema) ->
+ %% The new backup format is a pure extension of the old one
+ Current = mnesia_log:backup_log_header(),
+ convert_schema(Current#log_header.log_version, Schema);
+convert_schema(Latest, Schema) ->
+ H = mnesia_log:backup_log_header(),
+ if
+ H#log_header.log_version == Latest ->
+ Schema;
+ true ->
+ Reason = "Bad backup header version. Cannot convert schema.",
+ throw({error, {Reason, H}})
+ end.
+
+%% Backward compatibility for 0.1
+convert_0_1(Schema) ->
+ case lists:keysearch(schema, 2, Schema) of
+ {value, {schema, schema, List}} ->
+ Schema2 = lists:keydelete(schema, 2, Schema),
+ Cs = mnesia_schema:list2cs(List),
+ convert_0_1(Schema2, [], Cs);
+ false ->
+ List = mnesia_schema:get_initial_schema(disc_copies, [node()]),
+ Cs = mnesia_schema:list2cs(List),
+ convert_0_1(Schema, [], Cs)
+ end.
+
+convert_0_1([{schema, cookie, Cookie} | Schema], Acc, Cs) ->
+ convert_0_1(Schema, Acc, Cs#cstruct{cookie = Cookie});
+convert_0_1([{schema, db_nodes, DbNodes} | Schema], Acc, Cs) ->
+ convert_0_1(Schema, Acc, Cs#cstruct{disc_copies = DbNodes});
+convert_0_1([{schema, version, Version} | Schema], Acc, Cs) ->
+ convert_0_1(Schema, Acc, Cs#cstruct{version = Version});
+convert_0_1([{schema, Tab, Def} | Schema], Acc, Cs) ->
+ Head =
+ case lists:keysearch(index, 1, Def) of
+ {value, {index, PosList}} ->
+ %% Remove the snmp "index"
+ P = PosList -- [snmp],
+ Def2 = lists:keyreplace(index, 1, Def, {index, P}),
+ {schema, Tab, Def2};
+ false ->
+ {schema, Tab, Def}
+ end,
+ convert_0_1(Schema, [Head | Acc], Cs);
+convert_0_1([Head | Schema], Acc, Cs) ->
+ convert_0_1(Schema, [Head | Acc], Cs);
+convert_0_1([], Acc, Cs) ->
+ [schema2bup({schema, schema, Cs}) | Acc].
+
+%% Returns Val or throw error
+lookup_schema(Key, Schema) ->
+ case lists:keysearch(Key, 2, Schema) of
+ {value, {schema, Key, Val}} -> Val;
+ false -> throw({error, {"Cannot lookup", Key}})
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup compatibility
+
+%% Convert internal schema items to backup dito
+schema2bup({schema, Tab}) ->
+ {schema, Tab};
+schema2bup({schema, Tab, TableDef}) ->
+ {schema, Tab, mnesia_schema:cs2list(TableDef)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Create schema on the given nodes
+%% Requires that old schemas has been deleted
+%% Returns ok | {error, Reason}
+create_schema([]) ->
+ create_schema([node()]);
+create_schema(Ns) when list(Ns) ->
+ case is_set(Ns) of
+ true ->
+ create_schema(Ns, mnesia_schema:ensure_no_schema(Ns));
+ false ->
+ {error, {combine_error, Ns}}
+ end;
+create_schema(Ns) ->
+ {error, {badarg, Ns}}.
+
+is_set(List) when list(List) ->
+ ordsets:is_set(lists:sort(List));
+is_set(_) ->
+ false.
+
+create_schema(Ns, ok) ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case mnesia_monitor:get_env(schema_location) of
+ ram ->
+ {error, {has_no_disc, node()}};
+ _ ->
+ case mnesia_schema:opt_create_dir(true, mnesia_lib:dir()) of
+ {error, What} ->
+ {error, What};
+ ok ->
+ Mod = mnesia_backup,
+ Str = mk_str(),
+ File = mnesia_lib:dir(Str),
+ file:delete(File),
+ case catch make_initial_backup(Ns, File, Mod) of
+ {ok, _Res} ->
+ case do_install_fallback(File, Mod) of
+ ok ->
+ file:delete(File),
+ ok;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+create_schema(_Ns, {error, Reason}) ->
+ {error, Reason};
+create_schema(_Ns, Reason) ->
+ {error, Reason}.
+
+mk_str() ->
+ Now = [integer_to_list(I) || I <- tuple_to_list(now())],
+ lists:concat([node()] ++ Now ++ ".TMP").
+
+make_initial_backup(Ns, Opaque, Mod) ->
+ Schema = [{schema, schema, mnesia_schema:get_initial_schema(disc_copies, Ns)}],
+ O2 = do_apply(Mod, open_write, [Opaque], Opaque),
+ O3 = do_apply(Mod, write, [O2, [mnesia_log:backup_log_header()]], O2),
+ O4 = do_apply(Mod, write, [O3, Schema], O3),
+ O5 = do_apply(Mod, commit_write, [O4], O4),
+ {ok, O5}.
+
+do_apply(_, write, [_, Items], Opaque) when Items == [] ->
+ Opaque;
+do_apply(Mod, What, Args, _Opaque) ->
+ case catch apply(Mod, What, Args) of
+ {ok, Opaque2} -> Opaque2;
+ {error, Reason} -> throw({error, Reason});
+ {'EXIT', Reason} -> throw({error, {'EXIT', Reason}})
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Restore
+
+%% Restore schema and possibly other tables from a backup
+%% and replicate them to the necessary nodes
+%% Requires that old schemas has been deleted
+%% Returns ok | {error, Reason}
+install_fallback(Opaque) ->
+ install_fallback(Opaque, []).
+
+install_fallback(Opaque, Args) ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ do_install_fallback(Opaque, Args);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+do_install_fallback(Opaque, Mod) when atom(Mod) ->
+ do_install_fallback(Opaque, [{module, Mod}]);
+do_install_fallback(Opaque, Args) when list(Args) ->
+ case check_fallback_args(Args, #fallback_args{opaque = Opaque}) of
+ {ok, FA} ->
+ do_install_fallback(FA);
+ {error, Reason} ->
+ {error, Reason}
+ end;
+do_install_fallback(_Opaque, Args) ->
+ {error, {badarg, Args}}.
+
+check_fallback_args([Arg | Tail], FA) ->
+ case catch check_fallback_arg_type(Arg, FA) of
+ {'EXIT', _Reason} ->
+ {error, {badarg, Arg}};
+ FA2 ->
+ check_fallback_args(Tail, FA2)
+ end;
+check_fallback_args([], FA) ->
+ {ok, FA}.
+
+check_fallback_arg_type(Arg, FA) ->
+ case Arg of
+ {scope, global} ->
+ FA#fallback_args{scope = global};
+ {scope, local} ->
+ FA#fallback_args{scope = local};
+ {module, Mod} ->
+ Mod2 = mnesia_monitor:do_check_type(backup_module, Mod),
+ FA#fallback_args{module = Mod2};
+ {mnesia_dir, Dir} ->
+ FA#fallback_args{mnesia_dir = Dir,
+ use_default_dir = false};
+ {keep_tables, Tabs} ->
+ atom_list(Tabs),
+ FA#fallback_args{keep_tables = Tabs};
+ {skip_tables, Tabs} ->
+ atom_list(Tabs),
+ FA#fallback_args{skip_tables = Tabs};
+ {default_op, keep_tables} ->
+ FA#fallback_args{default_op = keep_tables};
+ {default_op, skip_tables} ->
+ FA#fallback_args{default_op = skip_tables}
+ end.
+
+atom_list([H | T]) when atom(H) ->
+ atom_list(T);
+atom_list([]) ->
+ ok.
+
+do_install_fallback(FA) ->
+ Pid = spawn_link(?MODULE, install_fallback_master, [self(), FA]),
+ Res =
+ receive
+ {'EXIT', Pid, Reason} -> % if appl has trapped exit
+ {error, {'EXIT', Reason}};
+ {Pid, Res2} ->
+ case Res2 of
+ {ok, _} ->
+ ok;
+ {error, Reason} ->
+ {error, {"Cannot install fallback", Reason}}
+ end
+ end,
+ Res.
+
+install_fallback_master(ClientPid, FA) ->
+ process_flag(trap_exit, true),
+ State = {start, FA},
+ Opaque = FA#fallback_args.opaque,
+ Mod = FA#fallback_args.module,
+ Res = (catch iterate(Mod, fun restore_recs/4, Opaque, State)),
+ unlink(ClientPid),
+ ClientPid ! {self(), Res},
+ exit(shutdown).
+
+restore_recs(_, _, _, stop) ->
+ throw({error, "restore_recs already stopped"});
+
+restore_recs(Recs, Header, Schema, {start, FA}) ->
+ %% No records in backup
+ Schema2 = convert_schema(Header#log_header.log_version, Schema),
+ CreateList = lookup_schema(schema, Schema2),
+ case catch mnesia_schema:list2cs(CreateList) of
+ {'EXIT', Reason} ->
+ throw({error, {"Bad schema in restore_recs", Reason}});
+ Cs ->
+ Ns = get_fallback_nodes(FA, Cs#cstruct.disc_copies),
+ global:set_lock({{mnesia_table_lock, schema}, self()}, Ns, infinity),
+ Args = [self(), FA],
+ Pids = [spawn_link(N, ?MODULE, fallback_receiver, Args) || N <- Ns],
+ send_fallback(Pids, {start, Header, Schema2}),
+ Res = restore_recs(Recs, Header, Schema2, Pids),
+ global:del_lock({{mnesia_table_lock, schema}, self()}, Ns),
+ Res
+ end;
+
+restore_recs([], _Header, _Schema, Pids) ->
+ send_fallback(Pids, swap),
+ send_fallback(Pids, stop),
+ stop;
+
+restore_recs(Recs, _, _, Pids) ->
+ send_fallback(Pids, {records, Recs}),
+ Pids.
+
+get_fallback_nodes(FA, Ns) ->
+ This = node(),
+ case lists:member(This, Ns) of
+ true ->
+ case FA#fallback_args.scope of
+ global -> Ns;
+ local -> [This]
+ end;
+ false ->
+ throw({error, {"No disc resident schema on local node", Ns}})
+ end.
+
+send_fallback(Pids, Msg) when list(Pids), Pids /= [] ->
+ lists:foreach(fun(Pid) -> Pid ! {self(), Msg} end, Pids),
+ rec_answers(Pids, []).
+
+rec_answers([], Acc) ->
+ case {lists:keysearch(error, 1, Acc), mnesia_lib:uniq(Acc)} of
+ {{value, {error, Val}}, _} -> throw({error, Val});
+ {_, [SameAnswer]} -> SameAnswer;
+ {_, Other} -> throw({error, {"Different answers", Other}})
+ end;
+rec_answers(Pids, Acc) ->
+ receive
+ {'EXIT', Pid, stopped} ->
+ Pids2 = lists:delete(Pid, Pids),
+ rec_answers(Pids2, [stopped|Acc]);
+ {'EXIT', Pid, Reason} ->
+ Pids2 = lists:delete(Pid, Pids),
+ rec_answers(Pids2, [{error, {'EXIT', Pid, Reason}}|Acc]);
+ {Pid, Reply} ->
+ Pids2 = lists:delete(Pid, Pids),
+ rec_answers(Pids2, [Reply|Acc])
+ end.
+
+fallback_exists() ->
+ Fname = fallback_bup(),
+ fallback_exists(Fname).
+
+fallback_exists(Fname) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_lib:exists(Fname);
+ false ->
+ case ?catch_val(active_fallback) of
+ {'EXIT', _} -> false;
+ Bool -> Bool
+ end
+ end.
+
+fallback_name() -> "FALLBACK.BUP".
+fallback_bup() -> mnesia_lib:dir(fallback_name()).
+
+fallback_tmp_name() -> "FALLBACK.TMP".
+%% fallback_full_tmp_name() -> mnesia_lib:dir(fallback_tmp_name()).
+
+fallback_receiver(Master, FA) ->
+ process_flag(trap_exit, true),
+
+ case catch register(mnesia_fallback, self()) of
+ {'EXIT', _} ->
+ Reason = {already_exists, node()},
+ local_fallback_error(Master, Reason);
+ true ->
+ FA2 = check_fallback_dir(Master, FA),
+ Bup = FA2#fallback_args.fallback_bup,
+ case mnesia_lib:exists(Bup) of
+ true ->
+ Reason2 = {already_exists, node()},
+ local_fallback_error(Master, Reason2);
+ false ->
+ Mod = mnesia_backup,
+ Tmp = FA2#fallback_args.fallback_tmp,
+ R = #restore{mode = replace,
+ bup_module = Mod,
+ bup_data = Tmp},
+ file:delete(Tmp),
+ case catch fallback_receiver_loop(Master, R, FA2, schema) of
+ {error, Reason} ->
+ local_fallback_error(Master, Reason);
+ Other ->
+ exit(Other)
+ end
+ end
+ end.
+
+local_fallback_error(Master, Reason) ->
+ Master ! {self(), {error, Reason}},
+ unlink(Master),
+ exit(Reason).
+
+check_fallback_dir(Master, FA) ->
+ case mnesia:system_info(schema_location) of
+ ram ->
+ Reason = {has_no_disc, node()},
+ local_fallback_error(Master, Reason);
+ _ ->
+ Dir = check_fallback_dir_arg(Master, FA),
+ Bup = filename:join([Dir, fallback_name()]),
+ Tmp = filename:join([Dir, fallback_tmp_name()]),
+ FA#fallback_args{fallback_bup = Bup,
+ fallback_tmp = Tmp,
+ mnesia_dir = Dir}
+ end.
+
+check_fallback_dir_arg(Master, FA) ->
+ case FA#fallback_args.use_default_dir of
+ true ->
+ mnesia_lib:dir();
+ false when FA#fallback_args.scope == local ->
+ Dir = FA#fallback_args.mnesia_dir,
+ case catch mnesia_monitor:do_check_type(dir, Dir) of
+ {'EXIT', _R} ->
+ Reason = {badarg, {dir, Dir}, node()},
+ local_fallback_error(Master, Reason);
+ AbsDir->
+ AbsDir
+ end;
+ false when FA#fallback_args.scope == global ->
+ Reason = {combine_error, global, dir, node()},
+ local_fallback_error(Master, Reason)
+ end.
+
+fallback_receiver_loop(Master, R, FA, State) ->
+ receive
+ {Master, {start, Header, Schema}} when State == schema ->
+ Dir = FA#fallback_args.mnesia_dir,
+ throw_bad_res(ok, mnesia_schema:opt_create_dir(true, Dir)),
+ R2 = safe_apply(R, open_write, [R#restore.bup_data]),
+ R3 = safe_apply(R2, write, [R2#restore.bup_data, [Header]]),
+ BupSchema = [schema2bup(S) || S <- Schema],
+ R4 = safe_apply(R3, write, [R3#restore.bup_data, BupSchema]),
+ Master ! {self(), ok},
+ fallback_receiver_loop(Master, R4, FA, records);
+
+ {Master, {records, Recs}} when State == records ->
+ R2 = safe_apply(R, write, [R#restore.bup_data, Recs]),
+ Master ! {self(), ok},
+ fallback_receiver_loop(Master, R2, FA, records);
+
+ {Master, swap} when State /= schema ->
+ ?eval_debug_fun({?MODULE, fallback_receiver_loop, pre_swap}, []),
+ safe_apply(R, commit_write, [R#restore.bup_data]),
+ Bup = FA#fallback_args.fallback_bup,
+ Tmp = FA#fallback_args.fallback_tmp,
+ throw_bad_res(ok, file:rename(Tmp, Bup)),
+ catch mnesia_lib:set(active_fallback, true),
+ ?eval_debug_fun({?MODULE, fallback_receiver_loop, post_swap}, []),
+ Master ! {self(), ok},
+ fallback_receiver_loop(Master, R, FA, stop);
+
+ {Master, stop} when State == stop ->
+ stopped;
+
+ Msg ->
+ safe_apply(R, abort_write, [R#restore.bup_data]),
+ Tmp = FA#fallback_args.fallback_tmp,
+ file:delete(Tmp),
+ throw({error, "Unexpected msg fallback_receiver_loop", Msg})
+ end.
+
+throw_bad_res(Expected, Expected) -> Expected;
+throw_bad_res(_Expected, {error, Actual}) -> throw({error, Actual});
+throw_bad_res(_Expected, Actual) -> throw({error, Actual}).
+
+-record(local_tab, {name, storage_type, dets_args, open, close, add, record_name}).
+
+tm_fallback_start(IgnoreFallback) ->
+ mnesia_schema:lock_schema(),
+ Res = do_fallback_start(fallback_exists(), IgnoreFallback),
+ mnesia_schema: unlock_schema(),
+ case Res of
+ ok -> ok;
+ {error, Reason} -> exit(Reason)
+ end.
+
+do_fallback_start(false, _IgnoreFallback) ->
+ ok;
+do_fallback_start(true, true) ->
+ verbose("Ignoring fallback at startup, but leaving it active...~n", []),
+ mnesia_lib:set(active_fallback, true),
+ ok;
+do_fallback_start(true, false) ->
+ verbose("Starting from fallback...~n", []),
+
+ Fname = fallback_bup(),
+ Mod = mnesia_backup,
+ Ets = ?ets_new_table(mnesia_local_tables, [set, public, {keypos, 2}]),
+ case catch iterate(Mod, fun restore_tables/4, Fname, {start, Ets}) of
+ {ok, Res} ->
+ case Res of
+ {local, _, LT} -> %% Close the last file
+ (LT#local_tab.close)(LT);
+ _ ->
+ ignore
+ end,
+ List = ?ets_match_object(Ets, '_'),
+ Tabs = [L#local_tab.name || L <- List, L#local_tab.name /= schema],
+ ?ets_delete_table(Ets),
+ mnesia_lib:swap_tmp_files(Tabs),
+ catch dets:close(schema),
+ Tmp = mnesia_lib:tab2tmp(schema),
+ Dat = mnesia_lib:tab2dat(schema),
+ case file:rename(Tmp, Dat) of
+ ok ->
+ file:delete(Fname),
+ ok;
+ {error, Reason} ->
+ file:delete(Tmp),
+ {error, {"Cannot start from fallback. Rename error.", Reason}}
+ end;
+ {error, Reason} ->
+ {error, {"Cannot start from fallback", Reason}};
+ {'EXIT', Reason} ->
+ {error, {"Cannot start from fallback", Reason}}
+ end.
+
+restore_tables(Recs, Header, Schema, {start, LocalTabs}) ->
+ Dir = mnesia_lib:dir(),
+ OldDir = filename:join([Dir, "OLD_DIR"]),
+ mnesia_schema:purge_dir(OldDir, []),
+ mnesia_schema:purge_dir(Dir, [fallback_name()]),
+ init_dat_files(Schema, LocalTabs),
+ State = {new, LocalTabs},
+ restore_tables(Recs, Header, Schema, State);
+restore_tables([Rec | Recs], Header, Schema, {new, LocalTabs}) ->
+ Tab = element(1, Rec),
+ case ?ets_lookup(LocalTabs, Tab) of
+ [] ->
+ State = {not_local, LocalTabs, Tab},
+ restore_tables(Recs, Header, Schema, State);
+ [L] when record(L, local_tab) ->
+ (L#local_tab.open)(Tab, L),
+ State = {local, LocalTabs, L},
+ restore_tables([Rec | Recs], Header, Schema, State)
+ end;
+restore_tables([Rec | Recs], Header, Schema, S = {not_local, LocalTabs, PrevTab}) ->
+ Tab = element(1, Rec),
+ if
+ Tab == PrevTab ->
+ restore_tables(Recs, Header, Schema, S);
+ true ->
+ State = {new, LocalTabs},
+ restore_tables([Rec | Recs], Header, Schema, State)
+ end;
+restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) ->
+ Tab = element(1, Rec),
+ if
+ Tab == L#local_tab.name ->
+ Key = element(2, Rec),
+ (L#local_tab.add)(Tab, Key, Rec, L),
+ restore_tables(Recs, Header, Schema, State);
+ true ->
+ (L#local_tab.close)(L),
+ NState = {new, LocalTabs},
+ restore_tables([Rec | Recs], Header, Schema, NState)
+ end;
+restore_tables([], _Header, _Schema, State) ->
+ State.
+
+%% Creates all neccessary dat files and inserts
+%% the table definitions in the schema table
+%%
+%% Returns a list of local_tab tuples for all local tables
+init_dat_files(Schema, LocalTabs) ->
+ Fname = mnesia_lib:tab2tmp(schema),
+ Args = [{file, Fname}, {keypos, 2}, {type, set}],
+ case dets:open_file(schema, Args) of % Assume schema lock
+ {ok, _} ->
+ create_dat_files(Schema, LocalTabs),
+ dets:close(schema),
+ LocalTab = #local_tab{name = schema,
+ storage_type = disc_copies,
+ dets_args = Args,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = schema},
+ ?ets_insert(LocalTabs, LocalTab);
+ {error, Reason} ->
+ throw({error, {"Cannot open file", schema, Args, Reason}})
+ end.
+
+create_dat_files([{schema, schema, TabDef} | Tail], LocalTabs) ->
+ ok = dets:insert(schema, {schema, schema, TabDef}),
+ create_dat_files(Tail, LocalTabs);
+create_dat_files([{schema, Tab, TabDef} | Tail], LocalTabs) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ ok = dets:insert(schema, {schema, Tab, TabDef}),
+ RecName = Cs#cstruct.record_name,
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ cleanup_dat_file(Tab),
+ create_dat_files(Tail, LocalTabs);
+ disc_only_copies ->
+ Fname = mnesia_lib:tab2tmp(Tab),
+ Args = [{file, Fname}, {keypos, 2},
+ {type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)}],
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, _} ->
+ mnesia_lib:dets_sync_close(Tab),
+ LocalTab = #local_tab{name = Tab,
+ storage_type = disc_only_copies,
+ dets_args = Args,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = RecName},
+ ?ets_insert(LocalTabs, LocalTab),
+ create_dat_files(Tail, LocalTabs);
+ {error, Reason} ->
+ throw({error, {"Cannot open file", Tab, Args, Reason}})
+ end;
+ ram_copies ->
+ %% Create .DCD if needed in open_media in case any ram_copies
+ %% are backed up.
+ LocalTab = #local_tab{name = Tab,
+ storage_type = ram_copies,
+ dets_args = ignore,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = RecName},
+ ?ets_insert(LocalTabs, LocalTab),
+ create_dat_files(Tail, LocalTabs);
+ Storage ->
+ %% Create DCD
+ Fname = mnesia_lib:tab2dcd(Tab),
+ file:delete(Fname),
+ Log = mnesia_log:open_log(fallback_tab, mnesia_log:dcd_log_header(),
+ Fname, false),
+ LocalTab = #local_tab{name = Tab,
+ storage_type = Storage,
+ dets_args = ignore,
+ open = fun open_media/2,
+ close = fun close_media/1,
+ add = fun add_to_media/4,
+ record_name = RecName},
+ mnesia_log:close_log(Log),
+ ?ets_insert(LocalTabs, LocalTab),
+ create_dat_files(Tail, LocalTabs)
+ end;
+create_dat_files([{schema, Tab} | Tail], LocalTabs) ->
+ cleanup_dat_file(Tab),
+ create_dat_files(Tail, LocalTabs);
+create_dat_files([], _LocalTabs) ->
+ ok.
+
+cleanup_dat_file(Tab) ->
+ ok = dets:delete(schema, {schema, Tab}),
+ mnesia_lib:cleanup_tmp_files([Tab]).
+
+open_media(Tab, LT) ->
+ case LT#local_tab.storage_type of
+ disc_only_copies ->
+ Args = LT#local_tab.dets_args,
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, _} -> ok;
+ {error, Reason} ->
+ throw({error, {"Cannot open file", Tab, Args, Reason}})
+ end;
+ ram_copies ->
+ %% Create .DCD as ram_copies backed up.
+ FnameDCD = mnesia_lib:tab2dcd(Tab),
+ file:delete(FnameDCD),
+ Log = mnesia_log:open_log(fallback_tab,
+ mnesia_log:dcd_log_header(),
+ FnameDCD, false),
+ mnesia_log:close_log(Log),
+
+ %% Create .DCL
+ Fname = mnesia_lib:tab2dcl(Tab),
+ file:delete(Fname),
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ Fname, false, false,
+ read_write);
+ _ ->
+ Fname = mnesia_lib:tab2dcl(Tab),
+ file:delete(Fname),
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ Fname, false, false,
+ read_write)
+ end.
+close_media(L) ->
+ Tab = L#local_tab.name,
+ case L#local_tab.storage_type of
+ disc_only_copies ->
+ mnesia_lib:dets_sync_close(Tab);
+ _ ->
+ mnesia_log:close_log({?MODULE,Tab})
+ end.
+
+add_to_media(Tab, Key, Rec, L) ->
+ RecName = L#local_tab.record_name,
+ case L#local_tab.storage_type of
+ disc_only_copies ->
+ case Rec of
+ {Tab, Key} ->
+ ok = dets:delete(Tab, Key);
+ (Rec) when Tab == RecName ->
+ ok = dets:insert(Tab, Rec);
+ (Rec) ->
+ Rec2 = setelement(1, Rec, RecName),
+ ok = dets:insert(Tab, Rec2)
+ end;
+ _ ->
+ Log = {?MODULE, Tab},
+ case Rec of
+ {Tab, Key} ->
+ mnesia_log:append(Log, {{Tab, Key}, {Tab, Key}, delete});
+ (Rec) when Tab == RecName ->
+ mnesia_log:append(Log, {{Tab, Key}, Rec, write});
+ (Rec) ->
+ Rec2 = setelement(1, Rec, RecName),
+ mnesia_log:append(Log, {{Tab, Key}, Rec2, write})
+ end
+ end.
+
+uninstall_fallback() ->
+ uninstall_fallback([{scope, global}]).
+
+uninstall_fallback(Args) ->
+ case check_fallback_args(Args, #fallback_args{}) of
+ {ok, FA} ->
+ do_uninstall_fallback(FA);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+do_uninstall_fallback(FA) ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ Pid = spawn_link(?MODULE, uninstall_fallback_master, [self(), FA]),
+ receive
+ {'EXIT', Pid, Reason} -> % if appl has trapped exit
+ {error, {'EXIT', Reason}};
+ {Pid, Res} ->
+ Res
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+uninstall_fallback_master(ClientPid, FA) ->
+ process_flag(trap_exit, true),
+
+ FA2 = check_fallback_dir(ClientPid, FA), % May exit
+ Bup = FA2#fallback_args.fallback_bup,
+ case fallback_to_schema(Bup) of
+ {ok, fallback, List} ->
+ Cs = mnesia_schema:list2cs(List),
+ case catch get_fallback_nodes(FA, Cs#cstruct.disc_copies) of
+ Ns when list(Ns) ->
+ do_uninstall(ClientPid, Ns, FA);
+ {error, Reason} ->
+ local_fallback_error(ClientPid, Reason)
+ end;
+ {error, Reason} ->
+ local_fallback_error(ClientPid, Reason)
+ end.
+
+do_uninstall(ClientPid, Ns, FA) ->
+ Args = [self(), FA],
+ global:set_lock({{mnesia_table_lock, schema}, self()}, Ns, infinity),
+ Pids = [spawn_link(N, ?MODULE, local_uninstall_fallback, Args) || N <- Ns],
+ Res = do_uninstall(ClientPid, Pids, [], [], ok),
+ global:del_lock({{mnesia_table_lock, schema}, self()}, Ns),
+ ClientPid ! {self(), Res},
+ unlink(ClientPid),
+ exit(shutdown).
+
+do_uninstall(ClientPid, [Pid | Pids], GoodPids, BadNodes, Res) ->
+ receive
+ %% {'EXIT', ClientPid, _} ->
+ %% client_exit;
+ {'EXIT', Pid, Reason} ->
+ BadNode = node(Pid),
+ BadRes = {error, {"Uninstall fallback", BadNode, Reason}},
+ do_uninstall(ClientPid, Pids, GoodPids, [BadNode | BadNodes], BadRes);
+ {Pid, {error, Reason}} ->
+ BadNode = node(Pid),
+ BadRes = {error, {"Uninstall fallback", BadNode, Reason}},
+ do_uninstall(ClientPid, Pids, GoodPids, [BadNode | BadNodes], BadRes);
+ {Pid, started} ->
+ do_uninstall(ClientPid, Pids, [Pid | GoodPids], BadNodes, Res)
+ end;
+do_uninstall(ClientPid, [], GoodPids, [], ok) ->
+ lists:foreach(fun(Pid) -> Pid ! {self(), do_uninstall} end, GoodPids),
+ rec_uninstall(ClientPid, GoodPids, ok);
+do_uninstall(_ClientPid, [], GoodPids, BadNodes, BadRes) ->
+ lists:foreach(fun(Pid) -> exit(Pid, shutdown) end, GoodPids),
+ {error, {node_not_running, BadNodes, BadRes}}.
+
+local_uninstall_fallback(Master, FA) ->
+ %% Don't trap exit
+
+ register(mnesia_fallback, self()), % May exit
+ FA2 = check_fallback_dir(Master, FA), % May exit
+ Master ! {self(), started},
+
+ receive
+ {Master, do_uninstall} ->
+ ?eval_debug_fun({?MODULE, uninstall_fallback2, pre_delete}, []),
+ catch mnesia_lib:set(active_fallback, false),
+ Tmp = FA2#fallback_args.fallback_tmp,
+ Bup = FA2#fallback_args.fallback_bup,
+ file:delete(Tmp),
+ Res =
+ case fallback_exists(Bup) of
+ true -> file:delete(Bup);
+ false -> ok
+ end,
+ ?eval_debug_fun({?MODULE, uninstall_fallback2, post_delete}, []),
+ Master ! {self(), Res},
+ unlink(Master),
+ exit(normal)
+ end.
+
+rec_uninstall(ClientPid, [Pid | Pids], AccRes) ->
+ receive
+ %% {'EXIT', ClientPid, _} ->
+ %% exit(shutdown);
+ {'EXIT', Pid, R} ->
+ Reason = {node_not_running, {node(Pid), R}},
+ rec_uninstall(ClientPid, Pids, {error, Reason});
+ {Pid, ok} ->
+ rec_uninstall(ClientPid, Pids, AccRes);
+ {Pid, BadRes} ->
+ rec_uninstall(ClientPid, Pids, BadRes)
+ end;
+rec_uninstall(ClientPid, [], Res) ->
+ ClientPid ! {self(), Res},
+ unlink(ClientPid),
+ exit(normal).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup traversal
+
+%% Iterate over a backup and produce a new backup.
+%% Fun(BackupItem, Acc) is applied for each BackupItem.
+%%
+%% Valid BackupItems are:
+%%
+%% {schema, Tab} Table to be deleted
+%% {schema, Tab, CreateList} Table to be created, CreateList may be empty
+%% {schema, db_nodes, DbNodes}List of nodes, defaults to [node()] OLD
+%% {schema, version, Version} Schema version OLD
+%% {schema, cookie, Cookie} Unique schema cookie OLD
+%% {Tab, Key} Oid for record to be deleted
+%% Record Record to be inserted.
+%%
+%% The Fun must return a tuple {BackupItems, NewAcc}
+%% where BackupItems is a list of valid BackupItems and
+%% NewAcc is a new accumulator value. Once BackupItems
+%% that not are schema related has been returned, no more schema
+%% items may be returned. The schema related items must always be
+%% first in the backup.
+%%
+%% If TargetMod == read_only, no new backup will be created.
+%%
+%% Opening of the source media will be performed by
+%% to SourceMod:open_read(Source)
+%%
+%% Opening of the target media will be performed by
+%% to TargetMod:open_write(Target)
+traverse_backup(Source, Target, Fun, Acc) ->
+ Mod = mnesia_monitor:get_env(backup_module),
+ traverse_backup(Source, Mod, Target, Mod, Fun, Acc).
+
+traverse_backup(Source, SourceMod, Target, TargetMod, Fun, Acc) ->
+ Args = [self(), Source, SourceMod, Target, TargetMod, Fun, Acc],
+ Pid = spawn_link(?MODULE, do_traverse_backup, Args),
+ receive
+ {'EXIT', Pid, Reason} ->
+ {error, {"Backup traversal crashed", Reason}};
+ {iter_done, Pid, Res} ->
+ Res
+ end.
+
+do_traverse_backup(ClientPid, Source, SourceMod, Target, TargetMod, Fun, Acc) ->
+ process_flag(trap_exit, true),
+ Iter =
+ if
+ TargetMod /= read_only ->
+ case catch do_apply(TargetMod, open_write, [Target], Target) of
+ {error, Error} ->
+ unlink(ClientPid),
+ ClientPid ! {iter_done, self(), {error, Error}},
+ exit(Error);
+ Else -> Else
+ end;
+ true ->
+ ignore
+ end,
+ A = {start, Fun, Acc, TargetMod, Iter},
+ Res =
+ case iterate(SourceMod, fun trav_apply/4, Source, A) of
+ {ok, {iter, _, Acc2, _, Iter2}} when TargetMod /= read_only ->
+ case catch do_apply(TargetMod, commit_write, [Iter2], Iter2) of
+ {error, Reason} ->
+ {error, Reason};
+ _ ->
+ {ok, Acc2}
+ end;
+ {ok, {iter, _, Acc2, _, _}} ->
+ {ok, Acc2};
+ {error, Reason} when TargetMod /= read_only->
+ catch do_apply(TargetMod, abort_write, [Iter], Iter),
+ {error, {"Backup traversal failed", Reason}};
+ {error, Reason} ->
+ {error, {"Backup traversal failed", Reason}}
+ end,
+ unlink(ClientPid),
+ ClientPid ! {iter_done, self(), Res}.
+
+trav_apply(Recs, _Header, _Schema, {iter, Fun, Acc, Mod, Iter}) ->
+ {NewRecs, Acc2} = filter_foldl(Fun, Acc, Recs),
+ if
+ Mod /= read_only, NewRecs /= [] ->
+ Iter2 = do_apply(Mod, write, [Iter, NewRecs], Iter),
+ {iter, Fun, Acc2, Mod, Iter2};
+ true ->
+ {iter, Fun, Acc2, Mod, Iter}
+ end;
+trav_apply(Recs, Header, Schema, {start, Fun, Acc, Mod, Iter}) ->
+ Iter2 =
+ if
+ Mod /= read_only ->
+ do_apply(Mod, write, [Iter, [Header]], Iter);
+ true ->
+ Iter
+ end,
+ TravAcc = trav_apply(Schema, Header, Schema, {iter, Fun, Acc, Mod, Iter2}),
+ trav_apply(Recs, Header, Schema, TravAcc).
+
+filter_foldl(Fun, Acc, [Head|Tail]) ->
+ case Fun(Head, Acc) of
+ {HeadItems, HeadAcc} when list(HeadItems) ->
+ {TailItems, TailAcc} = filter_foldl(Fun, HeadAcc, Tail),
+ {HeadItems ++ TailItems, TailAcc};
+ Other ->
+ throw({error, {"Fun must return a list", Other}})
+ end;
+filter_foldl(_Fun, Acc, []) ->
+ {[], Acc}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
new file mode 100644
index 0000000000..60a7a29861
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
@@ -0,0 +1,1283 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_checkpoint.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_checkpoint).
+
+%% TM callback interface
+-export([
+ tm_add_copy/2,
+ tm_change_table_copy_type/3,
+ tm_del_copy/2,
+ tm_mnesia_down/1,
+ tm_prepare/1,
+ tm_retain/4,
+ tm_retain/5,
+ tm_enter_pending/1,
+ tm_enter_pending/3,
+ tm_exit_pending/1,
+ convert_cp_record/1
+ ]).
+
+%% Public interface
+-export([
+ activate/1,
+ checkpoints/0,
+ deactivate/1,
+ deactivate/2,
+ iterate/6,
+ most_local_node/2,
+ really_retain/2,
+ stop/0,
+ stop_iteration/1,
+ tables_and_cookie/1
+ ]).
+
+%% Internal
+-export([
+ call/2,
+ cast/2,
+ init/1,
+ remote_deactivate/1,
+ start/1
+ ]).
+
+%% sys callback interface
+-export([
+ system_code_change/4,
+ system_continue/3,
+ system_terminate/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [add/2, del/2, set/2, unset/1]).
+-import(mnesia_lib, [dbg_out/2]).
+
+-record(tm, {log, pending, transactions, checkpoints}).
+
+-record(checkpoint_args, {name = {now(), node()},
+ allow_remote = true,
+ ram_overrides_dump = false,
+ nodes = [],
+ node = node(),
+ now = now(),
+ cookie = ?unique_cookie,
+ min = [],
+ max = [],
+ pending_tab,
+ wait_for_old, % Initially undefined then List
+ is_activated = false,
+ ignore_new = [],
+ retainers = [],
+ iterators = [],
+ supervisor,
+ pid
+ }).
+
+%% Old record definition
+-record(checkpoint, {name,
+ allow_remote,
+ ram_overrides_dump,
+ nodes,
+ node,
+ now,
+ min,
+ max,
+ pending_tab,
+ wait_for_old,
+ is_activated,
+ ignore_new,
+ retainers,
+ iterators,
+ supervisor,
+ pid
+ }).
+
+-record(retainer, {cp_name, tab_name, store, writers = [], really_retain = true}).
+
+-record(iter, {tab_name, oid_tab, main_tab, retainer_tab, source, val, pid}).
+
+-record(pending, {tid, disc_nodes = [], ram_nodes = []}).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% TM callback functions
+
+stop() ->
+ lists:foreach(fun(Name) -> call(Name, stop) end,
+ checkpoints()),
+ ok.
+
+tm_prepare(Cp) when record(Cp, checkpoint_args) ->
+ Name = Cp#checkpoint_args.name,
+ case lists:member(Name, checkpoints()) of
+ false ->
+ start_retainer(Cp);
+ true ->
+ {error, {already_exists, Name, node()}}
+ end;
+tm_prepare(Cp) when record(Cp, checkpoint) ->
+ %% Node with old protocol sent an old checkpoint record
+ %% and we have to convert it
+ case convert_cp_record(Cp) of
+ {ok, NewCp} ->
+ tm_prepare(NewCp);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+tm_mnesia_down(Node) ->
+ lists:foreach(fun(Name) -> cast(Name, {mnesia_down, Node}) end,
+ checkpoints()).
+
+%% Returns pending
+tm_enter_pending(Tid, DiscNs, RamNs) ->
+ Pending = #pending{tid = Tid, disc_nodes = DiscNs, ram_nodes = RamNs},
+ tm_enter_pending(Pending).
+
+tm_enter_pending(Pending) ->
+ PendingTabs = val(pending_checkpoints),
+ tm_enter_pending(PendingTabs, Pending).
+
+tm_enter_pending([], Pending) ->
+ Pending;
+tm_enter_pending([Tab | Tabs], Pending) ->
+ catch ?ets_insert(Tab, Pending),
+ tm_enter_pending(Tabs, Pending).
+
+tm_exit_pending(Tid) ->
+ Pids = val(pending_checkpoint_pids),
+ tm_exit_pending(Pids, Tid).
+
+tm_exit_pending([], Tid) ->
+ Tid;
+tm_exit_pending([Pid | Pids], Tid) ->
+ Pid ! {self(), {exit_pending, Tid}},
+ tm_exit_pending(Pids, Tid).
+
+enter_still_pending([Tid | Tids], Tab) ->
+ ?ets_insert(Tab, #pending{tid = Tid}),
+ enter_still_pending(Tids, Tab);
+enter_still_pending([], _Tab) ->
+ ok.
+
+
+%% Looks up checkpoints for functions in mnesia_tm.
+tm_retain(Tid, Tab, Key, Op) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ tm_retain(Tid, Tab, Key, Op, Checkpoints);
+ _ ->
+ undefined
+ end.
+
+tm_retain(Tid, Tab, Key, Op, Checkpoints) ->
+ case Op of
+ clear_table ->
+ OldRecs = mnesia_lib:db_match_object(Tab, '_'),
+ send_group_retain(OldRecs, Checkpoints, Tid, Tab, []),
+ OldRecs;
+ _ ->
+ OldRecs = mnesia_lib:db_get(Tab, Key),
+ send_retain(Checkpoints, {retain, Tid, Tab, Key, OldRecs}),
+ OldRecs
+ end.
+
+send_group_retain([Rec | Recs], Checkpoints, Tid, Tab, [PrevRec | PrevRecs])
+ when element(2, Rec) /= element(2, PrevRec) ->
+ Key = element(2, PrevRec),
+ OldRecs = lists:reverse([PrevRec | PrevRecs]),
+ send_retain(Checkpoints, {retain, Tid, Tab, Key, OldRecs}),
+ send_group_retain(Recs, Checkpoints, Tid, Tab, [Rec]);
+send_group_retain([Rec | Recs], Checkpoints, Tid, Tab, Acc) ->
+ send_group_retain(Recs, Checkpoints, Tid, Tab, [Rec | Acc]);
+send_group_retain([], Checkpoints, Tid, Tab, [PrevRec | PrevRecs]) ->
+ Key = element(2, PrevRec),
+ OldRecs = lists:reverse([PrevRec | PrevRecs]),
+ send_retain(Checkpoints, {retain, Tid, Tab, Key, OldRecs}),
+ ok;
+send_group_retain([], _Checkpoints, _Tid, _Tab, []) ->
+ ok.
+
+send_retain([Name | Names], Msg) ->
+ cast(Name, Msg),
+ send_retain(Names, Msg);
+send_retain([], _Msg) ->
+ ok.
+
+tm_add_copy(Tab, Node) when Node /= node() ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ Fun = fun(Name) -> call(Name, {add_copy, Tab, Node}) end,
+ map_call(Fun, Checkpoints, ok);
+ _ ->
+ ok
+ end.
+
+tm_del_copy(Tab, Node) when Node == node() ->
+ mnesia_subscr:unsubscribe_table(Tab),
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ Fun = fun(Name) -> call(Name, {del_copy, Tab, Node}) end,
+ map_call(Fun, Checkpoints, ok);
+ _ ->
+ ok
+ end.
+
+tm_change_table_copy_type(Tab, From, To) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, Checkpoints} | _ ] ->
+ Fun = fun(Name) -> call(Name, {change_copy, Tab, From, To}) end,
+ map_call(Fun, Checkpoints, ok);
+ _ ->
+ ok
+ end.
+
+map_call(Fun, [Name | Names], Res) ->
+ case Fun(Name) of
+ ok ->
+ map_call(Fun, Names, Res);
+ {error, {no_exists, Name}} ->
+ map_call(Fun, Names, Res);
+ {error, Reason} ->
+ %% BUGBUG: We may end up with some checkpoint retainers
+ %% too much in the add_copy case. How do we remove them?
+ map_call(Fun, Names, {error, Reason})
+ end;
+map_call(_Fun, [], Res) ->
+ Res.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Public functions
+
+deactivate(Name) ->
+ case call(Name, get_checkpoint) of
+ {error, Reason} ->
+ {error, Reason};
+ Cp ->
+ deactivate(Cp#checkpoint_args.nodes, Name)
+ end.
+
+deactivate(Nodes, Name) ->
+ rpc:multicall(Nodes, ?MODULE, remote_deactivate, [Name]),
+ ok.
+
+remote_deactivate(Name) ->
+ call(Name, deactivate).
+
+checkpoints() -> val(checkpoints).
+
+tables_and_cookie(Name) ->
+ case call(Name, get_checkpoint) of
+ {error, Reason} ->
+ {error, Reason};
+ Cp ->
+ Tabs = Cp#checkpoint_args.min ++ Cp#checkpoint_args.max,
+ Cookie = Cp#checkpoint_args.cookie,
+ {ok, Tabs, Cookie}
+ end.
+
+most_local_node(Name, Tab) ->
+ case ?catch_val({Tab, {retainer, Name}}) of
+ {'EXIT', _} ->
+ {error, {"No retainer attached to table", [Tab, Name]}};
+ R ->
+ Writers = R#retainer.writers,
+ LocalWriter = lists:member(node(), Writers),
+ if
+ LocalWriter == true ->
+ {ok, node()};
+ Writers /= [] ->
+ {ok, hd(Writers)};
+ true ->
+ {error, {"No retainer attached to table", [Tab, Name]}}
+ end
+ end.
+
+really_retain(Name, Tab) ->
+ R = val({Tab, {retainer, Name}}),
+ R#retainer.really_retain.
+
+%% Activate a checkpoint.
+%%
+%% A checkpoint is a transaction consistent state that may be used to
+%% perform a distributed backup or to rollback the involved tables to
+%% their old state. Backups may also be used to restore tables to
+%% their old state. Args is a list of the following tuples:
+%%
+%% {name, Name}
+%% Name of checkpoint. Each checkpoint must have a name which
+%% is unique on the reachable nodes. The name may be reused when
+%% the checkpoint has been deactivated.
+%% By default a probably unique name is generated.
+%% Multiple checkpoints may be set on the same table.
+%%
+%% {allow_remote, Bool}
+%% false means that all retainers must be local. If the
+%% table does not reside locally, the checkpoint fails.
+%% true allows retainers on other nodes.
+%%
+%% {min, MinTabs}
+%% Minimize redundancy and only keep checkpoint info together with
+%% one replica, preferrably at the local node. If any node involved
+%% the checkpoint goes down, the checkpoint is deactivated.
+%%
+%% {max, MaxTabs}
+%% Maximize redundancy and keep checkpoint info together with all
+%% replicas. The checkpoint becomes more fault tolerant if the
+%% tables has several replicas. When new replicas are added, they
+%% will also get a retainer attached to them.
+%%
+%% {ram_overrides_dump, Bool}
+%% {ram_overrides_dump, Tabs}
+%% Only applicable for ram_copies. Bool controls which versions of
+%% the records that should be included in the checkpoint state.
+%% true means that the latest comitted records in ram (i.e. the
+%% records that the application accesses) should be included
+%% in the checkpoint. false means that the records dumped to
+%% dat-files (the records that will be loaded at startup) should
+%% be included in the checkpoint. Tabs is a list of tables.
+%% Default is false.
+%%
+%% {ignore_new, TidList}
+%% Normally we wait for all pending transactions to complete
+%% before we allow iteration over the checkpoint. But in order
+%% to cope with checkpoint activation inside a transaction that
+%% currently prepares commit (mnesia_init:get_net_work_copy) we
+%% need to have the ability to ignore the enclosing transaction.
+%% We do not wait for the transactions in TidList to end. The
+%% transactions in TidList are regarded as newer than the checkpoint.
+
+activate(Args) ->
+ case args2cp(Args) of
+ {ok, Cp} ->
+ do_activate(Cp);
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+args2cp(Args) when list(Args)->
+ case catch lists:foldl(fun check_arg/2, #checkpoint_args{}, Args) of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Cp ->
+ case check_tables(Cp) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Overriders, AllTabs} ->
+ arrange_retainers(Cp, Overriders, AllTabs)
+ end
+ end;
+args2cp(Args) ->
+ {error, {badarg, Args}}.
+
+check_arg({name, Name}, Cp) ->
+ case lists:member(Name, checkpoints()) of
+ true ->
+ exit({already_exists, Name});
+ false ->
+ case catch tab2retainer({foo, Name}) of
+ List when list(List) ->
+ Cp#checkpoint_args{name = Name};
+ _ ->
+ exit({badarg, Name})
+ end
+ end;
+check_arg({allow_remote, true}, Cp) ->
+ Cp#checkpoint_args{allow_remote = true};
+check_arg({allow_remote, false}, Cp) ->
+ Cp#checkpoint_args{allow_remote = false};
+check_arg({ram_overrides_dump, true}, Cp) ->
+ Cp#checkpoint_args{ram_overrides_dump = true};
+check_arg({ram_overrides_dump, false}, Cp) ->
+ Cp#checkpoint_args{ram_overrides_dump = false};
+check_arg({ram_overrides_dump, Tabs}, Cp) when list(Tabs) ->
+ Cp#checkpoint_args{ram_overrides_dump = Tabs};
+check_arg({min, Tabs}, Cp) when list(Tabs) ->
+ Cp#checkpoint_args{min = Tabs};
+check_arg({max, Tabs}, Cp) when list(Tabs) ->
+ Cp#checkpoint_args{max = Tabs};
+check_arg({ignore_new, Tids}, Cp) when list(Tids) ->
+ Cp#checkpoint_args{ignore_new = Tids};
+check_arg(Arg, _) ->
+ exit({badarg, Arg}).
+
+check_tables(Cp) ->
+ Min = Cp#checkpoint_args.min,
+ Max = Cp#checkpoint_args.max,
+ AllTabs = Min ++ Max,
+ DoubleTabs = [T || T <- Min, lists:member(T, Max)],
+ Overriders = Cp#checkpoint_args.ram_overrides_dump,
+ if
+ DoubleTabs /= [] ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{min, DoubleTabs}, {max, DoubleTabs}]}};
+ Min == [], Max == [] ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{min, Min}, {max, Max}]}};
+ Overriders == false ->
+ {ok, [], AllTabs};
+ Overriders == true ->
+ {ok, AllTabs, AllTabs};
+ list(Overriders) ->
+ case [T || T <- Overriders, not lists:member(T, Min)] of
+ [] ->
+ case [T || T <- Overriders, not lists:member(T, Max)] of
+ [] ->
+ {ok, Overriders, AllTabs};
+ Outsiders ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{ram_overrides_dump, Outsiders},
+ {max, Outsiders}]}}
+ end;
+ Outsiders ->
+ {error, {combine_error, Cp#checkpoint_args.name,
+ [{ram_overrides_dump, Outsiders},
+ {min, Outsiders}]}}
+ end
+ end.
+
+arrange_retainers(Cp, Overriders, AllTabs) ->
+ R = #retainer{cp_name = Cp#checkpoint_args.name},
+ case catch [R#retainer{tab_name = Tab,
+ writers = select_writers(Cp, Tab)}
+ || Tab <- AllTabs] of
+ {'EXIT', Reason} ->
+ {error, Reason};
+ Retainers ->
+ {ok, Cp#checkpoint_args{ram_overrides_dump = Overriders,
+ retainers = Retainers,
+ nodes = writers(Retainers)}}
+ end.
+
+select_writers(Cp, Tab) ->
+ case filter_remote(Cp, val({Tab, active_replicas})) of
+ [] ->
+ exit({"Cannot prepare checkpoint (replica not available)",
+ [Tab, Cp#checkpoint_args.name]});
+ Writers ->
+ This = node(),
+ case {lists:member(Tab, Cp#checkpoint_args.max),
+ lists:member(This, Writers)} of
+ {true, _} -> Writers; % Max
+ {false, true} -> [This];
+ {false, false} -> [hd(Writers)]
+ end
+ end.
+
+filter_remote(Cp, Writers) when Cp#checkpoint_args.allow_remote == true ->
+ Writers;
+filter_remote(_Cp, Writers) ->
+ This = node(),
+ case lists:member(This, Writers) of
+ true -> [This];
+ false -> []
+ end.
+
+writers(Retainers) ->
+ Fun = fun(R, Acc) -> R#retainer.writers ++ Acc end,
+ Writers = lists:foldl(Fun, [], Retainers),
+ mnesia_lib:uniq(Writers).
+
+do_activate(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ Nodes = Cp#checkpoint_args.nodes,
+ case mnesia_tm:prepare_checkpoint(Nodes, Cp) of
+ {Replies, []} ->
+ check_prep(Replies, Name, Nodes, Cp#checkpoint_args.ignore_new);
+ {_, BadNodes} ->
+ {error, {"Cannot prepare checkpoint (bad nodes)",
+ [Name, BadNodes]}}
+ end.
+
+check_prep([{ok, Name, IgnoreNew, _Node} | Replies], Name, Nodes, IgnoreNew) ->
+ check_prep(Replies, Name, Nodes, IgnoreNew);
+check_prep([{error, Reason} | _Replies], Name, _Nodes, _IgnoreNew) ->
+ {error, {"Cannot prepare checkpoint (bad reply)",
+ [Name, Reason]}};
+check_prep([{badrpc, Reason} | _Replies], Name, _Nodes, _IgnoreNew) ->
+ {error, {"Cannot prepare checkpoint (badrpc)",
+ [Name, Reason]}};
+check_prep([], Name, Nodes, IgnoreNew) ->
+ collect_pending(Name, Nodes, IgnoreNew).
+
+collect_pending(Name, Nodes, IgnoreNew) ->
+ case rpc:multicall(Nodes, ?MODULE, call, [Name, collect_pending]) of
+ {Replies, []} ->
+ case catch ?ets_new_table(mnesia_union, [bag]) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table pending union",
+ {error, {system_limit, Msg, Reason}};
+ UnionTab ->
+ compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew)
+ end;
+ {_, BadNodes} ->
+ deactivate(Nodes, Name),
+ {error, {"Cannot collect from pending checkpoint", Name, BadNodes}}
+ end.
+
+compute_union([{ok, Pending} | Replies], Nodes, Name, UnionTab, IgnoreNew) ->
+ add_pending(Pending, UnionTab),
+ compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew);
+compute_union([{error, Reason} | _Replies], Nodes, Name, UnionTab, _IgnoreNew) ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, Reason};
+compute_union([{badrpc, Reason} | _Replies], Nodes, Name, UnionTab, _IgnoreNew) ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, {badrpc, Reason}};
+compute_union([], Nodes, Name, UnionTab, IgnoreNew) ->
+ send_activate(Nodes, Nodes, Name, UnionTab, IgnoreNew).
+
+add_pending([P | Pending], UnionTab) ->
+ add_pending_node(P#pending.disc_nodes, P#pending.tid, UnionTab),
+ add_pending_node(P#pending.ram_nodes, P#pending.tid, UnionTab),
+ add_pending(Pending, UnionTab);
+add_pending([], _UnionTab) ->
+ ok.
+
+add_pending_node([Node | Nodes], Tid, UnionTab) ->
+ ?ets_insert(UnionTab, {Node, Tid}),
+ add_pending_node(Nodes, Tid, UnionTab);
+add_pending_node([], _Tid, _UnionTab) ->
+ ok.
+
+send_activate([Node | Nodes], AllNodes, Name, UnionTab, IgnoreNew) ->
+ Pending = [Tid || {_, Tid} <- ?ets_lookup(UnionTab, Node),
+ not lists:member(Tid, IgnoreNew)],
+ case rpc:call(Node, ?MODULE, call, [Name, {activate, Pending}]) of
+ activated ->
+ send_activate(Nodes, AllNodes, Name, UnionTab, IgnoreNew);
+ {badrpc, Reason} ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, {"Activation failed (bad node)", Name, Node, Reason}};
+ {error, Reason} ->
+ deactivate(Nodes, Name),
+ ?ets_delete_table(UnionTab),
+ {error, {"Activation failed", Name, Node, Reason}}
+ end;
+send_activate([], AllNodes, Name, UnionTab, _IgnoreNew) ->
+ ?ets_delete_table(UnionTab),
+ {ok, Name, AllNodes}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Checkpoint server
+
+cast(Name, Msg) ->
+ case ?catch_val({checkpoint, Name}) of
+ {'EXIT', _} ->
+ {error, {no_exists, Name}};
+
+ Pid when pid(Pid) ->
+ Pid ! {self(), Msg},
+ {ok, Pid}
+ end.
+
+call(Name, Msg) ->
+ case cast(Name, Msg) of
+ {ok, Pid} ->
+ catch link(Pid), % Always local
+ Self = self(),
+ receive
+ {'EXIT', Pid, Reason} ->
+ {error, {"Got exit", [Name, Reason]}};
+ {Name, Self, Reply} ->
+ unlink(Pid),
+ Reply
+ end;
+ Error ->
+ Error
+ end.
+
+abcast(Nodes, Name, Msg) ->
+ rpc:eval_everywhere(Nodes, ?MODULE, cast, [Name, Msg]).
+
+reply(nopid, _Name, _Reply) ->
+ ignore;
+reply(ReplyTo, Name, Reply) ->
+ ReplyTo ! {Name, ReplyTo, Reply}.
+
+%% Returns {ok, NewCp} or {error, Reason}
+start_retainer(Cp) ->
+ % Will never be restarted
+ Name = Cp#checkpoint_args.name,
+ case supervisor:start_child(mnesia_checkpoint_sup, [Cp]) of
+ {ok, _Pid} ->
+ {ok, Name, Cp#checkpoint_args.ignore_new, node()};
+ {error, Reason} ->
+ {error, {"Cannot create checkpoint retainer",
+ Name, node(), Reason}}
+ end.
+
+start(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ Args = [Cp#checkpoint_args{supervisor = self()}],
+ mnesia_monitor:start_proc({?MODULE, Name}, ?MODULE, init, Args).
+
+init(Cp) ->
+ process_flag(trap_exit, true),
+ Name = Cp#checkpoint_args.name,
+ Props = [set, public, {keypos, 2}],
+ case catch ?ets_new_table(mnesia_pending_checkpoint, Props) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table for pending transactions",
+ Error = {error, {system_limit, Name, Msg, Reason}},
+ proc_lib:init_ack(Cp#checkpoint_args.supervisor, Error);
+ PendingTab ->
+ Rs = [prepare_tab(Cp, R) || R <- Cp#checkpoint_args.retainers],
+ Cp2 = Cp#checkpoint_args{retainers = Rs,
+ pid = self(),
+ pending_tab = PendingTab},
+ add(pending_checkpoint_pids, self()),
+ add(pending_checkpoints, PendingTab),
+ set({checkpoint, Name}, self()),
+ add(checkpoints, Name),
+ dbg_out("Checkpoint ~p (~p) started~n", [Name, self()]),
+ proc_lib:init_ack(Cp2#checkpoint_args.supervisor, {ok, self()}),
+ retainer_loop(Cp2)
+ end.
+
+prepare_tab(Cp, R) ->
+ Tab = R#retainer.tab_name,
+ prepare_tab(Cp, R, val({Tab, storage_type})).
+
+prepare_tab(Cp, R, Storage) ->
+ Tab = R#retainer.tab_name,
+ Name = R#retainer.cp_name,
+ case lists:member(node(), R#retainer.writers) of
+ true ->
+ R2 = retainer_create(Cp, R, Tab, Name, Storage),
+ set({Tab, {retainer, Name}}, R2),
+ add({Tab, checkpoints}, Name), %% Keep checkpoint info for table_info & mnesia_session
+ add_chkp_info(Tab, Name),
+ R2;
+ false ->
+ set({Tab, {retainer, Name}}, R#retainer{store = undefined}),
+ R
+ end.
+
+add_chkp_info(Tab, Name) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, OldList} | CommitList] ->
+ case lists:member(Name, OldList) of
+ true ->
+ ok;
+ false ->
+ NewC = [{checkpoints, [Name | OldList]} | CommitList],
+ mnesia_lib:set({Tab, commit_work}, NewC)
+ end;
+ CommitList ->
+ Chkp = {checkpoints, [Name]},
+ %% OBS checkpoints needs to be first in the list!
+ mnesia_lib:set({Tab, commit_work}, [Chkp | CommitList])
+ end.
+
+tab2retainer({Tab, Name}) ->
+ FlatName = lists:flatten(io_lib:write(Name)),
+ mnesia_lib:dir(lists:concat([?MODULE, "_", Tab, "_", FlatName, ".RET"])).
+
+retainer_create(_Cp, R, Tab, Name, disc_only_copies) ->
+ Fname = tab2retainer({Tab, Name}),
+ file:delete(Fname),
+ Args = [{file, Fname}, {type, set}, {keypos, 2}, {repair, false}],
+ {ok, _} = mnesia_lib:dets_sync_open({Tab, Name}, Args),
+ dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ R#retainer{store = {dets, {Tab, Name}}, really_retain = true};
+retainer_create(Cp, R, Tab, Name, Storage) ->
+ T = ?ets_new_table(mnesia_retainer, [set, public, {keypos, 2}]),
+ Overriders = Cp#checkpoint_args.ram_overrides_dump,
+ ReallyR = R#retainer.really_retain,
+ ReallyCp = lists:member(Tab, Overriders),
+ ReallyR2 = prepare_ram_tab(Tab, T, Storage, ReallyR, ReallyCp),
+ dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ R#retainer{store = {ets, T}, really_retain = ReallyR2}.
+
+%% Copy the dumped table into retainer if needed
+%% If the really_retain flag already has been set to false,
+%% it should remain false even if we change storage type
+%% while the checkpoint is activated.
+prepare_ram_tab(Tab, T, ram_copies, true, false) ->
+ Fname = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Fname) of
+ true ->
+ Log = mnesia_log:open_log(prepare_ram_tab,
+ mnesia_log:dcd_log_header(),
+ Fname, true,
+ mnesia_monitor:get_env(auto_repair),
+ read_only),
+ Add = fun(Rec) ->
+ Key = element(2, Rec),
+ Recs =
+ case ?ets_lookup(T, Key) of
+ [] -> [];
+ [{_, _, Old}] -> Old
+ end,
+ ?ets_insert(T, {Tab, Key, [Rec | Recs]}),
+ continue
+ end,
+ traverse_dcd(mnesia_log:chunk_log(Log, start), Log, Add),
+ mnesia_log:close_log(Log);
+ false ->
+ ok
+ end,
+ false;
+prepare_ram_tab(_, _, _, ReallyRetain, _) ->
+ ReallyRetain.
+
+traverse_dcd({Cont, [LogH | Rest]}, Log, Fun)
+ when record(LogH, log_header),
+ LogH#log_header.log_kind == dcd_log,
+ LogH#log_header.log_version >= "1.0" ->
+ traverse_dcd({Cont, Rest}, Log, Fun); %% BUGBUG Error handling repaired files
+traverse_dcd({Cont, Recs}, Log, Fun) -> %% trashed data??
+ lists:foreach(Fun, Recs),
+ traverse_dcd(mnesia_log:chunk_log(Log, Cont), Log, Fun);
+traverse_dcd(eof, _Log, _Fun) ->
+ ok.
+
+retainer_get({ets, Store}, Key) -> ?ets_lookup(Store, Key);
+retainer_get({dets, Store}, Key) -> dets:lookup(Store, Key).
+
+retainer_put({ets, Store}, Val) -> ?ets_insert(Store, Val);
+retainer_put({dets, Store}, Val) -> dets:insert(Store, Val).
+
+retainer_first({ets, Store}) -> ?ets_first(Store);
+retainer_first({dets, Store}) -> dets:first(Store).
+
+retainer_next({ets, Store}, Key) -> ?ets_next(Store, Key);
+retainer_next({dets, Store}, Key) -> dets:next(Store, Key).
+
+%% retainer_next_slot(Tab, Pos) ->
+%% case retainer_slot(Tab, Pos) of
+%% '$end_of_table' ->
+%% '$end_of_table';
+%% [] ->
+%% retainer_next_slot(Tab, Pos + 1);
+%% Recs when list(Recs) ->
+%% {Pos, Recs}
+%% end.
+%%
+%% retainer_slot({ets, Store}, Pos) -> ?ets_next(Store, Pos);
+%% retainer_slot({dets, Store}, Pos) -> dets:slot(Store, Pos).
+
+retainer_fixtable(Tab, Bool) when atom(Tab) ->
+ mnesia_lib:db_fixtable(val({Tab, storage_type}), Tab, Bool);
+retainer_fixtable({ets, Tab}, Bool) ->
+ mnesia_lib:db_fixtable(ram_copies, Tab, Bool);
+retainer_fixtable({dets, Tab}, Bool) ->
+ mnesia_lib:db_fixtable(disc_only_copies, Tab, Bool).
+
+retainer_delete({ets, Store}) ->
+ ?ets_delete_table(Store);
+retainer_delete({dets, Store}) ->
+ mnesia_lib:dets_sync_close(Store),
+ Fname = tab2retainer(Store),
+ file:delete(Fname).
+
+retainer_loop(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ receive
+ {_From, {retain, Tid, Tab, Key, OldRecs}}
+ when Cp#checkpoint_args.wait_for_old == [] ->
+ R = val({Tab, {retainer, Name}}),
+ case R#retainer.really_retain of
+ true ->
+ PendingTab = Cp#checkpoint_args.pending_tab,
+ case catch ?ets_lookup_element(PendingTab, Tid, 1) of
+ {'EXIT', _} ->
+ Store = R#retainer.store,
+ case retainer_get(Store, Key) of
+ [] ->
+ retainer_put(Store, {Tab, Key, OldRecs});
+ _ ->
+ already_retained
+ end;
+ pending ->
+ ignore
+ end;
+ false ->
+ ignore
+ end,
+ retainer_loop(Cp);
+
+ %% Adm
+ {From, deactivate} ->
+ do_stop(Cp),
+ reply(From, Name, deactivated),
+ unlink(From),
+ exit(shutdown);
+
+ {'EXIT', Parent, _} when Parent == Cp#checkpoint_args.supervisor ->
+ %% do_stop(Cp),
+ %% assume that entire Mnesia is terminating
+ exit(shutdown);
+
+ {_From, {mnesia_down, Node}} ->
+ Cp2 = do_del_retainers(Cp, Node),
+ retainer_loop(Cp2);
+ {From, get_checkpoint} ->
+ reply(From, Name, Cp),
+ retainer_loop(Cp);
+ {From, {add_copy, Tab, Node}} when Cp#checkpoint_args.wait_for_old == [] ->
+ {Res, Cp2} = do_add_copy(Cp, Tab, Node),
+ reply(From, Name, Res),
+ retainer_loop(Cp2);
+ {From, {del_copy, Tab, Node}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = do_del_copy(Cp, Tab, Node),
+ reply(From, Name, ok),
+ retainer_loop(Cp2);
+ {From, {change_copy, Tab, From, To}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = do_change_copy(Cp, Tab, From, To),
+ reply(From, Name, ok),
+ retainer_loop(Cp2);
+ {_From, {add_retainer, R, Node}} ->
+ Cp2 = do_add_retainer(Cp, R, Node),
+ retainer_loop(Cp2);
+ {_From, {del_retainer, R, Node}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = do_del_retainer(Cp, R, Node),
+ retainer_loop(Cp2);
+
+ %% Iteration
+ {From, {iter_begin, Iter}} when Cp#checkpoint_args.wait_for_old == [] ->
+ Cp2 = iter_begin(Cp, From, Iter),
+ retainer_loop(Cp2);
+
+ {From, {iter_end, Iter}} when Cp#checkpoint_args.wait_for_old == [] ->
+ retainer_fixtable(Iter#iter.oid_tab, false),
+ Iters = Cp#checkpoint_args.iterators -- [Iter],
+ reply(From, Name, ok),
+ retainer_loop(Cp#checkpoint_args{iterators = Iters});
+
+ {_From, {exit_pending, Tid}}
+ when list(Cp#checkpoint_args.wait_for_old) ->
+ StillPending = lists:delete(Tid, Cp#checkpoint_args.wait_for_old),
+ Cp2 = Cp#checkpoint_args{wait_for_old = StillPending},
+ Cp3 = maybe_activate(Cp2),
+ retainer_loop(Cp3);
+
+ {From, collect_pending} ->
+ PendingTab = Cp#checkpoint_args.pending_tab,
+ del(pending_checkpoints, PendingTab),
+ Pending = ?ets_match_object(PendingTab, '_'),
+ reply(From, Name, {ok, Pending}),
+ retainer_loop(Cp);
+
+ {From, {activate, Pending}} ->
+ StillPending = mnesia_recover:still_pending(Pending),
+ enter_still_pending(StillPending, Cp#checkpoint_args.pending_tab),
+ Cp2 = maybe_activate(Cp#checkpoint_args{wait_for_old = StillPending}),
+ reply(From, Name, activated),
+ retainer_loop(Cp2);
+
+ {'EXIT', From, _Reason} ->
+ Iters = [Iter || Iter <- Cp#checkpoint_args.iterators,
+ check_iter(From, Iter)],
+ retainer_loop(Cp#checkpoint_args{iterators = Iters});
+
+ {system, From, Msg} ->
+ dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ sys:handle_system_msg(Msg, From, no_parent, ?MODULE, [], Cp)
+ end.
+
+maybe_activate(Cp)
+ when Cp#checkpoint_args.wait_for_old == [],
+ Cp#checkpoint_args.is_activated == false ->
+ Cp#checkpoint_args{pending_tab = undefined, is_activated = true};
+maybe_activate(Cp) ->
+ Cp.
+
+iter_begin(Cp, From, Iter) ->
+ Name = Cp#checkpoint_args.name,
+ R = val({Iter#iter.tab_name, {retainer, Name}}),
+ Iter2 = init_tabs(R, Iter),
+ Iter3 = Iter2#iter{pid = From},
+ retainer_fixtable(Iter3#iter.oid_tab, true),
+ Iters = [Iter3 | Cp#checkpoint_args.iterators],
+ reply(From, Name, {ok, Iter3, self()}),
+ Cp#checkpoint_args{iterators = Iters}.
+
+do_stop(Cp) ->
+ Name = Cp#checkpoint_args.name,
+ del(pending_checkpoints, Cp#checkpoint_args.pending_tab),
+ del(pending_checkpoint_pids, self()),
+ del(checkpoints, Name),
+ unset({checkpoint, Name}),
+ lists:foreach(fun deactivate_tab/1, Cp#checkpoint_args.retainers),
+ Iters = Cp#checkpoint_args.iterators,
+ lists:foreach(fun(I) -> retainer_fixtable(I#iter.oid_tab, false) end, Iters).
+
+deactivate_tab(R) ->
+ Name = R#retainer.cp_name,
+ Tab = R#retainer.tab_name,
+ del({Tab, checkpoints}, Name), %% Keep checkpoint info for table_info & mnesia_session
+ del_chkp_info(Tab, Name),
+ unset({Tab, {retainer, Name}}),
+ Active = lists:member(node(), R#retainer.writers),
+ case R#retainer.store of
+ undefined ->
+ ignore;
+ Store when Active == true ->
+ retainer_delete(Store);
+ _ ->
+ ignore
+ end.
+
+del_chkp_info(Tab, Name) ->
+ case val({Tab, commit_work}) of
+ [{checkpoints, ChkList} | Rest] ->
+ case lists:delete(Name, ChkList) of
+ [] ->
+ %% The only checkpoint was deleted
+ mnesia_lib:set({Tab, commit_work}, Rest);
+ NewList ->
+ mnesia_lib:set({Tab, commit_work},
+ [{checkpoints, NewList} | Rest])
+ end;
+ _ -> ignore
+ end.
+
+do_del_retainers(Cp, Node) ->
+ Rs = [do_del_retainer2(Cp, R, Node) || R <- Cp#checkpoint_args.retainers],
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+do_del_retainer2(Cp, R, Node) ->
+ Writers = R#retainer.writers -- [Node],
+ R2 = R#retainer{writers = Writers},
+ set({R2#retainer.tab_name, {retainer, R2#retainer.cp_name}}, R2),
+ if
+ Writers == [] ->
+ Event = {mnesia_checkpoint_deactivated, Cp#checkpoint_args.name},
+ mnesia_lib:report_system_event(Event),
+ do_stop(Cp),
+ exit(shutdown);
+ Node == node() ->
+ deactivate_tab(R), % Avoids unnecessary tm_retain accesses
+ set({R2#retainer.tab_name, {retainer, R2#retainer.cp_name}}, R2),
+ R2;
+ true ->
+ R2
+ end.
+
+do_del_retainer(Cp, R0, Node) ->
+ {R, Rest} = find_retainer(R0, Cp#checkpoint_args.retainers, []),
+ R2 = do_del_retainer2(Cp, R, Node),
+ Rs = [R2|Rest],
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+do_del_copy(Cp, Tab, ThisNode) when ThisNode == node() ->
+ Name = Cp#checkpoint_args.name,
+ Others = Cp#checkpoint_args.nodes -- [ThisNode],
+ R = val({Tab, {retainer, Name}}),
+ abcast(Others, Name, {del_retainer, R, ThisNode}),
+ do_del_retainer(Cp, R, ThisNode).
+
+do_add_copy(Cp, Tab, Node) when Node /= node()->
+ case lists:member(Tab, Cp#checkpoint_args.max) of
+ false ->
+ {ok, Cp};
+ true ->
+ Name = Cp#checkpoint_args.name,
+ R0 = val({Tab, {retainer, Name}}),
+ W = R0#retainer.writers,
+ R = R0#retainer{writers = W ++ [Node]},
+
+ case lists:member(Node, Cp#checkpoint_args.nodes) of
+ true ->
+ send_retainer(Cp, R, Node);
+ false ->
+ case tm_remote_prepare(Node, Cp) of
+ {ok, Name, _IgnoreNew, Node} ->
+ case lists:member(schema, Cp#checkpoint_args.max) of
+ true ->
+ %% We need to send schema retainer somewhere
+ RS0 = val({schema, {retainer, Name}}),
+ W = RS0#retainer.writers,
+ RS1 = RS0#retainer{writers = W ++ [Node]},
+ case send_retainer(Cp, RS1, Node) of
+ {ok, Cp1} ->
+ send_retainer(Cp1, R, Node);
+ Error ->
+ Error
+ end;
+ false ->
+ send_retainer(Cp, R, Node)
+ end;
+ {badrpc, Reason} ->
+ {{error, {badrpc, Reason}}, Cp};
+ {error, Reason} ->
+ {{error, Reason}, Cp}
+ end
+ end
+ end.
+
+tm_remote_prepare(Node, Cp) ->
+ rpc:call(Node, ?MODULE, tm_prepare, [Cp]).
+
+do_add_retainer(Cp, R0, Node) ->
+ Writers = R0#retainer.writers,
+ {R, Rest} = find_retainer(R0, Cp#checkpoint_args.retainers, []),
+ NewRet =
+ if
+ Node == node() ->
+ prepare_tab(Cp, R#retainer{writers = Writers});
+ true ->
+ R#retainer{writers = Writers}
+ end,
+ Rs = [NewRet | Rest],
+ set({NewRet#retainer.tab_name, {retainer, NewRet#retainer.cp_name}}, NewRet),
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+find_retainer(#retainer{cp_name = CP, tab_name = Tab},
+ [Ret = #retainer{cp_name = CP, tab_name = Tab} | R], Acc) ->
+ {Ret, R ++ Acc};
+find_retainer(Ret, [H|R], Acc) ->
+ find_retainer(Ret, R, [H|Acc]).
+
+send_retainer(Cp, R, Node) ->
+ Name = Cp#checkpoint_args.name,
+ Nodes0 = Cp#checkpoint_args.nodes -- [Node],
+ Nodes1 = Nodes0 ++ [Node],
+ Nodes = Nodes1 -- [node()],
+ abcast(Nodes, Name, {add_retainer, R, Node}),
+ Store = R#retainer.store,
+%% send_retainer2(Node, Name, Store, retainer_next_slot(Store, 0)),
+ send_retainer2(Node, Name, Store, retainer_first(Store)),
+ Cp2 = do_add_retainer(Cp, R, Node),
+ {ok, Cp2}.
+
+send_retainer2(_, _, _, '$end_of_table') ->
+ ok;
+%%send_retainer2(Node, Name, Store, {Slot, Records}) ->
+send_retainer2(Node, Name, Store, Key) ->
+ [{Tab, _, Records}] = retainer_get(Store, Key),
+ abcast([Node], Name, {retain, {dirty, send_retainer}, Tab, Key, Records}),
+ send_retainer2(Node, Name, Store, retainer_next(Store, Key)).
+
+do_change_copy(Cp, Tab, FromType, ToType) ->
+ Name = Cp#checkpoint_args.name,
+ R = val({Tab, {retainer, Name}}),
+ R2 = prepare_tab(Cp, R, ToType),
+ {_, Old} = R#retainer.store,
+ {_, New} = R2#retainer.store,
+
+ Fname = tab2retainer({Tab, Name}),
+ if
+ FromType == disc_only_copies ->
+ mnesia_lib:dets_sync_close(Old),
+ loaded = mnesia_lib:dets_to_ets(Old, New, Fname, set, no, yes),
+ ok = file:delete(Fname);
+ ToType == disc_only_copies ->
+ TabSize = ?ets_info(Old, size),
+ Props = [{file, Fname},
+ {type, set},
+ {keypos, 2},
+%% {ram_file, true},
+ {estimated_no_objects, TabSize + 256},
+ {repair, false}],
+ {ok, _} = mnesia_lib:dets_sync_open(New, Props),
+ ok = mnesia_dumper:raw_dump_table(New, Old),
+ ?ets_delete_table(Old);
+ true ->
+ ignore
+ end,
+ Pos = #retainer.tab_name,
+ Rs = lists:keyreplace(Tab, Pos, Cp#checkpoint_args.retainers, R2),
+ Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
+
+check_iter(From, Iter) when Iter#iter.pid == From ->
+ retainer_fixtable(Iter#iter.oid_tab, false),
+ false;
+check_iter(_From, _Iter) ->
+ true.
+
+init_tabs(R, Iter) ->
+ {Kind, _} = Store = R#retainer.store,
+ Main = {Kind, Iter#iter.tab_name},
+ Ret = Store,
+ Iter2 = Iter#iter{main_tab = Main, retainer_tab = Ret},
+ case Iter#iter.source of
+ table -> Iter2#iter{oid_tab = Main};
+ retainer -> Iter2#iter{oid_tab = Ret}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Iteration
+%%
+%% Iterates over a table and applies Fun(ListOfRecords)
+%% with a suitable amount of records, e.g. 1000 or so.
+%% ListOfRecords is [] when the iteration is over.
+%%
+%% OidKind affects which internal table to be iterated over and
+%% ValKind affects which table to pick the actual records from. Legal
+%% values for OidKind and ValKind is the atom table or the atom
+%% retainer.
+%%
+%% The iteration may either be performed over the main table (which
+%% contains the latest values of the records, i.e. the values that
+%% are visible to the applications) or over the checkpoint retainer
+%% (which contains the values as the looked like the timepoint when
+%% the checkpoint was activated).
+%%
+%% It is possible to iterate over the main table and pick values
+%% from the retainer and vice versa.
+
+iterate(Name, Tab, Fun, Acc, Source, Val) ->
+ Iter0 = #iter{tab_name = Tab, source = Source, val = Val},
+ case call(Name, {iter_begin, Iter0}) of
+ {error, Reason} ->
+ {error, Reason};
+ {ok, Iter, Pid} ->
+ link(Pid), % We don't want any pending fixtable's
+ Res = (catch iter(Fun, Acc, Iter)),
+ unlink(Pid),
+ call(Name, {iter_end, Iter}),
+ case Res of
+ {'EXIT', Reason} -> {error, Reason};
+ {error, Reason} -> {error, Reason};
+ Acc2 -> {ok, Acc2}
+ end
+ end.
+
+iter(Fun, Acc, Iter)->
+ iter(Fun, Acc, Iter, retainer_first(Iter#iter.oid_tab)).
+
+iter(Fun, Acc, Iter, Key) ->
+ case get_records(Iter, Key) of
+ {'$end_of_table', []} ->
+ Fun([], Acc);
+ {'$end_of_table', Records} ->
+ Acc2 = Fun(Records, Acc),
+ Fun([], Acc2);
+ {Next, Records} ->
+ Acc2 = Fun(Records, Acc),
+ iter(Fun, Acc2, Iter, Next)
+ end.
+
+stop_iteration(Reason) ->
+ throw({error, {stopped, Reason}}).
+
+get_records(Iter, Key) ->
+ get_records(Iter, Key, 500, []). % 500 keys
+
+get_records(_Iter, Key, 0, Acc) ->
+ {Key, lists:append(lists:reverse(Acc))};
+get_records(_Iter, '$end_of_table', _I, Acc) ->
+ {'$end_of_table', lists:append(lists:reverse(Acc))};
+get_records(Iter, Key, I, Acc) ->
+ Recs = get_val(Iter, Key),
+ Next = retainer_next(Iter#iter.oid_tab, Key),
+ get_records(Iter, Next, I-1, [Recs | Acc]).
+
+get_val(Iter, Key) when Iter#iter.val == latest ->
+ get_latest_val(Iter, Key);
+get_val(Iter, Key) when Iter#iter.val == checkpoint ->
+ get_checkpoint_val(Iter, Key).
+
+get_latest_val(Iter, Key) when Iter#iter.source == table ->
+ retainer_get(Iter#iter.main_tab, Key);
+get_latest_val(Iter, Key) when Iter#iter.source == retainer ->
+ DeleteOid = {Iter#iter.tab_name, Key},
+ [DeleteOid | retainer_get(Iter#iter.main_tab, Key)].
+
+get_checkpoint_val(Iter, Key) when Iter#iter.source == table ->
+ retainer_get(Iter#iter.main_tab, Key);
+get_checkpoint_val(Iter, Key) when Iter#iter.source == retainer ->
+ DeleteOid = {Iter#iter.tab_name, Key},
+ case retainer_get(Iter#iter.retainer_tab, Key) of
+ [{_, _, []}] -> [DeleteOid];
+ [{_, _, Records}] -> [DeleteOid | Records]
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, Cp) ->
+ retainer_loop(Cp).
+
+system_terminate(_Reason, _Parent,_Debug, Cp) ->
+ do_stop(Cp).
+
+system_code_change(Cp, _Module, _OldVsn, _Extra) ->
+ {ok, Cp}.
+
+convert_cp_record(Cp) when record(Cp, checkpoint) ->
+ ROD =
+ case Cp#checkpoint.ram_overrides_dump of
+ true -> Cp#checkpoint.min ++ Cp#checkpoint.max;
+ false -> []
+ end,
+
+ {ok, #checkpoint_args{name = Cp#checkpoint.name,
+ allow_remote = Cp#checkpoint.name,
+ ram_overrides_dump = ROD,
+ nodes = Cp#checkpoint.nodes,
+ node = Cp#checkpoint.node,
+ now = Cp#checkpoint.now,
+ cookie = ?unique_cookie,
+ min = Cp#checkpoint.min,
+ max = Cp#checkpoint.max,
+ pending_tab = Cp#checkpoint.pending_tab,
+ wait_for_old = Cp#checkpoint.wait_for_old,
+ is_activated = Cp#checkpoint.is_activated,
+ ignore_new = Cp#checkpoint.ignore_new,
+ retainers = Cp#checkpoint.retainers,
+ iterators = Cp#checkpoint.iterators,
+ supervisor = Cp#checkpoint.supervisor,
+ pid = Cp#checkpoint.pid
+ }};
+convert_cp_record(Cp) when record(Cp, checkpoint_args) ->
+ AllTabs = Cp#checkpoint_args.min ++ Cp#checkpoint_args.max,
+ ROD = case Cp#checkpoint_args.ram_overrides_dump of
+ [] ->
+ false;
+ AllTabs ->
+ true;
+ _ ->
+ error
+ end,
+ if
+ ROD == error ->
+ {error, {"Old node cannot handle new checkpoint protocol",
+ ram_overrides_dump}};
+ true ->
+ {ok, #checkpoint{name = Cp#checkpoint_args.name,
+ allow_remote = Cp#checkpoint_args.name,
+ ram_overrides_dump = ROD,
+ nodes = Cp#checkpoint_args.nodes,
+ node = Cp#checkpoint_args.node,
+ now = Cp#checkpoint_args.now,
+ min = Cp#checkpoint_args.min,
+ max = Cp#checkpoint_args.max,
+ pending_tab = Cp#checkpoint_args.pending_tab,
+ wait_for_old = Cp#checkpoint_args.wait_for_old,
+ is_activated = Cp#checkpoint_args.is_activated,
+ ignore_new = Cp#checkpoint_args.ignore_new,
+ retainers = Cp#checkpoint_args.retainers,
+ iterators = Cp#checkpoint_args.iterators,
+ supervisor = Cp#checkpoint_args.supervisor,
+ pid = Cp#checkpoint_args.pid
+ }}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl
new file mode 100644
index 0000000000..36425537eb
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint_sup.erl
@@ -0,0 +1,39 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_checkpoint_sup.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_checkpoint_sup).
+
+-behaviour(supervisor).
+
+-export([start/0, init/1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% top supervisor callback functions
+
+start() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sub supervisor callback functions
+
+init([]) ->
+ Flags = {simple_one_for_one, 0, timer:hours(24)}, % Trust the top supervisor
+ MFA = {mnesia_checkpoint, start, []},
+ Modules = [?MODULE, mnesia_checkpoint, supervisor],
+ KillAfter = mnesia_kernel_sup:supervisor_timeout(timer:seconds(3)),
+ Workers = [{?MODULE, MFA, transient, KillAfter, worker, Modules}],
+ {ok, {Flags, Workers}}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_controller.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_controller.erl
new file mode 100644
index 0000000000..1d7f29bfbd
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_controller.erl
@@ -0,0 +1,2010 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_controller.erl,v 1.3 2010/03/04 13:54:19 maria Exp $
+%%
+%% The mnesia_init process loads tables from local disc or from
+%% another nodes. It also coordinates updates of the info about
+%% where we can read and write tables.
+%%
+%% Tables may need to be loaded initially at startup of the local
+%% node or when other nodes announces that they already have loaded
+%% tables that we also want.
+%%
+%% Initially we set the load request queue to those tables that we
+%% safely can load locally, i.e. tables where we have the last
+%% consistent replica and we have received mnesia_down from all
+%% other nodes holding the table. Then we let the mnesia_init
+%% process enter its normal working state.
+%%
+%% When we need to load a table we append a request to the load
+%% request queue. All other requests are regarded as high priority
+%% and are processed immediately (e.g. update table whereabouts).
+%% We processes the load request queue as a "background" job..
+
+-module(mnesia_controller).
+
+-behaviour(gen_server).
+
+%% Mnesia internal stuff
+-export([
+ start/0,
+ i_have_tab/1,
+ info/0,
+ get_info/1,
+ get_workers/1,
+ force_load_table/1,
+ async_dump_log/1,
+ sync_dump_log/1,
+ connect_nodes/1,
+ wait_for_schema_commit_lock/0,
+ release_schema_commit_lock/0,
+ create_table/1,
+ get_disc_copy/1,
+ get_cstructs/0,
+ sync_and_block_table_whereabouts/4,
+ sync_del_table_copy_whereabouts/2,
+ block_table/1,
+ unblock_table/1,
+ block_controller/0,
+ unblock_controller/0,
+ unannounce_add_table_copy/2,
+ master_nodes_updated/2,
+ mnesia_down/1,
+ add_active_replica/2,
+ add_active_replica/3,
+ add_active_replica/4,
+ change_table_access_mode/1,
+ del_active_replica/2,
+ wait_for_tables/2,
+ get_network_copy/2,
+ merge_schema/0,
+ start_remote_sender/4,
+ schedule_late_disc_load/2
+ ]).
+
+%% gen_server callbacks
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+%% Module internal stuff
+-export([call/1,
+ cast/1,
+ dump_and_reply/2,
+ load_and_reply/2,
+ send_and_reply/2,
+ wait_for_tables_init/2
+ ]).
+
+-import(mnesia_lib, [set/2, add/2]).
+-import(mnesia_lib, [fatal/2, error/2, verbose/2, dbg_out/2]).
+
+-include("mnesia.hrl").
+
+-define(SERVER_NAME, ?MODULE).
+
+-record(state, {supervisor,
+ schema_is_merged = false,
+ early_msgs = [],
+ loader_pid,
+ loader_queue = [],
+ sender_pid,
+ sender_queue = [],
+ late_loader_queue = [],
+ dumper_pid, % Dumper or schema commit pid
+ dumper_queue = [], % Dumper or schema commit queue
+ dump_log_timer_ref,
+ is_stopping = false
+ }).
+
+-record(worker_reply, {what,
+ pid,
+ result
+ }).
+
+-record(schema_commit_lock, {owner}).
+-record(block_controller, {owner}).
+
+-record(dump_log, {initiated_by,
+ opt_reply_to
+ }).
+
+-record(net_load, {table,
+ reason,
+ opt_reply_to,
+ cstruct = unknown
+ }).
+
+-record(send_table, {table,
+ receiver_pid,
+ remote_storage
+ }).
+
+-record(disc_load, {table,
+ reason,
+ opt_reply_to
+ }).
+
+-record(late_load, {table,
+ reason,
+ opt_reply_to,
+ loaders
+ }).
+
+-record(loader_done, {worker_pid,
+ is_loaded,
+ table_name,
+ needs_announce,
+ needs_sync,
+ needs_reply,
+ reply_to,
+ reply}).
+
+-record(sender_done, {worker_pid,
+ worker_res,
+ table_name
+ }).
+
+-record(dumper_done, {worker_pid,
+ worker_res
+ }).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+start() ->
+ gen_server:start_link({local, ?SERVER_NAME}, ?MODULE, [self()],
+ [{timeout, infinity}
+ %% ,{debug, [trace]}
+ ]).
+
+sync_dump_log(InitBy) ->
+ call({sync_dump_log, InitBy}).
+
+async_dump_log(InitBy) ->
+ ?SERVER_NAME ! {async_dump_log, InitBy}.
+
+%% Wait for tables to be active
+%% If needed, we will wait for Mnesia to start
+%% If Mnesia stops, we will wait for Mnesia to restart
+%% We will wait even if the list of tables is empty
+%%
+wait_for_tables(Tabs, Timeout) when list(Tabs), Timeout == infinity ->
+ do_wait_for_tables(Tabs, Timeout);
+wait_for_tables(Tabs, Timeout) when list(Tabs),
+ integer(Timeout), Timeout >= 0 ->
+ do_wait_for_tables(Tabs, Timeout);
+wait_for_tables(Tabs, Timeout) ->
+ {error, {badarg, Tabs, Timeout}}.
+
+do_wait_for_tables(Tabs, 0) ->
+ reply_wait(Tabs);
+do_wait_for_tables(Tabs, Timeout) ->
+ Pid = spawn_link(?MODULE, wait_for_tables_init, [self(), Tabs]),
+ receive
+ {?SERVER_NAME, Pid, Res} ->
+ Res;
+
+ {'EXIT', Pid, _} ->
+ reply_wait(Tabs)
+
+ after Timeout ->
+ unlink(Pid),
+ exit(Pid, timeout),
+ reply_wait(Tabs)
+ end.
+
+reply_wait(Tabs) ->
+ case catch mnesia_lib:active_tables() of
+ {'EXIT', _} ->
+ {error, {node_not_running, node()}};
+ Active when list(Active) ->
+ case Tabs -- Active of
+ [] ->
+ ok;
+ BadTabs ->
+ {timeout, BadTabs}
+ end
+ end.
+
+wait_for_tables_init(From, Tabs) ->
+ process_flag(trap_exit, true),
+ Res = wait_for_init(From, Tabs, whereis(?SERVER_NAME)),
+ From ! {?SERVER_NAME, self(), Res},
+ unlink(From),
+ exit(normal).
+
+wait_for_init(From, Tabs, Init) ->
+ case catch link(Init) of
+ {'EXIT', _} ->
+ %% Mnesia is not started
+ {error, {node_not_running, node()}};
+ true when pid(Init) ->
+ cast({sync_tabs, Tabs, self()}),
+ rec_tabs(Tabs, Tabs, From, Init)
+ end.
+
+sync_reply(Waiter, Tab) ->
+ Waiter ! {?SERVER_NAME, {tab_synced, Tab}}.
+
+rec_tabs([Tab | Tabs], AllTabs, From, Init) ->
+ receive
+ {?SERVER_NAME, {tab_synced, Tab}} ->
+ rec_tabs(Tabs, AllTabs, From, Init);
+
+ {'EXIT', From, _} ->
+ %% This will trigger an exit signal
+ %% to mnesia_init
+ exit(wait_for_tables_timeout);
+
+ {'EXIT', Init, _} ->
+ %% Oops, mnesia_init stopped,
+ exit(mnesia_stopped)
+ end;
+rec_tabs([], _, _, Init) ->
+ unlink(Init),
+ ok.
+
+get_cstructs() ->
+ call(get_cstructs).
+
+mnesia_down(Node) ->
+ case cast({mnesia_down, Node}) of
+ {error, _} -> mnesia_monitor:mnesia_down(?SERVER_NAME, Node);
+ _Pid -> ok
+ end.
+wait_for_schema_commit_lock() ->
+ link(whereis(?SERVER_NAME)),
+ unsafe_call(wait_for_schema_commit_lock).
+
+block_controller() ->
+ call(block_controller).
+
+unblock_controller() ->
+ cast(unblock_controller).
+
+release_schema_commit_lock() ->
+ cast({release_schema_commit_lock, self()}),
+ unlink(whereis(?SERVER_NAME)).
+
+%% Special for preparation of add table copy
+get_network_copy(Tab, Cs) ->
+ Work = #net_load{table = Tab,
+ reason = {dumper, add_table_copy},
+ cstruct = Cs
+ },
+ Res = (catch load_table(Work)),
+ if Res#loader_done.is_loaded == true ->
+ Tab = Res#loader_done.table_name,
+ case Res#loader_done.needs_announce of
+ true ->
+ i_have_tab(Tab);
+ false ->
+ ignore
+ end;
+ true -> ignore
+ end,
+
+ receive %% Flush copier done message
+ {copier_done, _Node} ->
+ ok
+ after 500 -> %% avoid hanging if something is wrong and we shall fail.
+ ignore
+ end,
+ Res#loader_done.reply.
+
+%% This functions is invoked from the dumper
+%%
+%% There are two cases here:
+%% startup ->
+%% no need for sync, since mnesia_controller not started yet
+%% schema_trans ->
+%% already synced with mnesia_controller since the dumper
+%% is syncronously started from mnesia_controller
+
+create_table(Tab) ->
+ {loaded, ok} = mnesia_loader:disc_load_table(Tab, {dumper,create_table}).
+
+get_disc_copy(Tab) ->
+ disc_load_table(Tab, {dumper,change_table_copy_type}, undefined).
+
+%% Returns ok instead of yes
+force_load_table(Tab) when atom(Tab), Tab /= schema ->
+ case ?catch_val({Tab, storage_type}) of
+ ram_copies ->
+ do_force_load_table(Tab);
+ disc_copies ->
+ do_force_load_table(Tab);
+ disc_only_copies ->
+ do_force_load_table(Tab);
+ unknown ->
+ set({Tab, load_by_force}, true),
+ cast({force_load_updated, Tab}),
+ wait_for_tables([Tab], infinity);
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}}
+ end;
+force_load_table(Tab) ->
+ {error, {bad_type, Tab}}.
+
+do_force_load_table(Tab) ->
+ Loaded = ?catch_val({Tab, load_reason}),
+ case Loaded of
+ unknown ->
+ set({Tab, load_by_force}, true),
+ mnesia_late_loader:async_late_disc_load(node(), [Tab], forced_by_user),
+ wait_for_tables([Tab], infinity);
+ {'EXIT', _} ->
+ set({Tab, load_by_force}, true),
+ mnesia_late_loader:async_late_disc_load(node(), [Tab], forced_by_user),
+ wait_for_tables([Tab], infinity);
+ _ ->
+ ok
+ end.
+master_nodes_updated(schema, _Masters) ->
+ ignore;
+master_nodes_updated(Tab, Masters) ->
+ cast({master_nodes_updated, Tab, Masters}).
+
+schedule_late_disc_load(Tabs, Reason) ->
+ MsgTag = late_disc_load,
+ try_schedule_late_disc_load(Tabs, Reason, MsgTag).
+
+try_schedule_late_disc_load(Tabs, _Reason, MsgTag)
+ when Tabs == [], MsgTag /= schema_is_merged ->
+ ignore;
+try_schedule_late_disc_load(Tabs, Reason, MsgTag) ->
+ GetIntents =
+ fun() ->
+ Item = mnesia_late_disc_load,
+ Nodes = val({current, db_nodes}),
+ mnesia:lock({global, Item, Nodes}, write),
+ case multicall(Nodes -- [node()], disc_load_intents) of
+ {Replies, []} ->
+ call({MsgTag, Tabs, Reason, Replies}),
+ done;
+ {_, BadNodes} ->
+ %% Some nodes did not respond, lets try again
+ {retry, BadNodes}
+ end
+ end,
+ case mnesia:transaction(GetIntents) of
+ {'atomic', done} ->
+ done;
+ {'atomic', {retry, BadNodes}} ->
+ verbose("Retry late_load_tables because bad nodes: ~p~n",
+ [BadNodes]),
+ try_schedule_late_disc_load(Tabs, Reason, MsgTag);
+ {aborted, AbortReason} ->
+ fatal("Cannot late_load_tables~p: ~p~n",
+ [[Tabs, Reason, MsgTag], AbortReason])
+ end.
+
+connect_nodes(Ns) ->
+ case mnesia:system_info(is_running) of
+ no ->
+ {error, {node_not_running, node()}};
+ yes ->
+ {NewC, OldC} = mnesia_recover:connect_nodes(Ns),
+ Connected = NewC ++OldC,
+ New1 = mnesia_lib:intersect(Ns, Connected),
+ New = New1 -- val({current, db_nodes}),
+
+ case try_merge_schema(New) of
+ ok ->
+ mnesia_lib:add_list(extra_db_nodes, New),
+ {ok, New};
+ {aborted, {throw, Str}} when list(Str) ->
+ %%mnesia_recover:disconnect_nodes(New),
+ {error, {merge_schema_failed, lists:flatten(Str)}};
+ Else ->
+ %% Unconnect nodes where merge failed!!
+ %% mnesia_recover:disconnect_nodes(New),
+ {error, Else}
+ end
+ end.
+
+%% Merge the local schema with the schema on other nodes.
+%% But first we must let all processes that want to force
+%% load tables wait until the schema merge is done.
+
+merge_schema() ->
+ AllNodes = mnesia_lib:all_nodes(),
+ case try_merge_schema(AllNodes) of
+ ok ->
+ schema_is_merged();
+ {aborted, {throw, Str}} when list(Str) ->
+ fatal("Failed to merge schema: ~s~n", [Str]);
+ Else ->
+ fatal("Failed to merge schema: ~p~n", [Else])
+ end.
+
+try_merge_schema(Nodes) ->
+ case mnesia_schema:merge_schema() of
+ {'atomic', not_merged} ->
+ %% No more nodes that we need to merge the schema with
+ ok;
+ {'atomic', {merged, OldFriends, NewFriends}} ->
+ %% Check if new nodes has been added to the schema
+ Diff = mnesia_lib:all_nodes() -- [node() | Nodes],
+ mnesia_recover:connect_nodes(Diff),
+
+ %% Tell everybody to adopt orphan tables
+ im_running(OldFriends, NewFriends),
+ im_running(NewFriends, OldFriends),
+
+ try_merge_schema(Nodes);
+ {'atomic', {"Cannot get cstructs", Node, Reason}} ->
+ dbg_out("Cannot get cstructs, Node ~p ~p~n", [Node, Reason]),
+ timer:sleep(1000), % Avoid a endless loop look alike
+ try_merge_schema(Nodes);
+ Other ->
+ Other
+ end.
+
+im_running(OldFriends, NewFriends) ->
+ abcast(OldFriends, {im_running, node(), NewFriends}).
+
+schema_is_merged() ->
+ MsgTag = schema_is_merged,
+ SafeLoads = initial_safe_loads(),
+
+ %% At this point we do not know anything about
+ %% which tables that the other nodes already
+ %% has loaded and therefore we let the normal
+ %% processing of the loader_queue take care
+ %% of it, since we at that time point will
+ %% know the whereabouts. We rely on the fact
+ %% that all nodes tells each other directly
+ %% when they have loaded a table and are
+ %% willing to share it.
+
+ try_schedule_late_disc_load(SafeLoads, initial, MsgTag).
+
+
+cast(Msg) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->{error, {node_not_running, node()}};
+ Pid -> gen_server:cast(Pid, Msg)
+ end.
+
+abcast(Nodes, Msg) ->
+ gen_server:abcast(Nodes, ?SERVER_NAME, Msg).
+
+unsafe_call(Msg) ->
+ case whereis(?SERVER_NAME) of
+ undefined -> {error, {node_not_running, node()}};
+ Pid -> gen_server:call(Pid, Msg, infinity)
+ end.
+
+call(Msg) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ link(Pid),
+ Res = gen_server:call(Pid, Msg, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+remote_call(Node, Func, Args) ->
+ case catch gen_server:call({?MODULE, Node}, {Func, Args, self()}, infinity) of
+ {'EXIT', Error} ->
+ {error, Error};
+ Else ->
+ Else
+ end.
+
+multicall(Nodes, Msg) ->
+ {Good, Bad} = gen_server:multi_call(Nodes, ?MODULE, Msg, infinity),
+ PatchedGood = [Reply || {_Node, Reply} <- Good],
+ {PatchedGood, Bad}. %% Make the replies look like rpc:multicalls..
+%% rpc:multicall(Nodes, ?MODULE, call, [Msg]).
+
+%%%----------------------------------------------------------------------
+%%% Callback functions from gen_server
+%%%----------------------------------------------------------------------
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ mnesia_lib:verbose("~p starting: ~p~n", [?SERVER_NAME, self()]),
+
+ %% Handshake and initialize transaction recovery
+ %% for new nodes detected in the schema
+ All = mnesia_lib:all_nodes(),
+ Diff = All -- [node() | val(original_nodes)],
+ mnesia_lib:unset(original_nodes),
+ mnesia_recover:connect_nodes(Diff),
+
+ Interval = mnesia_monitor:get_env(dump_log_time_threshold),
+ Msg = {async_dump_log, time_threshold},
+ {ok, Ref} = timer:send_interval(Interval, Msg),
+ mnesia_dumper:start_regulator(),
+
+ {ok, #state{supervisor = Parent, dump_log_timer_ref = Ref}}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%% {stop, Reason, Reply, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_call({sync_dump_log, InitBy}, From, State) ->
+ Worker = #dump_log{initiated_by = InitBy,
+ opt_reply_to = From
+ },
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_call(wait_for_schema_commit_lock, From, State) ->
+ Worker = #schema_commit_lock{owner = From},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_call(block_controller, From, State) ->
+ Worker = #block_controller{owner = From},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+
+handle_call(get_cstructs, From, State) ->
+ Tabs = val({schema, tables}),
+ Cstructs = [val({T, cstruct}) || T <- Tabs],
+ Running = val({current, db_nodes}),
+ reply(From, {cstructs, Cstructs, Running}),
+ noreply(State);
+
+handle_call({schema_is_merged, TabsR, Reason, RemoteLoaders}, From, State) ->
+ State2 = late_disc_load(TabsR, Reason, RemoteLoaders, From, State),
+
+ %% Handle early messages
+ Msgs = State2#state.early_msgs,
+ State3 = State2#state{early_msgs = [], schema_is_merged = true},
+ Ns = val({current, db_nodes}),
+ dbg_out("Schema is merged ~w, State ~w~n", [Ns, State3]),
+%% dbg_out("handle_early_msgs ~p ~n", [Msgs]), % qqqq
+ handle_early_msgs(lists:reverse(Msgs), State3);
+
+handle_call(disc_load_intents, From, State) ->
+ Tabs = disc_load_intents(State#state.loader_queue) ++
+ disc_load_intents(State#state.late_loader_queue),
+ ActiveTabs = mnesia_lib:local_active_tables(),
+ reply(From, {ok, node(), mnesia_lib:union(Tabs, ActiveTabs)}),
+ noreply(State);
+
+handle_call({update_where_to_write, [add, Tab, AddNode], _From}, _Dummy, State) ->
+%%% dbg_out("update_w2w ~p", [[add, Tab, AddNode]]), %%% qqqq
+ Current = val({current, db_nodes}),
+ Res =
+ case lists:member(AddNode, Current) and
+ State#state.schema_is_merged == true of
+ true ->
+ mnesia_lib:add({Tab, where_to_write}, AddNode);
+ false ->
+ ignore
+ end,
+ {reply, Res, State};
+
+handle_call({add_active_replica, [Tab, ToNode, RemoteS, AccessMode], From},
+ ReplyTo, State) ->
+ KnownNode = lists:member(ToNode, val({current, db_nodes})),
+ Merged = State#state.schema_is_merged,
+ if
+ KnownNode == false ->
+ reply(ReplyTo, ignore),
+ noreply(State);
+ Merged == true ->
+ Res = add_active_replica(Tab, ToNode, RemoteS, AccessMode),
+ reply(ReplyTo, Res),
+ noreply(State);
+ true -> %% Schema is not merged
+ Msg = {add_active_replica, [Tab, ToNode, RemoteS, AccessMode], From},
+ Msgs = State#state.early_msgs,
+ reply(ReplyTo, ignore), %% Reply ignore and add data after schema merge
+ noreply(State#state{early_msgs = [{call, Msg, undefined} | Msgs]})
+ end;
+
+handle_call({unannounce_add_table_copy, [Tab, Node], From}, ReplyTo, State) ->
+ KnownNode = lists:member(node(From), val({current, db_nodes})),
+ Merged = State#state.schema_is_merged,
+ if
+ KnownNode == false ->
+ reply(ReplyTo, ignore),
+ noreply(State);
+ Merged == true ->
+ Res = unannounce_add_table_copy(Tab, Node),
+ reply(ReplyTo, Res),
+ noreply(State);
+ true -> %% Schema is not merged
+ Msg = {unannounce_add_table_copy, [Tab, Node], From},
+ Msgs = State#state.early_msgs,
+ reply(ReplyTo, ignore), %% Reply ignore and add data after schema merge
+ %% Set ReplyTO to undefined so we don't reply twice
+ noreply(State#state{early_msgs = [{call, Msg, undefined} | Msgs]})
+ end;
+
+handle_call(Msg, From, State) when State#state.schema_is_merged == false ->
+ %% Buffer early messages
+%% dbg_out("Buffered early msg ~p ~n", [Msg]), %% qqqq
+ Msgs = State#state.early_msgs,
+ noreply(State#state{early_msgs = [{call, Msg, From} | Msgs]});
+
+handle_call({net_load, Tab, Cs}, From, State) ->
+ Worker = #net_load{table = Tab,
+ opt_reply_to = From,
+ reason = add_table_copy,
+ cstruct = Cs
+ },
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_call({late_disc_load, Tabs, Reason, RemoteLoaders}, From, State) ->
+ State2 = late_disc_load(Tabs, Reason, RemoteLoaders, From, State),
+ noreply(State2);
+
+handle_call({block_table, [Tab], From}, _Dummy, State) ->
+ case lists:member(node(From), val({current, db_nodes})) of
+ true ->
+ block_table(Tab);
+ false ->
+ ignore
+ end,
+ {reply, ok, State};
+
+handle_call({check_w2r, _Node, Tab}, _From, State) ->
+ {reply, val({Tab, where_to_read}), State};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?SERVER_NAME, Msg]),
+ noreply(State).
+
+disc_load_intents([H | T]) when record(H, disc_load) ->
+ [H#disc_load.table | disc_load_intents(T)];
+disc_load_intents([H | T]) when record(H, late_load) ->
+ [H#late_load.table | disc_load_intents(T)];
+disc_load_intents( [H | T]) when record(H, net_load) ->
+ disc_load_intents(T);
+disc_load_intents([]) ->
+ [].
+
+late_disc_load(TabsR, Reason, RemoteLoaders, From, State) ->
+ verbose("Intend to load tables: ~p~n", [TabsR]),
+ ?eval_debug_fun({?MODULE, late_disc_load},
+ [{tabs, TabsR},
+ {reason, Reason},
+ {loaders, RemoteLoaders}]),
+
+ reply(From, queued),
+ %% RemoteLoaders is a list of {ok, Node, Tabs} tuples
+
+ %% Remove deleted tabs
+ LocalTabs = mnesia_lib:val({schema, local_tables}),
+ Filter = fun({Tab, Reas}, Acc) ->
+ case lists:member(Tab, LocalTabs) of
+ true -> [{Tab, Reas} | Acc];
+ false -> Acc
+ end;
+ (Tab, Acc) ->
+ case lists:member(Tab, LocalTabs) of
+ true -> [Tab | Acc];
+ false -> Acc
+ end
+ end,
+
+ Tabs = lists:foldl(Filter, [], TabsR),
+
+ Nodes = val({current, db_nodes}),
+ LateLoaders = late_loaders(Tabs, Reason, RemoteLoaders, Nodes),
+ LateQueue = State#state.late_loader_queue ++ LateLoaders,
+ State#state{late_loader_queue = LateQueue}.
+
+late_loaders([{Tab, Reason} | Tabs], DefaultReason, RemoteLoaders, Nodes) ->
+ LoadNodes = late_load_filter(RemoteLoaders, Tab, Nodes, []),
+ case LoadNodes of
+ [] ->
+ cast({disc_load, Tab, Reason}); % Ugly cast
+ _ ->
+ ignore
+ end,
+ LateLoad = #late_load{table = Tab, loaders = LoadNodes, reason = Reason},
+ [LateLoad | late_loaders(Tabs, DefaultReason, RemoteLoaders, Nodes)];
+
+late_loaders([Tab | Tabs], Reason, RemoteLoaders, Nodes) ->
+ Loaders = late_load_filter(RemoteLoaders, Tab, Nodes, []),
+ case Loaders of
+ [] ->
+ cast({disc_load, Tab, Reason}); % Ugly cast
+ _ ->
+ ignore
+ end,
+ LateLoad = #late_load{table = Tab, loaders = Loaders, reason = Reason},
+ [LateLoad | late_loaders(Tabs, Reason, RemoteLoaders, Nodes)];
+late_loaders([], _Reason, _RemoteLoaders, _Nodes) ->
+ [].
+
+late_load_filter([{error, _} | RemoteLoaders], Tab, Nodes, Acc) ->
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc);
+late_load_filter([{badrpc, _} | RemoteLoaders], Tab, Nodes, Acc) ->
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc);
+late_load_filter([RL | RemoteLoaders], Tab, Nodes, Acc) ->
+ {ok, Node, Intents} = RL,
+ Access = val({Tab, access_mode}),
+ LocalC = val({Tab, local_content}),
+ StillActive = lists:member(Node, Nodes),
+ RemoteIntent = lists:member(Tab, Intents),
+ if
+ Access == read_write,
+ LocalC == false,
+ StillActive == true,
+ RemoteIntent == true ->
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ case lists:member(Node, Masters) of
+ true ->
+ %% The other node is master node for
+ %% the table, accept his load intent
+ late_load_filter(RemoteLoaders, Tab, Nodes, [Node | Acc]);
+ false when Masters == [] ->
+ %% The table has no master nodes
+ %% accept his load intent
+ late_load_filter(RemoteLoaders, Tab, Nodes, [Node | Acc]);
+ false ->
+ %% Some one else is master node for
+ %% the table, ignore his load intent
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc)
+ end;
+ true ->
+ late_load_filter(RemoteLoaders, Tab, Nodes, Acc)
+ end;
+late_load_filter([], _Tab, _Nodes, Acc) ->
+ Acc.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_cast({release_schema_commit_lock, _Owner}, State) ->
+ if
+ State#state.is_stopping == true ->
+ {stop, shutdown, State};
+ true ->
+ case State#state.dumper_queue of
+ [#schema_commit_lock{}|Rest] ->
+ [_Worker | Rest] = State#state.dumper_queue,
+ State2 = State#state{dumper_pid = undefined,
+ dumper_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ _ ->
+ noreply(State)
+ end
+ end;
+
+handle_cast(unblock_controller, State) ->
+ if
+ State#state.is_stopping == true ->
+ {stop, shutdown, State};
+ record(hd(State#state.dumper_queue), block_controller) ->
+ [_Worker | Rest] = State#state.dumper_queue,
+ State2 = State#state{dumper_pid = undefined,
+ dumper_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3)
+ end;
+
+handle_cast({mnesia_down, Node}, State) ->
+ maybe_log_mnesia_down(Node),
+ mnesia_lib:del({current, db_nodes}, Node),
+ mnesia_checkpoint:tm_mnesia_down(Node),
+ Alltabs = val({schema, tables}),
+ State2 = reconfigure_tables(Node, State, Alltabs),
+ case State#state.sender_pid of
+ undefined -> ignore;
+ Pid when pid(Pid) -> Pid ! {copier_done, Node}
+ end,
+ case State#state.loader_pid of
+ undefined -> ignore;
+ Pid2 when pid(Pid2) -> Pid2 ! {copier_done, Node}
+ end,
+ NewSenders =
+ case State#state.sender_queue of
+ [OldSender | RestSenders] ->
+ Remove = fun(ST) ->
+ node(ST#send_table.receiver_pid) /= Node
+ end,
+ NewS = lists:filter(Remove, RestSenders),
+ %% Keep old sender it will be removed by sender_done
+ [OldSender | NewS];
+ [] ->
+ []
+ end,
+ Early = remove_early_messages(State2#state.early_msgs, Node),
+ mnesia_monitor:mnesia_down(?SERVER_NAME, Node),
+ noreply(State2#state{sender_queue = NewSenders, early_msgs = Early});
+
+handle_cast({im_running, _Node, NewFriends}, State) ->
+ Tabs = mnesia_lib:local_active_tables() -- [schema],
+ Ns = mnesia_lib:intersect(NewFriends, val({current, db_nodes})),
+ abcast(Ns, {adopt_orphans, node(), Tabs}),
+ noreply(State);
+
+handle_cast(Msg, State) when State#state.schema_is_merged == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ noreply(State#state{early_msgs = [{cast, Msg} | Msgs]});
+
+handle_cast({disc_load, Tab, Reason}, State) ->
+ Worker = #disc_load{table = Tab, reason = Reason},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_cast(Worker, State) when record(Worker, send_table) ->
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_cast({sync_tabs, Tabs, From}, State) ->
+ %% user initiated wait_for_tables
+ handle_sync_tabs(Tabs, From),
+ noreply(State);
+
+handle_cast({i_have_tab, Tab, Node}, State) ->
+ case lists:member(Node, val({current, db_nodes})) of
+ true ->
+ State2 = node_has_tabs([Tab], Node, State),
+ noreply(State2);
+ false ->
+ noreply(State)
+ end;
+
+handle_cast({force_load_updated, Tab}, State) ->
+ case val({Tab, active_replicas}) of
+ [] ->
+ %% No valid replicas
+ noreply(State);
+ [SomeNode | _] ->
+ State2 = node_has_tabs([Tab], SomeNode, State),
+ noreply(State2)
+ end;
+
+handle_cast({master_nodes_updated, Tab, Masters}, State) ->
+ Active = val({Tab, active_replicas}),
+ Valid =
+ case val({Tab, load_by_force}) of
+ true ->
+ Active;
+ false ->
+ if
+ Masters == [] ->
+ Active;
+ true ->
+ mnesia_lib:intersect(Masters, Active)
+ end
+ end,
+ case Valid of
+ [] ->
+ %% No valid replicas
+ noreply(State);
+ [SomeNode | _] ->
+ State2 = node_has_tabs([Tab], SomeNode, State),
+ noreply(State2)
+ end;
+
+handle_cast({adopt_orphans, Node, Tabs}, State) ->
+
+ State2 = node_has_tabs(Tabs, Node, State),
+
+ %% Register the other node as up and running
+ mnesia_recover:log_mnesia_up(Node),
+ verbose("Logging mnesia_up ~w~n", [Node]),
+ mnesia_lib:report_system_event({mnesia_up, Node}),
+
+ %% Load orphan tables
+ LocalTabs = val({schema, local_tables}) -- [schema],
+ Nodes = val({current, db_nodes}),
+ {LocalOrphans, RemoteMasters} =
+ orphan_tables(LocalTabs, Node, Nodes, [], []),
+ Reason = {adopt_orphan, node()},
+ mnesia_late_loader:async_late_disc_load(node(), LocalOrphans, Reason),
+
+ Fun =
+ fun(N) ->
+ RemoteOrphans =
+ [Tab || {Tab, Ns} <- RemoteMasters,
+ lists:member(N, Ns)],
+ mnesia_late_loader:maybe_async_late_disc_load(N, RemoteOrphans, Reason)
+ end,
+ lists:foreach(Fun, Nodes),
+
+ Queue = State2#state.loader_queue,
+ State3 = State2#state{loader_queue = Queue},
+ noreply(State3);
+
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?SERVER_NAME, Msg]),
+ noreply(State).
+
+handle_sync_tabs([Tab | Tabs], From) ->
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ case get({sync_tab, Tab}) of
+ undefined ->
+ put({sync_tab, Tab}, [From]);
+ Pids ->
+ put({sync_tab, Tab}, [From | Pids])
+ end;
+ _ ->
+ sync_reply(From, Tab)
+ end,
+ handle_sync_tabs(Tabs, From);
+handle_sync_tabs([], _From) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_info({async_dump_log, InitBy}, State) ->
+ Worker = #dump_log{initiated_by = InitBy},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
+handle_info(Done, State) when record(Done, dumper_done) ->
+ Pid = Done#dumper_done.worker_pid,
+ Res = Done#dumper_done.worker_res,
+ if
+ State#state.is_stopping == true ->
+ {stop, shutdown, State};
+ Res == dumped, Pid == State#state.dumper_pid ->
+ [Worker | Rest] = State#state.dumper_queue,
+ reply(Worker#dump_log.opt_reply_to, Res),
+ State2 = State#state{dumper_pid = undefined,
+ dumper_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ true ->
+ fatal("Dumper failed: ~p~n state: ~p~n", [Res, State]),
+ {stop, fatal, State}
+ end;
+
+handle_info(Done, State) when record(Done, loader_done) ->
+ if
+ %% Assertion
+ Done#loader_done.worker_pid == State#state.loader_pid -> ok
+ end,
+
+ [_Worker | Rest] = LoadQ0 = State#state.loader_queue,
+ LateQueue0 = State#state.late_loader_queue,
+ {LoadQ, LateQueue} =
+ case Done#loader_done.is_loaded of
+ true ->
+ Tab = Done#loader_done.table_name,
+
+ %% Optional user sync
+ case Done#loader_done.needs_sync of
+ true -> user_sync_tab(Tab);
+ false -> ignore
+ end,
+
+ %% Optional table announcement
+ case Done#loader_done.needs_announce of
+ true ->
+ i_have_tab(Tab),
+ case Tab of
+ schema ->
+ ignore;
+ _ ->
+ %% Local node needs to perform user_sync_tab/1
+ Ns = val({current, db_nodes}),
+ abcast(Ns, {i_have_tab, Tab, node()})
+ end;
+ false ->
+ case Tab of
+ schema ->
+ ignore;
+ _ ->
+ %% Local node needs to perform user_sync_tab/1
+ Ns = val({current, db_nodes}),
+ AlreadyKnows = val({Tab, active_replicas}),
+ abcast(Ns -- AlreadyKnows, {i_have_tab, Tab, node()})
+ end
+ end,
+
+ %% Optional client reply
+ case Done#loader_done.needs_reply of
+ true ->
+ reply(Done#loader_done.reply_to,
+ Done#loader_done.reply);
+ false ->
+ ignore
+ end,
+ {Rest, reply_late_load(Tab, LateQueue0)};
+ false ->
+ case Done#loader_done.reply of
+ restart ->
+ {LoadQ0, LateQueue0};
+ _ ->
+ {Rest, LateQueue0}
+ end
+ end,
+
+ State2 = State#state{loader_pid = undefined,
+ loader_queue = LoadQ,
+ late_loader_queue = LateQueue},
+
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+
+handle_info(Done, State) when record(Done, sender_done) ->
+ Pid = Done#sender_done.worker_pid,
+ Res = Done#sender_done.worker_res,
+ if
+ Res == ok, Pid == State#state.sender_pid ->
+ [Worker | Rest] = State#state.sender_queue,
+ Worker#send_table.receiver_pid ! {copier_done, node()},
+ State2 = State#state{sender_pid = undefined,
+ sender_queue = Rest},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ true ->
+ %% No need to send any message to the table receiver
+ %% since it will soon get a mnesia_down anyway
+ fatal("Sender failed: ~p~n state: ~p~n", [Res, State]),
+ {stop, fatal, State}
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
+ catch set(mnesia_status, stopping),
+ case State#state.dumper_pid of
+ undefined ->
+ dbg_out("~p was ~p~n", [?SERVER_NAME, R]),
+ {stop, shutdown, State};
+ _ ->
+ noreply(State#state{is_stopping = true})
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.dumper_pid ->
+ case State#state.dumper_queue of
+ [#schema_commit_lock{}|Workers] -> %% Schema trans crashed or was killed
+ State2 = State#state{dumper_queue = Workers, dumper_pid = undefined},
+ State3 = opt_start_worker(State2),
+ noreply(State3);
+ _Other ->
+ fatal("Dumper or schema commit crashed: ~p~n state: ~p~n", [R, State]),
+ {stop, fatal, State}
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.loader_pid ->
+ fatal("Loader crashed: ~p~n state: ~p~n", [R, State]),
+ {stop, fatal, State};
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.sender_pid ->
+ %% No need to send any message to the table receiver
+ %% since it will soon get a mnesia_down anyway
+ fatal("Sender crashed: ~p~n state: ~p~n", [R, State]),
+ {stop, fatal, State};
+
+handle_info({From, get_state}, State) ->
+ From ! {?SERVER_NAME, State},
+ noreply(State);
+
+%% No real need for buffering
+handle_info(Msg, State) when State#state.schema_is_merged == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ noreply(State#state{early_msgs = [{info, Msg} | Msgs]});
+
+handle_info({'EXIT', Pid, wait_for_tables_timeout}, State) ->
+ sync_tab_timeout(Pid, get()),
+ noreply(State);
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info: ~p~n", [?SERVER_NAME, Msg]),
+ noreply(State).
+
+reply_late_load(Tab, [H | T]) when H#late_load.table == Tab ->
+ reply(H#late_load.opt_reply_to, ok),
+ reply_late_load(Tab, T);
+reply_late_load(Tab, [H | T]) ->
+ [H | reply_late_load(Tab, T)];
+reply_late_load(_Tab, []) ->
+ [].
+
+sync_tab_timeout(Pid, [{{sync_tab, Tab}, Pids} | Tail]) ->
+ case lists:delete(Pid, Pids) of
+ [] ->
+ erase({sync_tab, Tab});
+ Pids2 ->
+ put({sync_tab, Tab}, Pids2)
+ end,
+ sync_tab_timeout(Pid, Tail);
+sync_tab_timeout(Pid, [_ | Tail]) ->
+ sync_tab_timeout(Pid, Tail);
+sync_tab_timeout(_Pid, []) ->
+ ok.
+
+%% Pick the load record that has the highest load order
+%% Returns {BestLoad, RemainingQueue} or {none, []} if queue is empty
+pick_next(Queue) ->
+ pick_next(Queue, none, none, []).
+
+pick_next([Head | Tail], Load, Order, Rest) when record(Head, net_load) ->
+ Tab = Head#net_load.table,
+ select_best(Head, Tail, val({Tab, load_order}), Load, Order, Rest);
+pick_next([Head | Tail], Load, Order, Rest) when record(Head, disc_load) ->
+ Tab = Head#disc_load.table,
+ select_best(Head, Tail, val({Tab, load_order}), Load, Order, Rest);
+pick_next([], Load, _Order, Rest) ->
+ {Load, Rest}.
+
+select_best(Load, Tail, Order, none, none, Rest) ->
+ pick_next(Tail, Load, Order, Rest);
+select_best(Load, Tail, Order, OldLoad, OldOrder, Rest) when Order > OldOrder ->
+ pick_next(Tail, Load, Order, [OldLoad | Rest]);
+select_best(Load, Tail, _Order, OldLoad, OldOrder, Rest) ->
+ pick_next(Tail, OldLoad, OldOrder, [Load | Rest]).
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+terminate(Reason, State) ->
+ mnesia_monitor:terminate_proc(?SERVER_NAME, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+maybe_log_mnesia_down(N) ->
+ %% We use mnesia_down when deciding which tables to load locally,
+ %% so if we are not running (i.e haven't decided which tables
+ %% to load locally), don't log mnesia_down yet.
+ case mnesia_lib:is_running() of
+ yes ->
+ verbose("Logging mnesia_down ~w~n", [N]),
+ mnesia_recover:log_mnesia_down(N),
+ ok;
+ _ ->
+ Filter = fun(Tab) ->
+ inactive_copy_holders(Tab, N)
+ end,
+ HalfLoadedTabs = lists:any(Filter, val({schema, local_tables}) -- [schema]),
+ if
+ HalfLoadedTabs == true ->
+ verbose("Logging mnesia_down ~w~n", [N]),
+ mnesia_recover:log_mnesia_down(N),
+ ok;
+ true ->
+ %% Unfortunately we have not loaded some common
+ %% tables yet, so we cannot rely on the nodedown
+ log_later %% BUGBUG handle this case!!!
+ end
+ end.
+
+inactive_copy_holders(Tab, Node) ->
+ Cs = val({Tab, cstruct}),
+ case mnesia_lib:cs_to_storage_type(Node, Cs) of
+ unknown ->
+ false;
+ _Storage ->
+ mnesia_lib:not_active_here(Tab)
+ end.
+
+orphan_tables([Tab | Tabs], Node, Ns, Local, Remote) ->
+ Cs = val({Tab, cstruct}),
+ CopyHolders = mnesia_lib:copy_holders(Cs),
+ RamCopyHolders = Cs#cstruct.ram_copies,
+ DiscCopyHolders = CopyHolders -- RamCopyHolders,
+ DiscNodes = val({schema, disc_copies}),
+ LocalContent = Cs#cstruct.local_content,
+ RamCopyHoldersOnDiscNodes = mnesia_lib:intersect(RamCopyHolders, DiscNodes),
+ Active = val({Tab, active_replicas}),
+ case lists:member(Node, DiscCopyHolders) of
+ true when Active == [] ->
+ case DiscCopyHolders -- Ns of
+ [] ->
+ %% We're last up and the other nodes have not
+ %% loaded the table. Lets load it if we are
+ %% the smallest node.
+ case lists:min(DiscCopyHolders) of
+ Min when Min == node() ->
+ case mnesia_recover:get_master_nodes(Tab) of
+ [] ->
+ L = [Tab | Local],
+ orphan_tables(Tabs, Node, Ns, L, Remote);
+ Masters ->
+ R = [{Tab, Masters} | Remote],
+ orphan_tables(Tabs, Node, Ns, Local, R)
+ end;
+ _ ->
+ orphan_tables(Tabs, Node, Ns, Local, Remote)
+ end;
+ _ ->
+ orphan_tables(Tabs, Node, Ns, Local, Remote)
+ end;
+ false when Active == [], DiscCopyHolders == [], RamCopyHoldersOnDiscNodes == [] ->
+ %% Special case when all replicas resides on disc less nodes
+ orphan_tables(Tabs, Node, Ns, [Tab | Local], Remote);
+ _ when LocalContent == true ->
+ orphan_tables(Tabs, Node, Ns, [Tab | Local], Remote);
+ _ ->
+ orphan_tables(Tabs, Node, Ns, Local, Remote)
+ end;
+orphan_tables([], _, _, LocalOrphans, RemoteMasters) ->
+ {LocalOrphans, RemoteMasters}.
+
+node_has_tabs([Tab | Tabs], Node, State) when Node /= node() ->
+ State2 = update_whereabouts(Tab, Node, State),
+ node_has_tabs(Tabs, Node, State2);
+node_has_tabs([Tab | Tabs], Node, State) ->
+ user_sync_tab(Tab),
+ node_has_tabs(Tabs, Node, State);
+node_has_tabs([], _Node, State) ->
+ State.
+
+update_whereabouts(Tab, Node, State) ->
+ Storage = val({Tab, storage_type}),
+ Read = val({Tab, where_to_read}),
+ LocalC = val({Tab, local_content}),
+ BeingCreated = (?catch_val({Tab, create_table}) == true),
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ ByForce = val({Tab, load_by_force}),
+ GoGetIt =
+ if
+ ByForce == true ->
+ true;
+ Masters == [] ->
+ true;
+ true ->
+ lists:member(Node, Masters)
+ end,
+
+ dbg_out("Table ~w is loaded on ~w. s=~w, r=~w, lc=~w, f=~w, m=~w~n",
+ [Tab, Node, Storage, Read, LocalC, ByForce, GoGetIt]),
+ if
+ LocalC == true ->
+ %% Local contents, don't care about other node
+ State;
+ Storage == unknown, Read == nowhere ->
+ %% No own copy, time to read remotely
+ %% if the other node is a good node
+ add_active_replica(Tab, Node),
+ case GoGetIt of
+ true ->
+ set({Tab, where_to_read}, Node),
+ user_sync_tab(Tab),
+ State;
+ false ->
+ State
+ end;
+ Storage == unknown ->
+ %% No own copy, continue to read remotely
+ add_active_replica(Tab, Node),
+ NodeST = mnesia_lib:storage_type_at_node(Node, Tab),
+ ReadST = mnesia_lib:storage_type_at_node(Read, Tab),
+ if %% Avoid reading from disc_only_copies
+ NodeST == disc_only_copies ->
+ ignore;
+ ReadST == disc_only_copies ->
+ mnesia_lib:set_remote_where_to_read(Tab);
+ true ->
+ ignore
+ end,
+ user_sync_tab(Tab),
+ State;
+ BeingCreated == true ->
+ %% The table is currently being created
+ %% and we shall have an own copy of it.
+ %% We will load the (empty) table locally.
+ add_active_replica(Tab, Node),
+ State;
+ Read == nowhere ->
+ %% Own copy, go and get a copy of the table
+ %% if the other node is master or if there
+ %% are no master at all
+ add_active_replica(Tab, Node),
+ case GoGetIt of
+ true ->
+ Worker = #net_load{table = Tab,
+ reason = {active_remote, Node}},
+ add_worker(Worker, State);
+ false ->
+ State
+ end;
+ true ->
+ %% We already have an own copy
+ add_active_replica(Tab, Node),
+ user_sync_tab(Tab),
+ State
+ end.
+
+initial_safe_loads() ->
+ case val({schema, storage_type}) of
+ ram_copies ->
+ Downs = [],
+ Tabs = val({schema, local_tables}) -- [schema],
+ LastC = fun(T) -> last_consistent_replica(T, Downs) end,
+ lists:zf(LastC, Tabs);
+
+ disc_copies ->
+ Downs = mnesia_recover:get_mnesia_downs(),
+ dbg_out("mnesia_downs = ~p~n", [Downs]),
+
+ Tabs = val({schema, local_tables}) -- [schema],
+ LastC = fun(T) -> last_consistent_replica(T, Downs) end,
+ lists:zf(LastC, Tabs)
+ end.
+
+last_consistent_replica(Tab, Downs) ->
+ Cs = val({Tab, cstruct}),
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ Ram = Cs#cstruct.ram_copies,
+ Disc = Cs#cstruct.disc_copies,
+ DiscOnly = Cs#cstruct.disc_only_copies,
+ BetterCopies0 = mnesia_lib:remote_copy_holders(Cs) -- Downs,
+ BetterCopies = BetterCopies0 -- Ram,
+ AccessMode = Cs#cstruct.access_mode,
+ Copies = mnesia_lib:copy_holders(Cs),
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ LocalMaster0 = lists:member(node(), Masters),
+ LocalContent = Cs#cstruct.local_content,
+ RemoteMaster =
+ if
+ Masters == [] -> false;
+ true -> not LocalMaster0
+ end,
+ LocalMaster =
+ if
+ Masters == [] -> false;
+ true -> LocalMaster0
+ end,
+ if
+ Copies == [node()] ->
+ %% Only one copy holder and it is local.
+ %% It may also be a local contents table
+ {true, {Tab, local_only}};
+ LocalContent == true ->
+ {true, {Tab, local_content}};
+ LocalMaster == true ->
+ %% We have a local master
+ {true, {Tab, local_master}};
+ RemoteMaster == true ->
+ %% Wait for remote master copy
+ false;
+ Storage == ram_copies ->
+ if
+ Disc == [], DiscOnly == [] ->
+ %% Nobody has copy on disc
+ {true, {Tab, ram_only}};
+ true ->
+ %% Some other node has copy on disc
+ false
+ end;
+ AccessMode == read_only ->
+ %% No one has been able to update the table,
+ %% i.e. all disc resident copies are equal
+ {true, {Tab, read_only}};
+ BetterCopies /= [], Masters /= [node()] ->
+ %% There are better copies on other nodes
+ %% and we do not have the only master copy
+ false;
+ true ->
+ {true, {Tab, initial}}
+ end.
+
+reconfigure_tables(N, State, [Tab |Tail]) ->
+ del_active_replica(Tab, N),
+ case val({Tab, where_to_read}) of
+ N -> mnesia_lib:set_remote_where_to_read(Tab);
+ _ -> ignore
+ end,
+ LateQ = drop_loaders(Tab, N, State#state.late_loader_queue),
+ reconfigure_tables(N, State#state{late_loader_queue = LateQ}, Tail);
+
+reconfigure_tables(_, State, []) ->
+ State.
+
+remove_early_messages([], _Node) ->
+ [];
+remove_early_messages([{call, {add_active_replica, [_, Node, _, _], _}, _}|R], Node) ->
+ remove_early_messages(R, Node); %% Does a reply before queuing
+remove_early_messages([{call, {block_table, _, From}, ReplyTo}|R], Node)
+ when node(From) == Node ->
+ reply(ReplyTo, ok), %% Remove gen:server waits..
+ remove_early_messages(R, Node);
+remove_early_messages([{cast, {i_have_tab, _Tab, Node}}|R], Node) ->
+ remove_early_messages(R, Node);
+remove_early_messages([{cast, {adopt_orphans, Node, _Tabs}}|R], Node) ->
+ remove_early_messages(R, Node);
+remove_early_messages([M|R],Node) ->
+ [M|remove_early_messages(R,Node)].
+
+%% Drop loader from late load queue and possibly trigger a disc_load
+drop_loaders(Tab, Node, [H | T]) when H#late_load.table == Tab ->
+ %% Check if it is time to issue a disc_load request
+ case H#late_load.loaders of
+ [Node] ->
+ Reason = {H#late_load.reason, last_loader_down, Node},
+ cast({disc_load, Tab, Reason}); % Ugly cast
+ _ ->
+ ignore
+ end,
+ %% Drop the node from the list of loaders
+ H2 = H#late_load{loaders = H#late_load.loaders -- [Node]},
+ [H2 | drop_loaders(Tab, Node, T)];
+drop_loaders(Tab, Node, [H | T]) ->
+ [H | drop_loaders(Tab, Node, T)];
+drop_loaders(_, _, []) ->
+ [].
+
+add_active_replica(Tab, Node) ->
+ add_active_replica(Tab, Node, val({Tab, cstruct})).
+
+add_active_replica(Tab, Node, Cs) when record(Cs, cstruct) ->
+ Storage = mnesia_lib:schema_cs_to_storage_type(Node, Cs),
+ AccessMode = Cs#cstruct.access_mode,
+ add_active_replica(Tab, Node, Storage, AccessMode).
+
+%% Block table primitives
+
+block_table(Tab) ->
+ Var = {Tab, where_to_commit},
+ Old = val(Var),
+ New = {blocked, Old},
+ set(Var, New). % where_to_commit
+
+unblock_table(Tab) ->
+ Var = {Tab, where_to_commit},
+ New =
+ case val(Var) of
+ {blocked, List} ->
+ List;
+ List ->
+ List
+ end,
+ set(Var, New). % where_to_commit
+
+is_tab_blocked(W2C) when list(W2C) ->
+ {false, W2C};
+is_tab_blocked({blocked, W2C}) when list(W2C) ->
+ {true, W2C}.
+
+mark_blocked_tab(true, Value) ->
+ {blocked, Value};
+mark_blocked_tab(false, Value) ->
+ Value.
+
+%%
+
+add_active_replica(Tab, Node, Storage, AccessMode) ->
+ Var = {Tab, where_to_commit},
+ {Blocked, Old} = is_tab_blocked(val(Var)),
+ Del = lists:keydelete(Node, 1, Old),
+ case AccessMode of
+ read_write ->
+ New = lists:sort([{Node, Storage} | Del]),
+ set(Var, mark_blocked_tab(Blocked, New)), % where_to_commit
+ add({Tab, where_to_write}, Node);
+ read_only ->
+ set(Var, mark_blocked_tab(Blocked, Del)),
+ mnesia_lib:del({Tab, where_to_write}, Node)
+ end,
+ add({Tab, active_replicas}, Node).
+
+del_active_replica(Tab, Node) ->
+ Var = {Tab, where_to_commit},
+ {Blocked, Old} = is_tab_blocked(val(Var)),
+ Del = lists:keydelete(Node, 1, Old),
+ New = lists:sort(Del),
+ set(Var, mark_blocked_tab(Blocked, New)), % where_to_commit
+ mnesia_lib:del({Tab, active_replicas}, Node),
+ mnesia_lib:del({Tab, where_to_write}, Node).
+
+change_table_access_mode(Cs) ->
+ Tab = Cs#cstruct.name,
+ lists:foreach(fun(N) -> add_active_replica(Tab, N, Cs) end,
+ val({Tab, active_replicas})).
+
+%% node To now has tab loaded, but this must be undone
+%% This code is rpc:call'ed from the tab_copier process
+%% when it has *not* released it's table lock
+unannounce_add_table_copy(Tab, To) ->
+ del_active_replica(Tab, To),
+ case val({Tab , where_to_read}) of
+ To ->
+ mnesia_lib:set_remote_where_to_read(Tab);
+ _ ->
+ ignore
+ end.
+
+user_sync_tab(Tab) ->
+ case val(debug) of
+ trace ->
+ mnesia_subscr:subscribe(whereis(mnesia_event), {table, Tab});
+ _ ->
+ ignore
+ end,
+
+ case erase({sync_tab, Tab}) of
+ undefined ->
+ ok;
+ Pids ->
+ lists:foreach(fun(Pid) -> sync_reply(Pid, Tab) end, Pids)
+ end.
+
+i_have_tab(Tab) ->
+ case val({Tab, local_content}) of
+ true ->
+ mnesia_lib:set_local_content_whereabouts(Tab);
+ false ->
+ set({Tab, where_to_read}, node())
+ end,
+ add_active_replica(Tab, node()).
+
+sync_and_block_table_whereabouts(Tab, ToNode, RemoteS, AccessMode) when Tab /= schema ->
+ Current = val({current, db_nodes}),
+ Ns =
+ case lists:member(ToNode, Current) of
+ true -> Current -- [ToNode];
+ false -> Current
+ end,
+ remote_call(ToNode, block_table, [Tab]),
+ [remote_call(Node, add_active_replica, [Tab, ToNode, RemoteS, AccessMode]) ||
+ Node <- [ToNode | Ns]],
+ ok.
+
+sync_del_table_copy_whereabouts(Tab, ToNode) when Tab /= schema ->
+ Current = val({current, db_nodes}),
+ Ns =
+ case lists:member(ToNode, Current) of
+ true -> Current;
+ false -> [ToNode | Current]
+ end,
+ Args = [Tab, ToNode],
+ [remote_call(Node, unannounce_add_table_copy, Args) || Node <- Ns],
+ ok.
+
+get_info(Timeout) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->
+ {timeout, Timeout};
+ Pid ->
+ Pid ! {self(), get_state},
+ receive
+ {?SERVER_NAME, State} when record(State, state) ->
+ {info,State}
+ after Timeout ->
+ {timeout, Timeout}
+ end
+ end.
+
+get_workers(Timeout) ->
+ case whereis(?SERVER_NAME) of
+ undefined ->
+ {timeout, Timeout};
+ Pid ->
+ Pid ! {self(), get_state},
+ receive
+ {?SERVER_NAME, State} when record(State, state) ->
+ {workers, State#state.loader_pid, State#state.sender_pid, State#state.dumper_pid}
+ after Timeout ->
+ {timeout, Timeout}
+ end
+ end.
+
+info() ->
+ Tabs = mnesia_lib:local_active_tables(),
+ io:format( "---> Active tables <--- ~n", []),
+ info(Tabs).
+
+info([Tab | Tail]) ->
+ case val({Tab, storage_type}) of
+ disc_only_copies ->
+ info_format(Tab,
+ dets:info(Tab, size),
+ dets:info(Tab, file_size),
+ "bytes on disc");
+ _ ->
+ info_format(Tab,
+ ?ets_info(Tab, size),
+ ?ets_info(Tab, memory),
+ "words of mem")
+ end,
+ info(Tail);
+info([]) -> ok;
+info(Tab) -> info([Tab]).
+
+info_format(Tab, Size, Mem, Media) ->
+ StrT = mnesia_lib:pad_name(atom_to_list(Tab), 15, []),
+ StrS = mnesia_lib:pad_name(integer_to_list(Size), 8, []),
+ StrM = mnesia_lib:pad_name(integer_to_list(Mem), 8, []),
+ io:format("~s: with ~s records occupying ~s ~s~n",
+ [StrT, StrS, StrM, Media]).
+
+%% Handle early arrived messages
+handle_early_msgs([Msg | Msgs], State) ->
+ %% The messages are in reverse order
+ case handle_early_msg(Msg, State) of
+ {stop, Reason, Reply, State2} ->
+ {stop, Reason, Reply, State2};
+ {stop, Reason, State2} ->
+ {stop, Reason, State2};
+ {noreply, State2} ->
+ handle_early_msgs(Msgs, State2);
+ {noreply, State2, _Timeout} ->
+ handle_early_msgs(Msgs, State2);
+ Else ->
+ dbg_out("handle_early_msgs case clause ~p ~n", [Else]),
+ erlang:error(Else, [[Msg | Msgs], State])
+ end;
+handle_early_msgs([], State) ->
+ noreply(State).
+
+handle_early_msg({call, Msg, From}, State) ->
+ handle_call(Msg, From, State);
+handle_early_msg({cast, Msg}, State) ->
+ handle_cast(Msg, State);
+handle_early_msg({info, Msg}, State) ->
+ handle_info(Msg, State).
+
+noreply(State) ->
+ {noreply, State}.
+
+reply(undefined, Reply) ->
+ Reply;
+reply(ReplyTo, Reply) ->
+ gen_server:reply(ReplyTo, Reply),
+ Reply.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Worker management
+
+%% Returns new State
+add_worker(Worker, State) when record(Worker, dump_log) ->
+ InitBy = Worker#dump_log.initiated_by,
+ Queue = State#state.dumper_queue,
+ case lists:keymember(InitBy, #dump_log.initiated_by, Queue) of
+ false ->
+ ignore;
+ true when Worker#dump_log.opt_reply_to == undefined ->
+ %% The same threshold has been exceeded again,
+ %% before we have had the possibility to
+ %% process the older one.
+ DetectedBy = {dump_log, InitBy},
+ Event = {mnesia_overload, DetectedBy},
+ mnesia_lib:report_system_event(Event)
+ end,
+ Queue2 = Queue ++ [Worker],
+ State2 = State#state{dumper_queue = Queue2},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, schema_commit_lock) ->
+ Queue = State#state.dumper_queue,
+ Queue2 = Queue ++ [Worker],
+ State2 = State#state{dumper_queue = Queue2},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, net_load) ->
+ Queue = State#state.loader_queue,
+ State2 = State#state{loader_queue = Queue ++ [Worker]},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, send_table) ->
+ Queue = State#state.sender_queue,
+ State2 = State#state{sender_queue = Queue ++ [Worker]},
+ opt_start_worker(State2);
+add_worker(Worker, State) when record(Worker, disc_load) ->
+ Queue = State#state.loader_queue,
+ State2 = State#state{loader_queue = Queue ++ [Worker]},
+ opt_start_worker(State2);
+% Block controller should be used for upgrading mnesia.
+add_worker(Worker, State) when record(Worker, block_controller) ->
+ Queue = State#state.dumper_queue,
+ Queue2 = [Worker | Queue],
+ State2 = State#state{dumper_queue = Queue2},
+ opt_start_worker(State2).
+
+%% Optionally start a worker
+%%
+%% Dumpers and loaders may run simultaneously
+%% but neither of them may run during schema commit.
+%% Loaders may not start if a schema commit is enqueued.
+opt_start_worker(State) when State#state.is_stopping == true ->
+ State;
+opt_start_worker(State) ->
+ %% Prioritize dumper and schema commit
+ %% by checking them first
+ case State#state.dumper_queue of
+ [Worker | _Rest] when State#state.dumper_pid == undefined ->
+ %% Great, a worker in queue and neither
+ %% a schema transaction is being
+ %% committed and nor a dumper is running
+
+ %% Start worker but keep him in the queue
+ if
+ record(Worker, schema_commit_lock) ->
+ ReplyTo = Worker#schema_commit_lock.owner,
+ reply(ReplyTo, granted),
+ {Owner, _Tag} = ReplyTo,
+ State#state{dumper_pid = Owner};
+
+ record(Worker, dump_log) ->
+ Pid = spawn_link(?MODULE, dump_and_reply, [self(), Worker]),
+ State2 = State#state{dumper_pid = Pid},
+
+ %% If the worker was a dumper we may
+ %% possibly be able to start a loader
+ %% or sender
+ State3 = opt_start_sender(State2),
+ opt_start_loader(State3);
+
+ record(Worker, block_controller) ->
+ case {State#state.sender_pid, State#state.loader_pid} of
+ {undefined, undefined} ->
+ ReplyTo = Worker#block_controller.owner,
+ reply(ReplyTo, granted),
+ {Owner, _Tag} = ReplyTo,
+ State#state{dumper_pid = Owner};
+ _ ->
+ State
+ end
+ end;
+ _ ->
+ %% Bad luck, try with a loader or sender instead
+ State2 = opt_start_sender(State),
+ opt_start_loader(State2)
+ end.
+
+opt_start_sender(State) ->
+ case State#state.sender_queue of
+ []->
+ %% No need
+ State;
+
+ _ when State#state.sender_pid /= undefined ->
+ %% Bad luck, a sender is already running
+ State;
+
+ [Sender | _SenderRest] ->
+ case State#state.loader_queue of
+ [Loader | _LoaderRest]
+ when State#state.loader_pid /= undefined,
+ Loader#net_load.table == Sender#send_table.table ->
+ %% A conflicting loader is running
+ State;
+ _ ->
+ SchemaQueue = State#state.dumper_queue,
+ case lists:keymember(schema_commit, 1, SchemaQueue) of
+ false ->
+
+ %% Start worker but keep him in the queue
+ Pid = spawn_link(?MODULE, send_and_reply,
+ [self(), Sender]),
+ State#state{sender_pid = Pid};
+ true ->
+ %% Bad luck, we must wait for the schema commit
+ State
+ end
+ end
+ end.
+
+opt_start_loader(State) ->
+ LoaderQueue = State#state.loader_queue,
+ if
+ LoaderQueue == [] ->
+ %% No need
+ State;
+
+ State#state.loader_pid /= undefined ->
+ %% Bad luck, an loader is already running
+ State;
+
+ true ->
+ SchemaQueue = State#state.dumper_queue,
+ case lists:keymember(schema_commit, 1, SchemaQueue) of
+ false ->
+ {Worker, Rest} = pick_next(LoaderQueue),
+
+ %% Start worker but keep him in the queue
+ Pid = spawn_link(?MODULE, load_and_reply, [self(), Worker]),
+ State#state{loader_pid = Pid,
+ loader_queue = [Worker | Rest]};
+ true ->
+ %% Bad luck, we must wait for the schema commit
+ State
+ end
+ end.
+
+start_remote_sender(Node, Tab, Receiver, Storage) ->
+ Msg = #send_table{table = Tab,
+ receiver_pid = Receiver,
+ remote_storage = Storage},
+ gen_server:cast({?SERVER_NAME, Node}, Msg).
+
+dump_and_reply(ReplyTo, Worker) ->
+ %% No trap_exit, die intentionally instead
+ Res = mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by),
+ ReplyTo ! #dumper_done{worker_pid = self(),
+ worker_res = Res},
+ unlink(ReplyTo),
+ exit(normal).
+
+send_and_reply(ReplyTo, Worker) ->
+ %% No trap_exit, die intentionally instead
+ Res = mnesia_loader:send_table(Worker#send_table.receiver_pid,
+ Worker#send_table.table,
+ Worker#send_table.remote_storage),
+ ReplyTo ! #sender_done{worker_pid = self(),
+ worker_res = Res},
+ unlink(ReplyTo),
+ exit(normal).
+
+
+load_and_reply(ReplyTo, Worker) ->
+ process_flag(trap_exit, true),
+ Done = load_table(Worker),
+ ReplyTo ! Done#loader_done{worker_pid = self()},
+ unlink(ReplyTo),
+ exit(normal).
+
+%% Now it is time to load the table
+%% but first we must check if it still is neccessary
+load_table(Load) when record(Load, net_load) ->
+ Tab = Load#net_load.table,
+ ReplyTo = Load#net_load.opt_reply_to,
+ Reason = Load#net_load.reason,
+ LocalC = val({Tab, local_content}),
+ AccessMode = val({Tab, access_mode}),
+ ReadNode = val({Tab, where_to_read}),
+ Active = filter_active(Tab),
+ Done = #loader_done{is_loaded = true,
+ table_name = Tab,
+ needs_announce = false,
+ needs_sync = false,
+ needs_reply = true,
+ reply_to = ReplyTo,
+ reply = {loaded, ok}
+ },
+ if
+ ReadNode == node() ->
+ %% Already loaded locally
+ Done;
+ LocalC == true ->
+ Res = mnesia_loader:disc_load_table(Tab, load_local_content),
+ Done#loader_done{reply = Res, needs_announce = true, needs_sync = true};
+ AccessMode == read_only ->
+ disc_load_table(Tab, Reason, ReplyTo);
+ true ->
+ %% Either we cannot read the table yet
+ %% or someone is moving a replica between
+ %% two nodes
+ Cs = Load#net_load.cstruct,
+ Res = mnesia_loader:net_load_table(Tab, Reason, Active, Cs),
+ case Res of
+ {loaded, ok} ->
+ Done#loader_done{needs_sync = true,
+ reply = Res};
+ {not_loaded, storage_unknown} ->
+ Done#loader_done{reply = Res};
+ {not_loaded, _} ->
+ Done#loader_done{is_loaded = false,
+ needs_reply = false,
+ reply = Res}
+ end
+ end;
+
+load_table(Load) when record(Load, disc_load) ->
+ Tab = Load#disc_load.table,
+ Reason = Load#disc_load.reason,
+ ReplyTo = Load#disc_load.opt_reply_to,
+ ReadNode = val({Tab, where_to_read}),
+ Active = filter_active(Tab),
+ Done = #loader_done{is_loaded = true,
+ table_name = Tab,
+ needs_announce = false,
+ needs_sync = false,
+ needs_reply = false
+ },
+ if
+ Active == [], ReadNode == nowhere ->
+ %% Not loaded anywhere, lets load it from disc
+ disc_load_table(Tab, Reason, ReplyTo);
+ ReadNode == nowhere ->
+ %% Already loaded on other node, lets get it
+ Cs = val({Tab, cstruct}),
+ case mnesia_loader:net_load_table(Tab, Reason, Active, Cs) of
+ {loaded, ok} ->
+ Done#loader_done{needs_sync = true};
+ {not_loaded, storage_unknown} ->
+ Done#loader_done{is_loaded = false};
+ {not_loaded, ErrReason} ->
+ Done#loader_done{is_loaded = false,
+ reply = {not_loaded,ErrReason}}
+ end;
+ true ->
+ %% Already readable, do not worry be happy
+ Done
+ end.
+
+disc_load_table(Tab, Reason, ReplyTo) ->
+ Done = #loader_done{is_loaded = true,
+ table_name = Tab,
+ needs_announce = false,
+ needs_sync = false,
+ needs_reply = true,
+ reply_to = ReplyTo,
+ reply = {loaded, ok}
+ },
+ Res = mnesia_loader:disc_load_table(Tab, Reason),
+ if
+ Res == {loaded, ok} ->
+ Done#loader_done{needs_announce = true,
+ needs_sync = true,
+ reply = Res};
+ ReplyTo /= undefined ->
+ Done#loader_done{is_loaded = false,
+ reply = Res};
+ true ->
+ fatal("Cannot load table ~p from disc: ~p~n", [Tab, Res])
+ end.
+
+filter_active(Tab) ->
+ ByForce = val({Tab, load_by_force}),
+ Active = val({Tab, active_replicas}),
+ Masters = mnesia_recover:get_master_nodes(Tab),
+ do_filter_active(ByForce, Active, Masters).
+
+do_filter_active(true, Active, _Masters) ->
+ Active;
+do_filter_active(false, Active, []) ->
+ Active;
+do_filter_active(false, Active, Masters) ->
+ mnesia_lib:intersect(Active, Masters).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_dumper.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_dumper.erl
new file mode 100644
index 0000000000..116823a779
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_dumper.erl
@@ -0,0 +1,1092 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_dumper.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_dumper).
+
+%% The InitBy arg may be one of the following:
+%% scan_decisions Initial scan for decisions
+%% startup Initial dump during startup
+%% schema_prepare Dump initiated during schema transaction preparation
+%% schema_update Dump initiated during schema transaction commit
+%% fast_schema_update A schema_update, but ignores the log file
+%% user Dump initiated by user
+%% write_threshold Automatic dump caused by too many log writes
+%% time_threshold Automatic dump caused by timeout
+
+%% Public interface
+-export([
+ get_log_writes/0,
+ incr_log_writes/0,
+ raw_dump_table/2,
+ raw_named_dump_table/2,
+ start_regulator/0,
+ opt_dump_log/1,
+ update/3
+ ]).
+
+ %% Internal stuff
+-export([regulator_init/1]).
+
+-include("mnesia.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-import(mnesia_lib, [fatal/2, dbg_out/2]).
+
+-define(REGULATOR_NAME, mnesia_dumper_load_regulator).
+-define(DumpToEtsMultiplier, 4).
+
+-record(state, {initiated_by = nobody,
+ dumper = nopid,
+ regulator_pid,
+ supervisor_pid,
+ queue = [],
+ timeout}).
+
+get_log_writes() ->
+ Max = mnesia_monitor:get_env(dump_log_write_threshold),
+ Prev = mnesia_lib:read_counter(trans_log_writes),
+ Left = mnesia_lib:read_counter(trans_log_writes_left),
+ Diff = Max - Left,
+ Prev + Diff.
+
+incr_log_writes() ->
+ Left = mnesia_lib:incr_counter(trans_log_writes_left, -1),
+ if
+ Left > 0 ->
+ ignore;
+ true ->
+ adjust_log_writes(true)
+ end.
+
+adjust_log_writes(DoCast) ->
+ Token = {mnesia_adjust_log_writes, self()},
+ case global:set_lock(Token, [node()], 1) of
+ false ->
+ ignore; %% Somebody else is sending a dump request
+ true ->
+ case DoCast of
+ false ->
+ ignore;
+ true ->
+ mnesia_controller:async_dump_log(write_threshold)
+ end,
+ Max = mnesia_monitor:get_env(dump_log_write_threshold),
+ Left = mnesia_lib:read_counter(trans_log_writes_left),
+ %% Don't care if we lost a few writes
+ mnesia_lib:set_counter(trans_log_writes_left, Max),
+ Diff = Max - Left,
+ mnesia_lib:incr_counter(trans_log_writes, Diff),
+ global:del_lock(Token, [node()])
+ end.
+
+%% Returns 'ok' or exits
+opt_dump_log(InitBy) ->
+ Reg = case whereis(?REGULATOR_NAME) of
+ undefined ->
+ nopid;
+ Pid when pid(Pid) ->
+ Pid
+ end,
+ perform_dump(InitBy, Reg).
+
+%% Scan for decisions
+perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
+ ?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
+
+ dbg_out("Transaction log dump initiated by ~w~n", [InitBy]),
+ scan_decisions(mnesia_log:previous_log_file(), InitBy, Regulator),
+ scan_decisions(mnesia_log:latest_log_file(), InitBy, Regulator);
+
+%% Propagate the log into the DAT-files
+perform_dump(InitBy, Regulator) ->
+ ?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
+ LogState = mnesia_log:prepare_log_dump(InitBy),
+ dbg_out("Transaction log dump initiated by ~w: ~w~n",
+ [InitBy, LogState]),
+ adjust_log_writes(false),
+ mnesia_recover:allow_garb(),
+ case LogState of
+ already_dumped ->
+ dumped;
+ {needs_dump, Diff} ->
+ U = mnesia_monitor:get_env(dump_log_update_in_place),
+ Cont = mnesia_log:init_log_dump(),
+ case catch do_perform_dump(Cont, U, InitBy, Regulator, undefined) of
+ ok ->
+ ?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_recover:dump_decision_tab();
+ false ->
+ mnesia_log:purge_some_logs()
+ end,
+ %% And now to the crucial point...
+ mnesia_log:confirm_log_dump(Diff);
+ {error, Reason} ->
+ {error, Reason};
+ {'EXIT', {Desc, Reason}} ->
+ case mnesia_monitor:get_env(auto_repair) of
+ true ->
+ mnesia_lib:important(Desc, Reason),
+ %% Ignore rest of the log
+ mnesia_log:confirm_log_dump(Diff);
+ false ->
+ fatal(Desc, Reason)
+ end
+ end;
+ {error, Reason} ->
+ {error, {"Cannot prepare log dump", Reason}}
+ end.
+
+scan_decisions(Fname, InitBy, Regulator) ->
+ Exists = mnesia_lib:exists(Fname),
+ case Exists of
+ false ->
+ ok;
+ true ->
+ Header = mnesia_log:trans_log_header(),
+ Name = previous_log,
+ mnesia_log:open_log(Name, Header, Fname, Exists,
+ mnesia_monitor:get_env(auto_repair), read_only),
+ Cont = start,
+ Res = (catch do_perform_dump(Cont, false, InitBy, Regulator, undefined)),
+ mnesia_log:close_log(Name),
+ case Res of
+ ok -> ok;
+ {'EXIT', Reason} -> {error, Reason}
+ end
+ end.
+
+do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) ->
+ case mnesia_log:chunk_log(Cont) of
+ {C2, Recs} ->
+ case catch insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of
+ {'EXIT', R} ->
+ Reason = {"Transaction log dump error: ~p~n", [R]},
+ close_files(InPlace, {error, Reason}, InitBy),
+ exit(Reason);
+ Version ->
+ do_perform_dump(C2, InPlace, InitBy, Regulator, Version)
+ end;
+ eof ->
+ close_files(InPlace, ok, InitBy),
+ ok
+ end.
+
+insert_recs([Rec | Recs], InPlace, InitBy, Regulator, LogV) ->
+ regulate(Regulator),
+ case insert_rec(Rec, InPlace, InitBy, LogV) of
+ LogH when record(LogH, log_header) ->
+ insert_recs(Recs, InPlace, InitBy, Regulator, LogH#log_header.log_version);
+ _ ->
+ insert_recs(Recs, InPlace, InitBy, Regulator, LogV)
+ end;
+
+insert_recs([], _InPlace, _InitBy, _Regulator, Version) ->
+ Version.
+
+insert_rec(Rec, _InPlace, scan_decisions, _LogV) ->
+ if
+ record(Rec, commit) ->
+ ignore;
+ record(Rec, log_header) ->
+ ignore;
+ true ->
+ mnesia_recover:note_log_decision(Rec, scan_decisions)
+ end;
+insert_rec(Rec, InPlace, InitBy, LogV) when record(Rec, commit) ->
+ %% Determine the Outcome of the transaction and recover it
+ D = Rec#commit.decision,
+ case mnesia_recover:wait_for_decision(D, InitBy) of
+ {Tid, committed} ->
+ do_insert_rec(Tid, Rec, InPlace, InitBy, LogV);
+ {Tid, aborted} ->
+ mnesia_schema:undo_prepare_commit(Tid, Rec)
+ end;
+insert_rec(H, _InPlace, _InitBy, _LogV) when record(H, log_header) ->
+ CurrentVersion = mnesia_log:version(),
+ if
+ H#log_header.log_kind /= trans_log ->
+ exit({"Bad kind of transaction log", H});
+ H#log_header.log_version == CurrentVersion ->
+ ok;
+ H#log_header.log_version == "4.2" ->
+ ok;
+ H#log_header.log_version == "4.1" ->
+ ok;
+ H#log_header.log_version == "4.0" ->
+ ok;
+ true ->
+ fatal("Bad version of transaction log: ~p~n", [H])
+ end,
+ H;
+
+insert_rec(_Rec, _InPlace, _InitBy, _LogV) ->
+ ok.
+
+do_insert_rec(Tid, Rec, InPlace, InitBy, LogV) ->
+ case Rec#commit.schema_ops of
+ [] ->
+ ignore;
+ SchemaOps ->
+ case val({schema, storage_type}) of
+ ram_copies ->
+ insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV);
+ Storage ->
+ true = open_files(schema, Storage, InPlace, InitBy),
+ insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV)
+ end
+ end,
+ D = Rec#commit.disc_copies,
+ insert_ops(Tid, disc_copies, D, InPlace, InitBy, LogV),
+ case InitBy of
+ startup ->
+ DO = Rec#commit.disc_only_copies,
+ insert_ops(Tid, disc_only_copies, DO, InPlace, InitBy, LogV);
+ _ ->
+ ignore
+ end.
+
+
+update(_Tid, [], _DumperMode) ->
+ dumped;
+update(Tid, SchemaOps, DumperMode) ->
+ UseDir = mnesia_monitor:use_dir(),
+ Res = perform_update(Tid, SchemaOps, DumperMode, UseDir),
+ mnesia_controller:release_schema_commit_lock(),
+ Res.
+
+perform_update(_Tid, _SchemaOps, mandatory, true) ->
+ %% Force a dump of the transaction log in order to let the
+ %% dumper perform needed updates
+
+ InitBy = schema_update,
+ ?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
+ opt_dump_log(InitBy);
+perform_update(Tid, SchemaOps, _DumperMode, _UseDir) ->
+ %% No need for a full transaction log dump.
+ %% Ignore the log file and perform only perform
+ %% the corresponding updates.
+
+ InitBy = fast_schema_update,
+ InPlace = mnesia_monitor:get_env(dump_log_update_in_place),
+ ?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
+ case catch insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy,
+ mnesia_log:version()) of
+ {'EXIT', Reason} ->
+ Error = {error, {"Schema update error", Reason}},
+ close_files(InPlace, Error, InitBy),
+ fatal("Schema update error ~p ~p", [Reason, SchemaOps]);
+ _ ->
+ ?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
+ close_files(InPlace, ok, InitBy),
+ ok
+ end.
+
+insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok;
+insert_ops(Tid, Storage, [Op], InPlace, InitBy, Ver) when Ver >= "4.3"->
+ insert_op(Tid, Storage, Op, InPlace, InitBy),
+ ok;
+insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver >= "4.3"->
+ insert_op(Tid, Storage, Op, InPlace, InitBy),
+ insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver);
+insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver < "4.3" ->
+ insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver),
+ insert_op(Tid, Storage, Op, InPlace, InitBy).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Normal ops
+
+disc_insert(_Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
+ case open_files(Tab, Storage, InPlace, InitBy) of
+ true ->
+ case Storage of
+ disc_copies when Tab /= schema ->
+ mnesia_log:append({?MODULE,Tab}, {{Tab, Key}, Val, Op}),
+ ok;
+ _ ->
+ case Op of
+ write ->
+ ok = dets:insert(Tab, Val);
+ delete ->
+ ok = dets:delete(Tab, Key);
+ update_counter ->
+ {RecName, Incr} = Val,
+ case catch dets:update_counter(Tab, Key, Incr) of
+ CounterVal when integer(CounterVal) ->
+ ok;
+ _ ->
+ Zero = {RecName, Key, 0},
+ ok = dets:insert(Tab, Zero)
+ end;
+ delete_object ->
+ ok = dets:delete_object(Tab, Val);
+ clear_table ->
+ ok = dets:match_delete(Tab, '_')
+ end
+ end;
+ false ->
+ ignore
+ end.
+
+insert(Tid, Storage, Tab, Key, [Val | Tail], Op, InPlace, InitBy) ->
+ insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
+ insert(Tid, Storage, Tab, Key, Tail, Op, InPlace, InitBy);
+
+insert(_Tid, _Storage, _Tab, _Key, [], _Op, _InPlace, _InitBy) ->
+ ok;
+
+insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
+ Item = {{Tab, Key}, Val, Op},
+ case InitBy of
+ startup ->
+ disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
+
+ _ when Storage == ram_copies ->
+ mnesia_tm:do_update_op(Tid, Storage, Item),
+ Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
+ mnesia_tm:do_snmp(Tid, Snmp);
+
+ _ when Storage == disc_copies ->
+ disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
+ mnesia_tm:do_update_op(Tid, Storage, Item),
+ Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
+ mnesia_tm:do_snmp(Tid, Snmp);
+
+ _ when Storage == disc_only_copies ->
+ mnesia_tm:do_update_op(Tid, Storage, Item),
+ Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
+ mnesia_tm:do_snmp(Tid, Snmp);
+
+ _ when Storage == unknown ->
+ ignore
+ end.
+
+disc_delete_table(Tab, Storage) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ if
+ Storage == disc_only_copies; Tab == schema ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ Dat = mnesia_lib:tab2dat(Tab),
+ file:delete(Dat);
+ true ->
+ DclFile = mnesia_lib:tab2dcl(Tab),
+ case get({?MODULE,Tab}) of
+ {opened_dumper, dcl} ->
+ del_opened_tab(Tab),
+ mnesia_log:unsafe_close_log(Tab);
+ _ ->
+ ok
+ end,
+ file:delete(DclFile),
+ DcdFile = mnesia_lib:tab2dcd(Tab),
+ file:delete(DcdFile),
+ ok
+ end,
+ erase({?MODULE, Tab});
+ false ->
+ ignore
+ end.
+
+disc_delete_indecies(_Tab, _Cs, Storage) when Storage /= disc_only_copies ->
+ ignore;
+disc_delete_indecies(Tab, Cs, disc_only_copies) ->
+ Indecies = Cs#cstruct.index,
+ mnesia_index:del_transient(Tab, Indecies, disc_only_copies).
+
+insert_op(Tid, Storage, {{Tab, Key}, Val, Op}, InPlace, InitBy) ->
+ %% Propagate to disc only
+ disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% NOTE that all operations below will only
+%% be performed if the dump is initiated by
+%% startup or fast_schema_update
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+insert_op(_Tid, schema_ops, _OP, _InPlace, Initby)
+ when Initby /= startup,
+ Initby /= fast_schema_update,
+ Initby /= schema_update ->
+ ignore;
+
+insert_op(Tid, _, {op, rec, Storage, Item}, InPlace, InitBy) ->
+ {{Tab, Key}, ValList, Op} = Item,
+ insert(Tid, Storage, Tab, Key, ValList, Op, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_copy_type, N, FromS, ToS, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Val = mnesia_schema:insert_cstruct(Tid, Cs, true), % Update ram only
+ {schema, Tab, _} = Val,
+ if
+ InitBy /= startup ->
+ mnesia_controller:add_active_replica(Tab, N, Cs);
+ true ->
+ ignore
+ end,
+ if
+ N == node() ->
+ Dmp = mnesia_lib:tab2dmp(Tab),
+ Dat = mnesia_lib:tab2dat(Tab),
+ Dcd = mnesia_lib:tab2dcd(Tab),
+ Dcl = mnesia_lib:tab2dcl(Tab),
+ case {FromS, ToS} of
+ {ram_copies, disc_copies} when Tab == schema ->
+ ok = ensure_rename(Dmp, Dat);
+ {ram_copies, disc_copies} ->
+ file:delete(Dcl),
+ ok = ensure_rename(Dmp, Dcd);
+ {disc_copies, ram_copies} when Tab == schema ->
+ mnesia_lib:set(use_dir, false),
+ mnesia_monitor:unsafe_close_dets(Tab),
+ file:delete(Dat);
+ {disc_copies, ram_copies} ->
+ file:delete(Dcl),
+ file:delete(Dcd);
+ {ram_copies, disc_only_copies} ->
+ ok = ensure_rename(Dmp, Dat),
+ true = open_files(Tab, disc_only_copies, InPlace, InitBy),
+ %% ram_delete_table must be done before init_indecies,
+ %% it uses info which is reset in init_indecies,
+ %% it doesn't matter, because init_indecies don't use
+ %% the ram replica of the table when creating the disc
+ %% index; Could be improved :)
+ mnesia_schema:ram_delete_table(Tab, FromS),
+ PosList = Cs#cstruct.index,
+ mnesia_index:init_indecies(Tab, disc_only_copies, PosList);
+ {disc_only_copies, ram_copies} ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ disc_delete_indecies(Tab, Cs, disc_only_copies),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_controller:get_disc_copy(Tab)
+ end,
+ disc_delete_table(Tab, disc_only_copies);
+ {disc_copies, disc_only_copies} ->
+ ok = ensure_rename(Dmp, Dat),
+ true = open_files(Tab, disc_only_copies, InPlace, InitBy),
+ mnesia_schema:ram_delete_table(Tab, FromS),
+ PosList = Cs#cstruct.index,
+ mnesia_index:init_indecies(Tab, disc_only_copies, PosList),
+ file:delete(Dcl),
+ file:delete(Dcd);
+ {disc_only_copies, disc_copies} ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ disc_delete_indecies(Tab, Cs, disc_only_copies),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_log:ets2dcd(Tab),
+ mnesia_controller:get_disc_copy(Tab),
+ disc_delete_table(Tab, disc_only_copies)
+ end
+ end;
+ true ->
+ ignore
+ end,
+ S = val({schema, storage_type}),
+ disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy);
+
+insert_op(Tid, _, {op, transform, _Fun, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ disc_copies ->
+ open_dcl(Cs#cstruct.name);
+ _ ->
+ ignore
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+%%% Operations below this are handled without using the logg.
+
+insert_op(Tid, _, {op, restore_recreate, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Type = Cs#cstruct.type,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ %% Delete all possbibly existing files and tables
+ disc_delete_table(Tab, Storage),
+ disc_delete_indecies(Tab, Cs, Storage),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_schema:ram_delete_table(Tab, Storage),
+ mnesia_checkpoint:tm_del_copy(Tab, node())
+ end,
+ %% delete_cstruct(Tid, Cs, InPlace, InitBy),
+ %% And create new ones..
+ if
+ (InitBy == startup) or (Storage == unknown) ->
+ ignore;
+ Storage == ram_copies ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ mnesia_monitor:mktab(Tab, Args);
+ Storage == disc_copies ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ mnesia_monitor:mktab(Tab, Args),
+ File = mnesia_lib:tab2dcd(Tab),
+ FArg = [{file, File}, {name, {mnesia,create}},
+ {repair, false}, {mode, read_write}],
+ {ok, Log} = mnesia_monitor:open_log(FArg),
+ mnesia_monitor:unsafe_close_log(Log);
+ Storage == disc_only_copies ->
+ File = mnesia_lib:tab2dat(Tab),
+ file:delete(File),
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ mnesia_monitor:open_dets(Tab, Args)
+ end,
+ insert_op(Tid, ignore, {op, create_table, TabDef}, InPlace, InitBy);
+
+insert_op(Tid, _, {op, create_table, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, false, InPlace, InitBy),
+ Tab = Cs#cstruct.name,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ case InitBy of
+ startup ->
+ case Storage of
+ unknown ->
+ ignore;
+ ram_copies ->
+ ignore;
+ disc_copies ->
+ Dcd = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dcd) of
+ true -> ignore;
+ false ->
+ mnesia_log:open_log(temp,
+ mnesia_log:dcl_log_header(),
+ Dcd,
+ false,
+ false,
+ read_write),
+ mnesia_log:unsafe_close_log(temp)
+ end;
+ _ ->
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ case mnesia_monitor:open_dets(Tab, Args) of
+ {ok, _} ->
+ mnesia_monitor:unsafe_close_dets(Tab);
+ {error, Error} ->
+ exit({"Failed to create dets table", Error})
+ end
+ end;
+ _ ->
+ Copies = mnesia_lib:copy_holders(Cs),
+ Active = mnesia_lib:intersect(Copies, val({current, db_nodes})),
+ [mnesia_controller:add_active_replica(Tab, N, Cs) || N <- Active],
+
+ case Storage of
+ unknown ->
+ case Cs#cstruct.local_content of
+ true ->
+ ignore;
+ false ->
+ mnesia_lib:set_remote_where_to_read(Tab)
+ end;
+ _ ->
+ case Cs#cstruct.local_content of
+ true ->
+ mnesia_lib:set_local_content_whereabouts(Tab);
+ false ->
+ mnesia_lib:set({Tab, where_to_read}, node())
+ end,
+ case Storage of
+ ram_copies ->
+ ignore;
+ _ ->
+ %% Indecies are still created by loader
+ disc_delete_indecies(Tab, Cs, Storage)
+ %% disc_delete_table(Tab, Storage)
+ end,
+
+ %% Update whereabouts and create table
+ mnesia_controller:create_table(Tab)
+ end
+ end;
+
+insert_op(_Tid, _, {op, dump_table, Size, TabDef}, _InPlace, _InitBy) ->
+ case Size of
+ unknown ->
+ ignore;
+ _ ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Dmp = mnesia_lib:tab2dmp(Tab),
+ Dat = mnesia_lib:tab2dcd(Tab),
+ case Size of
+ 0 ->
+ %% Assume that table files already are closed
+ file:delete(Dmp),
+ file:delete(Dat);
+ _ ->
+ ok = ensure_rename(Dmp, Dat)
+ end
+ end;
+
+insert_op(Tid, _, {op, delete_table, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ ignore;
+ Storage ->
+ disc_delete_table(Tab, Storage),
+ disc_delete_indecies(Tab, Cs, Storage),
+ case InitBy of
+ startup ->
+ ignore;
+ _ ->
+ mnesia_schema:ram_delete_table(Tab, Storage),
+ mnesia_checkpoint:tm_del_copy(Tab, node())
+ end
+ end,
+ delete_cstruct(Tid, Cs, InPlace, InitBy);
+
+insert_op(Tid, _, {op, clear_table, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ ignore;
+ Storage ->
+ Oid = '_', %%val({Tab, wild_pattern}),
+ if Storage == disc_copies ->
+ open_dcl(Cs#cstruct.name);
+ true ->
+ ignore
+ end,
+ insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy)
+ end;
+
+insert_op(Tid, _, {op, merge_schema, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, false, InPlace, InitBy);
+
+insert_op(Tid, _, {op, del_table_copy, Storage, Node, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ if
+ Tab == schema, Storage == ram_copies ->
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+ Tab /= schema ->
+ mnesia_controller:del_active_replica(Tab, Node),
+ mnesia_lib:del({Tab, Storage}, Node),
+ if
+ Node == node() ->
+ case Cs#cstruct.local_content of
+ true -> mnesia_lib:set({Tab, where_to_read}, nowhere);
+ false -> mnesia_lib:set_remote_where_to_read(Tab)
+ end,
+ mnesia_lib:del({schema, local_tables}, Tab),
+ mnesia_lib:set({Tab, storage_type}, unknown),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy),
+ disc_delete_table(Tab, Storage),
+ disc_delete_indecies(Tab, Cs, Storage),
+ mnesia_schema:ram_delete_table(Tab, Storage),
+ mnesia_checkpoint:tm_del_copy(Tab, Node);
+ true ->
+ case val({Tab, where_to_read}) of
+ Node ->
+ mnesia_lib:set_remote_where_to_read(Tab);
+ _ ->
+ ignore
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy)
+ end
+ end;
+
+insert_op(Tid, _, {op, add_table_copy, _Storage, _Node, TabDef}, InPlace, InitBy) ->
+ %% During prepare commit, the files was created
+ %% and the replica was announced
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, add_snmp, _Us, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, del_snmp, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ if
+ InitBy /= startup,
+ Storage /= unknown ->
+ case ?catch_val({Tab, {index, snmp}}) of
+ {'EXIT', _} ->
+ ignore;
+ Stab ->
+ mnesia_snmp_hook:delete_table(Tab, Stab),
+ mnesia_lib:unset({Tab, {index, snmp}})
+ end;
+ true ->
+ ignore
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, add_index, Pos, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = insert_cstruct(Tid, Cs, true, InPlace, InitBy),
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ case InitBy of
+ startup when Storage == disc_only_copies ->
+ mnesia_index:init_indecies(Tab, Storage, [Pos]);
+ startup ->
+ ignore;
+ _ ->
+ mnesia_index:init_indecies(Tab, Storage, [Pos])
+ end;
+
+insert_op(Tid, _, {op, del_index, Pos, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ case InitBy of
+ startup when Storage == disc_only_copies ->
+ mnesia_index:del_index_table(Tab, Storage, Pos);
+ startup ->
+ ignore;
+ _ ->
+ mnesia_index:del_index_table(Tab, Storage, Pos)
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_access_mode,TabDef, _OldAccess, _Access}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ case InitBy of
+ startup -> ignore;
+ _ -> mnesia_controller:change_table_access_mode(Cs)
+ end,
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_load_order, TabDef, _OldLevel, _Level}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, delete_property, TabDef, PropKey}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_lib:unset({Tab, user_property, PropKey}),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, write_property, TabDef, _Prop}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy);
+
+insert_op(Tid, _, {op, change_table_frag, _Change, TabDef}, InPlace, InitBy) ->
+ Cs = mnesia_schema:list2cs(TabDef),
+ insert_cstruct(Tid, Cs, true, InPlace, InitBy).
+
+open_files(Tab, Storage, UpdateInPlace, InitBy)
+ when Storage /= unknown, Storage /= ram_copies ->
+ case get({?MODULE, Tab}) of
+ undefined ->
+ case ?catch_val({Tab, setorbag}) of
+ {'EXIT', _} ->
+ false;
+ Type ->
+ case Storage of
+ disc_copies when Tab /= schema ->
+ Bool = open_disc_copies(Tab, InitBy),
+ Bool;
+ _ ->
+ Fname = prepare_open(Tab, UpdateInPlace),
+ Args = [{file, Fname},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, mnesia_lib:disk_type(Tab, Type)}],
+ {ok, _} = mnesia_monitor:open_dets(Tab, Args),
+ put({?MODULE, Tab}, {opened_dumper, dat}),
+ true
+ end
+ end;
+ already_dumped ->
+ false;
+ {opened_dumper, _} ->
+ true
+ end;
+open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
+ false.
+
+open_disc_copies(Tab, InitBy) ->
+ DclF = mnesia_lib:tab2dcl(Tab),
+ DumpEts =
+ case file:read_file_info(DclF) of
+ {error, enoent} ->
+ false;
+ {ok, DclInfo} ->
+ DcdF = mnesia_lib:tab2dcd(Tab),
+ case file:read_file_info(DcdF) of
+ {error, Reason} ->
+ mnesia_lib:dbg_out("File ~p info_error ~p ~n",
+ [DcdF, Reason]),
+ true;
+ {ok, DcdInfo} ->
+ DcdInfo#file_info.size =<
+ (DclInfo#file_info.size *
+ ?DumpToEtsMultiplier)
+ end
+ end,
+ if
+ DumpEts == false; InitBy == startup ->
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ DclF,
+ mnesia_lib:exists(DclF),
+ mnesia_monitor:get_env(auto_repair),
+ read_write),
+ put({?MODULE, Tab}, {opened_dumper, dcl}),
+ true;
+ true ->
+ mnesia_log:ets2dcd(Tab),
+ put({?MODULE, Tab}, already_dumped),
+ false
+ end.
+
+%% Always opens the dcl file for writing overriding already_dumped
+%% mechanismen, used for schema transactions.
+open_dcl(Tab) ->
+ case get({?MODULE, Tab}) of
+ {opened_dumper, _} ->
+ true;
+ _ -> %% undefined or already_dumped
+ DclF = mnesia_lib:tab2dcl(Tab),
+ mnesia_log:open_log({?MODULE,Tab},
+ mnesia_log:dcl_log_header(),
+ DclF,
+ mnesia_lib:exists(DclF),
+ mnesia_monitor:get_env(auto_repair),
+ read_write),
+ put({?MODULE, Tab}, {opened_dumper, dcl}),
+ true
+ end.
+
+prepare_open(Tab, UpdateInPlace) ->
+ Dat = mnesia_lib:tab2dat(Tab),
+ case UpdateInPlace of
+ true ->
+ Dat;
+ false ->
+ Tmp = mnesia_lib:tab2tmp(Tab),
+ case catch mnesia_lib:copy_file(Dat, Tmp) of
+ ok ->
+ Tmp;
+ Error ->
+ fatal("Cannot copy dets file ~p to ~p: ~p~n",
+ [Dat, Tmp, Error])
+ end
+ end.
+
+del_opened_tab(Tab) ->
+ erase({?MODULE, Tab}).
+
+close_files(UpdateInPlace, Outcome, InitBy) -> % Update in place
+ close_files(UpdateInPlace, Outcome, InitBy, get()).
+
+close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, already_dumped} | Tail]) ->
+ erase({?MODULE, Tab}),
+ close_files(InPlace, Outcome, InitBy, Tail);
+close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, {opened_dumper, Type}} | Tail]) ->
+ erase({?MODULE, Tab}),
+ case val({Tab, storage_type}) of
+ disc_only_copies when InitBy /= startup ->
+ ignore;
+ disc_copies when Tab /= schema ->
+ mnesia_log:close_log({?MODULE,Tab});
+ Storage ->
+ do_close(InPlace, Outcome, Tab, Type, Storage)
+ end,
+ close_files(InPlace, Outcome, InitBy, Tail);
+
+close_files(InPlace, Outcome, InitBy, [_ | Tail]) ->
+ close_files(InPlace, Outcome, InitBy, Tail);
+close_files(_, _, _InitBy, []) ->
+ ok.
+
+%% If storage is unknown during close clean up files, this can happen if timing
+%% is right and dirty_write conflicts with schema operations.
+do_close(_, _, Tab, dcl, unknown) ->
+ mnesia_log:close_log({?MODULE,Tab}),
+ file:delete(mnesia_lib:tab2dcl(Tab));
+do_close(_, _, Tab, dcl, _) -> %% To be safe, can it happen?
+ mnesia_log:close_log({?MODULE,Tab});
+
+do_close(InPlace, Outcome, Tab, dat, Storage) ->
+ mnesia_monitor:close_dets(Tab),
+ if
+ Storage == unknown, InPlace == true ->
+ file:delete(mnesia_lib:tab2dat(Tab));
+ InPlace == true ->
+ %% Update in place
+ ok;
+ Outcome == ok, Storage /= unknown ->
+ %% Success: swap tmp files with dat files
+ TabDat = mnesia_lib:tab2dat(Tab),
+ ok = file:rename(mnesia_lib:tab2tmp(Tab), TabDat);
+ true ->
+ file:delete(mnesia_lib:tab2tmp(Tab))
+ end.
+
+
+ensure_rename(From, To) ->
+ case mnesia_lib:exists(From) of
+ true ->
+ file:rename(From, To);
+ false ->
+ case mnesia_lib:exists(To) of
+ true ->
+ ok;
+ false ->
+ {error, {rename_failed, From, To}}
+ end
+ end.
+
+insert_cstruct(Tid, Cs, KeepWhereabouts, InPlace, InitBy) ->
+ Val = mnesia_schema:insert_cstruct(Tid, Cs, KeepWhereabouts),
+ {schema, Tab, _} = Val,
+ S = val({schema, storage_type}),
+ disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy),
+ Tab.
+
+delete_cstruct(Tid, Cs, InPlace, InitBy) ->
+ Val = mnesia_schema:delete_cstruct(Tid, Cs),
+ {schema, Tab, _} = Val,
+ S = val({schema, storage_type}),
+ disc_insert(Tid, S, schema, Tab, Val, delete, InPlace, InitBy),
+ Tab.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Raw dump of table. Dumper must have unique access to the ets table.
+
+raw_named_dump_table(Tab, Ftype) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_lib:lock_table(Tab),
+ TmpFname = mnesia_lib:tab2tmp(Tab),
+ Fname =
+ case Ftype of
+ dat -> mnesia_lib:tab2dat(Tab);
+ dmp -> mnesia_lib:tab2dmp(Tab)
+ end,
+ file:delete(TmpFname),
+ file:delete(Fname),
+ TabSize = ?ets_info(Tab, size),
+ TabRef = Tab,
+ DiskType = mnesia_lib:disk_type(Tab),
+ Args = [{file, TmpFname},
+ {keypos, 2},
+ %% {ram_file, true},
+ {estimated_no_objects, TabSize + 256},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, DiskType}],
+ case mnesia_lib:dets_sync_open(TabRef, Args) of
+ {ok, TabRef} ->
+ Storage = ram_copies,
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+
+ case catch raw_dump_table(TabRef, Tab) of
+ {'EXIT', Reason} ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:dets_sync_close(Tab),
+ file:delete(TmpFname),
+ mnesia_lib:unlock_table(Tab),
+ exit({"Dump of table to disc failed", Reason});
+ ok ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:dets_sync_close(Tab),
+ mnesia_lib:unlock_table(Tab),
+ ok = file:rename(TmpFname, Fname)
+ end;
+ {error, Reason} ->
+ mnesia_lib:unlock_table(Tab),
+ exit({"Open of file before dump to disc failed", Reason})
+ end;
+ false ->
+ exit({has_no_disc, node()})
+ end.
+
+raw_dump_table(DetsRef, EtsRef) ->
+ dets:from_ets(DetsRef, EtsRef).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Load regulator
+%%
+%% This is a poor mans substitute for a fair scheduler algorithm
+%% in the Erlang emulator. The mnesia_dumper process performs many
+%% costly BIF invokations and must pay for this. But since the
+%% Emulator does not handle this properly we must compensate for
+%% this with some form of load regulation of ourselves in order to
+%% not steal all computation power in the Erlang Emulator ans make
+%% other processes starve. Hopefully this is a temporary solution.
+
+start_regulator() ->
+ case mnesia_monitor:get_env(dump_log_load_regulation) of
+ false ->
+ nopid;
+ true ->
+ N = ?REGULATOR_NAME,
+ case mnesia_monitor:start_proc(N, ?MODULE, regulator_init, [self()]) of
+ {ok, Pid} ->
+ Pid;
+ {error, Reason} ->
+ fatal("Failed to start ~n: ~p~n", [N, Reason])
+ end
+ end.
+
+regulator_init(Parent) ->
+ %% No need for trapping exits.
+ %% Using low priority causes the regulation
+ process_flag(priority, low),
+ register(?REGULATOR_NAME, self()),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ regulator_loop().
+
+regulator_loop() ->
+ receive
+ {regulate, From} ->
+ From ! {regulated, self()},
+ regulator_loop();
+ {stop, From} ->
+ From ! {stopped, self()},
+ exit(normal)
+ end.
+
+regulate(nopid) ->
+ ok;
+regulate(RegulatorPid) ->
+ RegulatorPid ! {regulate, self()},
+ receive
+ {regulated, RegulatorPid} -> ok
+ end.
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_event.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_event.erl
new file mode 100644
index 0000000000..6053179194
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_event.erl
@@ -0,0 +1,260 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_event.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_event).
+
+-behaviour(gen_event).
+%-behaviour(mnesia_event).
+
+%% gen_event callback interface
+-export([init/1,
+ handle_event/2,
+ handle_call/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3]).
+
+-record(state, {nodes = [],
+ dumped_core = false, %% only dump fatal core once
+ args}).
+
+%%%----------------------------------------------------------------
+%%% Callback functions from gen_server
+%%%----------------------------------------------------------------
+
+%%-----------------------------------------------------------------
+%% init(Args) ->
+%% {ok, State} | Error
+%%-----------------------------------------------------------------
+
+init(Args) ->
+ {ok, #state{args = Args}}.
+
+%%-----------------------------------------------------------------
+%% handle_event(Event, State) ->
+%% {ok, NewState} | remove_handler |
+%% {swap_handler, Args1, State1, Mod2, Args2}
+%%-----------------------------------------------------------------
+
+handle_event(Event, State) ->
+ handle_any_event(Event, State).
+
+%%-----------------------------------------------------------------
+%% handle_info(Msg, State) ->
+%% {ok, NewState} | remove_handler |
+%% {swap_handler, Args1, State1, Mod2, Args2}
+%%-----------------------------------------------------------------
+
+handle_info(Msg, State) ->
+ handle_any_event(Msg, State),
+ {ok, State}.
+
+%%-----------------------------------------------------------------
+%% handle_call(Event, State) ->
+%% {ok, Reply, NewState} | {remove_handler, Reply} |
+%% {swap_handler, Reply, Args1, State1, Mod2, Args2}
+%%-----------------------------------------------------------------
+
+handle_call(Msg, State) ->
+ Reply = ok,
+ case handle_any_event(Msg, State) of
+ {ok, NewState} ->
+ {ok, Reply, NewState};
+ remove_handler ->
+ {remove_handler, Reply};
+ {swap_handler,Args1, State1, Mod2, Args2} ->
+ {swap_handler, Reply, Args1, State1, Mod2, Args2}
+ end.
+
+%%-----------------------------------------------------------------
+%% terminate(Reason, State) ->
+%% AnyVal
+%%-----------------------------------------------------------------
+
+terminate(_Reason, _State) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%-----------------------------------------------------------------
+%% Internal functions
+%%-----------------------------------------------------------------
+
+handle_any_event({mnesia_system_event, Event}, State) ->
+ handle_system_event(Event, State);
+handle_any_event({mnesia_table_event, Event}, State) ->
+ handle_table_event(Event, State);
+handle_any_event(Msg, State) ->
+ report_error("~p got unexpected event: ~p~n", [?MODULE, Msg]),
+ {ok, State}.
+
+handle_table_event({Oper, Record, TransId}, State) ->
+ report_info("~p performed by ~p on record:~n\t~p~n",
+ [Oper, TransId, Record]),
+ {ok, State}.
+
+handle_system_event({mnesia_checkpoint_activated, _Checkpoint}, State) ->
+ {ok, State};
+
+handle_system_event({mnesia_checkpoint_deactivated, _Checkpoint}, State) ->
+ {ok, State};
+
+handle_system_event({mnesia_up, Node}, State) ->
+ Nodes = [Node | State#state.nodes],
+ {ok, State#state{nodes = Nodes}};
+
+handle_system_event({mnesia_down, Node}, State) ->
+ case mnesia:system_info(fallback_activated) of
+ true ->
+ case mnesia_monitor:get_env(fallback_error_function) of
+ {mnesia, lkill} ->
+ Msg = "A fallback is installed and Mnesia "
+ "must be restarted. Forcing shutdown "
+ "after mnesia_down from ~p...~n",
+ report_fatal(Msg, [Node], nocore, State#state.dumped_core),
+ mnesia:lkill(),
+ exit(fatal);
+ {UserMod, UserFunc} ->
+ Msg = "Warning: A fallback is installed and Mnesia got mnesia_down "
+ "from ~p. ~n",
+ report_info(Msg, [Node]),
+ case catch apply(UserMod, UserFunc, [Node]) of
+ {'EXIT', {undef, _Reason}} ->
+ %% Backward compatibility
+ apply(UserMod, UserFunc, []);
+ {'EXIT', Reason} ->
+ exit(Reason);
+ _ ->
+ ok
+ end,
+ Nodes = lists:delete(Node, State#state.nodes),
+ {ok, State#state{nodes = Nodes}}
+ end;
+ false ->
+ Nodes = lists:delete(Node, State#state.nodes),
+ {ok, State#state{nodes = Nodes}}
+ end;
+
+handle_system_event({mnesia_overload, Details}, State) ->
+ report_warning("Mnesia is overloaded: ~p~n", [Details]),
+ {ok, State};
+
+handle_system_event({mnesia_info, Format, Args}, State) ->
+ report_info(Format, Args),
+ {ok, State};
+
+handle_system_event({mnesia_warning, Format, Args}, State) ->
+ report_warning(Format, Args),
+ {ok, State};
+
+handle_system_event({mnesia_error, Format, Args}, State) ->
+ report_error(Format, Args),
+ {ok, State};
+
+handle_system_event({mnesia_fatal, Format, Args, BinaryCore}, State) ->
+ report_fatal(Format, Args, BinaryCore, State#state.dumped_core),
+ {ok, State#state{dumped_core = true}};
+
+handle_system_event({inconsistent_database, Reason, Node}, State) ->
+ report_error("mnesia_event got {inconsistent_database, ~w, ~w}~n",
+ [Reason, Node]),
+ {ok, State};
+
+handle_system_event({mnesia_user, Event}, State) ->
+ report_info("User event: ~p~n", [Event]),
+ {ok, State};
+
+handle_system_event(Msg, State) ->
+ report_error("mnesia_event got unexpected system event: ~p~n", [Msg]),
+ {ok, State}.
+
+report_info(Format0, Args0) ->
+ Format = "Mnesia(~p): " ++ Format0,
+ Args = [node() | Args0],
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ io:format(Format, Args);
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+report_warning(Format0, Args0) ->
+ Format = "Mnesia(~p): ** WARNING ** " ++ Format0,
+ Args = [node() | Args0],
+ case erlang:function_exported(error_logger, warning_msg, 2) of
+ true ->
+ error_logger:warning_msg(Format, Args);
+ false ->
+ error_logger:format(Format, Args)
+ end,
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ ok;
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+report_error(Format0, Args0) ->
+ Format = "Mnesia(~p): ** ERROR ** " ++ Format0,
+ Args = [node() | Args0],
+ error_logger:format(Format, Args),
+ case global:whereis_name(mnesia_global_logger) of
+ undefined ->
+ ok;
+ Pid ->
+ io:format(Pid, Format, Args)
+ end.
+
+report_fatal(Format, Args, BinaryCore, CoreDumped) ->
+ UseDir = mnesia_monitor:use_dir(),
+ CoreDir = mnesia_monitor:get_env(core_dir),
+ if
+ list(CoreDir),CoreDumped == false,binary(BinaryCore) ->
+ core_file(CoreDir,BinaryCore,Format,Args);
+ (UseDir == true),CoreDumped == false,binary(BinaryCore) ->
+ core_file(CoreDir,BinaryCore,Format,Args);
+ true ->
+ report_error("(ignoring core) ** FATAL ** " ++ Format, Args)
+ end.
+
+core_file(CoreDir,BinaryCore,Format,Args) ->
+ %% Integers = tuple_to_list(date()) ++ tuple_to_list(time()),
+ Integers = tuple_to_list(now()),
+ Fun = fun(I) when I < 10 -> ["_0",I];
+ (I) -> ["_",I]
+ end,
+ List = lists:append([Fun(I) || I <- Integers]),
+ CoreFile = if list(CoreDir) ->
+ filename:absname(lists:concat(["MnesiaCore.", node()] ++ List),
+ CoreDir);
+ true ->
+ filename:absname(lists:concat(["MnesiaCore.", node()] ++ List))
+ end,
+ case file:write_file(CoreFile, BinaryCore) of
+ ok ->
+ report_error("(core dumped to file: ~p)~n ** FATAL ** " ++ Format,
+ [CoreFile] ++ Args);
+ {error, Reason} ->
+ report_error("(could not write core file: ~p)~n ** FATAL ** " ++ Format,
+ [Reason] ++ Args)
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag.erl
new file mode 100644
index 0000000000..92ac51a0dc
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag.erl
@@ -0,0 +1,1201 @@
+%%% ``The contents of this file are subject to the Erlang Public License,
+%%% Version 1.1, (the "License"); you may not use this file except in
+%%% compliance with the License. You should have received a copy of the
+%%% Erlang Public License along with this software. If not, it can be
+%%% retrieved via the world wide web at http://www.erlang.org/.
+%%%
+%%% Software distributed under the License is distributed on an "AS IS"
+%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%%% the License for the specific language governing rights and limitations
+%%% under the License.
+%%%
+%%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%%% AB. All Rights Reserved.''
+%%%
+%%% $Id: mnesia_frag.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%%
+%%%----------------------------------------------------------------------
+%%% Purpose : Support tables so large that they need
+%%% to be divided into several fragments.
+%%%----------------------------------------------------------------------
+
+%header_doc_include
+
+-module(mnesia_frag).
+-behaviour(mnesia_access).
+
+%% Callback functions when accessed within an activity
+-export([
+ lock/4,
+ write/5, delete/5, delete_object/5,
+ read/5, match_object/5, all_keys/4,
+ select/5,
+ index_match_object/6, index_read/6,
+ foldl/6, foldr/6,
+ table_info/4
+ ]).
+
+%header_doc_include
+
+-export([
+ change_table_frag/2,
+ remove_node/2,
+ expand_cstruct/1,
+ lookup_frag_hash/1,
+ lookup_foreigners/1,
+ frag_names/1,
+ set_frag_hash/2,
+ local_select/4,
+ remote_select/4
+ ]).
+
+-include("mnesia.hrl").
+
+-define(OLD_HASH_MOD, mnesia_frag_old_hash).
+-define(DEFAULT_HASH_MOD, mnesia_frag_hash).
+%%-define(DEFAULT_HASH_MOD, ?OLD_HASH_MOD). %% BUGBUG: New should be default
+
+-record(frag_state,
+ {foreign_key,
+ n_fragments,
+ hash_module,
+ hash_state}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Access functions
+
+%impl_doc_include
+
+%% Callback functions which provides transparent
+%% access of fragmented tables from any activity
+%% access context.
+
+lock(ActivityId, Opaque, {table , Tab}, LockKind) ->
+ case frag_names(Tab) of
+ [Tab] ->
+ mnesia:lock(ActivityId, Opaque, {table, Tab}, LockKind);
+ Frags ->
+ DeepNs = [mnesia:lock(ActivityId, Opaque, {table, F}, LockKind) ||
+ F <- Frags],
+ mnesia_lib:uniq(lists:append(DeepNs))
+ end;
+
+lock(ActivityId, Opaque, LockItem, LockKind) ->
+ mnesia:lock(ActivityId, Opaque, LockItem, LockKind).
+
+write(ActivityId, Opaque, Tab, Rec, LockKind) ->
+ Frag = record_to_frag_name(Tab, Rec),
+ mnesia:write(ActivityId, Opaque, Frag, Rec, LockKind).
+
+delete(ActivityId, Opaque, Tab, Key, LockKind) ->
+ Frag = key_to_frag_name(Tab, Key),
+ mnesia:delete(ActivityId, Opaque, Frag, Key, LockKind).
+
+delete_object(ActivityId, Opaque, Tab, Rec, LockKind) ->
+ Frag = record_to_frag_name(Tab, Rec),
+ mnesia:delete_object(ActivityId, Opaque, Frag, Rec, LockKind).
+
+read(ActivityId, Opaque, Tab, Key, LockKind) ->
+ Frag = key_to_frag_name(Tab, Key),
+ mnesia:read(ActivityId, Opaque, Frag, Key, LockKind).
+
+match_object(ActivityId, Opaque, Tab, HeadPat, LockKind) ->
+ MatchSpec = [{HeadPat, [], ['$_']}],
+ select(ActivityId, Opaque, Tab, MatchSpec, LockKind).
+
+select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
+ do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind).
+
+all_keys(ActivityId, Opaque, Tab, LockKind) ->
+ Match = [mnesia:all_keys(ActivityId, Opaque, Frag, LockKind)
+ || Frag <- frag_names(Tab)],
+ lists:append(Match).
+
+index_match_object(ActivityId, Opaque, Tab, Pat, Attr, LockKind) ->
+ Match =
+ [mnesia:index_match_object(ActivityId, Opaque, Frag, Pat, Attr, LockKind)
+ || Frag <- frag_names(Tab)],
+ lists:append(Match).
+
+index_read(ActivityId, Opaque, Tab, Key, Attr, LockKind) ->
+ Match =
+ [mnesia:index_read(ActivityId, Opaque, Frag, Key, Attr, LockKind)
+ || Frag <- frag_names(Tab)],
+ lists:append(Match).
+
+foldl(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ Fun2 = fun(Frag, A) ->
+ mnesia:foldl(ActivityId, Opaque, Fun, A, Frag, LockKind)
+ end,
+ lists:foldl(Fun2, Acc, frag_names(Tab)).
+
+foldr(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
+ Fun2 = fun(Frag, A) ->
+ mnesia:foldr(ActivityId, Opaque, Fun, A, Frag, LockKind)
+ end,
+ lists:foldr(Fun2, Acc, frag_names(Tab)).
+
+table_info(ActivityId, Opaque, {Tab, Key}, Item) ->
+ Frag = key_to_frag_name(Tab, Key),
+ table_info2(ActivityId, Opaque, Tab, Frag, Item);
+table_info(ActivityId, Opaque, Tab, Item) ->
+ table_info2(ActivityId, Opaque, Tab, Tab, Item).
+
+table_info2(ActivityId, Opaque, Tab, Frag, Item) ->
+ case Item of
+ size ->
+ SumFun = fun({_, Size}, Acc) -> Acc + Size end,
+ lists:foldl(SumFun, 0, frag_size(ActivityId, Opaque, Tab));
+ memory ->
+ SumFun = fun({_, Size}, Acc) -> Acc + Size end,
+ lists:foldl(SumFun, 0, frag_memory(ActivityId, Opaque, Tab));
+ base_table ->
+ lookup_prop(Tab, base_table);
+ node_pool ->
+ lookup_prop(Tab, node_pool);
+ n_fragments ->
+ FH = lookup_frag_hash(Tab),
+ FH#frag_state.n_fragments;
+ foreign_key ->
+ FH = lookup_frag_hash(Tab),
+ FH#frag_state.foreign_key;
+ foreigners ->
+ lookup_foreigners(Tab);
+ n_ram_copies ->
+ length(val({Tab, ram_copies}));
+ n_disc_copies ->
+ length(val({Tab, disc_copies}));
+ n_disc_only_copies ->
+ length(val({Tab, disc_only_copies}));
+
+ frag_names ->
+ frag_names(Tab);
+ frag_dist ->
+ frag_dist(Tab);
+ frag_size ->
+ frag_size(ActivityId, Opaque, Tab);
+ frag_memory ->
+ frag_memory(ActivityId, Opaque, Tab);
+ _ ->
+ mnesia:table_info(ActivityId, Opaque, Frag, Item)
+ end.
+%impl_doc_include
+
+frag_size(ActivityId, Opaque, Tab) ->
+ [{F, remote_table_info(ActivityId, Opaque, F, size)} || F <- frag_names(Tab)].
+
+frag_memory(ActivityId, Opaque, Tab) ->
+ [{F, remote_table_info(ActivityId, Opaque, F, memory)} || F <- frag_names(Tab)].
+
+
+
+remote_table_info(ActivityId, Opaque, Tab, Item) ->
+ N = val({Tab, where_to_read}),
+ case rpc:call(N, mnesia, table_info, [ActivityId, Opaque, Tab, Item]) of
+ {badrpc, _} ->
+ mnesia:abort({no_exists, Tab, Item});
+ Info ->
+ Info
+ end.
+
+do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ mnesia:select(ActivityId, Opaque, Tab, MatchSpec, LockKind);
+ FH ->
+ HashState = FH#frag_state.hash_state,
+ FragNumbers =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:match_spec_to_frag_numbers(HashState, MatchSpec);
+ HashMod ->
+ HashMod:match_spec_to_frag_numbers(HashState, MatchSpec)
+ end,
+ N = FH#frag_state.n_fragments,
+ VerifyFun = fun(F) when integer(F), F >= 1, F =< N -> false;
+ (_F) -> true
+ end,
+ case catch lists:filter(VerifyFun, FragNumbers) of
+ [] ->
+ Fun = fun(Num) ->
+ Name = n_to_frag_name(Tab, Num),
+ Node = val({Name, where_to_read}),
+ mnesia:lock(ActivityId, Opaque, {table, Name}, LockKind),
+ {Name, Node}
+ end,
+ NameNodes = lists:map(Fun, FragNumbers),
+ SelectAllFun =
+ fun(PatchedMatchSpec) ->
+ Match = [mnesia:dirty_select(Name, PatchedMatchSpec)
+ || {Name, _Node} <- NameNodes],
+ lists:append(Match)
+ end,
+ case [{Name, Node} || {Name, Node} <- NameNodes, Node /= node()] of
+ [] ->
+ %% All fragments are local
+ mnesia:fun_select(ActivityId, Opaque, Tab, MatchSpec, none, '_', SelectAllFun);
+ RemoteNameNodes ->
+ SelectFun =
+ fun(PatchedMatchSpec) ->
+ Ref = make_ref(),
+ Args = [self(), Ref, RemoteNameNodes, PatchedMatchSpec],
+ Pid = spawn_link(?MODULE, local_select, Args),
+ LocalMatch = [mnesia:dirty_select(Name, PatchedMatchSpec)
+ || {Name, Node} <- NameNodes, Node == node()],
+ OldSelectFun = fun() -> SelectAllFun(PatchedMatchSpec) end,
+ local_collect(Ref, Pid, lists:append(LocalMatch), OldSelectFun)
+ end,
+ mnesia:fun_select(ActivityId, Opaque, Tab, MatchSpec, none, '_', SelectFun)
+ end;
+ BadFrags ->
+ mnesia:abort({"match_spec_to_frag_numbers: Fragment numbers out of range",
+ BadFrags, {range, 1, N}})
+ end
+ end.
+
+local_select(ReplyTo, Ref, RemoteNameNodes, MatchSpec) ->
+ RemoteNodes = mnesia_lib:uniq([Node || {_Name, Node} <- RemoteNameNodes]),
+ Args = [ReplyTo, Ref, RemoteNameNodes, MatchSpec],
+ {Replies, BadNodes} = rpc:multicall(RemoteNodes, ?MODULE, remote_select, Args),
+ case mnesia_lib:uniq(Replies) -- [ok] of
+ [] when BadNodes == [] ->
+ ReplyTo ! {local_select, Ref, ok};
+ _ when BadNodes /= [] ->
+ ReplyTo ! {local_select, Ref, {error, {node_not_running, hd(BadNodes)}}};
+ [{badrpc, {'EXIT', Reason}} | _] ->
+ ReplyTo ! {local_select, Ref, {error, Reason}};
+ [Reason | _] ->
+ ReplyTo ! {local_select, Ref, {error, Reason}}
+ end,
+ unlink(ReplyTo),
+ exit(normal).
+
+remote_select(ReplyTo, Ref, NameNodes, MatchSpec) ->
+ do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec).
+
+do_remote_select(ReplyTo, Ref, [{Name, Node} | NameNodes], MatchSpec) ->
+ if
+ Node == node() ->
+ Res = (catch {ok, mnesia:dirty_select(Name, MatchSpec)}),
+ ReplyTo ! {remote_select, Ref, Node, Res},
+ do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec);
+ true ->
+ do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec)
+ end;
+do_remote_select(_ReplyTo, _Ref, [], _MatchSpec) ->
+ ok.
+
+local_collect(Ref, Pid, LocalMatch, OldSelectFun) ->
+ receive
+ {local_select, Ref, LocalRes} ->
+ remote_collect(Ref, LocalRes, LocalMatch, OldSelectFun);
+ {'EXIT', Pid, Reason} ->
+ remote_collect(Ref, {error, Reason}, [], OldSelectFun)
+ end.
+
+remote_collect(Ref, LocalRes = ok, Acc, OldSelectFun) ->
+ receive
+ {remote_select, Ref, Node, RemoteRes} ->
+ case RemoteRes of
+ {ok, RemoteMatch} ->
+ remote_collect(Ref, LocalRes, RemoteMatch ++ Acc, OldSelectFun);
+ _ ->
+ remote_collect(Ref, {error, {node_not_running, Node}}, [], OldSelectFun)
+ end
+ after 0 ->
+ Acc
+ end;
+remote_collect(Ref, LocalRes = {error, Reason}, _Acc, OldSelectFun) ->
+ receive
+ {remote_select, Ref, _Node, _RemoteRes} ->
+ remote_collect(Ref, LocalRes, [], OldSelectFun)
+ after 0 ->
+ mnesia:abort(Reason)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Returns a list of cstructs
+
+expand_cstruct(Cs) ->
+ expand_cstruct(Cs, create).
+
+expand_cstruct(Cs, Mode) ->
+ Tab = Cs#cstruct.name,
+ Props = Cs#cstruct.frag_properties,
+ mnesia_schema:verify({alt, [nil, list]}, mnesia_lib:etype(Props),
+ {badarg, Tab, Props}),
+ %% Verify keys
+ ValidKeys = [foreign_key, n_fragments, node_pool,
+ n_ram_copies, n_disc_copies, n_disc_only_copies,
+ hash_module, hash_state],
+ Keys = mnesia_schema:check_keys(Tab, Props, ValidKeys),
+ mnesia_schema:check_duplicates(Tab, Keys),
+
+ %% Pick fragmentation props
+ ForeignKey = mnesia_schema:pick(Tab, foreign_key, Props, undefined),
+ {ForeignKey2, N, Pool, DefaultNR, DefaultND, DefaultNDO} =
+ pick_props(Tab, Cs, ForeignKey),
+
+ %% Verify node_pool
+ BadPool = {bad_type, Tab, {node_pool, Pool}},
+ mnesia_schema:verify(list, mnesia_lib:etype(Pool), BadPool),
+ NotAtom = fun(A) when atom(A) -> false;
+ (_A) -> true
+ end,
+ mnesia_schema:verify([], [P || P <- Pool, NotAtom(P)], BadPool),
+
+ NR = mnesia_schema:pick(Tab, n_ram_copies, Props, 0),
+ ND = mnesia_schema:pick(Tab, n_disc_copies, Props, 0),
+ NDO = mnesia_schema:pick(Tab, n_disc_only_copies, Props, 0),
+
+ PosInt = fun(I) when integer(I), I >= 0 -> true;
+ (_I) -> false
+ end,
+ mnesia_schema:verify(true, PosInt(NR),
+ {bad_type, Tab, {n_ram_copies, NR}}),
+ mnesia_schema:verify(true, PosInt(ND),
+ {bad_type, Tab, {n_disc_copies, ND}}),
+ mnesia_schema:verify(true, PosInt(NDO),
+ {bad_type, Tab, {n_disc_only_copies, NDO}}),
+
+ %% Verify n_fragments
+ Cs2 = verify_n_fragments(N, Cs, Mode),
+
+ %% Verify hash callback
+ HashMod = mnesia_schema:pick(Tab, hash_module, Props, ?DEFAULT_HASH_MOD),
+ HashState = mnesia_schema:pick(Tab, hash_state, Props, undefined),
+ HashState2 = HashMod:init_state(Tab, HashState), %% BUGBUG: Catch?
+
+ FH = #frag_state{foreign_key = ForeignKey2,
+ n_fragments = 1,
+ hash_module = HashMod,
+ hash_state = HashState2},
+ if
+ NR == 0, ND == 0, NDO == 0 ->
+ do_expand_cstruct(Cs2, FH, N, Pool, DefaultNR, DefaultND, DefaultNDO, Mode);
+ true ->
+ do_expand_cstruct(Cs2, FH, N, Pool, NR, ND, NDO, Mode)
+ end.
+
+do_expand_cstruct(Cs, FH, N, Pool, NR, ND, NDO, Mode) ->
+ Tab = Cs#cstruct.name,
+
+ LC = Cs#cstruct.local_content,
+ mnesia_schema:verify(false, LC,
+ {combine_error, Tab, {local_content, LC}}),
+
+ Snmp = Cs#cstruct.snmp,
+ mnesia_schema:verify([], Snmp,
+ {combine_error, Tab, {snmp, Snmp}}),
+
+ %% Add empty fragments
+ CommonProps = [{base_table, Tab}],
+ Cs2 = Cs#cstruct{frag_properties = lists:sort(CommonProps)},
+ expand_frag_cstructs(N, NR, ND, NDO, Cs2, Pool, Pool, FH, Mode).
+
+verify_n_fragments(N, Cs, Mode) when integer(N), N >= 1 ->
+ case Mode of
+ create ->
+ Cs#cstruct{ram_copies = [],
+ disc_copies = [],
+ disc_only_copies = []};
+ activate ->
+ Reason = {combine_error, Cs#cstruct.name, {n_fragments, N}},
+ mnesia_schema:verify(1, N, Reason),
+ Cs
+ end;
+verify_n_fragments(N, Cs, _Mode) ->
+ mnesia:abort({bad_type, Cs#cstruct.name, {n_fragments, N}}).
+
+pick_props(Tab, Cs, {ForeignTab, Attr}) ->
+ mnesia_schema:verify(true, ForeignTab /= Tab,
+ {combine_error, Tab, {ForeignTab, Attr}}),
+ Props = Cs#cstruct.frag_properties,
+ Attrs = Cs#cstruct.attributes,
+
+ ForeignKey = lookup_prop(ForeignTab, foreign_key),
+ ForeignN = lookup_prop(ForeignTab, n_fragments),
+ ForeignPool = lookup_prop(ForeignTab, node_pool),
+ N = mnesia_schema:pick(Tab, n_fragments, Props, ForeignN),
+ Pool = mnesia_schema:pick(Tab, node_pool, Props, ForeignPool),
+
+ mnesia_schema:verify(ForeignN, N,
+ {combine_error, Tab, {n_fragments, N},
+ ForeignTab, {n_fragments, ForeignN}}),
+
+ mnesia_schema:verify(ForeignPool, Pool,
+ {combine_error, Tab, {node_pool, Pool},
+ ForeignTab, {node_pool, ForeignPool}}),
+
+ mnesia_schema:verify(undefined, ForeignKey,
+ {combine_error, Tab,
+ "Multiple levels of foreign_key dependencies",
+ {ForeignTab, Attr}, ForeignKey}),
+
+ Key = {ForeignTab, mnesia_schema:attr_to_pos(Attr, Attrs)},
+ DefaultNR = length(val({ForeignTab, ram_copies})),
+ DefaultND = length(val({ForeignTab, disc_copies})),
+ DefaultNDO = length(val({ForeignTab, disc_only_copies})),
+ {Key, N, Pool, DefaultNR, DefaultND, DefaultNDO};
+pick_props(Tab, Cs, undefined) ->
+ Props = Cs#cstruct.frag_properties,
+ DefaultN = 1,
+ DefaultPool = mnesia:system_info(db_nodes),
+ N = mnesia_schema:pick(Tab, n_fragments, Props, DefaultN),
+ Pool = mnesia_schema:pick(Tab, node_pool, Props, DefaultPool),
+ DefaultNR = 1,
+ DefaultND = 0,
+ DefaultNDO = 0,
+ {undefined, N, Pool, DefaultNR, DefaultND, DefaultNDO};
+pick_props(Tab, _Cs, BadKey) ->
+ mnesia:abort({bad_type, Tab, {foreign_key, BadKey}}).
+
+expand_frag_cstructs(N, NR, ND, NDO, CommonCs, Dist, Pool, FH, Mode)
+ when N > 1, Mode == create ->
+ Frag = n_to_frag_name(CommonCs#cstruct.name, N),
+ Cs = CommonCs#cstruct{name = Frag},
+ {Cs2, RevModDist, RestDist} = set_frag_nodes(NR, ND, NDO, Cs, Dist, []),
+ ModDist = lists:reverse(RevModDist),
+ Dist2 = rearrange_dist(Cs, ModDist, RestDist, Pool),
+ %% Adjusts backwards, but it doesn't matter.
+ {FH2, _FromFrags, _AdditionalWriteFrags} = adjust_before_split(FH),
+ CsList = expand_frag_cstructs(N - 1, NR, ND, NDO, CommonCs, Dist2, Pool, FH2, Mode),
+ [Cs2 | CsList];
+expand_frag_cstructs(1, NR, ND, NDO, CommonCs, Dist, Pool, FH, Mode) ->
+ BaseProps = CommonCs#cstruct.frag_properties ++
+ [{foreign_key, FH#frag_state.foreign_key},
+ {hash_module, FH#frag_state.hash_module},
+ {hash_state, FH#frag_state.hash_state},
+ {n_fragments, FH#frag_state.n_fragments},
+ {node_pool, Pool}
+ ],
+ BaseCs = CommonCs#cstruct{frag_properties = lists:sort(BaseProps)},
+ case Mode of
+ activate ->
+ [BaseCs];
+ create ->
+ {BaseCs2, _, _} = set_frag_nodes(NR, ND, NDO, BaseCs, Dist, []),
+ [BaseCs2]
+ end.
+
+set_frag_nodes(NR, ND, NDO, Cs, [Head | Tail], Acc) when NR > 0 ->
+ Pos = #cstruct.ram_copies,
+ {Cs2, Head2} = set_frag_node(Cs, Pos, Head),
+ set_frag_nodes(NR - 1, ND, NDO, Cs2, Tail, [Head2 | Acc]);
+set_frag_nodes(NR, ND, NDO, Cs, [Head | Tail], Acc) when ND > 0 ->
+ Pos = #cstruct.disc_copies,
+ {Cs2, Head2} = set_frag_node(Cs, Pos, Head),
+ set_frag_nodes(NR, ND - 1, NDO, Cs2, Tail, [Head2 | Acc]);
+set_frag_nodes(NR, ND, NDO, Cs, [Head | Tail], Acc) when NDO > 0 ->
+ Pos = #cstruct.disc_only_copies,
+ {Cs2, Head2} = set_frag_node(Cs, Pos, Head),
+ set_frag_nodes(NR, ND, NDO - 1, Cs2, Tail, [Head2 | Acc]);
+set_frag_nodes(0, 0, 0, Cs, RestDist, ModDist) ->
+ {Cs, ModDist, RestDist};
+set_frag_nodes(_, _, _, Cs, [], _) ->
+ mnesia:abort({combine_error, Cs#cstruct.name, "Too few nodes in node_pool"}).
+
+set_frag_node(Cs, Pos, Head) ->
+ Ns = element(Pos, Cs),
+ {Node, Count2} =
+ case Head of
+ {N, Count} when atom(N), integer(Count), Count >= 0 ->
+ {N, Count + 1};
+ N when atom(N) ->
+ {N, 1};
+ BadNode ->
+ mnesia:abort({bad_type, Cs#cstruct.name, BadNode})
+ end,
+ Cs2 = setelement(Pos, Cs, [Node | Ns]),
+ {Cs2, {Node, Count2}}.
+
+rearrange_dist(Cs, [{Node, Count} | ModDist], Dist, Pool) ->
+ Dist2 = insert_dist(Cs, Node, Count, Dist, Pool),
+ rearrange_dist(Cs, ModDist, Dist2, Pool);
+rearrange_dist(_Cs, [], Dist, _) ->
+ Dist.
+
+insert_dist(Cs, Node, Count, [Head | Tail], Pool) ->
+ case Head of
+ {Node2, Count2} when atom(Node2), integer(Count2), Count2 >= 0 ->
+ case node_diff(Node, Count, Node2, Count2, Pool) of
+ less ->
+ [{Node, Count}, Head | Tail];
+ greater ->
+ [Head | insert_dist(Cs, Node, Count, Tail, Pool)]
+ end;
+ Node2 when atom(Node2) ->
+ insert_dist(Cs, Node, Count, [{Node2, 0} | Tail], Pool);
+ BadNode ->
+ mnesia:abort({bad_type, Cs#cstruct.name, BadNode})
+ end;
+insert_dist(_Cs, Node, Count, [], _Pool) ->
+ [{Node, Count}];
+insert_dist(_Cs, _Node, _Count, Dist, _Pool) ->
+ mnesia:abort({bad_type, Dist}).
+
+node_diff(_Node, Count, _Node2, Count2, _Pool) when Count < Count2 ->
+ less;
+node_diff(Node, Count, Node2, Count2, Pool) when Count == Count2 ->
+ Pos = list_pos(Node, Pool, 1),
+ Pos2 = list_pos(Node2, Pool, 1),
+ if
+ Pos < Pos2 ->
+ less;
+ Pos > Pos2 ->
+ greater
+ end;
+node_diff(_Node, Count, _Node2, Count2, _Pool) when Count > Count2 ->
+ greater.
+
+%% Returns position of element in list
+list_pos(H, [H | _T], Pos) ->
+ Pos;
+list_pos(E, [_H | T], Pos) ->
+ list_pos(E, T, Pos + 1).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Switch function for changing of table fragmentation
+%%
+%% Returns a list of lists of schema ops
+
+change_table_frag(Tab, {activate, FragProps}) ->
+ make_activate(Tab, FragProps);
+change_table_frag(Tab, deactivate) ->
+ make_deactivate(Tab);
+change_table_frag(Tab, {add_frag, SortedNodes}) ->
+ make_multi_add_frag(Tab, SortedNodes);
+change_table_frag(Tab, del_frag) ->
+ make_multi_del_frag(Tab);
+change_table_frag(Tab, {add_node, Node}) ->
+ make_multi_add_node(Tab, Node);
+change_table_frag(Tab, {del_node, Node}) ->
+ make_multi_del_node(Tab, Node);
+change_table_frag(Tab, Change) ->
+ mnesia:abort({bad_type, Tab, Change}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Turn a normal table into a fragmented table
+%%
+%% The storage type must be the same on all nodes
+
+make_activate(Tab, Props) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ case Cs#cstruct.frag_properties of
+ [] ->
+ Cs2 = Cs#cstruct{frag_properties = Props},
+ [Cs3] = expand_cstruct(Cs2, activate),
+ TabDef = mnesia_schema:cs2list(Cs3),
+ Op = {op, change_table_frag, activate, TabDef},
+ [[Op]];
+ BadProps ->
+ mnesia:abort({already_exists, Tab, {frag_properties, BadProps}})
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Turn a table into a normal defragmented table
+
+make_deactivate(Tab) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ Foreigners = lookup_foreigners(Tab),
+ BaseTab = lookup_prop(Tab, base_table),
+ FH = lookup_frag_hash(Tab),
+ if
+ BaseTab /= Tab ->
+ mnesia:abort({combine_error, Tab, "Not a base table"});
+ Foreigners /= [] ->
+ mnesia:abort({combine_error, Tab, "Too many foreigners", Foreigners});
+ FH#frag_state.n_fragments > 1 ->
+ mnesia:abort({combine_error, Tab, "Too many fragments"});
+ true ->
+ Cs2 = Cs#cstruct{frag_properties = []},
+ TabDef = mnesia_schema:cs2list(Cs2),
+ Op = {op, change_table_frag, deactivate, TabDef},
+ [[Op]]
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add a fragment to a fragmented table and fill it with half of
+%% the records from one of the old fragments
+
+make_multi_add_frag(Tab, SortedNs) when list(SortedNs) ->
+ verify_multi(Tab),
+ Ops = make_add_frag(Tab, SortedNs),
+
+ %% Propagate to foreigners
+ MoreOps = [make_add_frag(T, SortedNs) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps];
+make_multi_add_frag(Tab, SortedNs) ->
+ mnesia:abort({bad_type, Tab, SortedNs}).
+
+verify_multi(Tab) ->
+ FH = lookup_frag_hash(Tab),
+ ForeignKey = FH#frag_state.foreign_key,
+ mnesia_schema:verify(undefined, ForeignKey,
+ {combine_error, Tab,
+ "Op only allowed via foreign table",
+ {foreign_key, ForeignKey}}).
+
+make_frag_names_and_acquire_locks(Tab, N, FragIndecies, DoNotLockN) ->
+ mnesia_schema:get_tid_ts_and_lock(Tab, write),
+ Fun = fun(Index, FN) ->
+ if
+ DoNotLockN == true, Index == N ->
+ Name = n_to_frag_name(Tab, Index),
+ setelement(Index, FN, Name);
+ true ->
+ Name = n_to_frag_name(Tab, Index),
+ mnesia_schema:get_tid_ts_and_lock(Name, write),
+ setelement(Index , FN, Name)
+ end
+ end,
+ FragNames = erlang:make_tuple(N, undefined),
+ lists:foldl(Fun, FragNames, FragIndecies).
+
+make_add_frag(Tab, SortedNs) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ FH = lookup_frag_hash(Tab),
+ {FH2, FromIndecies, WriteIndecies} = adjust_before_split(FH),
+ N = FH2#frag_state.n_fragments,
+ FragNames = make_frag_names_and_acquire_locks(Tab, N, WriteIndecies, true),
+ NewFrag = element(N, FragNames),
+
+ NR = length(Cs#cstruct.ram_copies),
+ ND = length(Cs#cstruct.disc_copies),
+ NDO = length(Cs#cstruct.disc_only_copies),
+ NewCs = Cs#cstruct{name = NewFrag,
+ frag_properties = [{base_table, Tab}],
+ ram_copies = [],
+ disc_copies = [],
+ disc_only_copies = []},
+ {NewCs2, _, _} = set_frag_nodes(NR, ND, NDO, NewCs, SortedNs, []),
+ [NewOp] = mnesia_schema:make_create_table(NewCs2),
+
+ SplitOps = split(Tab, FH2, FromIndecies, FragNames, []),
+
+ Cs2 = replace_frag_hash(Cs, FH2),
+ TabDef = mnesia_schema:cs2list(Cs2),
+ BaseOp = {op, change_table_frag, {add_frag, SortedNs}, TabDef},
+
+ [BaseOp, NewOp | SplitOps].
+
+replace_frag_hash(Cs, FH) when record(FH, frag_state) ->
+ Fun = fun(Prop) ->
+ case Prop of
+ {n_fragments, _} ->
+ {true, {n_fragments, FH#frag_state.n_fragments}};
+ {hash_module, _} ->
+ {true, {hash_module, FH#frag_state.hash_module}};
+ {hash_state, _} ->
+ {true, {hash_state, FH#frag_state.hash_state}};
+ {next_n_to_split, _} ->
+ false;
+ {n_doubles, _} ->
+ false;
+ _ ->
+ true
+ end
+ end,
+ Props = lists:zf(Fun, Cs#cstruct.frag_properties),
+ Cs#cstruct{frag_properties = Props}.
+
+%% Adjust table info before split
+adjust_before_split(FH) ->
+ HashState = FH#frag_state.hash_state,
+ {HashState2, FromFrags, AdditionalWriteFrags} =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:add_frag(HashState);
+ HashMod ->
+ HashMod:add_frag(HashState)
+ end,
+ N = FH#frag_state.n_fragments + 1,
+ FromFrags2 = (catch lists:sort(FromFrags)),
+ UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))),
+ VerifyFun = fun(F) when integer(F), F >= 1, F =< N -> false;
+ (_F) -> true
+ end,
+ case catch lists:filter(VerifyFun, UnionFrags) of
+ [] ->
+ FH2 = FH#frag_state{n_fragments = N,
+ hash_state = HashState2},
+ {FH2, FromFrags2, UnionFrags};
+ BadFrags ->
+ mnesia:abort({"add_frag: Fragment numbers out of range",
+ BadFrags, {range, 1, N}})
+ end.
+
+split(Tab, FH, [SplitN | SplitNs], FragNames, Ops) ->
+ SplitFrag = element(SplitN, FragNames),
+ Pat = mnesia:table_info(SplitFrag, wild_pattern),
+ {_Mod, Tid, Ts} = mnesia_schema:get_tid_ts_and_lock(Tab, none),
+ Recs = mnesia:match_object(Tid, Ts, SplitFrag, Pat, read),
+ Ops2 = do_split(FH, SplitN, FragNames, Recs, Ops),
+ split(Tab, FH, SplitNs, FragNames, Ops2);
+split(_Tab, _FH, [], _FragNames, Ops) ->
+ Ops.
+
+%% Perform the split of the table
+do_split(FH, OldN, FragNames, [Rec | Recs], Ops) ->
+ Pos = key_pos(FH),
+ HashKey = element(Pos, Rec),
+ case key_to_n(FH, HashKey) of
+ NewN when NewN == OldN ->
+ %% Keep record in the same fragment. No need to move it.
+ do_split(FH, OldN, FragNames, Recs, Ops);
+ NewN ->
+ case element(NewN, FragNames) of
+ NewFrag when NewFrag /= undefined ->
+ OldFrag = element(OldN, FragNames),
+ Key = element(2, Rec),
+ NewOid = {NewFrag, Key},
+ OldOid = {OldFrag, Key},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ {op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
+ do_split(FH, OldN, FragNames, Recs, Ops2);
+ _NewFrag ->
+ %% Tried to move record to fragment that not is locked
+ mnesia:abort({"add_frag: Fragment not locked", NewN})
+ end
+ end;
+do_split(_FH, _OldN, _FragNames, [], Ops) ->
+ Ops.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delete a fragment from a fragmented table
+%% and merge its records with an other fragment
+
+make_multi_del_frag(Tab) ->
+ verify_multi(Tab),
+ Ops = make_del_frag(Tab),
+
+ %% Propagate to foreigners
+ MoreOps = [make_del_frag(T) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps].
+
+make_del_frag(Tab) ->
+ FH = lookup_frag_hash(Tab),
+ case FH#frag_state.n_fragments of
+ N when N > 1 ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ {FH2, FromIndecies, WriteIndecies} = adjust_before_merge(FH),
+ FragNames = make_frag_names_and_acquire_locks(Tab, N, WriteIndecies, false),
+
+ MergeOps = merge(Tab, FH2, FromIndecies, FragNames, []),
+ LastFrag = element(N, FragNames),
+ [LastOp] = mnesia_schema:make_delete_table(LastFrag, single_frag),
+ Cs2 = replace_frag_hash(Cs, FH2),
+ TabDef = mnesia_schema:cs2list(Cs2),
+ BaseOp = {op, change_table_frag, del_frag, TabDef},
+ [BaseOp, LastOp | MergeOps];
+ _ ->
+ %% Cannot remove the last fragment
+ mnesia:abort({no_exists, Tab})
+ end.
+
+%% Adjust tab info before merge
+adjust_before_merge(FH) ->
+ HashState = FH#frag_state.hash_state,
+ {HashState2, FromFrags, AdditionalWriteFrags} =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:del_frag(HashState);
+ HashMod ->
+ HashMod:del_frag(HashState)
+ end,
+ N = FH#frag_state.n_fragments,
+ FromFrags2 = (catch lists:sort(FromFrags)),
+ UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))),
+ VerifyFun = fun(F) when integer(F), F >= 1, F =< N -> false;
+ (_F) -> true
+ end,
+ case catch lists:filter(VerifyFun, UnionFrags) of
+ [] ->
+ case lists:member(N, FromFrags2) of
+ true ->
+ FH2 = FH#frag_state{n_fragments = N - 1,
+ hash_state = HashState2},
+ {FH2, FromFrags2, UnionFrags};
+ false ->
+ mnesia:abort({"del_frag: Last fragment number not included", N})
+ end;
+ BadFrags ->
+ mnesia:abort({"del_frag: Fragment numbers out of range",
+ BadFrags, {range, 1, N}})
+ end.
+
+merge(Tab, FH, [FromN | FromNs], FragNames, Ops) ->
+ FromFrag = element(FromN, FragNames),
+ Pat = mnesia:table_info(FromFrag, wild_pattern),
+ {_Mod, Tid, Ts} = mnesia_schema:get_tid_ts_and_lock(Tab, none),
+ Recs = mnesia:match_object(Tid, Ts, FromFrag, Pat, read),
+ Ops2 = do_merge(FH, FromN, FragNames, Recs, Ops),
+ merge(Tab, FH, FromNs, FragNames, Ops2);
+merge(_Tab, _FH, [], _FragNames, Ops) ->
+ Ops.
+
+%% Perform the merge of the table
+do_merge(FH, OldN, FragNames, [Rec | Recs], Ops) ->
+ Pos = key_pos(FH),
+ LastN = FH#frag_state.n_fragments + 1,
+ HashKey = element(Pos, Rec),
+ case key_to_n(FH, HashKey) of
+ NewN when NewN == LastN ->
+ %% Tried to leave a record in the fragment that is to be deleted
+ mnesia:abort({"del_frag: Fragment number out of range",
+ NewN, {range, 1, LastN}});
+ NewN when NewN == OldN ->
+ %% Keep record in the same fragment. No need to move it.
+ do_merge(FH, OldN, FragNames, Recs, Ops);
+ NewN when OldN == LastN ->
+ %% Move record from the fragment that is to be deleted
+ %% No need to create a delete op for each record.
+ case element(NewN, FragNames) of
+ NewFrag when NewFrag /= undefined ->
+ Key = element(2, Rec),
+ NewOid = {NewFrag, Key},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}} | Ops],
+ do_merge(FH, OldN, FragNames, Recs, Ops2);
+ _NewFrag ->
+ %% Tried to move record to fragment that not is locked
+ mnesia:abort({"del_frag: Fragment not locked", NewN})
+ end;
+ NewN ->
+ case element(NewN, FragNames) of
+ NewFrag when NewFrag /= undefined ->
+ OldFrag = element(OldN, FragNames),
+ Key = element(2, Rec),
+ NewOid = {NewFrag, Key},
+ OldOid = {OldFrag, Key},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ {op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
+ do_merge(FH, OldN, FragNames, Recs, Ops2);
+ _NewFrag ->
+ %% Tried to move record to fragment that not is locked
+ mnesia:abort({"del_frag: Fragment not locked", NewN})
+ end
+ end;
+ do_merge(_FH, _OldN, _FragNames, [], Ops) ->
+ Ops.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Add a node to the node pool of a fragmented table
+
+make_multi_add_node(Tab, Node) ->
+ verify_multi(Tab),
+ Ops = make_add_node(Tab, Node),
+
+ %% Propagate to foreigners
+ MoreOps = [make_add_node(T, Node) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps].
+
+make_add_node(Tab, Node) when atom(Node) ->
+ Pool = lookup_prop(Tab, node_pool),
+ case lists:member(Node, Pool) of
+ false ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ Pool2 = Pool ++ [Node],
+ Props = Cs#cstruct.frag_properties,
+ Props2 = lists:keyreplace(node_pool, 1, Props, {node_pool, Pool2}),
+ Cs2 = Cs#cstruct{frag_properties = Props2},
+ TabDef = mnesia_schema:cs2list(Cs2),
+ Op = {op, change_table_frag, {add_node, Node}, TabDef},
+ [Op];
+ true ->
+ mnesia:abort({already_exists, Tab, Node})
+ end;
+make_add_node(Tab, Node) ->
+ mnesia:abort({bad_type, Tab, Node}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delet a node from the node pool of a fragmented table
+
+make_multi_del_node(Tab, Node) ->
+ verify_multi(Tab),
+ Ops = make_del_node(Tab, Node),
+
+ %% Propagate to foreigners
+ MoreOps = [make_del_node(T, Node) || T <- lookup_foreigners(Tab)],
+ [Ops | MoreOps].
+
+make_del_node(Tab, Node) when atom(Node) ->
+ Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
+ mnesia_schema:ensure_active(Cs),
+ Pool = lookup_prop(Tab, node_pool),
+ case lists:member(Node, Pool) of
+ true ->
+ Pool2 = Pool -- [Node],
+ Props = lists:keyreplace(node_pool, 1, Cs#cstruct.frag_properties, {node_pool, Pool2}),
+ Cs2 = Cs#cstruct{frag_properties = Props},
+ TabDef = mnesia_schema:cs2list(Cs2),
+ Op = {op, change_table_frag, {del_node, Node}, TabDef},
+ [Op];
+ false ->
+ mnesia:abort({no_exists, Tab, Node})
+ end;
+make_del_node(Tab, Node) ->
+ mnesia:abort({bad_type, Tab, Node}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Special case used to remove all references to a node during
+%% mnesia:del_table_copy(schema, Node)
+
+remove_node(Node, Cs) ->
+ Tab = Cs#cstruct.name,
+ case is_top_frag(Tab) of
+ false ->
+ {Cs, false};
+ true ->
+ Pool = lookup_prop(Tab, node_pool),
+ case lists:member(Node, Pool) of
+ true ->
+ Pool2 = Pool -- [Node],
+ Props = lists:keyreplace(node_pool, 1,
+ Cs#cstruct.frag_properties,
+ {node_pool, Pool2}),
+ {Cs#cstruct{frag_properties = Props}, true};
+ false ->
+ {Cs, false}
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Helpers
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+set_frag_hash(Tab, Props) ->
+ case props_to_frag_hash(Tab, Props) of
+ FH when record(FH, frag_state) ->
+ mnesia_lib:set({Tab, frag_hash}, FH);
+ no_hash ->
+ mnesia_lib:unset({Tab, frag_hash})
+ end.
+
+props_to_frag_hash(_Tab, []) ->
+ no_hash;
+props_to_frag_hash(Tab, Props) ->
+ case mnesia_schema:pick(Tab, base_table, Props, undefined) of
+ T when T == Tab ->
+ Foreign = mnesia_schema:pick(Tab, foreign_key, Props, must),
+ N = mnesia_schema:pick(Tab, n_fragments, Props, must),
+
+ case mnesia_schema:pick(Tab, hash_module, Props, undefined) of
+ undefined ->
+ Split = mnesia_schema:pick(Tab, next_n_to_split, Props, must),
+ Doubles = mnesia_schema:pick(Tab, n_doubles, Props, must),
+ FH = {frag_hash, Foreign, N, Split, Doubles},
+ HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
+ #frag_state{foreign_key = Foreign,
+ n_fragments = N,
+ hash_module = ?OLD_HASH_MOD,
+ hash_state = HashState};
+ HashMod ->
+ HashState = mnesia_schema:pick(Tab, hash_state, Props, must),
+ #frag_state{foreign_key = Foreign,
+ n_fragments = N,
+ hash_module = HashMod,
+ hash_state = HashState}
+ %% Old style. Kept for backwards compatibility.
+ end;
+ _ ->
+ no_hash
+ end.
+
+lookup_prop(Tab, Prop) ->
+ Props = val({Tab, frag_properties}),
+ case lists:keysearch(Prop, 1, Props) of
+ {value, {Prop, Val}} ->
+ Val;
+ false ->
+ mnesia:abort({no_exists, Tab, Prop, {frag_properties, Props}})
+ end.
+
+lookup_frag_hash(Tab) ->
+ case ?catch_val({Tab, frag_hash}) of
+ FH when record(FH, frag_state) ->
+ FH;
+ {frag_hash, K, N, _S, _D} = FH ->
+ %% Old style. Kept for backwards compatibility.
+ HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
+ #frag_state{foreign_key = K,
+ n_fragments = N,
+ hash_module = ?OLD_HASH_MOD,
+ hash_state = HashState};
+ {'EXIT', _} ->
+ mnesia:abort({no_exists, Tab, frag_properties, frag_hash})
+ end.
+
+is_top_frag(Tab) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ false;
+ _ ->
+ [] == lookup_foreigners(Tab)
+ end.
+
+%% Returns a list of tables
+lookup_foreigners(Tab) ->
+ %% First field in HashPat is either frag_hash or frag_state
+ HashPat = {'_', {Tab, '_'}, '_', '_', '_'},
+ [T || [T] <- ?ets_match(mnesia_gvar, {{'$1', frag_hash}, HashPat})].
+
+%% Returns name of fragment table
+record_to_frag_name(Tab, Rec) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ Tab;
+ FH ->
+ Pos = key_pos(FH),
+ Key = element(Pos, Rec),
+ N = key_to_n(FH, Key),
+ n_to_frag_name(Tab, N)
+ end.
+
+key_pos(FH) ->
+ case FH#frag_state.foreign_key of
+ undefined ->
+ 2;
+ {_ForeignTab, Pos} ->
+ Pos
+ end.
+
+%% Returns name of fragment table
+key_to_frag_name({BaseTab, _} = Tab, Key) ->
+ N = key_to_frag_number(Tab, Key),
+ n_to_frag_name(BaseTab, N);
+key_to_frag_name(Tab, Key) ->
+ N = key_to_frag_number(Tab, Key),
+ n_to_frag_name(Tab, N).
+
+%% Returns name of fragment table
+n_to_frag_name(Tab, 1) ->
+ Tab;
+n_to_frag_name(Tab, N) when atom(Tab), integer(N) ->
+ list_to_atom(atom_to_list(Tab) ++ "_frag" ++ integer_to_list(N));
+n_to_frag_name(Tab, N) ->
+ mnesia:abort({bad_type, Tab, N}).
+
+%% Returns name of fragment table
+key_to_frag_number({Tab, ForeignKey}, _Key) ->
+ FH = val({Tab, frag_hash}),
+ case FH#frag_state.foreign_key of
+ {_ForeignTab, _Pos} ->
+ key_to_n(FH, ForeignKey);
+ undefined ->
+ mnesia:abort({combine_error, Tab, frag_properties,
+ {foreign_key, undefined}})
+ end;
+key_to_frag_number(Tab, Key) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ 1;
+ FH ->
+ key_to_n(FH, Key)
+ end.
+
+%% Returns fragment number
+key_to_n(FH, Key) ->
+ HashState = FH#frag_state.hash_state,
+ N =
+ case FH#frag_state.hash_module of
+ HashMod when HashMod == ?DEFAULT_HASH_MOD ->
+ ?DEFAULT_HASH_MOD:key_to_frag_number(HashState, Key);
+ HashMod ->
+ HashMod:key_to_frag_number(HashState, Key)
+ end,
+ if
+ integer(N), N >= 1, N =< FH#frag_state.n_fragments ->
+ N;
+ true ->
+ mnesia:abort({"key_to_frag_number: Fragment number out of range",
+ N, {range, 1, FH#frag_state.n_fragments}})
+ end.
+
+%% Returns a list of frament table names
+frag_names(Tab) ->
+ case ?catch_val({Tab, frag_hash}) of
+ {'EXIT', _} ->
+ [Tab];
+ FH ->
+ N = FH#frag_state.n_fragments,
+ frag_names(Tab, N, [])
+ end.
+
+frag_names(Tab, 1, Acc) ->
+ [Tab | Acc];
+frag_names(Tab, N, Acc) ->
+ Frag = n_to_frag_name(Tab, N),
+ frag_names(Tab, N - 1, [Frag | Acc]).
+
+%% Returns a list of {Node, FragCount} tuples
+%% sorted on FragCounts
+frag_dist(Tab) ->
+ Pool = lookup_prop(Tab, node_pool),
+ Dist = [{good, Node, 0} || Node <- Pool],
+ Dist2 = count_frag(frag_names(Tab), Dist),
+ sort_dist(Dist2).
+
+count_frag([Frag | Frags], Dist) ->
+ Dist2 = incr_nodes(val({Frag, ram_copies}), Dist),
+ Dist3 = incr_nodes(val({Frag, disc_copies}), Dist2),
+ Dist4 = incr_nodes(val({Frag, disc_only_copies}), Dist3),
+ count_frag(Frags, Dist4);
+count_frag([], Dist) ->
+ Dist.
+
+incr_nodes([Node | Nodes], Dist) ->
+ Dist2 = incr_node(Node, Dist),
+ incr_nodes(Nodes, Dist2);
+incr_nodes([], Dist) ->
+ Dist.
+
+incr_node(Node, [{Kind, Node, Count} | Tail]) ->
+ [{Kind, Node, Count + 1} | Tail];
+incr_node(Node, [Head | Tail]) ->
+ [Head | incr_node(Node, Tail)];
+incr_node(Node, []) ->
+ [{bad, Node, 1}].
+
+%% Sorts dist according in decreasing count order
+sort_dist(Dist) ->
+ Dist2 = deep_dist(Dist, []),
+ Dist3 = lists:keysort(1, Dist2),
+ shallow_dist(Dist3).
+
+deep_dist([Head | Tail], Deep) ->
+ {Kind, _Node, Count} = Head,
+ {Tag, Same, Other} = pick_count(Kind, Count, [Head | Tail]),
+ deep_dist(Other, [{Tag, Same} | Deep]);
+deep_dist([], Deep) ->
+ Deep.
+
+pick_count(Kind, Count, [{Kind2, Node2, Count2} | Tail]) ->
+ Head = {Node2, Count2},
+ {_, Same, Other} = pick_count(Kind, Count, Tail),
+ if
+ Kind == bad ->
+ {bad, [Head | Same], Other};
+ Kind2 == bad ->
+ {Count, Same, [{Kind2, Node2, Count2} | Other]};
+ Count == Count2 ->
+ {Count, [Head | Same], Other};
+ true ->
+ {Count, Same, [{Kind2, Node2, Count2} | Other]}
+ end;
+pick_count(_Kind, Count, []) ->
+ {Count, [], []}.
+
+shallow_dist([{_Tag, Shallow} | Deep]) ->
+ Shallow ++ shallow_dist(Deep);
+shallow_dist([]) ->
+ [].
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_hash.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_hash.erl
new file mode 100644
index 0000000000..591f2ce9c8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_hash.erl
@@ -0,0 +1,118 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_frag_hash.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+%%%----------------------------------------------------------------------
+%%% Purpose : Implements hashing functionality for fragmented tables
+%%%----------------------------------------------------------------------
+
+%header_doc_include
+-module(mnesia_frag_hash).
+-behaviour(mnesia_frag_hash).
+
+%% Fragmented Table Hashing callback functions
+-export([
+ init_state/2,
+ add_frag/1,
+ del_frag/1,
+ key_to_frag_number/2,
+ match_spec_to_frag_numbers/2
+ ]).
+
+%header_doc_include
+
+%impl_doc_include
+-record(hash_state, {n_fragments, next_n_to_split, n_doubles}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+init_state(_Tab, State) when State == undefined ->
+ #hash_state{n_fragments = 1,
+ next_n_to_split = 1,
+ n_doubles = 0}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_frag(State) when record(State, hash_state) ->
+ SplitN = State#hash_state.next_n_to_split,
+ P = SplitN + 1,
+ L = State#hash_state.n_doubles,
+ NewN = State#hash_state.n_fragments + 1,
+ State2 = case trunc(math:pow(2, L)) + 1 of
+ P2 when P2 == P ->
+ State#hash_state{n_fragments = NewN,
+ n_doubles = L + 1,
+ next_n_to_split = 1};
+ _ ->
+ State#hash_state{n_fragments = NewN,
+ next_n_to_split = P}
+ end,
+ {State2, [SplitN], [NewN]}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+del_frag(State) when record(State, hash_state) ->
+ P = State#hash_state.next_n_to_split - 1,
+ L = State#hash_state.n_doubles,
+ N = State#hash_state.n_fragments,
+ if
+ P < 1 ->
+ L2 = L - 1,
+ MergeN = trunc(math:pow(2, L2)),
+ State2 = State#hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN,
+ n_doubles = L2},
+ {State2, [N], [MergeN]};
+ true ->
+ MergeN = P,
+ State2 = State#hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN},
+ {State2, [N], [MergeN]}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+key_to_frag_number(State, Key) when record(State, hash_state) ->
+ L = State#hash_state.n_doubles,
+ A = erlang:phash(Key, trunc(math:pow(2, L))),
+ P = State#hash_state.next_n_to_split,
+ if
+ A < P ->
+ erlang:phash(Key, trunc(math:pow(2, L + 1)));
+ true ->
+ A
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+match_spec_to_frag_numbers(State, MatchSpec) when record(State, hash_state) ->
+ case MatchSpec of
+ [{HeadPat, _, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ KeyPat = element(2, HeadPat),
+ case has_var(KeyPat) of
+ false ->
+ [key_to_frag_number(State, KeyPat)];
+ true ->
+ lists:seq(1, State#hash_state.n_fragments)
+ end;
+ _ ->
+ lists:seq(1, State#hash_state.n_fragments)
+ end.
+
+%impl_doc_include
+
+has_var(Pat) ->
+ mnesia:has_var(Pat).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl
new file mode 100644
index 0000000000..8dc128a42e
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_frag_old_hash.erl
@@ -0,0 +1,127 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_frag_old_hash.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+%%%----------------------------------------------------------------------
+%%% Purpose : Implements hashing functionality for fragmented tables
+%%%----------------------------------------------------------------------
+
+-module(mnesia_frag_old_hash).
+-behaviour(mnesia_frag_hash).
+
+%% Hashing callback functions
+-export([
+ init_state/2,
+ add_frag/1,
+ del_frag/1,
+ key_to_frag_number/2,
+ match_spec_to_frag_numbers/2
+ ]).
+
+-record(old_hash_state,
+ {n_fragments,
+ next_n_to_split,
+ n_doubles}).
+
+%% Old style. Kept for backwards compatibility.
+-record(frag_hash,
+ {foreign_key,
+ n_fragments,
+ next_n_to_split,
+ n_doubles}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+init_state(_Tab, InitialState) when InitialState == undefined ->
+ #old_hash_state{n_fragments = 1,
+ next_n_to_split = 1,
+ n_doubles = 0};
+init_state(_Tab, FH) when record(FH, frag_hash) ->
+ %% Old style. Kept for backwards compatibility.
+ #old_hash_state{n_fragments = FH#frag_hash.n_fragments,
+ next_n_to_split = FH#frag_hash.next_n_to_split,
+ n_doubles = FH#frag_hash.n_doubles}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_frag(State) when record(State, old_hash_state) ->
+ SplitN = State#old_hash_state.next_n_to_split,
+ P = SplitN + 1,
+ L = State#old_hash_state.n_doubles,
+ NewN = State#old_hash_state.n_fragments + 1,
+ State2 = case trunc(math:pow(2, L)) + 1 of
+ P2 when P2 == P ->
+ State#old_hash_state{n_fragments = NewN,
+ next_n_to_split = 1,
+ n_doubles = L + 1};
+ _ ->
+ State#old_hash_state{n_fragments = NewN,
+ next_n_to_split = P}
+ end,
+ {State2, [SplitN], [NewN]}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+del_frag(State) when record(State, old_hash_state) ->
+ P = State#old_hash_state.next_n_to_split - 1,
+ L = State#old_hash_state.n_doubles,
+ N = State#old_hash_state.n_fragments,
+ if
+ P < 1 ->
+ L2 = L - 1,
+ MergeN = trunc(math:pow(2, L2)),
+ State2 = State#old_hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN,
+ n_doubles = L2},
+ {State2, [N], [MergeN]};
+ true ->
+ MergeN = P,
+ State2 = State#old_hash_state{n_fragments = N - 1,
+ next_n_to_split = MergeN},
+ {State2, [N], [MergeN]}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+key_to_frag_number(State, Key) when record(State, old_hash_state) ->
+ L = State#old_hash_state.n_doubles,
+ A = erlang:hash(Key, trunc(math:pow(2, L))),
+ P = State#old_hash_state.next_n_to_split,
+ if
+ A < P ->
+ erlang:hash(Key, trunc(math:pow(2, L + 1)));
+ true ->
+ A
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+match_spec_to_frag_numbers(State, MatchSpec) when record(State, old_hash_state) ->
+ case MatchSpec of
+ [{HeadPat, _, _}] when tuple(HeadPat), size(HeadPat) > 2 ->
+ KeyPat = element(2, HeadPat),
+ case has_var(KeyPat) of
+ false ->
+ [key_to_frag_number(State, KeyPat)];
+ true ->
+ lists:seq(1, State#old_hash_state.n_fragments)
+ end;
+ _ ->
+ lists:seq(1, State#old_hash_state.n_fragments)
+ end.
+
+has_var(Pat) ->
+ mnesia:has_var(Pat).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_index.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_index.erl
new file mode 100644
index 0000000000..650a2d1d3c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_index.erl
@@ -0,0 +1,379 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_index.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+%% Purpose: Handles index functionality in mnesia
+
+-module(mnesia_index).
+-export([read/5,
+ add_index/5,
+ delete_index/3,
+ del_object_index/5,
+ clear_index/4,
+ dirty_match_object/3,
+ dirty_select/3,
+ dirty_read/3,
+ dirty_read2/3,
+
+ db_put/2,
+ db_get/2,
+ db_match_erase/2,
+ get_index_table/2,
+ get_index_table/3,
+
+ tab2filename/2,
+ tab2tmp_filename/2,
+ init_index/2,
+ init_indecies/3,
+ del_transient/2,
+ del_transient/3,
+ del_index_table/3]).
+
+-import(mnesia_lib, [verbose/2]).
+-include("mnesia.hrl").
+
+-record(index, {setorbag, pos_list}).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+%% read an object list throuh its index table
+%% we assume that table Tab has index on attribute number Pos
+
+read(Tid, Store, Tab, IxKey, Pos) ->
+ ResList = mnesia_locker:ixrlock(Tid, Store, Tab, IxKey, Pos),
+ %% Remove all tuples which don't include Ixkey, happens when Tab is a bag
+ case val({Tab, setorbag}) of
+ bag ->
+ mnesia_lib:key_search_all(IxKey, Pos, ResList);
+ _ ->
+ ResList
+ end.
+
+add_index(Index, Tab, Key, Obj, Old) ->
+ add_index2(Index#index.pos_list, Index#index.setorbag, Tab, Key, Obj, Old).
+
+add_index2([{Pos, Ixt} |Tail], bag, Tab, K, Obj, OldRecs) ->
+ db_put(Ixt, {element(Pos, Obj), K}),
+ add_index2(Tail, bag, Tab, K, Obj, OldRecs);
+add_index2([{Pos, Ixt} |Tail], Type, Tab, K, Obj, OldRecs) ->
+ %% Remove old tuples in index if Tab is updated
+ case OldRecs of
+ undefined ->
+ Old = mnesia_lib:db_get(Tab, K),
+ del_ixes(Ixt, Old, Pos, K);
+ Old ->
+ del_ixes(Ixt, Old, Pos, K)
+ end,
+ db_put(Ixt, {element(Pos, Obj), K}),
+ add_index2(Tail, Type, Tab, K, Obj, OldRecs);
+add_index2([], _, _Tab, _K, _Obj, _) -> ok.
+
+delete_index(Index, Tab, K) ->
+ delete_index2(Index#index.pos_list, Tab, K).
+
+delete_index2([{Pos, Ixt} | Tail], Tab, K) ->
+ DelObjs = mnesia_lib:db_get(Tab, K),
+ del_ixes(Ixt, DelObjs, Pos, K),
+ delete_index2(Tail, Tab, K);
+delete_index2([], _Tab, _K) -> ok.
+
+
+del_ixes(_Ixt, [], _Pos, _L) -> ok;
+del_ixes(Ixt, [Obj | Tail], Pos, Key) ->
+ db_match_erase(Ixt, {element(Pos, Obj), Key}),
+ del_ixes(Ixt, Tail, Pos, Key).
+
+del_object_index(Index, Tab, K, Obj, Old) ->
+ del_object_index2(Index#index.pos_list, Index#index.setorbag, Tab, K, Obj, Old).
+
+del_object_index2([], _, _Tab, _K, _Obj, _Old) -> ok;
+del_object_index2([{Pos, Ixt} | Tail], SoB, Tab, K, Obj, Old) ->
+ case SoB of
+ bag ->
+ del_object_bag(Tab, K, Obj, Pos, Ixt, Old);
+ _ -> %% If set remove the tuple in index table
+ del_ixes(Ixt, [Obj], Pos, K)
+ end,
+ del_object_index2(Tail, SoB, Tab, K, Obj, Old).
+
+del_object_bag(Tab, Key, Obj, Pos, Ixt, undefined) ->
+ Old = mnesia_lib:db_get(Tab, Key),
+ del_object_bag(Tab, Key, Obj, Pos, Ixt, Old);
+%% If Tab type is bag we need remove index identifier if Tab
+%% contains less than 2 elements.
+del_object_bag(_Tab, Key, Obj, Pos, Ixt, Old) when length(Old) < 2 ->
+ del_ixes(Ixt, [Obj], Pos, Key);
+del_object_bag(_Tab, _Key, _Obj, _Pos, _Ixt, _Old) -> ok.
+
+clear_index(Index, Tab, K, Obj) ->
+ clear_index2(Index#index.pos_list, Tab, K, Obj).
+
+clear_index2([], _Tab, _K, _Obj) -> ok;
+clear_index2([{_Pos, Ixt} | Tail], Tab, K, Obj) ->
+ db_match_erase(Ixt, Obj),
+ clear_index2(Tail, Tab, K, Obj).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+dirty_match_object(Tab, Pat, Pos) ->
+ %% Assume that we are on the node where the replica is
+ case element(2, Pat) of
+ '_' ->
+ IxKey = element(Pos, Pat),
+ RealKeys = realkeys(Tab, Pos, IxKey),
+ merge(RealKeys, Tab, Pat, []);
+ _Else ->
+ mnesia_lib:db_match_object(Tab, Pat)
+ end.
+
+merge([{_IxKey, RealKey} | Tail], Tab, Pat, Ack) ->
+ %% Assume that we are on the node where the replica is
+ Pat2 = setelement(2, Pat, RealKey),
+ Recs = mnesia_lib:db_match_object(Tab, Pat2),
+ merge(Tail, Tab, Pat, Recs ++ Ack);
+merge([], _, _, Ack) ->
+ Ack.
+
+realkeys(Tab, Pos, IxKey) ->
+ Index = get_index_table(Tab, Pos),
+ db_get(Index, IxKey). % a list on the form [{IxKey, RealKey1} , ....
+
+dirty_select(Tab, Spec, Pos) ->
+ %% Assume that we are on the node where the replica is
+ %% Returns the records without applying the match spec
+ %% The actual filtering is handled by the caller
+ IxKey = element(Pos, Spec),
+ RealKeys = realkeys(Tab, Pos, IxKey),
+ StorageType = val({Tab, storage_type}),
+ lists:append([mnesia_lib:db_get(StorageType, Tab, Key) || Key <- RealKeys]).
+
+dirty_read(Tab, IxKey, Pos) ->
+ ResList = mnesia:dirty_rpc(Tab, ?MODULE, dirty_read2,
+ [Tab, IxKey, Pos]),
+ case val({Tab, setorbag}) of
+ bag ->
+ %% Remove all tuples which don't include Ixkey
+ mnesia_lib:key_search_all(IxKey, Pos, ResList);
+ _ ->
+ ResList
+ end.
+
+dirty_read2(Tab, IxKey, Pos) ->
+ Ix = get_index_table(Tab, Pos),
+ Keys = db_match(Ix, {IxKey, '$1'}),
+ r_keys(Keys, Tab, []).
+
+r_keys([[H]|T],Tab,Ack) ->
+ V = mnesia_lib:db_get(Tab, H),
+ r_keys(T, Tab, V ++ Ack);
+r_keys([], _, Ack) ->
+ Ack.
+
+
+%%%%%%% Creation, Init and deletion routines for index tables
+%% We can have several indexes on the same table
+%% this can be a fairly costly operation if table is *very* large
+
+tab2filename(Tab, Pos) ->
+ mnesia_lib:dir(Tab) ++ "_" ++ integer_to_list(Pos) ++ ".DAT".
+
+tab2tmp_filename(Tab, Pos) ->
+ mnesia_lib:dir(Tab) ++ "_" ++ integer_to_list(Pos) ++ ".TMP".
+
+init_index(Tab, Storage) ->
+ PosList = val({Tab, index}),
+ init_indecies(Tab, Storage, PosList).
+
+init_indecies(Tab, Storage, PosList) ->
+ case Storage of
+ unknown ->
+ ignore;
+ disc_only_copies ->
+ init_disc_index(Tab, PosList);
+ ram_copies ->
+ make_ram_index(Tab, PosList);
+ disc_copies ->
+ make_ram_index(Tab, PosList)
+ end.
+
+%% works for both ram and disc indexes
+
+del_index_table(_, unknown, _) ->
+ ignore;
+del_index_table(Tab, Storage, Pos) ->
+ delete_transient_index(Tab, Pos, Storage),
+ mnesia_lib:del({Tab, index}, Pos).
+
+del_transient(Tab, Storage) ->
+ PosList = val({Tab, index}),
+ del_transient(Tab, PosList, Storage).
+
+del_transient(_, [], _) -> done;
+del_transient(Tab, [Pos | Tail], Storage) ->
+ delete_transient_index(Tab, Pos, Storage),
+ del_transient(Tab, Tail, Storage).
+
+delete_transient_index(Tab, Pos, disc_only_copies) ->
+ Tag = {Tab, index, Pos},
+ mnesia_monitor:unsafe_close_dets(Tag),
+ file:delete(tab2filename(Tab, Pos)),
+ del_index_info(Tab, Pos), %% Uses val(..)
+ mnesia_lib:unset({Tab, {index, Pos}});
+
+delete_transient_index(Tab, Pos, _Storage) ->
+ Ixt = val({Tab, {index, Pos}}),
+ ?ets_delete_table(Ixt),
+ del_index_info(Tab, Pos),
+ mnesia_lib:unset({Tab, {index, Pos}}).
+
+%%%%% misc functions for the index create/init/delete functions above
+
+%% assuming that the file exists.
+init_disc_index(_Tab, []) ->
+ done;
+init_disc_index(Tab, [Pos | Tail]) when integer(Pos) ->
+ Fn = tab2filename(Tab, Pos),
+ IxTag = {Tab, index, Pos},
+ file:delete(Fn),
+ Args = [{file, Fn}, {keypos, 1}, {type, bag}],
+ mnesia_monitor:open_dets(IxTag, Args),
+ Storage = disc_only_copies,
+ Key = mnesia_lib:db_first(Storage, Tab),
+ Recs = mnesia_lib:db_get(Storage, Tab, Key),
+ BinSize = size(term_to_binary(Recs)),
+ KeysPerChunk = (4000 div BinSize) + 1,
+ Init = {start, KeysPerChunk},
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+ ok = dets:init_table(IxTag, create_fun(Init, Tab, Pos)),
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:set({Tab, {index, Pos}}, IxTag),
+ add_index_info(Tab, val({Tab, setorbag}), {Pos, {dets, IxTag}}),
+ init_disc_index(Tab, Tail).
+
+create_fun(Cont, Tab, Pos) ->
+ fun(read) ->
+ Data =
+ case Cont of
+ {start, KeysPerChunk} ->
+ mnesia_lib:db_init_chunk(disc_only_copies, Tab, KeysPerChunk);
+ '$end_of_table' ->
+ '$end_of_table';
+ _Else ->
+ mnesia_lib:db_chunk(disc_only_copies, Cont)
+ end,
+ case Data of
+ '$end_of_table' ->
+ end_of_input;
+ {Recs, Next} ->
+ IdxElems = [{element(Pos, Obj), element(2, Obj)} || Obj <- Recs],
+ {IdxElems, create_fun(Next, Tab, Pos)}
+ end;
+ (close) ->
+ ok
+ end.
+
+make_ram_index(_, []) ->
+ done;
+make_ram_index(Tab, [Pos | Tail]) ->
+ add_ram_index(Tab, Pos),
+ make_ram_index(Tab, Tail).
+
+add_ram_index(Tab, Pos) when integer(Pos) ->
+ verbose("Creating index for ~w ~n", [Tab]),
+ Index = mnesia_monitor:mktab(mnesia_index, [bag, public]),
+ Insert = fun(Rec, _Acc) ->
+ true = ?ets_insert(Index, {element(Pos, Rec), element(2, Rec)})
+ end,
+ mnesia_lib:db_fixtable(ram_copies, Tab, true),
+ true = ets:foldl(Insert, true, Tab),
+ mnesia_lib:db_fixtable(ram_copies, Tab, false),
+ mnesia_lib:set({Tab, {index, Pos}}, Index),
+ add_index_info(Tab, val({Tab, setorbag}), {Pos, {ram, Index}});
+add_ram_index(_Tab, snmp) ->
+ ok.
+
+add_index_info(Tab, Type, IxElem) ->
+ Commit = val({Tab, commit_work}),
+ case lists:keysearch(index, 1, Commit) of
+ false ->
+ Index = #index{setorbag = Type,
+ pos_list = [IxElem]},
+ %% Check later if mnesia_tm is sensative about the order
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit([Index | Commit]));
+ {value, Old} ->
+ %% We could check for consistency here
+ Index = Old#index{pos_list = [IxElem | Old#index.pos_list]},
+ NewC = lists:keyreplace(index, 1, Commit, Index),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC))
+ end.
+
+del_index_info(Tab, Pos) ->
+ Commit = val({Tab, commit_work}),
+ case lists:keysearch(index, 1, Commit) of
+ false ->
+ %% Something is wrong ignore
+ skip;
+ {value, Old} ->
+ case lists:keydelete(Pos, 1, Old#index.pos_list) of
+ [] ->
+ NewC = lists:keydelete(index, 1, Commit),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC));
+ New ->
+ Index = Old#index{pos_list = New},
+ NewC = lists:keyreplace(index, 1, Commit, Index),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC))
+ end
+ end.
+
+db_put({ram, Ixt}, V) ->
+ true = ?ets_insert(Ixt, V);
+db_put({dets, Ixt}, V) ->
+ ok = dets:insert(Ixt, V).
+
+db_get({ram, Ixt}, K) ->
+ ?ets_lookup(Ixt, K);
+db_get({dets, Ixt}, K) ->
+ dets:lookup(Ixt, K).
+
+db_match_erase({ram, Ixt}, Pat) ->
+ true = ?ets_match_delete(Ixt, Pat);
+db_match_erase({dets, Ixt}, Pat) ->
+ ok = dets:match_delete(Ixt, Pat).
+
+db_match({ram, Ixt}, Pat) ->
+ ?ets_match(Ixt, Pat);
+db_match({dets, Ixt}, Pat) ->
+ dets:match(Ixt, Pat).
+
+get_index_table(Tab, Pos) ->
+ get_index_table(Tab, val({Tab, storage_type}), Pos).
+
+get_index_table(Tab, ram_copies, Pos) ->
+ {ram, val({Tab, {index, Pos}})};
+get_index_table(Tab, disc_copies, Pos) ->
+ {ram, val({Tab, {index, Pos}})};
+get_index_table(Tab, disc_only_copies, Pos) ->
+ {dets, val({Tab, {index, Pos}})};
+get_index_table(_Tab, unknown, _Pos) ->
+ unknown.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_kernel_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_kernel_sup.erl
new file mode 100644
index 0000000000..015a42c749
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_kernel_sup.erl
@@ -0,0 +1,60 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_kernel_sup.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_kernel_sup).
+
+-behaviour(supervisor).
+
+-export([start/0, init/1, supervisor_timeout/1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% top supervisor callback functions
+
+start() ->
+ supervisor:start_link({local, mnesia_kernel_sup}, ?MODULE, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sub supervisor callback functions
+
+init([]) ->
+ ProcLib = [mnesia_monitor, proc_lib],
+ Flags = {one_for_all, 0, timer:hours(24)}, % Trust the top supervisor
+ Workers = [worker_spec(mnesia_monitor, timer:seconds(3), [gen_server]),
+ worker_spec(mnesia_subscr, timer:seconds(3), [gen_server]),
+ worker_spec(mnesia_locker, timer:seconds(3), ProcLib),
+ worker_spec(mnesia_recover, timer:minutes(3), [gen_server]),
+ worker_spec(mnesia_tm, timer:seconds(30), ProcLib),
+ supervisor_spec(mnesia_checkpoint_sup),
+ supervisor_spec(mnesia_snmp_sup),
+ worker_spec(mnesia_controller, timer:seconds(3), [gen_server]),
+ worker_spec(mnesia_late_loader, timer:seconds(3), ProcLib)
+ ],
+ {ok, {Flags, Workers}}.
+
+worker_spec(Name, KillAfter, Modules) ->
+ KA = supervisor_timeout(KillAfter),
+ {Name, {Name, start, []}, permanent, KA, worker, [Name] ++ Modules}.
+
+supervisor_spec(Name) ->
+ {Name, {Name, start, []}, permanent, infinity, supervisor,
+ [Name, supervisor]}.
+
+-ifdef(debug_shutdown).
+supervisor_timeout(_KillAfter) -> timer:hours(24).
+-else.
+supervisor_timeout(KillAfter) -> KillAfter.
+-endif.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_late_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_late_loader.erl
new file mode 100644
index 0000000000..b49cf22fd9
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_late_loader.erl
@@ -0,0 +1,95 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_late_loader.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_late_loader).
+
+-export([
+ async_late_disc_load/3,
+ maybe_async_late_disc_load/3,
+ init/1,
+ start/0
+ ]).
+
+%% sys callback functions
+-export([
+ system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+-define(SERVER_NAME, ?MODULE).
+
+-record(state, {supervisor}).
+
+async_late_disc_load(Node, Tabs, Reason) ->
+ Msg = {async_late_disc_load, Tabs, Reason},
+ catch ({?SERVER_NAME, Node} ! {self(), Msg}).
+
+maybe_async_late_disc_load(Node, Tabs, Reason) ->
+ Msg = {maybe_async_late_disc_load, Tabs, Reason},
+ catch ({?SERVER_NAME, Node} ! {self(), Msg}).
+
+start() ->
+ mnesia_monitor:start_proc(?SERVER_NAME, ?MODULE, init, [self()]).
+
+init(Parent) ->
+ %% Trap exit omitted intentionally
+ register(?SERVER_NAME, self()),
+ link(whereis(mnesia_controller)), %% We may not hang
+ mnesia_controller:merge_schema(),
+ unlink(whereis(mnesia_controller)),
+ mnesia_lib:set(mnesia_status, running),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ loop(#state{supervisor = Parent}).
+
+loop(State) ->
+ receive
+ {_From, {async_late_disc_load, Tabs, Reason}} ->
+ mnesia_controller:schedule_late_disc_load(Tabs, Reason),
+ loop(State);
+
+ {_From, {maybe_async_late_disc_load, Tabs, Reason}} ->
+ GoodTabs =
+ [T || T <- Tabs,
+ lists:member(node(),
+ mnesia_recover:get_master_nodes(T))],
+ mnesia_controller:schedule_late_disc_load(GoodTabs, Reason),
+ loop(State);
+
+ {system, From, Msg} ->
+ mnesia_lib:dbg_out("~p got {system, ~p, ~p}~n",
+ [?SERVER_NAME, From, Msg]),
+ Parent = State#state.supervisor,
+ sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
+
+ Msg ->
+ mnesia_lib:error("~p got unexpected message: ~p~n",
+ [?SERVER_NAME, Msg]),
+ loop(State)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, State) ->
+ loop(State).
+
+system_terminate(Reason, _Parent, _Debug, _State) ->
+ exit(Reason).
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_lib.erl
new file mode 100644
index 0000000000..29a54936d4
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_lib.erl
@@ -0,0 +1,1276 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_lib.erl,v 1.3 2009/07/01 15:45:40 kostis Exp $
+%%
+%% This module contains all sorts of various which doesn't fit
+%% anywhere else. Basically everything is exported.
+
+-module(mnesia_lib).
+
+-include("mnesia.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-export([core_file/0]).
+
+-export([
+ active_tables/0,
+ add/2,
+ add_list/2,
+ all_nodes/0,
+%% catch_val/1,
+ cleanup_tmp_files/1,
+ copy_file/2,
+ copy_holders/1,
+ coredump/0,
+ coredump/1,
+ create_counter/1,
+ cs_to_nodes/1,
+ cs_to_storage_type/2,
+ dets_to_ets/6,
+ db_chunk/2,
+ db_init_chunk/1,
+ db_init_chunk/2,
+ db_init_chunk/3,
+ db_erase/2,
+ db_erase/3,
+ db_erase_tab/1,
+ db_erase_tab/2,
+ db_first/1,
+ db_first/2,
+ db_last/1,
+ db_last/2,
+ db_fixtable/3,
+ db_get/2,
+ db_get/3,
+ db_match_erase/2,
+ db_match_erase/3,
+ db_match_object/2,
+ db_match_object/3,
+ db_next_key/2,
+ db_next_key/3,
+ db_prev_key/2,
+ db_prev_key/3,
+ db_put/2,
+ db_put/3,
+ db_select/2,
+ db_select/3,
+ db_slot/2,
+ db_slot/3,
+ db_update_counter/3,
+ db_update_counter/4,
+ dbg_out/2,
+ del/2,
+ dets_sync_close/1,
+ dets_sync_open/2,
+ dets_sync_open/3,
+ dir/0,
+ dir/1,
+ dir_info/0,
+ dirty_rpc_error_tag/1,
+ dist_coredump/0,
+ disk_type/1,
+ disk_type/2,
+ elems/2,
+ ensure_loaded/1,
+ error/2,
+ error_desc/1,
+ etype/1,
+ exists/1,
+ fatal/2,
+ get_node_number/0,
+ fix_error/1,
+ important/2,
+ incr_counter/1,
+ incr_counter/2,
+ intersect/2,
+ is_running/0,
+ is_running/1,
+ is_running_remote/0,
+ is_string/1,
+ key_search_delete/3,
+ key_search_all/3,
+ last_error/0,
+ local_active_tables/0,
+ lock_table/1,
+ mkcore/1,
+ not_active_here/1,
+ other_val/2,
+ pad_name/3,
+ random_time/2,
+ read_counter/1,
+ readable_indecies/1,
+ remote_copy_holders/1,
+ report_fatal/2,
+ report_system_event/1,
+ running_nodes/0,
+ running_nodes/1,
+ schema_cs_to_storage_type/2,
+ search_delete/2,
+ set/2,
+ set_counter/2,
+ set_local_content_whereabouts/1,
+ set_remote_where_to_read/1,
+ set_remote_where_to_read/2,
+ show/1,
+ show/2,
+ sort_commit/1,
+ storage_type_at_node/2,
+ swap_tmp_files/1,
+ tab2dat/1,
+ tab2dmp/1,
+ tab2tmp/1,
+ tab2dcd/1,
+ tab2dcl/1,
+ to_list/1,
+ union/2,
+ uniq/1,
+ unlock_table/1,
+ unset/1,
+ update_counter/2,
+ val/1,
+ vcore/0,
+ vcore/1,
+ verbose/2,
+ view/0,
+ view/1,
+ view/2,
+ warning/2,
+
+ is_debug_compiled/0,
+ activate_debug_fun/5,
+ deactivate_debug_fun/3,
+ eval_debug_fun/4,
+ scratch_debug_fun/0
+ ]).
+
+
+search_delete(Obj, List) ->
+ search_delete(Obj, List, [], none).
+search_delete(Obj, [Obj|Tail], Ack, _Res) ->
+ search_delete(Obj, Tail, Ack, Obj);
+search_delete(Obj, [H|T], Ack, Res) ->
+ search_delete(Obj, T, [H|Ack], Res);
+search_delete(_, [], Ack, Res) ->
+ {Res, Ack}.
+
+key_search_delete(Key, Pos, TupleList) ->
+ key_search_delete(Key, Pos, TupleList, none, []).
+key_search_delete(Key, Pos, [H|T], _Obj, Ack) when element(Pos, H) == Key ->
+ key_search_delete(Key, Pos, T, H, Ack);
+key_search_delete(Key, Pos, [H|T], Obj, Ack) ->
+ key_search_delete(Key, Pos, T, Obj, [H|Ack]);
+key_search_delete(_, _, [], Obj, Ack) ->
+ {Obj, Ack}.
+
+key_search_all(Key, Pos, TupleList) ->
+ key_search_all(Key, Pos, TupleList, []).
+key_search_all(Key, N, [H|T], Ack) when element(N, H) == Key ->
+ key_search_all(Key, N, T, [H|Ack]);
+key_search_all(Key, N, [_|T], Ack) ->
+ key_search_all(Key, N, T, Ack);
+key_search_all(_, _, [], Ack) -> Ack.
+
+intersect(L1, L2) ->
+ L2 -- (L2 -- L1).
+
+elems(I, [H|T]) ->
+ [element(I, H) | elems(I, T)];
+elems(_, []) ->
+ [].
+
+%% sort_commit see to that checkpoint info is always first in
+%% commit_work structure the other info don't need to be sorted.
+sort_commit(List) ->
+ sort_commit2(List, []).
+
+sort_commit2([{checkpoints, ChkpL}| Rest], Acc) ->
+ [{checkpoints, ChkpL}| Rest] ++ Acc;
+sort_commit2([H | R], Acc) ->
+ sort_commit2(R, [H | Acc]);
+sort_commit2([], Acc) -> Acc.
+
+is_string([H|T]) ->
+ if
+ 0 =< H, H < 256, integer(H) -> is_string(T);
+ true -> false
+ end;
+is_string([]) -> true.
+
+%%%
+
+union([H|L1], L2) ->
+ case lists:member(H, L2) of
+ true -> union(L1, L2);
+ false -> [H | union(L1, L2)]
+ end;
+union([], L2) -> L2.
+
+uniq([]) ->
+ [];
+uniq(List) ->
+ [H|T] = lists:sort(List),
+ uniq1(H, T, []).
+
+uniq1(H, [H|R], Ack) ->
+ uniq1(H, R, Ack);
+uniq1(Old, [H|R], Ack) ->
+ uniq1(H, R, [Old|Ack]);
+uniq1(Old, [], Ack) ->
+ [Old| Ack].
+
+to_list(X) when list(X) -> X;
+to_list(X) -> atom_to_list(X).
+
+all_nodes() ->
+ Ns = mnesia:system_info(db_nodes) ++
+ mnesia:system_info(extra_db_nodes),
+ mnesia_lib:uniq(Ns).
+
+running_nodes() ->
+ running_nodes(all_nodes()).
+
+running_nodes(Ns) ->
+ {Replies, _BadNs} = rpc:multicall(Ns, ?MODULE, is_running_remote, []),
+ [N || {GoodState, N} <- Replies, GoodState == true].
+
+is_running_remote() ->
+ IsRunning = is_running(),
+ {IsRunning == yes, node()}.
+
+is_running(Node) when atom(Node) ->
+ case rpc:call(Node, ?MODULE, is_running, []) of
+ {badrpc, _} -> no;
+ X -> X
+ end.
+
+is_running() ->
+ case ?catch_val(mnesia_status) of
+ {'EXIT', _} -> no;
+ running -> yes;
+ starting -> starting;
+ stopping -> stopping
+ end.
+
+show(X) ->
+ show(X, []).
+show(F, A) ->
+ io:format(user, F, A).
+
+
+pad_name([Char | Chars], Len, Tail) ->
+ [Char | pad_name(Chars, Len - 1, Tail)];
+pad_name([], Len, Tail) when Len =< 0 ->
+ Tail;
+pad_name([], Len, Tail) ->
+ [$ | pad_name([], Len - 1, Tail)].
+
+%% Some utility functions .....
+active_here(Tab) ->
+ case val({Tab, where_to_read}) of
+ Node when Node == node() -> true;
+ _ -> false
+ end.
+
+not_active_here(Tab) ->
+ not active_here(Tab).
+
+exists(Fname) ->
+ case file:open(Fname, [raw,read]) of
+ {ok, F} ->file:close(F), true;
+ _ -> false
+ end.
+
+dir() -> mnesia_monitor:get_env(dir).
+
+dir(Fname) ->
+ filename:join([dir(), to_list(Fname)]).
+
+tab2dat(Tab) -> %% DETS files
+ dir(lists:concat([Tab, ".DAT"])).
+
+tab2tmp(Tab) ->
+ dir(lists:concat([Tab, ".TMP"])).
+
+tab2dmp(Tab) -> %% Dumped ets tables
+ dir(lists:concat([Tab, ".DMP"])).
+
+tab2dcd(Tab) -> %% Disc copies data
+ dir(lists:concat([Tab, ".DCD"])).
+
+tab2dcl(Tab) -> %% Disc copies log
+ dir(lists:concat([Tab, ".DCL"])).
+
+storage_type_at_node(Node, Tab) ->
+ search_key(Node, [{disc_copies, val({Tab, disc_copies})},
+ {ram_copies, val({Tab, ram_copies})},
+ {disc_only_copies, val({Tab, disc_only_copies})}]).
+
+cs_to_storage_type(Node, Cs) ->
+ search_key(Node, [{disc_copies, Cs#cstruct.disc_copies},
+ {ram_copies, Cs#cstruct.ram_copies},
+ {disc_only_copies, Cs#cstruct.disc_only_copies}]).
+
+schema_cs_to_storage_type(Node, Cs) ->
+ case cs_to_storage_type(Node, Cs) of
+ unknown when Cs#cstruct.name == schema -> ram_copies;
+ Other -> Other
+ end.
+
+
+search_key(Key, [{Val, List} | Tail]) ->
+ case lists:member(Key, List) of
+ true -> Val;
+ false -> search_key(Key, Tail)
+ end;
+search_key(_Key, []) ->
+ unknown.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% ops, we've got some global variables here :-)
+
+%% They are
+%%
+%% {Tab, setorbag}, -> set | bag
+%% {Tab, storage_type} -> disc_copies |ram_copies | unknown (**)
+%% {Tab, disc_copies} -> node list (from schema)
+%% {Tab, ram_copies}, -> node list (from schema)
+%% {Tab, arity}, -> number
+%% {Tab, attributes}, -> atom list
+%% {Tab, wild_pattern}, -> record tuple with '_'s
+%% {Tab, {index, Pos}} -> ets table
+%% {Tab, index} -> integer list
+%% {Tab, cstruct} -> cstruct structure
+%%
+
+%% The following fields are dynamic according to the
+%% the current node/table situation
+
+%% {Tab, where_to_write} -> node list
+%% {Tab, where_to_read} -> node | nowhere
+%%
+%% {schema, tables} -> tab list
+%% {schema, local_tables} -> tab list (**)
+%%
+%% {current, db_nodes} -> node list
+%%
+%% dir -> directory path (**)
+%% mnesia_status -> status | running | stopping (**)
+%% (**) == (Different on all nodes)
+%%
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+set(Var, Val) ->
+ ?ets_insert(mnesia_gvar, {Var, Val}).
+
+unset(Var) ->
+ ?ets_delete(mnesia_gvar, Var).
+
+other_val(Var, Other) ->
+ case Var of
+ {_, where_to_read} -> nowhere;
+ {_, where_to_write} -> [];
+ {_, active_replicas} -> [];
+ _ ->
+ pr_other(Var, Other)
+ end.
+
+pr_other(Var, Other) ->
+ Why =
+ case is_running() of
+ no -> {node_not_running, node()};
+ _ -> {no_exists, Var}
+ end,
+ verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~p ~n",
+ [self(), process_info(self(), registered_name),
+ Var, Other, Why]),
+ case Other of
+ {badarg, [{ets, lookup_element, _}|_]} ->
+ exit(Why);
+ _ ->
+ erlang:error(Why)
+ end.
+
+%% Some functions for list valued variables
+add(Var, Val) ->
+ L = val(Var),
+ set(Var, [Val | lists:delete(Val, L)]).
+
+add_list(Var, List) ->
+ L = val(Var),
+ set(Var, union(L, List)).
+
+del(Var, Val) ->
+ L = val(Var),
+ set(Var, lists:delete(Val, L)).
+
+%% This function is needed due to the fact
+%% that the application_controller enters
+%% a deadlock now and then. ac is implemented
+%% as a rather naive server.
+ensure_loaded(Appl) ->
+ case application_controller:get_loaded(Appl) of
+ {true, _} ->
+ ok;
+ false ->
+ case application:load(Appl) of
+ ok ->
+ ok;
+ {error, {already_loaded, Appl}} ->
+ ok;
+ {error, Reason} ->
+ {error, {application_load_error, Reason}}
+ end
+ end.
+
+local_active_tables() ->
+ Tabs = val({schema, local_tables}),
+ lists:zf(fun(Tab) -> active_here(Tab) end, Tabs).
+
+active_tables() ->
+ Tabs = val({schema, tables}),
+ F = fun(Tab) ->
+ case val({Tab, where_to_read}) of
+ nowhere -> false;
+ _ -> {true, Tab}
+ end
+ end,
+ lists:zf(F, Tabs).
+
+etype(X) when integer(X) -> integer;
+etype([]) -> nil;
+etype(X) when list(X) -> list;
+etype(X) when tuple(X) -> tuple;
+etype(X) when atom(X) -> atom;
+etype(_) -> othertype.
+
+remote_copy_holders(Cs) ->
+ copy_holders(Cs) -- [node()].
+
+copy_holders(Cs) when Cs#cstruct.local_content == false ->
+ cs_to_nodes(Cs);
+copy_holders(Cs) when Cs#cstruct.local_content == true ->
+ case lists:member(node(), cs_to_nodes(Cs)) of
+ true -> [node()];
+ false -> []
+ end.
+
+
+set_remote_where_to_read(Tab) ->
+ set_remote_where_to_read(Tab, []).
+
+set_remote_where_to_read(Tab, Ignore) ->
+ Active = val({Tab, active_replicas}),
+ Valid =
+ case mnesia_recover:get_master_nodes(Tab) of
+ [] -> Active;
+ Masters -> mnesia_lib:intersect(Masters, Active)
+ end,
+ Available = mnesia_lib:intersect(val({current, db_nodes}), Valid -- Ignore),
+ DiscOnlyC = val({Tab, disc_only_copies}),
+ Prefered = Available -- DiscOnlyC,
+ if
+ Prefered /= [] ->
+ set({Tab, where_to_read}, hd(Prefered));
+ Available /= [] ->
+ set({Tab, where_to_read}, hd(Available));
+ true ->
+ set({Tab, where_to_read}, nowhere)
+ end.
+
+%%% Local only
+set_local_content_whereabouts(Tab) ->
+ add({schema, local_tables}, Tab),
+ add({Tab, active_replicas}, node()),
+ set({Tab, where_to_write}, [node()]),
+ set({Tab, where_to_read}, node()).
+
+%%% counter routines
+
+create_counter(Name) ->
+ set_counter(Name, 0).
+
+set_counter(Name, Val) ->
+ ?ets_insert(mnesia_gvar, {Name, Val}).
+
+incr_counter(Name) ->
+ ?ets_update_counter(mnesia_gvar, Name, 1).
+
+incr_counter(Name, I) ->
+ ?ets_update_counter(mnesia_gvar, Name, I).
+
+update_counter(Name, Val) ->
+ ?ets_update_counter(mnesia_gvar, Name, Val).
+
+read_counter(Name) ->
+ ?ets_lookup_element(mnesia_gvar, Name, 2).
+
+cs_to_nodes(Cs) ->
+ Cs#cstruct.disc_only_copies ++
+ Cs#cstruct.disc_copies ++
+ Cs#cstruct.ram_copies.
+
+dist_coredump() ->
+ dist_coredump(all_nodes()).
+dist_coredump(Ns) ->
+ {Replies, _} = rpc:multicall(Ns, ?MODULE, coredump, []),
+ Replies.
+
+coredump() ->
+ coredump({crashinfo, {"user initiated~n", []}}).
+coredump(CrashInfo) ->
+ Core = mkcore(CrashInfo),
+ Out = core_file(),
+ important("Writing Mnesia core to file: ~p...~p~n", [Out, CrashInfo]),
+ file:write_file(Out, Core),
+ Out.
+
+core_file() ->
+ Integers = tuple_to_list(date()) ++ tuple_to_list(time()),
+ Fun = fun(I) when I < 10 -> ["_0", I];
+ (I) -> ["_", I]
+ end,
+ List = lists:append([Fun(I) || I <- Integers]),
+ filename:absname(lists:concat(["MnesiaCore.", node()] ++ List)).
+
+mkcore(CrashInfo) ->
+% dbg_out("Making a Mnesia core dump...~p~n", [CrashInfo]),
+ Nodes = [node() |nodes()],
+ TidLocks = (catch ets:tab2list(mnesia_tid_locks)),
+ Core = [
+ CrashInfo,
+ {time, {date(), time()}},
+ {self, catch process_info(self())},
+ {nodes, catch rpc:multicall(Nodes, ?MODULE, get_node_number, [])},
+ {applications, catch lists:sort(application:loaded_applications())},
+ {flags, catch init:get_arguments()},
+ {code_path, catch code:get_path()},
+ {code_loaded, catch lists:sort(code:all_loaded())},
+ {etsinfo, catch ets_info(ets:all())},
+
+ {version, catch mnesia:system_info(version)},
+ {schema, catch ets:tab2list(schema)},
+ {gvar, catch ets:tab2list(mnesia_gvar)},
+ {master_nodes, catch mnesia_recover:get_master_node_info()},
+
+ {processes, catch procs()},
+ {relatives, catch relatives()},
+ {workers, catch workers(mnesia_controller:get_workers(2000))},
+ {locking_procs, catch locking_procs(TidLocks)},
+
+ {held_locks, catch mnesia:system_info(held_locks)},
+ {tid_locks, TidLocks},
+ {lock_queue, catch mnesia:system_info(lock_queue)},
+ {load_info, catch mnesia_controller:get_info(2000)},
+ {trans_info, catch mnesia_tm:get_info(2000)},
+
+ {schema_file, catch file:read_file(tab2dat(schema))},
+ {dir_info, catch dir_info()},
+ {logfile, catch {ok, read_log_files()}}
+ ],
+ term_to_binary(Core).
+
+procs() ->
+ Fun = fun(P) -> {P, (catch lists:zf(fun proc_info/1, process_info(P)))} end,
+ lists:map(Fun, processes()).
+
+proc_info({registered_name, Val}) -> {true, Val};
+proc_info({message_queue_len, Val}) -> {true, Val};
+proc_info({status, Val}) -> {true, Val};
+proc_info({current_function, Val}) -> {true, Val};
+proc_info(_) -> false.
+
+get_node_number() ->
+ {node(), self()}.
+
+read_log_files() ->
+ [{F, catch file:read_file(F)} || F <- mnesia_log:log_files()].
+
+dir_info() ->
+ {ok, Cwd} = file:get_cwd(),
+ Dir = dir(),
+ [{cwd, Cwd, file:read_file_info(Cwd)},
+ {mnesia_dir, Dir, file:read_file_info(Dir)}] ++
+ case file:list_dir(Dir) of
+ {ok, Files} ->
+ [{mnesia_file, F, catch file:read_file_info(dir(F))} || F <- Files];
+ Other ->
+ [Other]
+ end.
+
+ets_info([H|T]) ->
+ [{table, H, ets:info(H)} | ets_info(T)];
+ets_info([]) -> [].
+
+relatives() ->
+ Info = fun(Name) ->
+ case whereis(Name) of
+ undefined -> false;
+ Pid -> {true, {Name, Pid, catch process_info(Pid)}}
+ end
+ end,
+ lists:zf(Info, mnesia:ms()).
+
+workers({workers, Loader, Sender, Dumper}) ->
+ Info = fun({Name, Pid}) ->
+ case Pid of
+ undefined -> false;
+ Pid -> {true, {Name, Pid, catch process_info(Pid)}}
+ end
+ end,
+ lists:zf(Info, [{loader, Loader}, {sender, Sender}, {dumper, Dumper}]).
+
+locking_procs(LockList) when list(LockList) ->
+ Tids = [element(1, Lock) || Lock <- LockList],
+ UT = uniq(Tids),
+ Info = fun(Tid) ->
+ Pid = Tid#tid.pid,
+ case node(Pid) == node() of
+ true ->
+ {true, {Pid, catch process_info(Pid)}};
+ _ ->
+ false
+ end
+ end,
+ lists:zf(Info, UT).
+
+view() ->
+ Bin = mkcore({crashinfo, {"view only~n", []}}),
+ vcore(Bin).
+
+%% Displays a Mnesia file on the tty. The file may be repaired.
+view(File) ->
+ case suffix([".DAT", ".RET", ".DMP", ".TMP"], File) of
+ true ->
+ view(File, dat);
+ false ->
+ case suffix([".LOG", ".BUP", ".ETS"], File) of
+ true ->
+ view(File, log);
+ false ->
+ case lists:prefix("MnesiaCore.", File) of
+ true ->
+ view(File, core);
+ false ->
+ {error, "Unknown file name"}
+ end
+ end
+ end.
+
+view(File, dat) ->
+ dets:view(File);
+view(File, log) ->
+ mnesia_log:view(File);
+view(File, core) ->
+ vcore(File).
+
+suffix(Suffixes, File) ->
+ Fun = fun(S) -> lists:suffix(S, File) end,
+ lists:any(Fun, Suffixes).
+
+%% View a core file
+
+vcore() ->
+ Prefix = lists:concat(["MnesiaCore.", node()]),
+ Filter = fun(F) -> lists:prefix(Prefix, F) end,
+ {ok, Cwd} = file:get_cwd(),
+ case file:list_dir(Cwd) of
+ {ok, Files}->
+ CoreFiles = lists:sort(lists:zf(Filter, Files)),
+ show("Mnesia core files: ~p~n", [CoreFiles]),
+ vcore(lists:last(CoreFiles));
+ Error ->
+ Error
+ end.
+
+vcore(Bin) when binary(Bin) ->
+ Core = binary_to_term(Bin),
+ Fun = fun({Item, Info}) ->
+ show("***** ~p *****~n", [Item]),
+ case catch vcore_elem({Item, Info}) of
+ {'EXIT', Reason} ->
+ show("{'EXIT', ~p}~n", [Reason]);
+ _ -> ok
+ end
+ end,
+ lists:foreach(Fun, Core);
+
+vcore(File) ->
+ show("~n***** Mnesia core: ~p *****~n", [File]),
+ case file:read_file(File) of
+ {ok, Bin} ->
+ vcore(Bin);
+ _ ->
+ nocore
+ end.
+
+vcore_elem({schema_file, {ok, B}}) ->
+ Fname = "/tmp/schema.DAT",
+ file:write_file(Fname, B),
+ dets:view(Fname),
+ file:delete(Fname);
+
+vcore_elem({logfile, {ok, BinList}}) ->
+ Fun = fun({F, Info}) ->
+ show("----- logfile: ~p -----~n", [F]),
+ case Info of
+ {ok, B} ->
+ Fname = "/tmp/mnesia_vcore_elem.TMP",
+ file:write_file(Fname, B),
+ mnesia_log:view(Fname),
+ file:delete(Fname);
+ _ ->
+ show("~p~n", [Info])
+ end
+ end,
+ lists:foreach(Fun, BinList);
+
+vcore_elem({crashinfo, {Format, Args}}) ->
+ show(Format, Args);
+vcore_elem({gvar, L}) ->
+ show("~p~n", [lists:sort(L)]);
+vcore_elem({transactions, Info}) ->
+ mnesia_tm:display_info(user, Info);
+
+vcore_elem({_Item, Info}) ->
+ show("~p~n", [Info]).
+
+fix_error(X) ->
+ set(last_error, X), %% for debugabililty
+ case X of
+ {aborted, Reason} -> Reason;
+ {abort, Reason} -> Reason;
+ Y when atom(Y) -> Y;
+ {'EXIT', {_Reason, {Mod, _, _}}} when atom(Mod) ->
+ save(X),
+ case atom_to_list(Mod) of
+ [$m, $n, $e|_] -> badarg;
+ _ -> X
+ end;
+ _ -> X
+ end.
+
+last_error() ->
+ val(last_error).
+
+%% The following is a list of possible mnesia errors and what they
+%% actually mean
+
+error_desc(nested_transaction) -> "Nested transactions are not allowed";
+error_desc(badarg) -> "Bad or invalid argument, possibly bad type";
+error_desc(no_transaction) -> "Operation not allowed outside transactions";
+error_desc(combine_error) -> "Table options were ilegally combined";
+error_desc(bad_index) -> "Index already exists or was out of bounds";
+error_desc(already_exists) -> "Some schema option we try to set is already on";
+error_desc(index_exists)-> "Some ops can not be performed on tabs with index";
+error_desc(no_exists)-> "Tried to perform op on non-existing (non alive) item";
+error_desc(system_limit) -> "Some system_limit was exhausted";
+error_desc(mnesia_down) -> "A transaction involving objects at some remote "
+ "node which died while transaction was executing"
+ "*and* object(s) are no longer available elsewhere"
+ "in the network";
+error_desc(not_a_db_node) -> "A node which is non existant in "
+ "the schema was mentioned";
+error_desc(bad_type) -> "Bad type on some provided arguments";
+error_desc(node_not_running) -> "Node not running";
+error_desc(truncated_binary_file) -> "Truncated binary in file";
+error_desc(active) -> "Some delete ops require that "
+ "all active objects are removed";
+error_desc(illegal) -> "Operation not supported on object";
+error_desc({'EXIT', Reason}) ->
+ error_desc(Reason);
+error_desc({error, Reason}) ->
+ error_desc(Reason);
+error_desc({aborted, Reason}) ->
+ error_desc(Reason);
+error_desc(Reason) when tuple(Reason), size(Reason) > 0 ->
+ setelement(1, Reason, error_desc(element(1, Reason)));
+error_desc(Reason) ->
+ Reason.
+
+dirty_rpc_error_tag(Reason) ->
+ case Reason of
+ {'EXIT', _} -> badarg;
+ no_variable -> badarg;
+ _ -> no_exists
+ end.
+
+fatal(Format, Args) ->
+ catch set(mnesia_status, stopping),
+ Core = mkcore({crashinfo, {Format, Args}}),
+ report_fatal(Format, Args, Core),
+ timer:sleep(10000), % Enough to write the core dump to disc?
+ mnesia:lkill(),
+ exit(fatal).
+
+report_fatal(Format, Args) ->
+ report_fatal(Format, Args, nocore).
+
+report_fatal(Format, Args, Core) ->
+ report_system_event({mnesia_fatal, Format, Args, Core}),
+ catch exit(whereis(mnesia_monitor), fatal).
+
+%% We sleep longer and longer the more we try
+%% Made some testing and came up with the following constants
+random_time(Retries, _Counter0) ->
+% UpperLimit = 2000,
+% MaxIntv = trunc(UpperLimit * (1-(4/((Retries*Retries)+4)))),
+ UpperLimit = 500,
+ Dup = Retries * Retries,
+ MaxIntv = trunc(UpperLimit * (1-(50/((Dup)+50)))),
+
+ case get(random_seed) of
+ undefined ->
+ {X, Y, Z} = erlang:now(), %% time()
+ random:seed(X, Y, Z),
+ Time = Dup + random:uniform(MaxIntv),
+ %% dbg_out("---random_test rs ~w max ~w val ~w---~n", [Retries, MaxIntv, Time]),
+ Time;
+ _ ->
+ Time = Dup + random:uniform(MaxIntv),
+ %% dbg_out("---random_test rs ~w max ~w val ~w---~n", [Retries, MaxIntv, Time]),
+ Time
+ end.
+
+report_system_event(Event0) ->
+ Event = {mnesia_system_event, Event0},
+ report_system_event(catch_notify(Event), Event),
+ case ?catch_val(subscribers) of
+ {'EXIT', _} -> ignore;
+ Pids -> lists:foreach(fun(Pid) -> Pid ! Event end, Pids)
+ end,
+ ok.
+
+catch_notify(Event) ->
+ case whereis(mnesia_event) of
+ undefined ->
+ {'EXIT', {badarg, {mnesia_event, Event}}};
+ Pid ->
+ gen_event:notify(Pid, Event)
+ end.
+
+report_system_event({'EXIT', Reason}, Event) ->
+ Mod = mnesia_monitor:get_env(event_module),
+ case mnesia_sup:start_event() of
+ {ok, Pid} ->
+ link(Pid),
+ gen_event:call(mnesia_event, Mod, Event, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ gen_event:stop(mnesia_event),
+ ok
+ end;
+
+ Error ->
+ Msg = "Mnesia(~p): Cannot report event ~p: ~p (~p)~n",
+ error_logger:format(Msg, [node(), Event, Reason, Error])
+ end;
+report_system_event(_Res, _Event) ->
+ ignore.
+
+%% important messages are reported regardless of debug level
+important(Format, Args) ->
+ save({Format, Args}),
+ report_system_event({mnesia_info, Format, Args}).
+
+%% Warning messages are reported regardless of debug level
+warning(Format, Args) ->
+ save({Format, Args}),
+ report_system_event({mnesia_warning, Format, Args}).
+
+%% error messages are reported regardless of debug level
+error(Format, Args) ->
+ save({Format, Args}),
+ report_system_event({mnesia_error, Format, Args}).
+
+%% verbose messages are reported if debug level == debug or verbose
+verbose(Format, Args) ->
+ case mnesia_monitor:get_env(debug) of
+ none -> save({Format, Args});
+ verbose -> important(Format, Args);
+ debug -> important(Format, Args);
+ trace -> important(Format, Args)
+ end.
+
+%% debug message are display if debug level == 2
+dbg_out(Format, Args) ->
+ case mnesia_monitor:get_env(debug) of
+ none -> ignore;
+ verbose -> save({Format, Args});
+ _ -> report_system_event({mnesia_info, Format, Args})
+ end.
+
+%% Keep the last 10 debug print outs
+save(DbgInfo) ->
+ catch save2(DbgInfo).
+
+save2(DbgInfo) ->
+ Key = {'$$$_report', current_pos},
+ P =
+ case ?ets_lookup_element(mnesia_gvar, Key, 2) of
+ 30 -> -1;
+ I -> I
+ end,
+ set({'$$$_report', current_pos}, P+1),
+ set({'$$$_report', P+1}, {date(), time(), DbgInfo}).
+
+copy_file(From, To) ->
+ case file:open(From, [raw, binary, read]) of
+ {ok, F} ->
+ case file:open(To, [raw, binary, write]) of
+ {ok, T} ->
+ Res = copy_file_loop(F, T, 8000),
+ file:close(F),
+ file:close(T),
+ Res;
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+copy_file_loop(F, T, ChunkSize) ->
+ case file:read(F, ChunkSize) of
+ {ok, {0, _}} ->
+ ok;
+ {ok, {_, Bin}} ->
+ file:write(T, Bin),
+ copy_file_loop(F, T, ChunkSize);
+ {ok, Bin} ->
+ file:write(T, Bin),
+ copy_file_loop(F, T, ChunkSize);
+ eof ->
+ ok;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+
+%%%%%%%%%%%%
+%% versions of all the lowlevel db funcs that determine whether we
+%% shall go to disc or ram to do the actual operation.
+
+db_get(Tab, Key) ->
+ db_get(val({Tab, storage_type}), Tab, Key).
+db_get(ram_copies, Tab, Key) -> ?ets_lookup(Tab, Key);
+db_get(disc_copies, Tab, Key) -> ?ets_lookup(Tab, Key);
+db_get(disc_only_copies, Tab, Key) -> dets:lookup(Tab, Key).
+
+db_init_chunk(Tab) ->
+ db_init_chunk(val({Tab, storage_type}), Tab, 1000).
+db_init_chunk(Tab, N) ->
+ db_init_chunk(val({Tab, storage_type}), Tab, N).
+
+db_init_chunk(disc_only_copies, Tab, N) ->
+ dets:select(Tab, [{'_', [], ['$_']}], N);
+db_init_chunk(_, Tab, N) ->
+ ets:select(Tab, [{'_', [], ['$_']}], N).
+
+db_chunk(disc_only_copies, State) ->
+ dets:select(State);
+db_chunk(_, State) ->
+ ets:select(State).
+
+db_put(Tab, Val) ->
+ db_put(val({Tab, storage_type}), Tab, Val).
+
+db_put(ram_copies, Tab, Val) -> ?ets_insert(Tab, Val), ok;
+db_put(disc_copies, Tab, Val) -> ?ets_insert(Tab, Val), ok;
+db_put(disc_only_copies, Tab, Val) -> dets:insert(Tab, Val).
+
+db_match_object(Tab, Pat) ->
+ db_match_object(val({Tab, storage_type}), Tab, Pat).
+db_match_object(Storage, Tab, Pat) ->
+ db_fixtable(Storage, Tab, true),
+ Res = catch_match_object(Storage, Tab, Pat),
+ db_fixtable(Storage, Tab, false),
+ case Res of
+ {'EXIT', Reason} -> exit(Reason);
+ _ -> Res
+ end.
+
+catch_match_object(disc_only_copies, Tab, Pat) ->
+ catch dets:match_object(Tab, Pat);
+catch_match_object(_, Tab, Pat) ->
+ catch ets:match_object(Tab, Pat).
+
+db_select(Tab, Pat) ->
+ db_select(val({Tab, storage_type}), Tab, Pat).
+
+db_select(Storage, Tab, Pat) ->
+ db_fixtable(Storage, Tab, true),
+ Res = catch_select(Storage, Tab, Pat),
+ db_fixtable(Storage, Tab, false),
+ case Res of
+ {'EXIT', Reason} -> exit(Reason);
+ _ -> Res
+ end.
+
+catch_select(disc_only_copies, Tab, Pat) ->
+ dets:select(Tab, Pat);
+catch_select(_, Tab, Pat) ->
+ ets:select(Tab, Pat).
+
+db_fixtable(ets, Tab, Bool) ->
+ ets:safe_fixtable(Tab, Bool);
+db_fixtable(ram_copies, Tab, Bool) ->
+ ets:safe_fixtable(Tab, Bool);
+db_fixtable(disc_copies, Tab, Bool) ->
+ ets:safe_fixtable(Tab, Bool);
+db_fixtable(dets, Tab, Bool) ->
+ dets:safe_fixtable(Tab, Bool);
+db_fixtable(disc_only_copies, Tab, Bool) ->
+ dets:safe_fixtable(Tab, Bool).
+
+db_erase(Tab, Key) ->
+ db_erase(val({Tab, storage_type}), Tab, Key).
+db_erase(ram_copies, Tab, Key) -> ?ets_delete(Tab, Key), ok;
+db_erase(disc_copies, Tab, Key) -> ?ets_delete(Tab, Key), ok;
+db_erase(disc_only_copies, Tab, Key) -> dets:delete(Tab, Key).
+
+db_match_erase(Tab, Pat) ->
+ db_match_erase(val({Tab, storage_type}), Tab, Pat).
+db_match_erase(ram_copies, Tab, Pat) -> ?ets_match_delete(Tab, Pat), ok;
+db_match_erase(disc_copies, Tab, Pat) -> ?ets_match_delete(Tab, Pat), ok;
+db_match_erase(disc_only_copies, Tab, Pat) -> dets:match_delete(Tab, Pat).
+
+db_first(Tab) ->
+ db_first(val({Tab, storage_type}), Tab).
+db_first(ram_copies, Tab) -> ?ets_first(Tab);
+db_first(disc_copies, Tab) -> ?ets_first(Tab);
+db_first(disc_only_copies, Tab) -> dets:first(Tab).
+
+db_next_key(Tab, Key) ->
+ db_next_key(val({Tab, storage_type}), Tab, Key).
+db_next_key(ram_copies, Tab, Key) -> ?ets_next(Tab, Key);
+db_next_key(disc_copies, Tab, Key) -> ?ets_next(Tab, Key);
+db_next_key(disc_only_copies, Tab, Key) -> dets:next(Tab, Key).
+
+db_last(Tab) ->
+ db_last(val({Tab, storage_type}), Tab).
+db_last(ram_copies, Tab) -> ?ets_last(Tab);
+db_last(disc_copies, Tab) -> ?ets_last(Tab);
+db_last(disc_only_copies, Tab) -> dets:first(Tab). %% Dets don't have order
+
+db_prev_key(Tab, Key) ->
+ db_prev_key(val({Tab, storage_type}), Tab, Key).
+db_prev_key(ram_copies, Tab, Key) -> ?ets_prev(Tab, Key);
+db_prev_key(disc_copies, Tab, Key) -> ?ets_prev(Tab, Key);
+db_prev_key(disc_only_copies, Tab, Key) -> dets:next(Tab, Key). %% Dets don't have order
+
+db_slot(Tab, Pos) ->
+ db_slot(val({Tab, storage_type}), Tab, Pos).
+db_slot(ram_copies, Tab, Pos) -> ?ets_slot(Tab, Pos);
+db_slot(disc_copies, Tab, Pos) -> ?ets_slot(Tab, Pos);
+db_slot(disc_only_copies, Tab, Pos) -> dets:slot(Tab, Pos).
+
+db_update_counter(Tab, C, Val) ->
+ db_update_counter(val({Tab, storage_type}), Tab, C, Val).
+db_update_counter(ram_copies, Tab, C, Val) ->
+ ?ets_update_counter(Tab, C, Val);
+db_update_counter(disc_copies, Tab, C, Val) ->
+ ?ets_update_counter(Tab, C, Val);
+db_update_counter(disc_only_copies, Tab, C, Val) ->
+ dets:update_counter(Tab, C, Val).
+
+db_erase_tab(Tab) ->
+ db_erase_tab(val({Tab, storage_type}), Tab).
+db_erase_tab(ram_copies, Tab) -> ?ets_delete_table(Tab);
+db_erase_tab(disc_copies, Tab) -> ?ets_delete_table(Tab);
+db_erase_tab(disc_only_copies, _Tab) -> ignore.
+
+%% assuming that Tab is a valid ets-table
+dets_to_ets(Tabname, Tab, File, Type, Rep, Lock) ->
+ {Open, Close} = mkfuns(Lock),
+ case Open(Tabname, [{file, File}, {type, disk_type(Tab, Type)},
+ {keypos, 2}, {repair, Rep}]) of
+ {ok, Tabname} ->
+ Res = dets:to_ets(Tabname, Tab),
+ Close(Tabname),
+ trav_ret(Res, Tab);
+ Other ->
+ Other
+ end.
+
+trav_ret(Tabname, Tabname) -> loaded;
+trav_ret(Other, _Tabname) -> Other.
+
+mkfuns(yes) ->
+ {fun(Tab, Args) -> dets_sync_open(Tab, Args) end,
+ fun(Tab) -> dets_sync_close(Tab) end};
+mkfuns(no) ->
+ {fun(Tab, Args) -> dets:open_file(Tab, Args) end,
+ fun(Tab) -> dets:close(Tab) end}.
+
+disk_type(Tab) ->
+ disk_type(Tab, val({Tab, setorbag})).
+
+disk_type(_Tab, ordered_set) ->
+ set;
+disk_type(_, Type) ->
+ Type.
+
+dets_sync_open(Tab, Ref, File) ->
+ Args = [{file, File},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, disk_type(Tab)}],
+ dets_sync_open(Ref, Args).
+
+lock_table(Tab) ->
+ global:set_lock({{mnesia_table_lock, Tab}, self()}, [node()], infinity).
+% dbg_out("dets_sync_open: ~p ~p~n", [T, self()]),
+
+unlock_table(Tab) ->
+ global:del_lock({{mnesia_table_lock, Tab}, self()}, [node()]).
+% dbg_out("unlock_table: ~p ~p~n", [T, self()]),
+
+dets_sync_open(Tab, Args) ->
+ lock_table(Tab),
+ case dets:open_file(Tab, Args) of
+ {ok, Tab} ->
+ {ok, Tab};
+ Other ->
+ dets_sync_close(Tab),
+ Other
+ end.
+
+dets_sync_close(Tab) ->
+ catch dets:close(Tab),
+ unlock_table(Tab),
+ ok.
+
+cleanup_tmp_files([Tab | Tabs]) ->
+ dets_sync_close(Tab),
+ file:delete(tab2tmp(Tab)),
+ cleanup_tmp_files(Tabs);
+cleanup_tmp_files([]) ->
+ ok.
+
+%% Returns a list of bad tables
+swap_tmp_files([Tab | Tabs]) ->
+ dets_sync_close(Tab),
+ Tmp = tab2tmp(Tab),
+ Dat = tab2dat(Tab),
+ case file:rename(Tmp, Dat) of
+ ok ->
+ swap_tmp_files(Tabs);
+ _ ->
+ file:delete(Tmp),
+ [Tab | swap_tmp_files(Tabs)]
+ end;
+swap_tmp_files([]) ->
+ [].
+
+readable_indecies(Tab) ->
+ val({Tab, index}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Managing conditional debug functions
+%%
+%% The main idea with the debug_fun's is to allow test programs
+%% to control the internal behaviour of Mnesia. This is needed
+%% to make the test programs independent of system load, swapping
+%% and other circumstances that may affect the behaviour of Mnesia.
+%%
+%% First should calls to ?eval_debug_fun be inserted at well
+%% defined places in Mnesia's code. E.g. in critical situations
+%% of startup, transaction commit, backups etc.
+%%
+%% Then compile Mnesia with the compiler option 'debug'.
+%%
+%% In test programs ?activate_debug_fun should be called
+%% in order to bind a fun to the debug identifier stated
+%% in the call to ?eval_debug_fun.
+%%
+%% If eval_debug_fun finds that the fun is activated it
+%% invokes the fun as NewContext = Fun(PreviousContext, EvalContext)
+%% and replaces the PreviousContext with the NewContext.
+%% The initial context of a debug_fun is given as argument to
+%% activate_debug_fun.
+
+-define(DEBUG_TAB, mnesia_debug).
+-record(debug_info, {id, function, context, file, line}).
+
+scratch_debug_fun() ->
+ dbg_out("scratch_debug_fun(): ~p~n", [?DEBUG_TAB]),
+ (catch ?ets_delete_table(?DEBUG_TAB)),
+ ?ets_new_table(?DEBUG_TAB, [set, public, named_table, {keypos, 2}]).
+
+activate_debug_fun(FunId, Fun, InitialContext, File, Line) ->
+ Info = #debug_info{id = FunId,
+ function = Fun,
+ context = InitialContext,
+ file = File,
+ line = Line
+ },
+ update_debug_info(Info).
+
+update_debug_info(Info) ->
+ case catch ?ets_insert(?DEBUG_TAB, Info) of
+ {'EXIT', _} ->
+ scratch_debug_fun(),
+ ?ets_insert(?DEBUG_TAB, Info);
+ _ ->
+ ok
+ end,
+ dbg_out("update_debug_info(~p)~n", [Info]),
+ ok.
+
+deactivate_debug_fun(FunId, _File, _Line) ->
+ catch ?ets_delete(?DEBUG_TAB, FunId),
+ ok.
+
+eval_debug_fun(FunId, EvalContext, EvalFile, EvalLine) ->
+ case catch ?ets_lookup(?DEBUG_TAB, FunId) of
+ [] ->
+ ok;
+ [Info] ->
+ OldContext = Info#debug_info.context,
+ dbg_out("~s(~p): ~w "
+ "activated in ~s(~p)~n "
+ "eval_debug_fun(~w, ~w)~n",
+ [filename:basename(EvalFile), EvalLine, Info#debug_info.id,
+ filename:basename(Info#debug_info.file), Info#debug_info.line,
+ OldContext, EvalContext]),
+ Fun = Info#debug_info.function,
+ NewContext = Fun(OldContext, EvalContext),
+
+ case catch ?ets_lookup(?DEBUG_TAB, FunId) of
+ [Info] when NewContext /= OldContext ->
+ NewInfo = Info#debug_info{context = NewContext},
+ update_debug_info(NewInfo);
+ _ ->
+ ok
+ end;
+ {'EXIT', _} -> ok
+ end.
+
+-ifdef(debug).
+ is_debug_compiled() -> true.
+-else.
+ is_debug_compiled() -> false.
+-endif.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
new file mode 100644
index 0000000000..f21a8240aa
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
@@ -0,0 +1,805 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_loader.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+%%% Purpose : Loads tables from local disc or from remote node
+
+-module(mnesia_loader).
+
+%% Mnesia internal stuff
+-export([disc_load_table/2,
+ net_load_table/4,
+ send_table/3]).
+
+-export([old_node_init_table/6]). %% Spawned old node protocol conversion hack
+-export([spawned_receiver/8]). %% Spawned lock taking process
+
+-import(mnesia_lib, [set/2, fatal/2, verbose/2, dbg_out/2]).
+
+-include("mnesia.hrl").
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Load a table from local disc
+
+disc_load_table(Tab, Reason) ->
+ Storage = val({Tab, storage_type}),
+ Type = val({Tab, setorbag}),
+ dbg_out("Getting table ~p (~p) from disc: ~p~n",
+ [Tab, Storage, Reason]),
+ ?eval_debug_fun({?MODULE, do_get_disc_copy},
+ [{tab, Tab},
+ {reason, Reason},
+ {storage, Storage},
+ {type, Type}]),
+ do_get_disc_copy2(Tab, Reason, Storage, Type).
+
+do_get_disc_copy2(Tab, _Reason, Storage, _Type) when Storage == unknown ->
+ verbose("Local table copy of ~p has recently been deleted, ignored.~n",
+ [Tab]),
+ {loaded, ok}; %% ?
+do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
+ %% NOW we create the actual table
+ Repair = mnesia_monitor:get_env(auto_repair),
+ Args = [{keypos, 2}, public, named_table, Type],
+ case Reason of
+ {dumper, _} -> %% Resources allready allocated
+ ignore;
+ _ ->
+ mnesia_monitor:mktab(Tab, Args),
+ Count = mnesia_log:dcd2ets(Tab, Repair),
+ case ets:info(Tab, size) of
+ X when X < Count * 4 ->
+ ok = mnesia_log:ets2dcd(Tab);
+ _ ->
+ ignore
+ end
+ end,
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+
+do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ case Reason of
+ {dumper, _} -> %% Resources allready allocated
+ ignore;
+ _ ->
+ mnesia_monitor:mktab(Tab, Args),
+ Fname = mnesia_lib:tab2dcd(Tab),
+ Datname = mnesia_lib:tab2dat(Tab),
+ Repair = mnesia_monitor:get_env(auto_repair),
+ case mnesia_monitor:use_dir() of
+ true ->
+ case mnesia_lib:exists(Fname) of
+ true -> mnesia_log:dcd2ets(Tab, Repair);
+ false ->
+ case mnesia_lib:exists(Datname) of
+ true ->
+ mnesia_lib:dets_to_ets(Tab, Tab, Datname,
+ Type, Repair, no);
+ false ->
+ false
+ end
+ end;
+ false ->
+ false
+ end
+ end,
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+
+do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_only_copies ->
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ case Reason of
+ {dumper, _} ->
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+ _ ->
+ case mnesia_monitor:open_dets(Tab, Args) of
+ {ok, _} ->
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ set({Tab, load_node}, node()),
+ set({Tab, load_reason}, Reason),
+ {loaded, ok};
+ {error, Error} ->
+ {not_loaded, {"Failed to create dets table", Error}}
+ end
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Load a table from a remote node
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Receiver Sender
+%% -------- ------
+%% Grab schema lock on table
+%% Determine table size
+%% Create empty pre-grown table
+%% Grab read lock on table
+%% Let receiver subscribe on updates done on sender node
+%% Disable rehashing of table
+%% Release read lock on table
+%% Send table to receiver in chunks
+%%
+%% Grab read lock on table
+%% Block dirty updates
+%% Update wherabouts
+%%
+%% Cancel the update subscription
+%% Process the subscription events
+%% Optionally dump to disc
+%% Unblock dirty updates
+%% Release read lock on table
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-define(MAX_TRANSFER_SIZE, 7500).
+-define(MAX_RAM_FILE_SIZE, 1000000).
+-define(MAX_RAM_TRANSFERS, (?MAX_RAM_FILE_SIZE div ?MAX_TRANSFER_SIZE) + 1).
+-define(MAX_NOPACKETS, 20).
+
+net_load_table(Tab, Reason, Ns, Cs)
+ when Reason == {dumper,add_table_copy} ->
+ try_net_load_table(Tab, Reason, Ns, Cs);
+net_load_table(Tab, Reason, Ns, _Cs) ->
+ try_net_load_table(Tab, Reason, Ns, val({Tab, cstruct})).
+
+try_net_load_table(Tab, _Reason, [], _Cs) ->
+ verbose("Copy failed. No active replicas of ~p are available.~n", [Tab]),
+ {not_loaded, none_active};
+try_net_load_table(Tab, Reason, Ns, Cs) ->
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ do_get_network_copy(Tab, Reason, Ns, Storage, Cs).
+
+do_get_network_copy(Tab, _Reason, _Ns, unknown, _Cs) ->
+ verbose("Local table copy of ~p has recently been deleted, ignored.~n", [Tab]),
+ {not_loaded, storage_unknown};
+do_get_network_copy(Tab, Reason, Ns, Storage, Cs) ->
+ [Node | Tail] = Ns,
+ dbg_out("Getting table ~p (~p) from node ~p: ~p~n",
+ [Tab, Storage, Node, Reason]),
+ ?eval_debug_fun({?MODULE, do_get_network_copy},
+ [{tab, Tab}, {reason, Reason},
+ {nodes, Ns}, {storage, Storage}]),
+ mnesia_controller:start_remote_sender(Node, Tab, self(), Storage),
+ put(mnesia_table_sender_node, {Tab, Node}),
+ case init_receiver(Node, Tab, Storage, Cs, Reason) of
+ ok ->
+ set({Tab, load_node}, Node),
+ set({Tab, load_reason}, Reason),
+ mnesia_controller:i_have_tab(Tab),
+ dbg_out("Table ~p copied from ~p to ~p~n", [Tab, Node, node()]),
+ {loaded, ok};
+ Err = {error, _} when element(1, Reason) == dumper ->
+ {not_loaded,Err};
+ restart ->
+ try_net_load_table(Tab, Reason, Tail, Cs);
+ down ->
+ try_net_load_table(Tab, Reason, Tail, Cs)
+ end.
+
+snmpify(Tab, Storage) ->
+ do_snmpify(Tab, val({Tab, snmp}), Storage).
+
+do_snmpify(_Tab, [], _Storage) ->
+ ignore;
+do_snmpify(Tab, Us, Storage) ->
+ Snmp = mnesia_snmp_hook:create_table(Us, Tab, Storage),
+ set({Tab, {index, snmp}}, Snmp).
+
+%% Start the recieiver
+%% Sender should be started first, so we don't have the schema-read
+%% lock to long (or get stuck in a deadlock)
+init_receiver(Node, Tab, Storage, Cs, Reason) ->
+ receive
+ {SenderPid, {first, TabSize}} ->
+ spawn_receiver(Tab,Storage,Cs,SenderPid,
+ TabSize,false,Reason);
+ {SenderPid, {first, TabSize, DetsData}} ->
+ spawn_receiver(Tab,Storage,Cs,SenderPid,
+ TabSize,DetsData,Reason);
+ %% Protocol conversion hack
+ {copier_done, Node} ->
+ dbg_out("Sender of table ~p crashed on node ~p ~n", [Tab, Node]),
+ down(Tab, Storage)
+ end.
+
+
+table_init_fun(SenderPid) ->
+ PConv = mnesia_monitor:needs_protocol_conversion(node(SenderPid)),
+ MeMyselfAndI = self(),
+ fun(read) ->
+ Receiver =
+ if
+ PConv == true ->
+ MeMyselfAndI ! {actual_tabrec, self()},
+ MeMyselfAndI; %% Old mnesia
+ PConv == false -> self()
+ end,
+ SenderPid ! {Receiver, more},
+ get_data(SenderPid, Receiver)
+ end.
+
+
+%% Add_table_copy get's it's own locks.
+spawn_receiver(Tab,Storage,Cs,SenderPid,TabSize,DetsData,{dumper,add_table_copy}) ->
+ Init = table_init_fun(SenderPid),
+ case do_init_table(Tab,Storage,Cs,SenderPid,TabSize,DetsData,self(), Init) of
+ Err = {error, _} ->
+ SenderPid ! {copier_done, node()},
+ Err;
+ Else ->
+ Else
+ end;
+
+spawn_receiver(Tab,Storage,Cs,SenderPid,
+ TabSize,DetsData,Reason) ->
+ %% Grab a schema lock to avoid deadlock between table_loader and schema_commit dumping.
+ %% Both may grab tables-locks in different order.
+ Load = fun() ->
+ {_,Tid,Ts} = get(mnesia_activity_state),
+ mnesia_locker:rlock(Tid, Ts#tidstore.store,
+ {schema, Tab}),
+ Init = table_init_fun(SenderPid),
+ Pid = spawn_link(?MODULE, spawned_receiver,
+ [self(),Tab,Storage,Cs,
+ SenderPid,TabSize,DetsData,
+ Init]),
+ put(mnesia_real_loader, Pid),
+ wait_on_load_complete(Pid)
+ end,
+ Res = case mnesia:transaction(Load, 20) of
+ {'atomic', {error,Result}} when element(1,Reason) == dumper ->
+ SenderPid ! {copier_done, node()},
+ {error,Result};
+ {'atomic', {error,Result}} ->
+ SenderPid ! {copier_done, node()},
+ fatal("Cannot create table ~p: ~p~n",
+ [[Tab, Storage], Result]);
+ {'atomic', Result} -> Result;
+ {aborted, nomore} ->
+ SenderPid ! {copier_done, node()},
+ restart;
+ {aborted, _ } ->
+ SenderPid ! {copier_done, node()},
+ down %% either this node or sender is dying
+ end,
+ unlink(whereis(mnesia_tm)), %% Avoid late unlink from tm
+ Res.
+
+spawned_receiver(ReplyTo,Tab,Storage,Cs,
+ SenderPid,TabSize,DetsData, Init) ->
+ process_flag(trap_exit, true),
+ Done = do_init_table(Tab,Storage,Cs,
+ SenderPid,TabSize,DetsData,
+ ReplyTo, Init),
+ ReplyTo ! {self(),Done},
+ unlink(ReplyTo),
+ unlink(whereis(mnesia_controller)),
+ exit(normal).
+
+wait_on_load_complete(Pid) ->
+ receive
+ {Pid, Res} ->
+ Res;
+ {'EXIT', Pid, Reason} ->
+ exit(Reason);
+ Else ->
+ Pid ! Else,
+ wait_on_load_complete(Pid)
+ end.
+
+tab_receiver(Node, Tab, Storage, Cs, PConv, OrigTabRec) ->
+ receive
+ {SenderPid, {no_more, DatBin}} when PConv == false ->
+ finish_copy(Storage,Tab,Cs,SenderPid,DatBin,OrigTabRec);
+
+ %% Protocol conversion hack
+ {SenderPid, {no_more, DatBin}} when pid(PConv) ->
+ PConv ! {SenderPid, no_more},
+ receive
+ {old_init_table_complete, ok} ->
+ finish_copy(Storage, Tab, Cs, SenderPid, DatBin,OrigTabRec);
+ {old_init_table_complete, Reason} ->
+ Msg = "OLD: [d]ets:init table failed",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage)
+ end;
+
+ {actual_tabrec, Pid} ->
+ tab_receiver(Node, Tab, Storage, Cs, Pid,OrigTabRec);
+
+ {SenderPid, {more, [Recs]}} when pid(PConv) ->
+ PConv ! {SenderPid, {more, Recs}}, %% Forward Msg to OldNodes
+ tab_receiver(Node, Tab, Storage, Cs, PConv,OrigTabRec);
+
+ {'EXIT', PConv, Reason} -> %% [d]ets:init process crashed
+ Msg = "Receiver crashed",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage);
+
+ %% Protocol conversion hack
+ {copier_done, Node} ->
+ dbg_out("Sender of table ~p crashed on node ~p ~n", [Tab, Node]),
+ down(Tab, Storage);
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ tab_receiver(Node, Tab, Storage, Cs, PConv,OrigTabRec)
+ end.
+
+create_table(Tab, TabSize, Storage, Cs) ->
+ if
+ Storage == disc_only_copies ->
+ mnesia_lib:lock_table(Tab),
+ Tmp = mnesia_lib:tab2tmp(Tab),
+ Size = lists:max([TabSize, 256]),
+ Args = [{file, Tmp},
+ {keypos, 2},
+%% {ram_file, true},
+ {estimated_no_objects, Size},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)}],
+ file:delete(Tmp),
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, _} ->
+ mnesia_lib:unlock_table(Tab),
+ {Storage, Tab};
+ Else ->
+ mnesia_lib:unlock_table(Tab),
+ Else
+ end;
+ (Storage == ram_copies) or (Storage == disc_copies) ->
+ Args = [{keypos, 2}, public, named_table, Cs#cstruct.type],
+ case mnesia_monitor:unsafe_mktab(Tab, Args) of
+ Tab ->
+ {Storage, Tab};
+ Else ->
+ Else
+ end
+ end.
+
+do_init_table(Tab,Storage,Cs,SenderPid,
+ TabSize,DetsInfo,OrigTabRec,Init) ->
+ case create_table(Tab, TabSize, Storage, Cs) of
+ {Storage,Tab} ->
+ %% Debug info
+ Node = node(SenderPid),
+ put(mnesia_table_receiver, {Tab, Node, SenderPid}),
+ mnesia_tm:block_tab(Tab),
+ PConv = mnesia_monitor:needs_protocol_conversion(Node),
+
+ case init_table(Tab,Storage,Init,PConv,DetsInfo,SenderPid) of
+ ok ->
+ tab_receiver(Node,Tab,Storage,Cs,PConv,OrigTabRec);
+ Reason ->
+ Msg = "[d]ets:init table failed",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage)
+ end;
+ Error ->
+ Error
+ end.
+
+make_table_fun(Pid, TabRec) ->
+ fun(close) ->
+ ok;
+ (read) ->
+ get_data(Pid, TabRec)
+ end.
+
+get_data(Pid, TabRec) ->
+ receive
+ {Pid, {more, Recs}} ->
+ Pid ! {TabRec, more},
+ {Recs, make_table_fun(Pid,TabRec)};
+ {Pid, no_more} ->
+ end_of_input;
+ {copier_done, Node} ->
+ case node(Pid) of
+ Node ->
+ {copier_done, Node};
+ _ ->
+ get_data(Pid, TabRec)
+ end;
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ get_data(Pid, TabRec)
+ end.
+
+init_table(Tab, disc_only_copies, Fun, false, DetsInfo,Sender) ->
+ ErtsVer = erlang:system_info(version),
+ case DetsInfo of
+ {ErtsVer, DetsData} ->
+ Res = (catch dets:is_compatible_bchunk_format(Tab, DetsData)),
+ case Res of
+ {'EXIT',{undef,[{dets,_,_}|_]}} ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ {'EXIT', What} ->
+ exit(What);
+ false ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ true ->
+ dets:init_table(Tab, Fun, [{format, bchunk}])
+ end;
+ Old when Old /= false ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ _ ->
+ dets:init_table(Tab, Fun)
+ end;
+init_table(Tab, _, Fun, false, _DetsInfo,_) ->
+ case catch ets:init_table(Tab, Fun) of
+ true ->
+ ok;
+ {'EXIT', Else} -> Else
+ end;
+init_table(Tab, Storage, Fun, true, _DetsInfo, Sender) -> %% Old Nodes
+ spawn_link(?MODULE, old_node_init_table,
+ [Tab, Storage, Fun, self(), false, Sender]),
+ ok.
+
+old_node_init_table(Tab, Storage, Fun, TabReceiver, DetsInfo,Sender) ->
+ Res = init_table(Tab, Storage, Fun, false, DetsInfo,Sender),
+ TabReceiver ! {old_init_table_complete, Res},
+ unlink(TabReceiver),
+ ok.
+
+finish_copy(Storage,Tab,Cs,SenderPid,DatBin,OrigTabRec) ->
+ TabRef = {Storage, Tab},
+ subscr_receiver(TabRef, Cs#cstruct.record_name),
+ case handle_last(TabRef, Cs#cstruct.type, DatBin) of
+ ok ->
+ mnesia_index:init_index(Tab, Storage),
+ snmpify(Tab, Storage),
+ %% OrigTabRec must not be the spawned tab-receiver
+ %% due to old protocol.
+ SenderPid ! {OrigTabRec, no_more},
+ mnesia_tm:unblock_tab(Tab),
+ ok;
+ {error, Reason} ->
+ Msg = "Failed to handle last",
+ dbg_out("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ down(Tab, Storage)
+ end.
+
+subscr_receiver(TabRef = {_, Tab}, RecName) ->
+ receive
+ {mnesia_table_event, {Op, Val, _Tid}} ->
+ if
+ Tab == RecName ->
+ handle_event(TabRef, Op, Val);
+ true ->
+ handle_event(TabRef, Op, setelement(1, Val, RecName))
+ end,
+ subscr_receiver(TabRef, RecName);
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ subscr_receiver(TabRef, RecName)
+ after 0 ->
+ ok
+ end.
+
+handle_event(TabRef, write, Rec) ->
+ db_put(TabRef, Rec);
+handle_event(TabRef, delete, {_Tab, Key}) ->
+ db_erase(TabRef, Key);
+handle_event(TabRef, delete_object, OldRec) ->
+ db_match_erase(TabRef, OldRec);
+handle_event(TabRef, clear_table, {_Tab, _Key}) ->
+ db_match_erase(TabRef, '_').
+
+handle_last({disc_copies, Tab}, _Type, nobin) ->
+ Ret = mnesia_log:ets2dcd(Tab),
+ Fname = mnesia_lib:tab2dat(Tab),
+ case mnesia_lib:exists(Fname) of
+ true -> %% Remove old .DAT files.
+ file:delete(Fname);
+ false ->
+ ok
+ end,
+ Ret;
+
+handle_last({disc_only_copies, Tab}, Type, nobin) ->
+ case mnesia_lib:swap_tmp_files([Tab]) of
+ [] ->
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ mnesia_monitor:open_dets(Tab, Args),
+ ok;
+ L when list(L) ->
+ {error, {"Cannot swap tmp files", Tab, L}}
+ end;
+
+handle_last({ram_copies, _Tab}, _Type, nobin) ->
+ ok;
+handle_last({ram_copies, Tab}, _Type, DatBin) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_lib:lock_table(Tab),
+ Tmp = mnesia_lib:tab2tmp(Tab),
+ ok = file:write_file(Tmp, DatBin),
+ ok = file:rename(Tmp, mnesia_lib:tab2dcd(Tab)),
+ mnesia_lib:unlock_table(Tab),
+ ok;
+ false ->
+ ok
+ end.
+
+down(Tab, Storage) ->
+ case Storage of
+ ram_copies ->
+ catch ?ets_delete_table(Tab);
+ disc_copies ->
+ catch ?ets_delete_table(Tab);
+ disc_only_copies ->
+ mnesia_lib:cleanup_tmp_files([Tab])
+ end,
+ mnesia_checkpoint:tm_del_copy(Tab, node()),
+ mnesia_controller:sync_del_table_copy_whereabouts(Tab, node()),
+ mnesia_tm:unblock_tab(Tab),
+ flush_subcrs(),
+ down.
+
+flush_subcrs() ->
+ receive
+ {mnesia_table_event, _} ->
+ flush_subcrs();
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason),
+ flush_subcrs()
+ after 0 ->
+ done
+ end.
+
+db_erase({ram_copies, Tab}, Key) ->
+ true = ?ets_delete(Tab, Key);
+db_erase({disc_copies, Tab}, Key) ->
+ true = ?ets_delete(Tab, Key);
+db_erase({disc_only_copies, Tab}, Key) ->
+ ok = dets:delete(Tab, Key).
+
+db_match_erase({ram_copies, Tab} , Pat) ->
+ true = ?ets_match_delete(Tab, Pat);
+db_match_erase({disc_copies, Tab} , Pat) ->
+ true = ?ets_match_delete(Tab, Pat);
+db_match_erase({disc_only_copies, Tab}, Pat) ->
+ ok = dets:match_delete(Tab, Pat).
+
+db_put({ram_copies, Tab}, Val) ->
+ true = ?ets_insert(Tab, Val);
+db_put({disc_copies, Tab}, Val) ->
+ true = ?ets_insert(Tab, Val);
+db_put({disc_only_copies, Tab}, Val) ->
+ ok = dets:insert(Tab, Val).
+
+%% This code executes at the remote site where the data is
+%% executes in a special copier process.
+
+calc_nokeys(Storage, Tab) ->
+ %% Calculate #keys per transfer
+ Key = mnesia_lib:db_first(Storage, Tab),
+ Recs = mnesia_lib:db_get(Storage, Tab, Key),
+ BinSize = size(term_to_binary(Recs)),
+ (?MAX_TRANSFER_SIZE div BinSize) + 1.
+
+send_table(Pid, Tab, RemoteS) ->
+ case ?catch_val({Tab, storage_type}) of
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ unknown ->
+ {error, {no_exists, Tab}};
+ Storage ->
+ %% Send first
+ TabSize = mnesia:table_info(Tab, size),
+ Pconvert = mnesia_monitor:needs_protocol_conversion(node(Pid)),
+ KeysPerTransfer = calc_nokeys(Storage, Tab),
+ ChunkData = dets:info(Tab, bchunk_format),
+
+ UseDetsChunk =
+ Storage == RemoteS andalso
+ Storage == disc_only_copies andalso
+ ChunkData /= undefined andalso
+ Pconvert == false,
+ if
+ UseDetsChunk == true ->
+ DetsInfo = erlang:system_info(version),
+ Pid ! {self(), {first, TabSize, {DetsInfo, ChunkData}}};
+ true ->
+ Pid ! {self(), {first, TabSize}}
+ end,
+
+ %% Debug info
+ put(mnesia_table_sender, {Tab, node(Pid), Pid}),
+ {Init, Chunk} = reader_funcs(UseDetsChunk, Tab, Storage, KeysPerTransfer),
+
+ SendIt = fun() ->
+ prepare_copy(Pid, Tab, Storage),
+ send_more(Pid, 1, Chunk, Init(), Tab, Pconvert),
+ finish_copy(Pid, Tab, Storage, RemoteS)
+ end,
+
+ case catch SendIt() of
+ receiver_died ->
+ cleanup_tab_copier(Pid, Storage, Tab),
+ unlink(whereis(mnesia_tm)),
+ ok;
+ {_, receiver_died} ->
+ unlink(whereis(mnesia_tm)),
+ ok;
+ {'atomic', no_more} ->
+ unlink(whereis(mnesia_tm)),
+ ok;
+ Reason ->
+ cleanup_tab_copier(Pid, Storage, Tab),
+ unlink(whereis(mnesia_tm)),
+ {error, Reason}
+ end
+ end.
+
+prepare_copy(Pid, Tab, Storage) ->
+ Trans =
+ fun() ->
+ mnesia:write_lock_table(Tab),
+ mnesia_subscr:subscribe(Pid, {table, Tab}),
+ update_where_to_write(Tab, node(Pid)),
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+ ok
+ end,
+ case mnesia:transaction(Trans) of
+ {'atomic', ok} ->
+ ok;
+ {aborted, Reason} ->
+ exit({tab_copier_prepare, Tab, Reason})
+ end.
+
+update_where_to_write(Tab, Node) ->
+ case val({Tab, access_mode}) of
+ read_only ->
+ ignore;
+ read_write ->
+ Current = val({current, db_nodes}),
+ Ns =
+ case lists:member(Node, Current) of
+ true -> Current;
+ false -> [Node | Current]
+ end,
+ update_where_to_write(Ns, Tab, Node)
+ end.
+
+update_where_to_write([], _, _) ->
+ ok;
+update_where_to_write([H|T], Tab, AddNode) ->
+ rpc:call(H, mnesia_controller, call,
+ [{update_where_to_write, [add, Tab, AddNode], self()}]),
+ update_where_to_write(T, Tab, AddNode).
+
+send_more(Pid, N, Chunk, DataState, Tab, OldNode) ->
+ receive
+ {NewPid, more} ->
+ case send_packet(N - 1, NewPid, Chunk, DataState, OldNode) of
+ New when integer(New) ->
+ New - 1;
+ NewData ->
+ send_more(NewPid, ?MAX_NOPACKETS, Chunk, NewData, Tab, OldNode)
+ end;
+ {_NewPid, {old_protocol, Tab}} ->
+ Storage = val({Tab, storage_type}),
+ {Init, NewChunk} =
+ reader_funcs(false, Tab, Storage, calc_nokeys(Storage, Tab)),
+ send_more(Pid, 1, NewChunk, Init(), Tab, OldNode);
+
+ {copier_done, Node} when Node == node(Pid)->
+ verbose("Receiver of table ~p crashed on ~p (more)~n", [Tab, Node]),
+ throw(receiver_died)
+ end.
+
+reader_funcs(UseDetsChunk, Tab, Storage, KeysPerTransfer) ->
+ case UseDetsChunk of
+ false ->
+ {fun() -> mnesia_lib:db_init_chunk(Storage, Tab, KeysPerTransfer) end,
+ fun(Cont) -> mnesia_lib:db_chunk(Storage, Cont) end};
+ true ->
+ {fun() -> dets_bchunk(Tab, start) end,
+ fun(Cont) -> dets_bchunk(Tab, Cont) end}
+ end.
+
+dets_bchunk(Tab, Chunk) -> %% Arrg
+ case dets:bchunk(Tab, Chunk) of
+ {Cont, Data} -> {Data, Cont};
+ Else -> Else
+ end.
+
+send_packet(N, Pid, _Chunk, '$end_of_table', OldNode) ->
+ case OldNode of
+ true -> ignore; %% Old nodes can't handle the new no_more
+ false -> Pid ! {self(), no_more}
+ end,
+ N;
+send_packet(N, Pid, Chunk, {[], Cont}, OldNode) ->
+ send_packet(N, Pid, Chunk, Chunk(Cont), OldNode);
+send_packet(N, Pid, Chunk, {Recs, Cont}, OldNode) when N < ?MAX_NOPACKETS ->
+ case OldNode of
+ true -> Pid ! {self(), {more, [Recs]}}; %% Old need's wrapping list
+ false -> Pid ! {self(), {more, Recs}}
+ end,
+ send_packet(N+1, Pid, Chunk, Chunk(Cont), OldNode);
+send_packet(_N, _Pid, _Chunk, DataState, _OldNode) ->
+ DataState.
+
+finish_copy(Pid, Tab, Storage, RemoteS) ->
+ RecNode = node(Pid),
+ DatBin = dat2bin(Tab, Storage, RemoteS),
+ Trans =
+ fun() ->
+ mnesia:read_lock_table(Tab),
+ A = val({Tab, access_mode}),
+ mnesia_controller:sync_and_block_table_whereabouts(Tab, RecNode, RemoteS, A),
+ cleanup_tab_copier(Pid, Storage, Tab),
+ mnesia_checkpoint:tm_add_copy(Tab, RecNode),
+ Pid ! {self(), {no_more, DatBin}},
+ receive
+ {Pid, no_more} -> % Dont bother about the spurious 'more' message
+ no_more;
+ {copier_done, Node} when Node == node(Pid)->
+ verbose("Tab receiver ~p crashed (more): ~p~n", [Tab, Node]),
+ receiver_died
+ end
+ end,
+ mnesia:transaction(Trans).
+
+cleanup_tab_copier(Pid, Storage, Tab) ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_subscr:unsubscribe(Pid, {table, Tab}).
+
+dat2bin(Tab, ram_copies, ram_copies) ->
+ mnesia_lib:lock_table(Tab),
+ Res = file:read_file(mnesia_lib:tab2dcd(Tab)),
+ mnesia_lib:unlock_table(Tab),
+ case Res of
+ {ok, DatBin} -> DatBin;
+ _ -> nobin
+ end;
+dat2bin(_Tab, _LocalS, _RemoteS) ->
+ nobin.
+
+handle_exit(Pid, Reason) when node(Pid) == node() ->
+ exit(Reason);
+handle_exit(_Pid, _Reason) -> %% Not from our node, this will be handled by
+ ignore. %% mnesia_down soon.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
new file mode 100644
index 0000000000..c24ccc5518
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
@@ -0,0 +1,1021 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_locker.erl,v 1.2 2009/07/01 15:45:40 kostis Exp $
+%%
+-module(mnesia_locker).
+
+-export([
+ get_held_locks/0,
+ get_lock_queue/0,
+ global_lock/5,
+ ixrlock/5,
+ init/1,
+ mnesia_down/2,
+ release_tid/1,
+ async_release_tid/2,
+ send_release_tid/2,
+ receive_release_tid_acc/2,
+ rlock/3,
+ rlock_table/3,
+ rwlock/3,
+ sticky_rwlock/3,
+ start/0,
+ sticky_wlock/3,
+ sticky_wlock_table/3,
+ wlock/3,
+ wlock_no_exist/4,
+ wlock_table/3
+ ]).
+
+%% sys callback functions
+-export([system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [dbg_out/2, error/2, verbose/2]).
+
+-define(dbg(S,V), ok).
+%-define(dbg(S,V), dbg_out("~p:~p: " ++ S, [?MODULE, ?LINE] ++ V)).
+
+-define(ALL, '______WHOLETABLE_____').
+-define(STICK, '______STICK_____').
+-define(GLOBAL, '______GLOBAL_____').
+
+-record(state, {supervisor}).
+
+-record(queue, {oid, tid, op, pid, lucky}).
+
+%% mnesia_held_locks: contain {Oid, Op, Tid} entries (bag)
+-define(match_oid_held_locks(Oid), {Oid, '_', '_'}).
+%% mnesia_tid_locks: contain {Tid, Oid, Op} entries (bag)
+-define(match_oid_tid_locks(Tid), {Tid, '_', '_'}).
+%% mnesia_sticky_locks: contain {Oid, Node} entries and {Tab, Node} entries (set)
+-define(match_oid_sticky_locks(Oid),{Oid, '_'}).
+%% mnesia_lock_queue: contain {queue, Oid, Tid, Op, ReplyTo, WaitForTid} entries (ordered_set)
+-define(match_oid_lock_queue(Oid), #queue{oid=Oid, tid='_', op = '_', pid = '_', lucky = '_'}).
+%% mnesia_lock_counter: {{write, Tab}, Number} &&
+%% {{read, Tab}, Number} entries (set)
+
+start() ->
+ mnesia_monitor:start_proc(?MODULE, ?MODULE, init, [self()]).
+
+init(Parent) ->
+ register(?MODULE, self()),
+ process_flag(trap_exit, true),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ loop(#state{supervisor = Parent}).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+reply(From, R) ->
+ From ! {?MODULE, node(), R}.
+
+l_request(Node, X, Store) ->
+ {?MODULE, Node} ! {self(), X},
+ l_req_rec(Node, Store).
+
+l_req_rec(Node, Store) ->
+ ?ets_insert(Store, {nodes, Node}),
+ receive
+ {?MODULE, Node, {switch, Node2, Req}} ->
+ ?ets_insert(Store, {nodes, Node2}),
+ {?MODULE, Node2} ! Req,
+ {switch, Node2, Req};
+ {?MODULE, Node, Reply} ->
+ Reply;
+ {mnesia_down, Node} ->
+ {not_granted, {node_not_running, Node}}
+ end.
+
+release_tid(Tid) ->
+ ?MODULE ! {release_tid, Tid}.
+
+async_release_tid(Nodes, Tid) ->
+ rpc:abcast(Nodes, ?MODULE, {release_tid, Tid}).
+
+send_release_tid(Nodes, Tid) ->
+ rpc:abcast(Nodes, ?MODULE, {self(), {sync_release_tid, Tid}}).
+
+receive_release_tid_acc([Node | Nodes], Tid) ->
+ receive
+ {?MODULE, Node, {tid_released, Tid}} ->
+ receive_release_tid_acc(Nodes, Tid);
+ {mnesia_down, Node} ->
+ receive_release_tid_acc(Nodes, Tid)
+ end;
+receive_release_tid_acc([], _Tid) ->
+ ok.
+
+loop(State) ->
+ receive
+ {From, {write, Tid, Oid}} ->
+ try_sticky_lock(Tid, write, From, Oid),
+ loop(State);
+
+ %% If Key == ?ALL it's a request to lock the entire table
+ %%
+
+ {From, {read, Tid, Oid}} ->
+ try_sticky_lock(Tid, read, From, Oid),
+ loop(State);
+
+ %% Really do a read, but get hold of a write lock
+ %% used by mnesia:wread(Oid).
+
+ {From, {read_write, Tid, Oid}} ->
+ try_sticky_lock(Tid, read_write, From, Oid),
+ loop(State);
+
+ %% Tid has somehow terminated, clear up everything
+ %% and pass locks on to queued processes.
+ %% This is the purpose of the mnesia_tid_locks table
+
+ {release_tid, Tid} ->
+ do_release_tid(Tid),
+ loop(State);
+
+ %% stick lock, first tries this to the where_to_read Node
+ {From, {test_set_sticky, Tid, {Tab, _} = Oid, Lock}} ->
+ case ?ets_lookup(mnesia_sticky_locks, Tab) of
+ [] ->
+ reply(From, not_stuck),
+ loop(State);
+ [{_,Node}] when Node == node() ->
+ %% Lock is stuck here, see now if we can just set
+ %% a regular write lock
+ try_lock(Tid, Lock, From, Oid),
+ loop(State);
+ [{_,Node}] ->
+ reply(From, {stuck_elsewhere, Node}),
+ loop(State)
+ end;
+
+ %% If test_set_sticky fails, we send this to all nodes
+ %% after aquiring a real write lock on Oid
+
+ {stick, {Tab, _}, N} ->
+ ?ets_insert(mnesia_sticky_locks, {Tab, N}),
+ loop(State);
+
+ %% The caller which sends this message, must have first
+ %% aquired a write lock on the entire table
+ {unstick, Tab} ->
+ ?ets_delete(mnesia_sticky_locks, Tab),
+ loop(State);
+
+ {From, {ix_read, Tid, Tab, IxKey, Pos}} ->
+ case catch mnesia_index:get_index_table(Tab, Pos) of
+ {'EXIT', _} ->
+ reply(From, {not_granted, {no_exists, Tab, {index, [Pos]}}}),
+ loop(State);
+ Index ->
+ Rk = mnesia_lib:elems(2,mnesia_index:db_get(Index, IxKey)),
+ %% list of real keys
+ case ?ets_lookup(mnesia_sticky_locks, Tab) of
+ [] ->
+ set_read_lock_on_all_keys(Tid, From,Tab,Rk,Rk,
+ []),
+ loop(State);
+ [{_,N}] when N == node() ->
+ set_read_lock_on_all_keys(Tid, From,Tab,Rk,Rk,
+ []),
+ loop(State);
+ [{_,N}] ->
+ Req = {From, {ix_read, Tid, Tab, IxKey, Pos}},
+ From ! {?MODULE, node(), {switch, N, Req}},
+ loop(State)
+ end
+ end;
+
+ {From, {sync_release_tid, Tid}} ->
+ do_release_tid(Tid),
+ reply(From, {tid_released, Tid}),
+ loop(State);
+
+ {release_remote_non_pending, Node, Pending} ->
+ release_remote_non_pending(Node, Pending),
+ mnesia_monitor:mnesia_down(?MODULE, Node),
+ loop(State);
+
+ {'EXIT', Pid, _} when Pid == State#state.supervisor ->
+ do_stop();
+
+ {system, From, Msg} ->
+ verbose("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ Parent = State#state.supervisor,
+ sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
+
+ Msg ->
+ error("~p got unexpected message: ~p~n", [?MODULE, Msg]),
+ loop(State)
+ end.
+
+set_lock(Tid, Oid, Op) ->
+ ?dbg("Granted ~p ~p ~p~n", [Tid,Oid,Op]),
+ ?ets_insert(mnesia_held_locks, {Oid, Op, Tid}),
+ ?ets_insert(mnesia_tid_locks, {Tid, Oid, Op}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Acquire locks
+
+try_sticky_lock(Tid, Op, Pid, {Tab, _} = Oid) ->
+ case ?ets_lookup(mnesia_sticky_locks, Tab) of
+ [] ->
+ try_lock(Tid, Op, Pid, Oid);
+ [{_,N}] when N == node() ->
+ try_lock(Tid, Op, Pid, Oid);
+ [{_,N}] ->
+ Req = {Pid, {Op, Tid, Oid}},
+ Pid ! {?MODULE, node(), {switch, N, Req}}
+ end.
+
+try_lock(Tid, read_write, Pid, Oid) ->
+ try_lock(Tid, read_write, read, write, Pid, Oid);
+try_lock(Tid, Op, Pid, Oid) ->
+ try_lock(Tid, Op, Op, Op, Pid, Oid).
+
+try_lock(Tid, Op, SimpleOp, Lock, Pid, Oid) ->
+ case can_lock(Tid, Lock, Oid, {no, bad_luck}) of
+ yes ->
+ Reply = grant_lock(Tid, SimpleOp, Lock, Oid),
+ reply(Pid, Reply);
+ {no, Lucky} ->
+ C = #cyclic{op = SimpleOp, lock = Lock, oid = Oid, lucky = Lucky},
+ ?dbg("Rejected ~p ~p ~p ~p ~n", [Tid, Oid, Lock, Lucky]),
+ reply(Pid, {not_granted, C});
+ {queue, Lucky} ->
+ ?dbg("Queued ~p ~p ~p ~p ~n", [Tid, Oid, Lock, Lucky]),
+ %% Append to queue: Nice place for trace output
+ ?ets_insert(mnesia_lock_queue,
+ #queue{oid = Oid, tid = Tid, op = Op,
+ pid = Pid, lucky = Lucky}),
+ ?ets_insert(mnesia_tid_locks, {Tid, Oid, {queued, Op}})
+ end.
+
+grant_lock(Tid, read, Lock, {Tab, Key})
+ when Key /= ?ALL, Tab /= ?GLOBAL ->
+ case node(Tid#tid.pid) == node() of
+ true ->
+ set_lock(Tid, {Tab, Key}, Lock),
+ {granted, lookup_in_client};
+ false ->
+ case catch mnesia_lib:db_get(Tab, Key) of %% lookup as well
+ {'EXIT', _Reason} ->
+ %% Table has been deleted from this node,
+ %% restart the transaction.
+ C = #cyclic{op = read, lock = Lock, oid = {Tab, Key},
+ lucky = nowhere},
+ {not_granted, C};
+ Val ->
+ set_lock(Tid, {Tab, Key}, Lock),
+ {granted, Val}
+ end
+ end;
+grant_lock(Tid, read, Lock, Oid) ->
+ set_lock(Tid, Oid, Lock),
+ {granted, ok};
+grant_lock(Tid, write, Lock, Oid) ->
+ set_lock(Tid, Oid, Lock),
+ granted.
+
+%% 1) Impose an ordering on all transactions favour old (low tid) transactions
+%% newer (higher tid) transactions may never wait on older ones,
+%% 2) When releasing the tids from the queue always begin with youngest (high tid)
+%% because of 1) it will avoid the deadlocks.
+%% 3) TabLocks is the problem :-) They should not starve and not deadlock
+%% handle tablocks in queue as they had locks on unlocked records.
+
+can_lock(Tid, read, {Tab, Key}, AlreadyQ) when Key /= ?ALL ->
+ %% The key is bound, no need for the other BIF
+ Oid = {Tab, Key},
+ ObjLocks = ?ets_match_object(mnesia_held_locks, {Oid, write, '_'}),
+ TabLocks = ?ets_match_object(mnesia_held_locks, {{Tab, ?ALL}, write, '_'}),
+ check_lock(Tid, Oid, ObjLocks, TabLocks, yes, AlreadyQ, read);
+
+can_lock(Tid, read, Oid, AlreadyQ) -> % Whole tab
+ Tab = element(1, Oid),
+ ObjLocks = ?ets_match_object(mnesia_held_locks, {{Tab, '_'}, write, '_'}),
+ check_lock(Tid, Oid, ObjLocks, [], yes, AlreadyQ, read);
+
+can_lock(Tid, write, {Tab, Key}, AlreadyQ) when Key /= ?ALL ->
+ Oid = {Tab, Key},
+ ObjLocks = ?ets_lookup(mnesia_held_locks, Oid),
+ TabLocks = ?ets_lookup(mnesia_held_locks, {Tab, ?ALL}),
+ check_lock(Tid, Oid, ObjLocks, TabLocks, yes, AlreadyQ, write);
+
+can_lock(Tid, write, Oid, AlreadyQ) -> % Whole tab
+ Tab = element(1, Oid),
+ ObjLocks = ?ets_match_object(mnesia_held_locks, ?match_oid_held_locks({Tab, '_'})),
+ check_lock(Tid, Oid, ObjLocks, [], yes, AlreadyQ, write).
+
+%% Check held locks for conflicting locks
+check_lock(Tid, Oid, [Lock | Locks], TabLocks, X, AlreadyQ, Type) ->
+ case element(3, Lock) of
+ Tid ->
+ check_lock(Tid, Oid, Locks, TabLocks, X, AlreadyQ, Type);
+ WaitForTid when WaitForTid > Tid -> % Important order
+ check_lock(Tid, Oid, Locks, TabLocks, {queue, WaitForTid}, AlreadyQ, Type);
+ WaitForTid when Tid#tid.pid == WaitForTid#tid.pid ->
+ dbg_out("Spurious lock conflict ~w ~w: ~w -> ~w~n",
+ [Oid, Lock, Tid, WaitForTid]),
+%% check_lock(Tid, Oid, Locks, TabLocks, {queue, WaitForTid}, AlreadyQ);
+ %% BUGBUG Fix this if possible
+ {no, WaitForTid};
+ WaitForTid ->
+ {no, WaitForTid}
+ end;
+
+check_lock(_, _, [], [], X, {queue, bad_luck}, _) ->
+ X; %% The queue should be correct already no need to check it again
+
+check_lock(_, _, [], [], X = {queue, _Tid}, _AlreadyQ, _) ->
+ X;
+
+check_lock(Tid, Oid, [], [], X, AlreadyQ, Type) ->
+ {Tab, Key} = Oid,
+ if
+ Type == write ->
+ check_queue(Tid, Tab, X, AlreadyQ);
+ Key == ?ALL ->
+ %% hmm should be solvable by a clever select expr but not today...
+ check_queue(Tid, Tab, X, AlreadyQ);
+ true ->
+ %% If there is a queue on that object, read_lock shouldn't be granted
+ ObjLocks = ets:lookup(mnesia_lock_queue, Oid),
+ Greatest = max(ObjLocks),
+ case Greatest of
+ empty ->
+ check_queue(Tid, Tab, X, AlreadyQ);
+ ObjL when Tid > ObjL ->
+ {no, ObjL}; %% Starvation Preemption (write waits for read)
+ ObjL ->
+ check_queue(Tid, Tab, {queue, ObjL}, AlreadyQ)
+ end
+ end;
+
+check_lock(Tid, Oid, [], TabLocks, X, AlreadyQ, Type) ->
+ check_lock(Tid, Oid, TabLocks, [], X, AlreadyQ, Type).
+
+%% Check queue for conflicting locks
+%% Assume that all queued locks belongs to other tid's
+
+check_queue(Tid, Tab, X, AlreadyQ) ->
+ TabLocks = ets:lookup(mnesia_lock_queue, {Tab,?ALL}),
+ Greatest = max(TabLocks),
+ case Greatest of
+ empty ->
+ X;
+ Tid ->
+ X;
+ WaitForTid when WaitForTid#queue.tid > Tid -> % Important order
+ {queue, WaitForTid};
+ WaitForTid ->
+ case AlreadyQ of
+ {no, bad_luck} -> {no, WaitForTid};
+ _ ->
+ erlang:error({mnesia_locker, assert, AlreadyQ})
+ end
+ end.
+
+max([]) ->
+ empty;
+max([H|R]) ->
+ max(R, H#queue.tid).
+
+max([H|R], Tid) when H#queue.tid > Tid ->
+ max(R, H#queue.tid);
+max([_|R], Tid) ->
+ max(R, Tid);
+max([], Tid) ->
+ Tid.
+
+%% We can't queue the ixlock requests since it
+%% becomes to complivated for little me :-)
+%% If we encounter an object with a wlock we reject the
+%% entire lock request
+%%
+%% BUGBUG: this is actually a bug since we may starve
+
+set_read_lock_on_all_keys(Tid, From, Tab, [RealKey | Tail], Orig, Ack) ->
+ Oid = {Tab, RealKey},
+ case can_lock(Tid, read, Oid, {no, bad_luck}) of
+ yes ->
+ {granted, Val} = grant_lock(Tid, read, read, Oid),
+ case opt_lookup_in_client(Val, Oid, read) of % Ought to be invoked
+ C when record(C, cyclic) -> % in the client
+ reply(From, {not_granted, C});
+ Val2 ->
+ Ack2 = lists:append(Val2, Ack),
+ set_read_lock_on_all_keys(Tid, From, Tab, Tail, Orig, Ack2)
+ end;
+ {no, Lucky} ->
+ C = #cyclic{op = read, lock = read, oid = Oid, lucky = Lucky},
+ reply(From, {not_granted, C});
+ {queue, Lucky} ->
+ C = #cyclic{op = read, lock = read, oid = Oid, lucky = Lucky},
+ reply(From, {not_granted, C})
+ end;
+set_read_lock_on_all_keys(_Tid, From, _Tab, [], Orig, Ack) ->
+ reply(From, {granted, Ack, Orig}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Release of locks
+
+%% Release remote non-pending nodes
+release_remote_non_pending(Node, Pending) ->
+ %% Clear the mnesia_sticky_locks table first, to avoid
+ %% unnecessary requests to the failing node
+ ?ets_match_delete(mnesia_sticky_locks, {'_' , Node}),
+
+ %% Then we have to release all locks held by processes
+ %% running at the failed node and also simply remove all
+ %% queue'd requests back to the failed node
+
+ AllTids = ?ets_match(mnesia_tid_locks, {'$1', '_', '_'}),
+ Tids = [T || [T] <- AllTids, Node == node(T#tid.pid), not lists:member(T, Pending)],
+ do_release_tids(Tids).
+
+do_release_tids([Tid | Tids]) ->
+ do_release_tid(Tid),
+ do_release_tids(Tids);
+do_release_tids([]) ->
+ ok.
+
+do_release_tid(Tid) ->
+ Locks = ?ets_lookup(mnesia_tid_locks, Tid),
+ ?dbg("Release ~p ~p ~n", [Tid, Locks]),
+ ?ets_delete(mnesia_tid_locks, Tid),
+ release_locks(Locks),
+ %% Removed queued locks which has had locks
+ UniqueLocks = keyunique(lists:sort(Locks),[]),
+ rearrange_queue(UniqueLocks).
+
+keyunique([{_Tid, Oid, _Op}|R], Acc = [{_, Oid, _}|_]) ->
+ keyunique(R, Acc);
+keyunique([H|R], Acc) ->
+ keyunique(R, [H|Acc]);
+keyunique([], Acc) ->
+ Acc.
+
+release_locks([Lock | Locks]) ->
+ release_lock(Lock),
+ release_locks(Locks);
+release_locks([]) ->
+ ok.
+
+release_lock({Tid, Oid, {queued, _}}) ->
+ ?ets_match_delete(mnesia_lock_queue,
+ #queue{oid=Oid, tid = Tid, op = '_',
+ pid = '_', lucky = '_'});
+release_lock({Tid, Oid, Op}) ->
+ if
+ Op == write ->
+ ?ets_delete(mnesia_held_locks, Oid);
+ Op == read ->
+ ?ets_match_delete(mnesia_held_locks, {Oid, Op, Tid})
+ end.
+
+rearrange_queue([{_Tid, {Tab, Key}, _} | Locks]) ->
+ if
+ Key /= ?ALL->
+ Queue =
+ ets:lookup(mnesia_lock_queue, {Tab, ?ALL}) ++
+ ets:lookup(mnesia_lock_queue, {Tab, Key}),
+ case Queue of
+ [] ->
+ ok;
+ _ ->
+ Sorted = lists:reverse(lists:keysort(#queue.tid, Queue)),
+ try_waiters_obj(Sorted)
+ end;
+ true ->
+ Pat = ?match_oid_lock_queue({Tab, '_'}),
+ Queue = ?ets_match_object(mnesia_lock_queue, Pat),
+ Sorted = lists:reverse(lists:keysort(#queue.tid, Queue)),
+ try_waiters_tab(Sorted)
+ end,
+ ?dbg("RearrQ ~p~n", [Queue]),
+ rearrange_queue(Locks);
+rearrange_queue([]) ->
+ ok.
+
+try_waiters_obj([W | Waiters]) ->
+ case try_waiter(W) of
+ queued ->
+ no;
+ _ ->
+ try_waiters_obj(Waiters)
+ end;
+try_waiters_obj([]) ->
+ ok.
+
+try_waiters_tab([W | Waiters]) ->
+ case W#queue.oid of
+ {_Tab, ?ALL} ->
+ case try_waiter(W) of
+ queued ->
+ no;
+ _ ->
+ try_waiters_tab(Waiters)
+ end;
+ Oid ->
+ case try_waiter(W) of
+ queued ->
+ Rest = key_delete_all(Oid, #queue.oid, Waiters),
+ try_waiters_tab(Rest);
+ _ ->
+ try_waiters_tab(Waiters)
+ end
+ end;
+try_waiters_tab([]) ->
+ ok.
+
+try_waiter({queue, Oid, Tid, read_write, ReplyTo, _}) ->
+ try_waiter(Oid, read_write, read, write, ReplyTo, Tid);
+try_waiter({queue, Oid, Tid, Op, ReplyTo, _}) ->
+ try_waiter(Oid, Op, Op, Op, ReplyTo, Tid).
+
+try_waiter(Oid, Op, SimpleOp, Lock, ReplyTo, Tid) ->
+ case can_lock(Tid, Lock, Oid, {queue, bad_luck}) of
+ yes ->
+ %% Delete from queue: Nice place for trace output
+ ?ets_match_delete(mnesia_lock_queue,
+ #queue{oid=Oid, tid = Tid, op = Op,
+ pid = ReplyTo, lucky = '_'}),
+ Reply = grant_lock(Tid, SimpleOp, Lock, Oid),
+ ReplyTo ! {?MODULE, node(), Reply},
+ locked;
+ {queue, _Why} ->
+ ?dbg("Keep ~p ~p ~p ~p~n", [Tid, Oid, Lock, _Why]),
+ queued; % Keep waiter in queue
+ {no, Lucky} ->
+ C = #cyclic{op = SimpleOp, lock = Lock, oid = Oid, lucky = Lucky},
+ verbose("** WARNING ** Restarted transaction, possible deadlock in lock queue ~w: cyclic = ~w~n",
+ [Tid, C]),
+ ?ets_match_delete(mnesia_lock_queue,
+ #queue{oid=Oid, tid = Tid, op = Op,
+ pid = ReplyTo, lucky = '_'}),
+ Reply = {not_granted, C},
+ ReplyTo ! {?MODULE, node(), Reply},
+ removed
+ end.
+
+key_delete_all(Key, Pos, TupleList) ->
+ key_delete_all(Key, Pos, TupleList, []).
+key_delete_all(Key, Pos, [H|T], Ack) when element(Pos, H) == Key ->
+ key_delete_all(Key, Pos, T, Ack);
+key_delete_all(Key, Pos, [H|T], Ack) ->
+ key_delete_all(Key, Pos, T, [H|Ack]);
+key_delete_all(_, _, [], Ack) ->
+ lists:reverse(Ack).
+
+
+%% ********************* end server code ********************
+%% The following code executes at the client side of a transactions
+
+mnesia_down(N, Pending) ->
+ case whereis(?MODULE) of
+ undefined ->
+ %% Takes care of mnesia_down's in early startup
+ mnesia_monitor:mnesia_down(?MODULE, N);
+ Pid ->
+ %% Syncronously call needed in order to avoid
+ %% race with mnesia_tm's coordinator processes
+ %% that may restart and acquire new locks.
+ %% mnesia_monitor ensures the sync.
+ Pid ! {release_remote_non_pending, N, Pending}
+ end.
+
+%% Aquire a write lock, but do a read, used by
+%% mnesia:wread/1
+
+rwlock(Tid, Store, Oid) ->
+ {Tab, Key} = Oid,
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ mnesia:abort({no_exists, Tab});
+ Node ->
+ Lock = write,
+ case need_lock(Store, Tab, Key, Lock) of
+ yes ->
+ Ns = w_nodes(Tab),
+ Res = get_rwlocks_on_nodes(Ns, Ns, Node, Store, Tid, Oid),
+ ?ets_insert(Store, {{locks, Tab, Key}, Lock}),
+ Res;
+ no ->
+ if
+ Key == ?ALL ->
+ w_nodes(Tab);
+ Tab == ?GLOBAL ->
+ w_nodes(Tab);
+ true ->
+ dirty_rpc(Node, Tab, Key, Lock)
+ end
+ end
+ end.
+
+get_rwlocks_on_nodes([Node | Tail], Orig, Node, Store, Tid, Oid) ->
+ Op = {self(), {read_write, Tid, Oid}},
+ {?MODULE, Node} ! Op,
+ ?ets_insert(Store, {nodes, Node}),
+ add_debug(Node),
+ get_rwlocks_on_nodes(Tail, Orig, Node, Store, Tid, Oid);
+get_rwlocks_on_nodes([Node | Tail], Orig, OtherNode, Store, Tid, Oid) ->
+ Op = {self(), {write, Tid, Oid}},
+ {?MODULE, Node} ! Op,
+ add_debug(Node),
+ ?ets_insert(Store, {nodes, Node}),
+ get_rwlocks_on_nodes(Tail, Orig, OtherNode, Store, Tid, Oid);
+get_rwlocks_on_nodes([], Orig, _Node, Store, _Tid, Oid) ->
+ receive_wlocks(Orig, read_write_lock, Store, Oid).
+
+%% Return a list of nodes or abort transaction
+%% WE also insert any additional where_to_write nodes
+%% in the local store under the key == nodes
+
+w_nodes(Tab) ->
+ Nodes = ?catch_val({Tab, where_to_write}),
+ case Nodes of
+ [_ | _] -> Nodes;
+ _ -> mnesia:abort({no_exists, Tab})
+ end.
+
+%% aquire a sticky wlock, a sticky lock is a lock
+%% which remains at this node after the termination of the
+%% transaction.
+
+sticky_wlock(Tid, Store, Oid) ->
+ sticky_lock(Tid, Store, Oid, write).
+
+sticky_rwlock(Tid, Store, Oid) ->
+ sticky_lock(Tid, Store, Oid, read_write).
+
+sticky_lock(Tid, Store, {Tab, Key} = Oid, Lock) ->
+ N = val({Tab, where_to_read}),
+ if
+ node() == N ->
+ case need_lock(Store, Tab, Key, write) of
+ yes ->
+ do_sticky_lock(Tid, Store, Oid, Lock);
+ no ->
+ dirty_sticky_lock(Tab, Key, [N], Lock)
+ end;
+ true ->
+ mnesia:abort({not_local, Tab})
+ end.
+
+do_sticky_lock(Tid, Store, {Tab, Key} = Oid, Lock) ->
+ ?MODULE ! {self(), {test_set_sticky, Tid, Oid, Lock}},
+ receive
+ {?MODULE, _N, granted} ->
+ ?ets_insert(Store, {{locks, Tab, Key}, write}),
+ granted;
+ {?MODULE, _N, {granted, Val}} -> %% for rwlocks
+ case opt_lookup_in_client(Val, Oid, write) of
+ C when record(C, cyclic) ->
+ exit({aborted, C});
+ Val2 ->
+ ?ets_insert(Store, {{locks, Tab, Key}, write}),
+ Val2
+ end;
+ {?MODULE, _N, {not_granted, Reason}} ->
+ exit({aborted, Reason});
+ {?MODULE, N, not_stuck} ->
+ not_stuck(Tid, Store, Tab, Key, Oid, Lock, N),
+ dirty_sticky_lock(Tab, Key, [N], Lock);
+ {mnesia_down, N} ->
+ exit({aborted, {node_not_running, N}});
+ {?MODULE, N, {stuck_elsewhere, _N2}} ->
+ stuck_elsewhere(Tid, Store, Tab, Key, Oid, Lock),
+ dirty_sticky_lock(Tab, Key, [N], Lock)
+ end.
+
+not_stuck(Tid, Store, Tab, _Key, Oid, _Lock, N) ->
+ rlock(Tid, Store, {Tab, ?ALL}), %% needed?
+ wlock(Tid, Store, Oid), %% perfect sync
+ wlock(Tid, Store, {Tab, ?STICK}), %% max one sticker/table
+ Ns = val({Tab, where_to_write}),
+ rpc:abcast(Ns, ?MODULE, {stick, Oid, N}).
+
+stuck_elsewhere(Tid, Store, Tab, _Key, Oid, _Lock) ->
+ rlock(Tid, Store, {Tab, ?ALL}), %% needed?
+ wlock(Tid, Store, Oid), %% perfect sync
+ wlock(Tid, Store, {Tab, ?STICK}), %% max one sticker/table
+ Ns = val({Tab, where_to_write}),
+ rpc:abcast(Ns, ?MODULE, {unstick, Tab}).
+
+dirty_sticky_lock(Tab, Key, Nodes, Lock) ->
+ if
+ Lock == read_write ->
+ mnesia_lib:db_get(Tab, Key);
+ Key == ?ALL ->
+ Nodes;
+ Tab == ?GLOBAL ->
+ Nodes;
+ true ->
+ ok
+ end.
+
+sticky_wlock_table(Tid, Store, Tab) ->
+ sticky_lock(Tid, Store, {Tab, ?ALL}, write).
+
+%% aquire a wlock on Oid
+%% We store a {Tabname, write, Tid} in all locktables
+%% on all nodes containing a copy of Tabname
+%% We also store an item {{locks, Tab, Key}, write} in the
+%% local store when we have aquired the lock.
+%%
+wlock(Tid, Store, Oid) ->
+ {Tab, Key} = Oid,
+ case need_lock(Store, Tab, Key, write) of
+ yes ->
+ Ns = w_nodes(Tab),
+ Op = {self(), {write, Tid, Oid}},
+ ?ets_insert(Store, {{locks, Tab, Key}, write}),
+ get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid);
+ no when Key /= ?ALL, Tab /= ?GLOBAL ->
+ [];
+ no ->
+ w_nodes(Tab)
+ end.
+
+wlock_table(Tid, Store, Tab) ->
+ wlock(Tid, Store, {Tab, ?ALL}).
+
+%% Write lock even if the table does not exist
+
+wlock_no_exist(Tid, Store, Tab, Ns) ->
+ Oid = {Tab, ?ALL},
+ Op = {self(), {write, Tid, Oid}},
+ get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid).
+
+need_lock(Store, Tab, Key, LockPattern) ->
+ TabL = ?ets_match_object(Store, {{locks, Tab, ?ALL}, LockPattern}),
+ if
+ TabL == [] ->
+ KeyL = ?ets_match_object(Store, {{locks, Tab, Key}, LockPattern}),
+ if
+ KeyL == [] ->
+ yes;
+ true ->
+ no
+ end;
+ true ->
+ no
+ end.
+
+add_debug(Node) -> % Use process dictionary for debug info
+ case get(mnesia_wlock_nodes) of
+ undefined ->
+ put(mnesia_wlock_nodes, [Node]);
+ NodeList ->
+ put(mnesia_wlock_nodes, [Node|NodeList])
+ end.
+
+del_debug(Node) ->
+ case get(mnesia_wlock_nodes) of
+ undefined -> % Shouldn't happen
+ ignore;
+ [Node] ->
+ erase(mnesia_wlock_nodes);
+ List ->
+ put(mnesia_wlock_nodes, lists:delete(Node, List))
+ end.
+
+%% We first send lock requests to the lockmanagers on all
+%% nodes holding a copy of the table
+
+get_wlocks_on_nodes([Node | Tail], Orig, Store, Request, Oid) ->
+ {?MODULE, Node} ! Request,
+ ?ets_insert(Store, {nodes, Node}),
+ add_debug(Node),
+ get_wlocks_on_nodes(Tail, Orig, Store, Request, Oid);
+get_wlocks_on_nodes([], Orig, Store, _Request, Oid) ->
+ receive_wlocks(Orig, Orig, Store, Oid).
+
+receive_wlocks([Node | Tail], Res, Store, Oid) ->
+ receive
+ {?MODULE, Node, granted} ->
+ del_debug(Node),
+ receive_wlocks(Tail, Res, Store, Oid);
+ {?MODULE, Node, {granted, Val}} -> %% for rwlocks
+ del_debug(Node),
+ case opt_lookup_in_client(Val, Oid, write) of
+ C when record(C, cyclic) ->
+ flush_remaining(Tail, Node, {aborted, C});
+ Val2 ->
+ receive_wlocks(Tail, Val2, Store, Oid)
+ end;
+ {?MODULE, Node, {not_granted, Reason}} ->
+ del_debug(Node),
+ Reason1 = {aborted, Reason},
+ flush_remaining(Tail, Node, Reason1);
+ {mnesia_down, Node} ->
+ del_debug(Node),
+ Reason1 = {aborted, {node_not_running, Node}},
+ flush_remaining(Tail, Node, Reason1);
+ {?MODULE, Node, {switch, Node2, Req}} -> %% for rwlocks
+ del_debug(Node),
+ add_debug(Node2),
+ ?ets_insert(Store, {nodes, Node2}),
+ {?MODULE, Node2} ! Req,
+ receive_wlocks([Node2 | Tail], Res, Store, Oid)
+ end;
+
+receive_wlocks([], Res, _Store, _Oid) ->
+ Res.
+
+flush_remaining([], _SkipNode, Res) ->
+ exit(Res);
+flush_remaining([SkipNode | Tail ], SkipNode, Res) ->
+ del_debug(SkipNode),
+ flush_remaining(Tail, SkipNode, Res);
+flush_remaining([Node | Tail], SkipNode, Res) ->
+ receive
+ {?MODULE, Node, _} ->
+ del_debug(Node),
+ flush_remaining(Tail, SkipNode, Res);
+ {mnesia_down, Node} ->
+ del_debug(Node),
+ flush_remaining(Tail, SkipNode, {aborted, {node_not_running, Node}})
+ end.
+
+opt_lookup_in_client(lookup_in_client, Oid, Lock) ->
+ {Tab, Key} = Oid,
+ case catch mnesia_lib:db_get(Tab, Key) of
+ {'EXIT', _} ->
+ %% Table has been deleted from this node,
+ %% restart the transaction.
+ #cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere};
+ Val ->
+ Val
+ end;
+opt_lookup_in_client(Val, _Oid, _Lock) ->
+ Val.
+
+return_granted_or_nodes({_, ?ALL} , Nodes) -> Nodes;
+return_granted_or_nodes({?GLOBAL, _}, Nodes) -> Nodes;
+return_granted_or_nodes(_ , _Nodes) -> granted.
+
+%% We store a {Tab, read, From} item in the
+%% locks table on the node where we actually do pick up the object
+%% and we also store an item {lock, Oid, read} in our local store
+%% so that we can release any locks we hold when we commit.
+%% This function not only aquires a read lock, but also reads the object
+
+%% Oid's are always {Tab, Key} tuples
+rlock(Tid, Store, Oid) ->
+ {Tab, Key} = Oid,
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ mnesia:abort({no_exists, Tab});
+ Node ->
+ case need_lock(Store, Tab, Key, '_') of
+ yes ->
+ R = l_request(Node, {read, Tid, Oid}, Store),
+ rlock_get_reply(Node, Store, Oid, R);
+ no ->
+ if
+ Key == ?ALL ->
+ [Node];
+ Tab == ?GLOBAL ->
+ [Node];
+ true ->
+ dirty_rpc(Node, Tab, Key, read)
+ end
+ end
+ end.
+
+dirty_rpc(nowhere, Tab, Key, _Lock) ->
+ mnesia:abort({no_exists, {Tab, Key}});
+dirty_rpc(Node, _Tab, ?ALL, _Lock) ->
+ [Node];
+dirty_rpc(Node, ?GLOBAL, _Key, _Lock) ->
+ [Node];
+dirty_rpc(Node, Tab, Key, Lock) ->
+ Args = [Tab, Key],
+ case rpc:call(Node, mnesia_lib, db_get, Args) of
+ {badrpc, Reason} ->
+ case val({Tab, where_to_read}) of
+ Node ->
+ ErrorTag = mnesia_lib:dirty_rpc_error_tag(Reason),
+ mnesia:abort({ErrorTag, Args});
+ _NewNode ->
+ %% Table has been deleted from the node,
+ %% restart the transaction.
+ C = #cyclic{op = read, lock = Lock, oid = {Tab, Key}, lucky = nowhere},
+ exit({aborted, C})
+ end;
+ Other ->
+ Other
+ end.
+
+rlock_get_reply(Node, Store, Oid, {granted, V}) ->
+ {Tab, Key} = Oid,
+ ?ets_insert(Store, {{locks, Tab, Key}, read}),
+ ?ets_insert(Store, {nodes, Node}),
+ case opt_lookup_in_client(V, Oid, read) of
+ C when record(C, cyclic) ->
+ mnesia:abort(C);
+ Val ->
+ Val
+ end;
+rlock_get_reply(Node, Store, Oid, granted) ->
+ {Tab, Key} = Oid,
+ ?ets_insert(Store, {{locks, Tab, Key}, read}),
+ ?ets_insert(Store, {nodes, Node}),
+ return_granted_or_nodes(Oid, [Node]);
+rlock_get_reply(Node, Store, Tab, {granted, V, RealKeys}) ->
+ L = fun(K) -> ?ets_insert(Store, {{locks, Tab, K}, read}) end,
+ lists:foreach(L, RealKeys),
+ ?ets_insert(Store, {nodes, Node}),
+ V;
+rlock_get_reply(_Node, _Store, _Oid, {not_granted , Reason}) ->
+ exit({aborted, Reason});
+
+rlock_get_reply(_Node, Store, Oid, {switch, N2, Req}) ->
+ ?ets_insert(Store, {nodes, N2}),
+ {?MODULE, N2} ! Req,
+ rlock_get_reply(N2, Store, Oid, l_req_rec(N2, Store)).
+
+
+rlock_table(Tid, Store, Tab) ->
+ rlock(Tid, Store, {Tab, ?ALL}).
+
+ixrlock(Tid, Store, Tab, IxKey, Pos) ->
+ case val({Tab, where_to_read}) of
+ nowhere ->
+ mnesia:abort({no_exists, Tab});
+ Node ->
+ R = l_request(Node, {ix_read, Tid, Tab, IxKey, Pos}, Store),
+ rlock_get_reply(Node, Store, Tab, R)
+ end.
+
+%% Grabs the locks or exits
+global_lock(Tid, Store, Item, write, Ns) ->
+ Oid = {?GLOBAL, Item},
+ Op = {self(), {write, Tid, Oid}},
+ get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid);
+global_lock(Tid, Store, Item, read, Ns) ->
+ Oid = {?GLOBAL, Item},
+ send_requests(Ns, {read, Tid, Oid}),
+ rec_requests(Ns, Oid, Store),
+ Ns.
+
+send_requests([Node | Nodes], X) ->
+ {?MODULE, Node} ! {self(), X},
+ send_requests(Nodes, X);
+send_requests([], _X) ->
+ ok.
+
+rec_requests([Node | Nodes], Oid, Store) ->
+ Res = l_req_rec(Node, Store),
+ case catch rlock_get_reply(Node, Store, Oid, Res) of
+ {'EXIT', Reason} ->
+ flush_remaining(Nodes, Node, Reason);
+ _ ->
+ rec_requests(Nodes, Oid, Store)
+ end;
+rec_requests([], _Oid, _Store) ->
+ ok.
+
+get_held_locks() ->
+ ?ets_match_object(mnesia_held_locks, '_').
+
+get_lock_queue() ->
+ Q = ?ets_match_object(mnesia_lock_queue, '_'),
+ [{Oid, Op, Pid, Tid, WFT} || {queue, Oid, Tid, Op, Pid, WFT} <- Q].
+
+do_stop() ->
+ exit(shutdown).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, State) ->
+ loop(State).
+
+system_terminate(_Reason, _Parent, _Debug, _State) ->
+ do_stop().
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_log.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_log.erl
new file mode 100644
index 0000000000..47e8be32c0
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_log.erl
@@ -0,0 +1,1019 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_log.erl,v 1.2 2009/07/01 15:45:40 kostis Exp $
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% This module administers three kinds of log files:
+%%
+%% 1 The transaction log
+%% mnesia_tm appends to the log (via mnesia_log) at the
+%% end of each transaction (or dirty write) and
+%% mnesia_dumper reads the log and performs the ops in
+%% the dat files. The dump_log is done at startup and
+%% at intervals controlled by the user.
+%%
+%% 2 The mnesia_down log
+%% mnesia_tm appends to the log (via mnesia_log) when it
+%% realizes that mnesia goes up or down on another node.
+%% mnesia_init reads the log (via mnesia_log) at startup.
+%%
+%% 3 The backup log
+%% mnesia_schema produces one tiny log when the schema is
+%% initially created. mnesia_schema also reads the log
+%% when the user wants tables (possibly incl the schema)
+%% to be restored. mnesia_log appends to the log when the
+%% user wants to produce a real backup.
+%%
+%% The actual access to the backup media is performed via the
+%% mnesia_backup module for both read and write. mnesia_backup
+%% uses the disk_log (*), BUT the user may write an own module
+%% with the same interface as mnesia_backup and configure
+%% Mnesia so the alternate module performs the actual accesses
+%% to the backup media. This means that the user may put the
+%% backup on medias that Mnesia does not know about possibly on
+%% hosts where Erlang is not running.
+%%
+%% All these logs have to some extent a common structure.
+%% They are all using the disk_log module (*) for the basic
+%% file structure. The disk_log has a repair feature that
+%% can be used to skip erroneous log records if one comes to
+%% the conclusion that it is more important to reuse some
+%% of the log records than the risque of obtaining inconsistent
+%% data. If the data becomes inconsistent it is solely up to the
+%% application to make it consistent again. The automatic
+%% reparation of the disk_log is very powerful, but use it
+%% with extreme care.
+%%
+%% First in all Mnesia's log file is a mnesia log header.
+%% It contains a list with a log_header record as single
+%% element. The structure of the log_header may never be
+%% changed since it may be written to very old backup files.
+%% By holding this record definition stable we can be
+%% able to comprahend backups from timepoint 0. It also
+%% allows us to use the backup format as an interchange
+%% format between Mnesia releases.
+%%
+%% An op-list is a list of tuples with arity 3. Each tuple
+%% has this structure: {Oid, Recs, Op} where Oid is the tuple
+%% {Tab, Key}, Recs is a (possibly empty) list of records and
+%% Op is an atom.
+%%
+%% The log file structure for the transaction log is as follows.
+%%
+%% After the mnesia log section follows an extended record section
+%% containing op-lists. There are several values that Op may
+%% have, such as write, delete, update_counter, delete_object,
+%% and replace. There is no special end of section marker.
+%%
+%% +-----------------+
+%% | mnesia log head |
+%% +-----------------+
+%% | extended record |
+%% | section |
+%% +-----------------+
+%%
+%% The log file structure for the mnesia_down log is as follows.
+%%
+%% After the mnesia log section follows a mnesia_down section
+%% containg lists with yoyo records as single element.
+%%
+%% +-----------------+
+%% | mnesia log head |
+%% +-----------------+
+%% | mnesia_down |
+%% | section |
+%% +-----------------+
+%%
+%% The log file structure for the backup log is as follows.
+%%
+%% After the mnesia log section follows a schema section
+%% containing record lists. A record list is a list of tuples
+%% where {schema, Tab} is interpreted as a delete_table(Tab) and
+%% {schema, Tab, CreateList} are interpreted as create_table.
+%%
+%% The record section also contains record lists. In this section
+%% {Tab, Key} is interpreted as delete({Tab, Key}) and other tuples
+%% as write(Tuple). There is no special end of section marker.
+%%
+%% +-----------------+
+%% | mnesia log head |
+%% +-----------------+
+%% | schema section |
+%% +-----------------+
+%% | record section |
+%% +-----------------+
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-module(mnesia_log).
+
+-export([
+ append/2,
+ backup/1,
+ backup/2,
+ backup_checkpoint/2,
+ backup_checkpoint/3,
+ backup_log_header/0,
+ backup_master/2,
+ chunk_decision_log/1,
+ chunk_decision_tab/1,
+ chunk_log/1,
+ chunk_log/2,
+ close_decision_log/0,
+ close_decision_tab/0,
+ close_log/1,
+ unsafe_close_log/1,
+ confirm_log_dump/1,
+ confirm_decision_log_dump/0,
+ previous_log_file/0,
+ previous_decision_log_file/0,
+ latest_log_file/0,
+ decision_log_version/0,
+ decision_log_file/0,
+ decision_tab_file/0,
+ decision_tab_version/0,
+ dcl_version/0,
+ dcd_version/0,
+ ets2dcd/1,
+ ets2dcd/2,
+ dcd2ets/1,
+ dcd2ets/2,
+ init/0,
+ init_log_dump/0,
+ log/1,
+ slog/1,
+ log_decision/1,
+ log_files/0,
+ open_decision_log/0,
+ trans_log_header/0,
+ open_decision_tab/0,
+ dcl_log_header/0,
+ dcd_log_header/0,
+ open_log/4,
+ open_log/6,
+ prepare_decision_log_dump/0,
+ prepare_log_dump/1,
+ save_decision_tab/1,
+ purge_all_logs/0,
+ purge_some_logs/0,
+ stop/0,
+ tab_copier/3,
+ version/0,
+ view/0,
+ view/1,
+ write_trans_log_header/0
+ ]).
+
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [val/1, dir/1]).
+-import(mnesia_lib, [exists/1, fatal/2, error/2, dbg_out/2]).
+
+trans_log_header() -> log_header(trans_log, version()).
+backup_log_header() -> log_header(backup_log, "1.2").
+decision_log_header() -> log_header(decision_log, decision_log_version()).
+decision_tab_header() -> log_header(decision_tab, decision_tab_version()).
+dcl_log_header() -> log_header(dcl_log, dcl_version()).
+dcd_log_header() -> log_header(dcd_log, dcd_version()).
+
+log_header(Kind, Version) ->
+ #log_header{log_version=Version,
+ log_kind=Kind,
+ mnesia_version=mnesia:system_info(version),
+ node=node(),
+ now=now()}.
+
+version() -> "4.3".
+
+decision_log_version() -> "3.0".
+
+decision_tab_version() -> "1.0".
+
+dcl_version() -> "1.0".
+dcd_version() -> "1.0".
+
+append(Log, Bin) when binary(Bin) ->
+ disk_log:balog(Log, Bin);
+append(Log, Term) ->
+ disk_log:alog(Log, Term).
+
+%% Synced append
+sappend(Log, Bin) when binary(Bin) ->
+ ok = disk_log:blog(Log, Bin);
+sappend(Log, Term) ->
+ ok = disk_log:log(Log, Term).
+
+%% Write commit records to the latest_log
+log(C) when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [],
+ C#commit.schema_ops == [] ->
+ ignore;
+log(C) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ if
+ record(C, commit) ->
+ C2 = C#commit{ram_copies = [], snmp = []},
+ append(latest_log, C2);
+ true ->
+ %% Either a commit record as binary
+ %% or some decision related info
+ append(latest_log, C)
+ end,
+ mnesia_dumper:incr_log_writes();
+ false ->
+ ignore
+ end.
+
+%% Synced
+
+slog(C) when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [],
+ C#commit.schema_ops == [] ->
+ ignore;
+slog(C) ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ if
+ record(C, commit) ->
+ C2 = C#commit{ram_copies = [], snmp = []},
+ sappend(latest_log, C2);
+ true ->
+ %% Either a commit record as binary
+ %% or some decision related info
+ sappend(latest_log, C)
+ end,
+ mnesia_dumper:incr_log_writes();
+ false ->
+ ignore
+ end.
+
+
+%% Stuff related to the file LOG
+
+%% Returns a list of logfiles. The oldest is first.
+log_files() -> [previous_log_file(),
+ latest_log_file(),
+ decision_tab_file()
+ ].
+
+latest_log_file() -> dir(latest_log_name()).
+
+previous_log_file() -> dir("PREVIOUS.LOG").
+
+decision_log_file() -> dir(decision_log_name()).
+
+decision_tab_file() -> dir(decision_tab_name()).
+
+previous_decision_log_file() -> dir("PDECISION.LOG").
+
+latest_log_name() -> "LATEST.LOG".
+
+decision_log_name() -> "DECISION.LOG".
+
+decision_tab_name() -> "DECISION_TAB.LOG".
+
+init() ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Prev = previous_log_file(),
+ verify_no_exists(Prev),
+
+ Latest = latest_log_file(),
+ verify_no_exists(Latest),
+
+ Header = trans_log_header(),
+ open_log(latest_log, Header, Latest);
+ false ->
+ ok
+ end.
+
+verify_no_exists(Fname) ->
+ case exists(Fname) of
+ false ->
+ ok;
+ true ->
+ fatal("Log file exists: ~p~n", [Fname])
+ end.
+
+open_log(Name, Header, Fname) ->
+ Exists = exists(Fname),
+ open_log(Name, Header, Fname, Exists).
+
+open_log(Name, Header, Fname, Exists) ->
+ Repair = mnesia_monitor:get_env(auto_repair),
+ open_log(Name, Header, Fname, Exists, Repair).
+
+open_log(Name, Header, Fname, Exists, Repair) ->
+ case Name == previous_log of
+ true ->
+ open_log(Name, Header, Fname, Exists, Repair, read_only);
+ false ->
+ open_log(Name, Header, Fname, Exists, Repair, read_write)
+ end.
+
+open_log(Name, Header, Fname, Exists, Repair, Mode) ->
+ Args = [{file, Fname}, {name, Name}, {repair, Repair}, {mode, Mode}],
+%% io:format("~p:open_log: ~p ~p~n", [?MODULE, Name, Fname]),
+ case mnesia_monitor:open_log(Args) of
+ {ok, Log} when Exists == true ->
+ Log;
+ {ok, Log} ->
+ write_header(Log, Header),
+ Log;
+ {repaired, Log, _, {badbytes, 0}} when Exists == true ->
+ Log;
+ {repaired, Log, _, {badbytes, 0}} ->
+ write_header(Log, Header),
+ Log;
+ {repaired, Log, _Recover, BadBytes} ->
+ mnesia_lib:important("Data may be missing, log ~p repaired: Lost ~p bytes~n",
+ [Fname, BadBytes]),
+ Log;
+ {error, Reason} when Repair == true ->
+ file:delete(Fname),
+ mnesia_lib:important("Data may be missing, Corrupt logfile deleted: ~p, ~p ~n",
+ [Fname, Reason]),
+ %% Create a new
+ open_log(Name, Header, Fname, false, false, read_write);
+ {error, Reason} ->
+ fatal("Cannot open log file ~p: ~p~n", [Fname, Reason])
+ end.
+
+write_header(Log, Header) ->
+ append(Log, Header).
+
+write_trans_log_header() ->
+ write_header(latest_log, trans_log_header()).
+
+stop() ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ close_log(latest_log);
+ false ->
+ ok
+ end.
+
+close_log(Log) ->
+%% io:format("mnesia_log:close_log ~p~n", [Log]),
+%% io:format("mnesia_log:close_log ~p~n", [Log]),
+ case disk_log:sync(Log) of
+ ok -> ok;
+ {error, {read_only_mode, Log}} ->
+ ok;
+ {error, Reason} ->
+ mnesia_lib:important("Failed syncing ~p to_disk reason ~p ~n",
+ [Log, Reason])
+ end,
+ mnesia_monitor:close_log(Log).
+
+unsafe_close_log(Log) ->
+%% io:format("mnesia_log:close_log ~p~n", [Log]),
+ mnesia_monitor:unsafe_close_log(Log).
+
+
+purge_some_logs() ->
+ mnesia_monitor:unsafe_close_log(latest_log),
+ file:delete(latest_log_file()),
+ file:delete(decision_tab_file()).
+
+purge_all_logs() ->
+ file:delete(previous_log_file()),
+ file:delete(latest_log_file()),
+ file:delete(decision_tab_file()).
+
+%% Prepare dump by renaming the open logfile if possible
+%% Returns a tuple on the following format: {Res, OpenLog}
+%% where OpenLog is the file descriptor to log file, ready for append
+%% and Res is one of the following: already_dumped, needs_dump or {error, Reason}
+prepare_log_dump(InitBy) ->
+ Diff = mnesia_dumper:get_log_writes() -
+ mnesia_lib:read_counter(trans_log_writes_prev),
+ if
+ Diff == 0, InitBy /= startup ->
+ already_dumped;
+ true ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Prev = previous_log_file(),
+ prepare_prev(Diff, InitBy, Prev, exists(Prev));
+ false ->
+ already_dumped
+ end
+ end.
+
+prepare_prev(Diff, _, _, true) ->
+ {needs_dump, Diff};
+prepare_prev(Diff, startup, Prev, false) ->
+ Latest = latest_log_file(),
+ case exists(Latest) of
+ true ->
+ case file:rename(Latest, Prev) of
+ ok ->
+ {needs_dump, Diff};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ false ->
+ already_dumped
+ end;
+prepare_prev(Diff, _InitBy, Prev, false) ->
+ Head = trans_log_header(),
+ case mnesia_monitor:reopen_log(latest_log, Prev, Head) of
+ ok ->
+ {needs_dump, Diff};
+ {error, Reason} ->
+ Latest = latest_log_file(),
+ {error, {"Cannot rename log file",
+ [Latest, Prev, Reason]}}
+ end.
+
+%% Init dump and return PrevLogFileDesc or exit.
+init_log_dump() ->
+ Fname = previous_log_file(),
+ open_log(previous_log, trans_log_header(), Fname),
+ start.
+
+
+chunk_log(Cont) ->
+ chunk_log(previous_log, Cont).
+
+chunk_log(_Log, eof) ->
+ eof;
+chunk_log(Log, Cont) ->
+ case catch disk_log:chunk(Log, Cont) of
+ {error, Reason} ->
+ fatal("Possibly truncated ~p file: ~p~n",
+ [Log, Reason]);
+ {C2, Chunk, _BadBytes} ->
+ %% Read_only case, should we warn about the bad log file?
+ %% BUGBUG Should we crash if Repair == false ??
+ %% We got to check this !!
+ mnesia_lib:important("~p repaired, lost ~p bad bytes~n", [Log, _BadBytes]),
+ {C2, Chunk};
+ Other ->
+ Other
+ end.
+
+%% Confirms the dump by closing prev log and delete the file
+confirm_log_dump(Updates) ->
+ case mnesia_monitor:close_log(previous_log) of
+ ok ->
+ file:delete(previous_log_file()),
+ mnesia_lib:incr_counter(trans_log_writes_prev, Updates),
+ dumped;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Decision log
+
+open_decision_log() ->
+ Latest = decision_log_file(),
+ open_log(decision_log, decision_log_header(), Latest),
+ start.
+
+prepare_decision_log_dump() ->
+ Prev = previous_decision_log_file(),
+ prepare_decision_log_dump(exists(Prev), Prev).
+
+prepare_decision_log_dump(false, Prev) ->
+ Head = decision_log_header(),
+ case mnesia_monitor:reopen_log(decision_log, Prev, Head) of
+ ok ->
+ prepare_decision_log_dump(true, Prev);
+ {error, Reason} ->
+ fatal("Cannot rename decision log file ~p -> ~p: ~p~n",
+ [decision_log_file(), Prev, Reason])
+ end;
+prepare_decision_log_dump(true, Prev) ->
+ open_log(previous_decision_log, decision_log_header(), Prev),
+ start.
+
+chunk_decision_log(Cont) ->
+ %% dbg_out("chunk log ~p~n", [Cont]),
+ chunk_log(previous_decision_log, Cont).
+
+%% Confirms dump of the decision log
+confirm_decision_log_dump() ->
+ case mnesia_monitor:close_log(previous_decision_log) of
+ ok ->
+ file:delete(previous_decision_log_file());
+ {error, Reason} ->
+ fatal("Cannot confirm decision log dump: ~p~n",
+ [Reason])
+ end.
+
+save_decision_tab(Decisions) ->
+ Log = decision_tab,
+ Tmp = mnesia_lib:dir("DECISION_TAB.TMP"),
+ file:delete(Tmp),
+ open_log(Log, decision_tab_header(), Tmp),
+ append(Log, Decisions),
+ close_log(Log),
+ TabFile = decision_tab_file(),
+ ok = file:rename(Tmp, TabFile).
+
+open_decision_tab() ->
+ TabFile = decision_tab_file(),
+ open_log(decision_tab, decision_tab_header(), TabFile),
+ start.
+
+close_decision_tab() ->
+ close_log(decision_tab).
+
+chunk_decision_tab(Cont) ->
+ %% dbg_out("chunk tab ~p~n", [Cont]),
+ chunk_log(decision_tab, Cont).
+
+close_decision_log() ->
+ close_log(decision_log).
+
+log_decision(Decision) ->
+ append(decision_log, Decision).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Debug functions
+
+view() ->
+ lists:foreach(fun(F) -> view(F) end, log_files()).
+
+view(File) ->
+ mnesia_lib:show("***** ~p ***** ~n", [File]),
+ case exists(File) of
+ false ->
+ nolog;
+ true ->
+ N = view_only,
+ Args = [{file, File}, {name, N}, {mode, read_only}],
+ case disk_log:open(Args) of
+ {ok, N} ->
+ view_file(start, N);
+ {repaired, _, _, _} ->
+ view_file(start, N);
+ {error, Reason} ->
+ error("Cannot open log ~p: ~p~n", [File, Reason])
+ end
+ end.
+
+view_file(C, Log) ->
+ case disk_log:chunk(Log, C) of
+ {error, Reason} ->
+ error("** Possibly truncated FILE ~p~n", [Reason]),
+ error;
+ eof ->
+ disk_log:close(Log),
+ eof;
+ {C2, Terms, _BadBytes} ->
+ dbg_out("Lost ~p bytes in ~p ~n", [_BadBytes, Log]),
+ lists:foreach(fun(X) -> mnesia_lib:show("~p~n", [X]) end,
+ Terms),
+ view_file(C2, Log);
+ {C2, Terms} ->
+ lists:foreach(fun(X) -> mnesia_lib:show("~p~n", [X]) end,
+ Terms),
+ view_file(C2, Log)
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Backup
+
+-record(backup_args, {name, module, opaque, scope, prev_name, tables, cookie}).
+
+backup(Opaque) ->
+ backup(Opaque, []).
+
+backup(Opaque, Mod) when atom(Mod) ->
+ backup(Opaque, [{module, Mod}]);
+backup(Opaque, Args) when list(Args) ->
+ %% Backup all tables with max redundancy
+ CpArgs = [{ram_overrides_dump, false}, {max, val({schema, tables})}],
+ case mnesia_checkpoint:activate(CpArgs) of
+ {ok, Name, _Nodes} ->
+ Res = backup_checkpoint(Name, Opaque, Args),
+ mnesia_checkpoint:deactivate(Name),
+ Res;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+backup_checkpoint(Name, Opaque) ->
+ backup_checkpoint(Name, Opaque, []).
+
+backup_checkpoint(Name, Opaque, Mod) when atom(Mod) ->
+ backup_checkpoint(Name, Opaque, [{module, Mod}]);
+backup_checkpoint(Name, Opaque, Args) when list(Args) ->
+ DefaultMod = mnesia_monitor:get_env(backup_module),
+ B = #backup_args{name = Name,
+ module = DefaultMod,
+ opaque = Opaque,
+ scope = global,
+ tables = all,
+ prev_name = Name},
+ case check_backup_args(Args, B) of
+ {ok, B2} ->
+ %% Decentralized backup
+ %% Incremental
+
+ Self = self(),
+ Pid = spawn_link(?MODULE, backup_master, [Self, B2]),
+ receive
+ {Pid, Self, Res} -> Res
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+check_backup_args([Arg | Tail], B) ->
+ case catch check_backup_arg_type(Arg, B) of
+ {'EXIT', _Reason} ->
+ {error, {badarg, Arg}};
+ B2 ->
+ check_backup_args(Tail, B2)
+ end;
+
+check_backup_args([], B) ->
+ {ok, B}.
+
+check_backup_arg_type(Arg, B) ->
+ case Arg of
+ {scope, global} ->
+ B#backup_args{scope = global};
+ {scope, local} ->
+ B#backup_args{scope = local};
+ {module, Mod} ->
+ Mod2 = mnesia_monitor:do_check_type(backup_module, Mod),
+ B#backup_args{module = Mod2};
+ {incremental, Name} ->
+ B#backup_args{prev_name = Name};
+ {tables, Tabs} when list(Tabs) ->
+ B#backup_args{tables = Tabs}
+ end.
+
+backup_master(ClientPid, B) ->
+ process_flag(trap_exit, true),
+ case catch do_backup_master(B) of
+ {'EXIT', Reason} ->
+ ClientPid ! {self(), ClientPid, {error, {'EXIT', Reason}}};
+ Res ->
+ ClientPid ! {self(), ClientPid, Res}
+ end,
+ unlink(ClientPid),
+ exit(normal).
+
+do_backup_master(B) ->
+ Name = B#backup_args.name,
+ B2 = safe_apply(B, open_write, [B#backup_args.opaque]),
+ B3 = safe_write(B2, [backup_log_header()]),
+ case mnesia_checkpoint:tables_and_cookie(Name) of
+ {ok, AllTabs, Cookie} ->
+ Tabs = select_tables(AllTabs, B3),
+ B4 = B3#backup_args{cookie = Cookie},
+ %% Always put schema first in backup file
+ B5 = backup_schema(B4, Tabs),
+ B6 = lists:foldl(fun backup_tab/2, B5, Tabs -- [schema]),
+ safe_apply(B6, commit_write, [B6#backup_args.opaque]),
+ ok;
+ {error, Reason} ->
+ abort_write(B3, {?MODULE, backup_master}, [B], {error, Reason})
+ end.
+
+select_tables(AllTabs, B) ->
+ Tabs =
+ case B#backup_args.tables of
+ all -> AllTabs;
+ SomeTabs when list(SomeTabs) -> SomeTabs
+ end,
+ case B#backup_args.scope of
+ global ->
+ Tabs;
+ local ->
+ Name = B#backup_args.name,
+ [T || T <- Tabs, mnesia_checkpoint:most_local_node(Name, T) == node()]
+ end.
+
+safe_write(B, []) ->
+ B;
+safe_write(B, Recs) ->
+ safe_apply(B, write, [B#backup_args.opaque, Recs]).
+
+backup_schema(B, Tabs) ->
+ case lists:member(schema, Tabs) of
+ true ->
+ backup_tab(schema, B);
+ false ->
+ Defs = [{schema, T, mnesia_schema:get_create_list(T)} || T <- Tabs],
+ safe_write(B, Defs)
+ end.
+
+safe_apply(B, write, [_, Items]) when Items == [] ->
+ B;
+safe_apply(B, What, Args) ->
+ Abort = fun(R) -> abort_write(B, What, Args, R) end,
+ receive
+ {'EXIT', Pid, R} -> Abort({'EXIT', Pid, R})
+ after 0 ->
+ Mod = B#backup_args.module,
+ case catch apply(Mod, What, Args) of
+ {ok, Opaque} -> B#backup_args{opaque=Opaque};
+ {error, R} -> Abort(R);
+ R -> Abort(R)
+ end
+ end.
+
+abort_write(B, What, Args, Reason) ->
+ Mod = B#backup_args.module,
+ Opaque = B#backup_args.opaque,
+ dbg_out("Failed to perform backup. M=~p:F=~p:A=~p -> ~p~n",
+ [Mod, What, Args, Reason]),
+ case catch apply(Mod, abort_write, [Opaque]) of
+ {ok, _Res} ->
+ throw({error, Reason});
+ Other ->
+ error("Failed to abort backup. ~p:~p~p -> ~p~n",
+ [Mod, abort_write, [Opaque], Other]),
+ throw({error, Reason})
+ end.
+
+backup_tab(Tab, B) ->
+ Name = B#backup_args.name,
+ case mnesia_checkpoint:most_local_node(Name, Tab) of
+ {ok, Node} when Node == node() ->
+ tab_copier(self(), B, Tab);
+ {ok, Node} ->
+ RemoteB = B,
+ Pid = spawn_link(Node, ?MODULE, tab_copier, [self(), RemoteB, Tab]),
+ RecName = val({Tab, record_name}),
+ tab_receiver(Pid, B, Tab, RecName, 0);
+ {error, Reason} ->
+ abort_write(B, {?MODULE, backup_tab}, [Tab, B], {error, Reason})
+ end.
+
+tab_copier(Pid, B, Tab) when record(B, backup_args) ->
+ %% Intentional crash at exit
+ Name = B#backup_args.name,
+ PrevName = B#backup_args.prev_name,
+ {FirstName, FirstSource} = select_source(Tab, Name, PrevName),
+
+ ?eval_debug_fun({?MODULE, tab_copier, pre}, [{name, Name}, {tab, Tab}]),
+ Res = handle_more(Pid, B, Tab, FirstName, FirstSource, Name),
+ ?eval_debug_fun({?MODULE, tab_copier, post}, [{name, Name}, {tab, Tab}]),
+
+ handle_last(Pid, Res).
+
+select_source(Tab, Name, PrevName) ->
+ if
+ Tab == schema ->
+ %% Always full backup of schema
+ {Name, table};
+ Name == PrevName ->
+ %% Full backup
+ {Name, table};
+ true ->
+ %% Wants incremental backup
+ case mnesia_checkpoint:most_local_node(PrevName, Tab) of
+ {ok, Node} when Node == node() ->
+ %% Accept incremental backup
+ {PrevName, retainer};
+ _ ->
+ %% Do a full backup anyway
+ dbg_out("Incremental backup escalated to full backup: ~p~n", [Tab]),
+ {Name, table}
+ end
+ end.
+
+handle_more(Pid, B, Tab, FirstName, FirstSource, Name) ->
+ Acc = {0, B},
+ case {mnesia_checkpoint:really_retain(Name, Tab),
+ mnesia_checkpoint:really_retain(FirstName, Tab)} of
+ {true, true} ->
+ Acc2 = iterate(B, FirstName, Tab, Pid, FirstSource, latest, first, Acc),
+ iterate(B, Name, Tab, Pid, retainer, checkpoint, last, Acc2);
+ {false, false}->
+ %% Put the dumped file in the backup
+ %% instead of the ram table. Does
+ %% only apply to ram_copies.
+ iterate(B, Name, Tab, Pid, retainer, checkpoint, last, Acc);
+ Bad ->
+ Reason = {"Checkpoints for incremental backup must have same "
+ "setting of ram_overrides_dump",
+ Tab, Name, FirstName, Bad},
+ abort_write(B, {?MODULE, backup_tab}, [Tab, B], {error, Reason})
+ end.
+
+handle_last(Pid, {_Count, B}) when Pid == self() ->
+ B;
+handle_last(Pid, _Acc) ->
+ unlink(Pid),
+ Pid ! {self(), {last, {ok, dummy}}},
+ exit(normal).
+
+iterate(B, Name, Tab, Pid, Source, Age, Pass, Acc) ->
+ Fun =
+ if
+ Pid == self() ->
+ RecName = val({Tab, record_name}),
+ fun(Recs, A) -> copy_records(RecName, Tab, Recs, A) end;
+ true ->
+ fun(Recs, A) -> send_records(Pid, Tab, Recs, Pass, A) end
+ end,
+ case mnesia_checkpoint:iterate(Name, Tab, Fun, Acc, Source, Age) of
+ {ok, Acc2} ->
+ Acc2;
+ {error, Reason} ->
+ R = {error, {"Tab copier iteration failed", Reason}},
+ abort_write(B, {?MODULE, iterate}, [self(), B, Tab], R)
+ end.
+
+copy_records(_RecName, _Tab, [], Acc) ->
+ Acc;
+copy_records(RecName, Tab, Recs, {Count, B}) ->
+ Recs2 = rec_filter(B, Tab, RecName, Recs),
+ B2 = safe_write(B, Recs2),
+ {Count + 1, B2}.
+
+send_records(Pid, Tab, Recs, Pass, {Count, B}) ->
+ receive
+ {Pid, more, Count} ->
+ if
+ Pass == last, Recs == [] ->
+ {Count, B};
+ true ->
+ Next = Count + 1,
+ Pid ! {self(), {more, Next, Recs}},
+ {Next, B}
+ end;
+ Msg ->
+ exit({send_records_unexpected_msg, Tab, Msg})
+ end.
+
+tab_receiver(Pid, B, Tab, RecName, Slot) ->
+ Pid ! {self(), more, Slot},
+ receive
+ {Pid, {more, Next, Recs}} ->
+ Recs2 = rec_filter(B, Tab, RecName, Recs),
+ B2 = safe_write(B, Recs2),
+ tab_receiver(Pid, B2, Tab, RecName, Next);
+
+ {Pid, {last, {ok,_}}} ->
+ B;
+
+ {'EXIT', Pid, {error, R}} ->
+ Reason = {error, {"Tab copier crashed", R}},
+ abort_write(B, {?MODULE, remote_tab_sender}, [self(), B, Tab], Reason);
+ {'EXIT', Pid, R} ->
+ Reason = {error, {"Tab copier crashed", {'EXIT', R}}},
+ abort_write(B, {?MODULE, remote_tab_sender}, [self(), B, Tab], Reason);
+ Msg ->
+ R = {error, {"Tab receiver got unexpected msg", Msg}},
+ abort_write(B, {?MODULE, remote_tab_sender}, [self(), B, Tab], R)
+ end.
+
+rec_filter(B, schema, _RecName, Recs) ->
+ case catch mnesia_bup:refresh_cookie(Recs, B#backup_args.cookie) of
+ Recs2 when list(Recs2) ->
+ Recs2;
+ {error, _Reason} ->
+ %% No schema table cookie
+ Recs
+ end;
+rec_filter(_B, Tab, Tab, Recs) ->
+ Recs;
+rec_filter(_B, Tab, _RecName, Recs) ->
+ [setelement(1, Rec, Tab) || Rec <- Recs].
+
+ets2dcd(Tab) ->
+ ets2dcd(Tab, dcd).
+
+ets2dcd(Tab, Ftype) ->
+ Fname =
+ case Ftype of
+ dcd -> mnesia_lib:tab2dcd(Tab);
+ dmp -> mnesia_lib:tab2dmp(Tab)
+ end,
+ TmpF = mnesia_lib:tab2tmp(Tab),
+ file:delete(TmpF),
+ Log = open_log({Tab, ets2dcd}, dcd_log_header(), TmpF, false),
+ mnesia_lib:db_fixtable(ram_copies, Tab, true),
+ ok = ets2dcd(mnesia_lib:db_init_chunk(ram_copies, Tab, 1000), Tab, Log),
+ mnesia_lib:db_fixtable(ram_copies, Tab, false),
+ close_log(Log),
+ ok = file:rename(TmpF, Fname),
+ %% Remove old log data which is now in the new dcd.
+ %% No one else should be accessing this file!
+ file:delete(mnesia_lib:tab2dcl(Tab)),
+ ok.
+
+ets2dcd('$end_of_table', _Tab, _Log) ->
+ ok;
+ets2dcd({Recs, Cont}, Tab, Log) ->
+ ok = disk_log:alog_terms(Log, Recs),
+ ets2dcd(mnesia_lib:db_chunk(ram_copies, Cont), Tab, Log).
+
+dcd2ets(Tab) ->
+ dcd2ets(Tab, mnesia_monitor:get_env(auto_repair)).
+
+dcd2ets(Tab, Rep) ->
+ Dcd = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dcd) of
+ true ->
+ Log = open_log({Tab, dcd2ets}, dcd_log_header(), Dcd,
+ true, Rep, read_only),
+ Data = chunk_log(Log, start),
+ ok = insert_dcdchunk(Data, Log, Tab),
+ close_log(Log),
+ load_dcl(Tab, Rep);
+ false -> %% Handle old dets files, and conversion from disc_only to disc.
+ Fname = mnesia_lib:tab2dat(Tab),
+ Type = val({Tab, setorbag}),
+ case mnesia_lib:dets_to_ets(Tab, Tab, Fname, Type, Rep, yes) of
+ loaded ->
+ ets2dcd(Tab),
+ file:delete(Fname),
+ 0;
+ {error, Error} ->
+ erlang:error({"Failed to load table from disc", [Tab, Error]})
+ end
+ end.
+
+insert_dcdchunk({Cont, [LogH | Rest]}, Log, Tab)
+ when record(LogH, log_header),
+ LogH#log_header.log_kind == dcd_log,
+ LogH#log_header.log_version >= "1.0" ->
+ insert_dcdchunk({Cont, Rest}, Log, Tab);
+
+insert_dcdchunk({Cont, Recs}, Log, Tab) ->
+ true = ets:insert(Tab, Recs),
+ insert_dcdchunk(chunk_log(Log, Cont), Log, Tab);
+insert_dcdchunk(eof, _Log, _Tab) ->
+ ok.
+
+load_dcl(Tab, Rep) ->
+ FName = mnesia_lib:tab2dcl(Tab),
+ case mnesia_lib:exists(FName) of
+ true ->
+ Name = {load_dcl,Tab},
+ open_log(Name,
+ dcl_log_header(),
+ FName,
+ true,
+ Rep,
+ read_only),
+ FirstChunk = chunk_log(Name, start),
+ N = insert_logchunk(FirstChunk, Name, 0),
+ close_log(Name),
+ N;
+ false ->
+ 0
+ end.
+
+insert_logchunk({C2, Recs}, Tab, C) ->
+ N = add_recs(Recs, C),
+ insert_logchunk(chunk_log(Tab, C2), Tab, C+N);
+insert_logchunk(eof, _Tab, C) ->
+ C.
+
+add_recs([{{Tab, _Key}, Val, write} | Rest], N) ->
+ true = ets:insert(Tab, Val),
+ add_recs(Rest, N+1);
+add_recs([{{Tab, Key}, _Val, delete} | Rest], N) ->
+ true = ets:delete(Tab, Key),
+ add_recs(Rest, N+1);
+add_recs([{{Tab, _Key}, Val, delete_object} | Rest], N) ->
+ true = ets:match_delete(Tab, Val),
+ add_recs(Rest, N+1);
+add_recs([{{Tab, Key}, Val, update_counter} | Rest], N) ->
+ {RecName, Incr} = Val,
+ case catch ets:update_counter(Tab, Key, Incr) of
+ CounterVal when integer(CounterVal) ->
+ ok;
+ _ ->
+ Zero = {RecName, Key, 0},
+ true = ets:insert(Tab, Zero)
+ end,
+ add_recs(Rest, N+1);
+add_recs([LogH|Rest], N)
+ when record(LogH, log_header),
+ LogH#log_header.log_kind == dcl_log,
+ LogH#log_header.log_version >= "1.0" ->
+ add_recs(Rest, N);
+add_recs([{{Tab, _Key}, _Val, clear_table} | Rest], N) ->
+ true = ets:match_delete(Tab, '_'),
+ add_recs(Rest, N+ets:info(Tab, size));
+add_recs([], N) ->
+ N.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
new file mode 100644
index 0000000000..b64419d5a8
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
@@ -0,0 +1,776 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_monitor.erl,v 1.1 2008/12/17 09:53:38 mikpe Exp $
+%%
+-module(mnesia_monitor).
+
+-behaviour(gen_server).
+
+%% Public exports
+-export([
+ close_dets/1,
+ close_log/1,
+ detect_inconcistency/2,
+ get_env/1,
+ init/0,
+ mktab/2,
+ unsafe_mktab/2,
+ mnesia_down/2,
+ needs_protocol_conversion/1,
+ negotiate_protocol/1,
+ disconnect/1,
+ open_dets/2,
+ unsafe_open_dets/2,
+ open_log/1,
+ patch_env/2,
+ protocol_version/0,
+ reopen_log/3,
+ set_env/2,
+ start/0,
+ start_proc/4,
+ terminate_proc/3,
+ unsafe_close_dets/1,
+ unsafe_close_log/1,
+ use_dir/0,
+ do_check_type/2
+ ]).
+
+%% gen_server callbacks
+-export([
+ init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
+
+%% Internal exports
+-export([
+ call/1,
+ cast/1,
+ detect_partitioned_network/2,
+ has_remote_mnesia_down/1
+ ]).
+
+-import(mnesia_lib, [dbg_out/2, verbose/2, error/2, fatal/2, set/2]).
+
+-include("mnesia.hrl").
+
+-record(state, {supervisor, pending_negotiators = [],
+ going_down = [], tm_started = false, early_connects = []}).
+
+-define(current_protocol_version, {7,6}).
+
+-define(previous_protocol_version, {7,5}).
+
+start() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE,
+ [self()], [{timeout, infinity}
+ %% ,{debug, [trace]}
+ ]).
+
+init() ->
+ call(init).
+
+mnesia_down(From, Node) ->
+ cast({mnesia_down, From, Node}).
+
+mktab(Tab, Args) ->
+ unsafe_call({mktab, Tab, Args}).
+unsafe_mktab(Tab, Args) ->
+ unsafe_call({unsafe_mktab, Tab, Args}).
+
+open_dets(Tab, Args) ->
+ unsafe_call({open_dets, Tab, Args}).
+unsafe_open_dets(Tab, Args) ->
+ unsafe_call({unsafe_open_dets, Tab, Args}).
+
+close_dets(Tab) ->
+ unsafe_call({close_dets, Tab}).
+
+unsafe_close_dets(Name) ->
+ unsafe_call({unsafe_close_dets, Name}).
+
+open_log(Args) ->
+ unsafe_call({open_log, Args}).
+
+reopen_log(Name, Fname, Head) ->
+ unsafe_call({reopen_log, Name, Fname, Head}).
+
+close_log(Name) ->
+ unsafe_call({close_log, Name}).
+
+unsafe_close_log(Name) ->
+ unsafe_call({unsafe_close_log, Name}).
+
+
+disconnect(Node) ->
+ cast({disconnect, Node}).
+
+%% Returns GoodNoodes
+%% Creates a link to each compatible monitor and
+%% protocol_version to agreed version upon success
+
+negotiate_protocol(Nodes) ->
+ Version = mnesia:system_info(version),
+ Protocols = acceptable_protocol_versions(),
+ MonitorPid = whereis(?MODULE),
+ Msg = {negotiate_protocol, MonitorPid, Version, Protocols},
+ {Replies, _BadNodes} = multicall(Nodes, Msg),
+ check_protocol(Replies, Protocols).
+
+check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) ->
+ case lists:member(Protocol, Protocols) of
+ true ->
+ case Protocol == protocol_version() of
+ true ->
+ set({protocol, Node}, {Protocol, false});
+ false ->
+ set({protocol, Node}, {Protocol, true})
+ end,
+ [node(Mon) | check_protocol(Tail, Protocols)];
+ false ->
+ unlink(Mon), % Get rid of unneccessary link
+ check_protocol(Tail, Protocols)
+ end;
+check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
+ verbose("Failed to connect with ~p. ~p protocols rejected. "
+ "expected version = ~p, expected protocol = ~p~n",
+ [Node, Protocols, Version, Protocol]),
+ check_protocol(Tail, Protocols);
+check_protocol([{error, _Reason} | Tail], Protocols) ->
+ check_protocol(Tail, Protocols);
+check_protocol([{badrpc, _Reason} | Tail], Protocols) ->
+ check_protocol(Tail, Protocols);
+check_protocol([], [Protocol | _Protocols]) ->
+ set(protocol_version, Protocol),
+ [];
+check_protocol([], []) ->
+ set(protocol_version, protocol_version()),
+ [].
+
+protocol_version() ->
+ case ?catch_val(protocol_version) of
+ {'EXIT', _} -> ?current_protocol_version;
+ Version -> Version
+ end.
+
+%% A sorted list of acceptable protocols the
+%% preferred protocols are first in the list
+acceptable_protocol_versions() ->
+ [protocol_version(), ?previous_protocol_version].
+
+needs_protocol_conversion(Node) ->
+ case {?catch_val({protocol, Node}), protocol_version()} of
+ {{'EXIT', _}, _} ->
+ false;
+ {{_, Bool}, ?current_protocol_version} ->
+ Bool;
+ {{_, Bool}, _} ->
+ not Bool
+ end.
+
+cast(Msg) ->
+ case whereis(?MODULE) of
+ undefined -> ignore;
+ Pid -> gen_server:cast(Pid, Msg)
+ end.
+
+unsafe_call(Msg) ->
+ case whereis(?MODULE) of
+ undefined -> {error, {node_not_running, node()}};
+ Pid -> gen_server:call(Pid, Msg, infinity)
+ end.
+
+call(Msg) ->
+ case whereis(?MODULE) of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ link(Pid),
+ Res = gen_server:call(Pid, Msg, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+multicall(Nodes, Msg) ->
+ rpc:multicall(Nodes, ?MODULE, call, [Msg]).
+
+start_proc(Who, Mod, Fun, Args) ->
+ Args2 = [Who, Mod, Fun, Args],
+ proc_lib:start_link(mnesia_sp, init_proc, Args2, infinity).
+
+terminate_proc(Who, R, State) when R /= shutdown, R /= killed ->
+ fatal("~p crashed: ~p state: ~p~n", [Who, R, State]);
+
+terminate_proc(Who, Reason, _State) ->
+ mnesia_lib:verbose("~p terminated: ~p~n", [Who, Reason]),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Callback functions from gen_server
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ ?ets_new_table(mnesia_gvar, [set, public, named_table]),
+ set(subscribers, []),
+ mnesia_lib:verbose("~p starting: ~p~n", [?MODULE, self()]),
+ Version = mnesia:system_info(version),
+ set(version, Version),
+ dbg_out("Version: ~p~n", [Version]),
+
+ case catch process_config_args(env()) of
+ ok ->
+ mnesia_lib:set({'$$$_report', current_pos}, 0),
+ Level = mnesia_lib:val(debug),
+ mnesia_lib:verbose("Mnesia debug level set to ~p\n", [Level]),
+ set(mnesia_status, starting), %% set start status
+ set({current, db_nodes}, [node()]),
+ set(use_dir, use_dir()),
+ mnesia_lib:create_counter(trans_aborts),
+ mnesia_lib:create_counter(trans_commits),
+ mnesia_lib:create_counter(trans_log_writes),
+ Left = get_env(dump_log_write_threshold),
+ mnesia_lib:set_counter(trans_log_writes_left, Left),
+ mnesia_lib:create_counter(trans_log_writes_prev),
+ mnesia_lib:create_counter(trans_restarts),
+ mnesia_lib:create_counter(trans_failures),
+ ?ets_new_table(mnesia_held_locks, [bag, public, named_table]),
+ ?ets_new_table(mnesia_tid_locks, [bag, public, named_table]),
+ ?ets_new_table(mnesia_sticky_locks, [set, public, named_table]),
+ ?ets_new_table(mnesia_lock_queue,
+ [bag, public, named_table, {keypos, 2}]),
+ ?ets_new_table(mnesia_lock_counter, [set, public, named_table]),
+ set(checkpoints, []),
+ set(pending_checkpoints, []),
+ set(pending_checkpoint_pids, []),
+
+ {ok, #state{supervisor = Parent}};
+ {'EXIT', Reason} ->
+ mnesia_lib:report_fatal("Bad configuration: ~p~n", [Reason]),
+ {stop, {bad_config, Reason}}
+ end.
+
+use_dir() ->
+ case ?catch_val(use_dir) of
+ {'EXIT', _} ->
+ case get_env(schema_location) of
+ disc -> true;
+ opt_disc -> non_empty_dir();
+ ram -> false
+ end;
+ Bool ->
+ Bool
+ end.
+
+%% Returns true if the Mnesia directory contains
+%% important files
+non_empty_dir() ->
+ mnesia_lib:exists(mnesia_bup:fallback_bup()) or
+ mnesia_lib:exists(mnesia_lib:tab2dmp(schema)) or
+ mnesia_lib:exists(mnesia_lib:tab2dat(schema)).
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_call({mktab, Tab, Args}, _From, State) ->
+ case catch ?ets_new_table(Tab, Args) of
+ {'EXIT', ExitReason} ->
+ Msg = "Cannot create ets table",
+ Reason = {system_limit, Msg, Tab, Args, ExitReason},
+ fatal("~p~n", [Reason]),
+ {noreply, State};
+ Reply ->
+ {reply, Reply, State}
+ end;
+
+handle_call({unsafe_mktab, Tab, Args}, _From, State) ->
+ case catch ?ets_new_table(Tab, Args) of
+ {'EXIT', ExitReason} ->
+ {reply, {error, ExitReason}, State};
+ Reply ->
+ {reply, Reply, State}
+ end;
+
+
+handle_call({open_dets, Tab, Args}, _From, State) ->
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, Tab} ->
+ {reply, {ok, Tab}, State};
+
+ {error, Reason} ->
+ Msg = "Cannot open dets table",
+ Error = {error, {Msg, Tab, Args, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({unsafe_open_dets, Tab, Args}, _From, State) ->
+ case mnesia_lib:dets_sync_open(Tab, Args) of
+ {ok, Tab} ->
+ {reply, {ok, Tab}, State};
+ {error, Reason} ->
+ {reply, {error,Reason}, State}
+ end;
+
+handle_call({close_dets, Tab}, _From, State) ->
+ case mnesia_lib:dets_sync_close(Tab) of
+ ok ->
+ {reply, ok, State};
+ {error, Reason} ->
+ Msg = "Cannot close dets table",
+ Error = {error, {Msg, Tab, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({unsafe_close_dets, Tab}, _From, State) ->
+ mnesia_lib:dets_sync_close(Tab),
+ {reply, ok, State};
+
+handle_call({open_log, Args}, _From, State) ->
+ Res = disk_log:open([{notify, true}|Args]),
+ {reply, Res, State};
+
+handle_call({reopen_log, Name, Fname, Head}, _From, State) ->
+ case disk_log:reopen(Name, Fname, Head) of
+ ok ->
+ {reply, ok, State};
+
+ {error, Reason} ->
+ Msg = "Cannot rename disk_log file",
+ Error = {error, {Msg, Name, Fname, Head, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({close_log, Name}, _From, State) ->
+ case disk_log:close(Name) of
+ ok ->
+ {reply, ok, State};
+
+ {error, Reason} ->
+ Msg = "Cannot close disk_log file",
+ Error = {error, {Msg, Name, Reason}},
+ fatal("~p~n", [Error]),
+ {noreply, State}
+ end;
+
+handle_call({unsafe_close_log, Name}, _From, State) ->
+ disk_log:close(Name),
+ {reply, ok, State};
+
+handle_call({negotiate_protocol, Mon, _Version, _Protocols}, _From, State)
+ when State#state.tm_started == false ->
+ State2 = State#state{early_connects = [node(Mon) | State#state.early_connects]},
+ {reply, {node(), {reject, self(), uninitialized, uninitialized}}, State2};
+
+handle_call({negotiate_protocol, Mon, Version, Protocols}, From, State)
+ when node(Mon) /= node() ->
+ Protocol = protocol_version(),
+ MyVersion = mnesia:system_info(version),
+ case lists:member(Protocol, Protocols) of
+ true ->
+ accept_protocol(Mon, MyVersion, Protocol, From, State);
+ false ->
+ %% in this release we should be able to handle the previous
+ %% protocol
+ case hd(Protocols) of
+ ?previous_protocol_version ->
+ accept_protocol(Mon, MyVersion, ?previous_protocol_version, From, State);
+ _ ->
+ verbose("Connection with ~p rejected. "
+ "version = ~p, protocols = ~p, "
+ "expected version = ~p, expected protocol = ~p~n",
+ [node(Mon), Version, Protocols, MyVersion, Protocol]),
+ {reply, {node(), {reject, self(), MyVersion, Protocol}}, State}
+ end
+ end;
+
+handle_call(init, _From, State) ->
+ net_kernel:monitor_nodes(true),
+ EarlyNodes = State#state.early_connects,
+ State2 = State#state{tm_started = true},
+ {reply, EarlyNodes, State2};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+accept_protocol(Mon, Version, Protocol, From, State) ->
+ Reply = {node(), {accept, self(), Version, Protocol}},
+ Node = node(Mon),
+ Pending0 = State#state.pending_negotiators,
+ Pending = lists:keydelete(Node, 1, Pending0),
+ case lists:member(Node, State#state.going_down) of
+ true ->
+ %% Wait for the mnesia_down to be processed,
+ %% before we reply
+ P = Pending ++ [{Node, Mon, From, Reply}],
+ {noreply, State#state{pending_negotiators = P}};
+ false ->
+ %% No need for wait
+ link(Mon), %% link to remote Monitor
+ case Protocol == protocol_version() of
+ true ->
+ set({protocol, Node}, {Protocol, false});
+ false ->
+ set({protocol, Node}, {Protocol, true})
+ end,
+ {reply, Reply, State#state{pending_negotiators = Pending}}
+ end.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_cast({mnesia_down, mnesia_controller, Node}, State) ->
+ mnesia_tm:mnesia_down(Node),
+ {noreply, State};
+
+handle_cast({mnesia_down, mnesia_tm, {Node, Pending}}, State) ->
+ mnesia_locker:mnesia_down(Node, Pending),
+ {noreply, State};
+
+handle_cast({mnesia_down, mnesia_locker, Node}, State) ->
+ Down = {mnesia_down, Node},
+ mnesia_lib:report_system_event(Down),
+ GoingDown = lists:delete(Node, State#state.going_down),
+ State2 = State#state{going_down = GoingDown},
+ Pending = State#state.pending_negotiators,
+ case lists:keysearch(Node, 1, Pending) of
+ {value, {Node, Mon, ReplyTo, Reply}} ->
+ %% Late reply to remote monitor
+ link(Mon), %% link to remote Monitor
+ gen_server:reply(ReplyTo, Reply),
+ P2 = lists:keydelete(Node, 1,Pending),
+ State3 = State2#state{pending_negotiators = P2},
+ {noreply, State3};
+ false ->
+ %% No pending remote monitors
+ {noreply, State2}
+ end;
+
+handle_cast({disconnect, Node}, State) ->
+ case rpc:call(Node, erlang, whereis, [?MODULE]) of
+ {badrpc, _} ->
+ ignore;
+ RemoteMon when pid(RemoteMon) ->
+ unlink(RemoteMon)
+ end,
+ {noreply, State};
+
+handle_cast({inconsistent_database, Context, Node}, State) ->
+ Msg = {inconsistent_database, Context, Node},
+ mnesia_lib:report_system_event(Msg),
+ {noreply, State};
+
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
+ dbg_out("~p was ~p by supervisor~n",[?MODULE, R]),
+ {stop, R, State};
+
+handle_info({'EXIT', Pid, fatal}, State) when node(Pid) == node() ->
+ dbg_out("~p got FATAL ERROR from: ~p~n",[?MODULE, Pid]),
+ exit(State#state.supervisor, shutdown),
+ {noreply, State};
+
+handle_info({'EXIT', Pid, Reason}, State) ->
+ Node = node(Pid),
+ if
+ Node /= node() ->
+ %% Remotly linked process died, assume that it was a mnesia_monitor
+ mnesia_recover:mnesia_down(Node),
+ mnesia_controller:mnesia_down(Node),
+ {noreply, State#state{going_down = [Node | State#state.going_down]}};
+ true ->
+ %% We have probably got an exit signal from from
+ %% disk_log or dets
+ Hint = "Hint: check that the disk still is writable",
+ Msg = {'EXIT', Pid, Reason},
+ fatal("~p got unexpected info: ~p; ~p~n",
+ [?MODULE, Msg, Hint])
+ end;
+
+handle_info({nodeup, Node}, State) ->
+ %% Ok, we are connected to yet another Erlang node
+ %% Let's check if Mnesia is running there in order
+ %% to detect if the network has been partitioned
+ %% due to communication failure.
+
+ HasDown = mnesia_recover:has_mnesia_down(Node),
+ ImRunning = mnesia_lib:is_running(),
+
+ if
+ %% If I'm not running the test will be made later.
+ HasDown == true, ImRunning == yes ->
+ spawn_link(?MODULE, detect_partitioned_network, [self(), Node]);
+ true ->
+ ignore
+ end,
+ {noreply, State};
+
+handle_info({nodedown, _Node}, State) ->
+ %% Ignore, we are only caring about nodeup's
+ {noreply, State};
+
+handle_info({disk_log, _Node, Log, Info}, State) ->
+ case Info of
+ {truncated, _No} ->
+ ok;
+ _ ->
+ mnesia_lib:important("Warning Log file ~p error reason ~s~n",
+ [Log, disk_log:format_error(Info)])
+ end,
+ {noreply, State};
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info (~p): ~p~n", [?MODULE, State, Msg]).
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+terminate(Reason, State) ->
+ terminate_proc(?MODULE, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+process_config_args([]) ->
+ ok;
+process_config_args([C|T]) ->
+ V = get_env(C),
+ dbg_out("Env ~p: ~p~n", [C, V]),
+ mnesia_lib:set(C, V),
+ process_config_args(T).
+
+set_env(E,Val) ->
+ mnesia_lib:set(E, check_type(E,Val)),
+ ok.
+
+get_env(E) ->
+ case ?catch_val(E) of
+ {'EXIT', _} ->
+ case application:get_env(mnesia, E) of
+ {ok, Val} ->
+ check_type(E, Val);
+ undefined ->
+ check_type(E, default_env(E))
+ end;
+ Val ->
+ Val
+ end.
+
+env() ->
+ [
+ access_module,
+ auto_repair,
+ backup_module,
+ debug,
+ dir,
+ dump_log_load_regulation,
+ dump_log_time_threshold,
+ dump_log_update_in_place,
+ dump_log_write_threshold,
+ embedded_mnemosyne,
+ event_module,
+ extra_db_nodes,
+ ignore_fallback_at_startup,
+ fallback_error_function,
+ max_wait_for_decision,
+ schema_location,
+ core_dir
+ ].
+
+default_env(access_module) ->
+ mnesia;
+default_env(auto_repair) ->
+ true;
+default_env(backup_module) ->
+ mnesia_backup;
+default_env(debug) ->
+ none;
+default_env(dir) ->
+ Name = lists:concat(["Mnesia.", node()]),
+ filename:absname(Name);
+default_env(dump_log_load_regulation) ->
+ false;
+default_env(dump_log_time_threshold) ->
+ timer:minutes(3);
+default_env(dump_log_update_in_place) ->
+ true;
+default_env(dump_log_write_threshold) ->
+ 1000;
+default_env(embedded_mnemosyne) ->
+ false;
+default_env(event_module) ->
+ mnesia_event;
+default_env(extra_db_nodes) ->
+ [];
+default_env(ignore_fallback_at_startup) ->
+ false;
+default_env(fallback_error_function) ->
+ {mnesia, lkill};
+default_env(max_wait_for_decision) ->
+ infinity;
+default_env(schema_location) ->
+ opt_disc;
+default_env(core_dir) ->
+ false.
+
+check_type(Env, Val) ->
+ case catch do_check_type(Env, Val) of
+ {'EXIT', _Reason} ->
+ exit({bad_config, Env, Val});
+ NewVal ->
+ NewVal
+ end.
+
+do_check_type(access_module, A) when atom(A) -> A;
+do_check_type(auto_repair, B) -> bool(B);
+do_check_type(backup_module, B) when atom(B) -> B;
+do_check_type(debug, debug) -> debug;
+do_check_type(debug, false) -> none;
+do_check_type(debug, none) -> none;
+do_check_type(debug, trace) -> trace;
+do_check_type(debug, true) -> debug;
+do_check_type(debug, verbose) -> verbose;
+do_check_type(dir, V) -> filename:absname(V);
+do_check_type(dump_log_load_regulation, B) -> bool(B);
+do_check_type(dump_log_time_threshold, I) when integer(I), I > 0 -> I;
+do_check_type(dump_log_update_in_place, B) -> bool(B);
+do_check_type(dump_log_write_threshold, I) when integer(I), I > 0 -> I;
+do_check_type(event_module, A) when atom(A) -> A;
+do_check_type(ignore_fallback_at_startup, B) -> bool(B);
+do_check_type(fallback_error_function, {Mod, Func})
+ when atom(Mod), atom(Func) -> {Mod, Func};
+do_check_type(embedded_mnemosyne, B) -> bool(B);
+do_check_type(extra_db_nodes, L) when list(L) ->
+ Fun = fun(N) when N == node() -> false;
+ (A) when atom(A) -> true
+ end,
+ lists:filter(Fun, L);
+do_check_type(max_wait_for_decision, infinity) -> infinity;
+do_check_type(max_wait_for_decision, I) when integer(I), I > 0 -> I;
+do_check_type(schema_location, M) -> media(M);
+do_check_type(core_dir, "false") -> false;
+do_check_type(core_dir, false) -> false;
+do_check_type(core_dir, Dir) when list(Dir) -> Dir.
+
+
+bool(true) -> true;
+bool(false) -> false.
+
+media(disc) -> disc;
+media(opt_disc) -> opt_disc;
+media(ram) -> ram.
+
+patch_env(Env, Val) ->
+ case catch do_check_type(Env, Val) of
+ {'EXIT', _Reason} ->
+ {error, {bad_type, Env, Val}};
+ NewVal ->
+ application_controller:set_env(mnesia, Env, NewVal),
+ NewVal
+ end.
+
+detect_partitioned_network(Mon, Node) ->
+ GoodNodes = negotiate_protocol([Node]),
+ detect_inconcistency(GoodNodes, running_partitioned_network),
+ unlink(Mon),
+ exit(normal).
+
+detect_inconcistency([], _Context) ->
+ ok;
+detect_inconcistency(Nodes, Context) ->
+ Downs = [N || N <- Nodes, mnesia_recover:has_mnesia_down(N)],
+ {Replies, _BadNodes} =
+ rpc:multicall(Downs, ?MODULE, has_remote_mnesia_down, [node()]),
+ report_inconsistency(Replies, Context, ok).
+
+has_remote_mnesia_down(Node) ->
+ HasDown = mnesia_recover:has_mnesia_down(Node),
+ Master = mnesia_recover:get_master_nodes(schema),
+ if
+ HasDown == true, Master == [] ->
+ {true, node()};
+ true ->
+ {false, node()}
+ end.
+
+report_inconsistency([{true, Node} | Replies], Context, _Status) ->
+ %% Oops, Mnesia is already running on the
+ %% other node AND we both regard each
+ %% other as down. The database is
+ %% potentially inconsistent and we has to
+ %% do tell the applications about it, so
+ %% they may perform some clever recovery
+ %% action.
+ Msg = {inconsistent_database, Context, Node},
+ mnesia_lib:report_system_event(Msg),
+ report_inconsistency(Replies, Context, inconsistent_database);
+report_inconsistency([{false, _Node} | Replies], Context, Status) ->
+ report_inconsistency(Replies, Context, Status);
+report_inconsistency([{badrpc, _Reason} | Replies], Context, Status) ->
+ report_inconsistency(Replies, Context, Status);
+report_inconsistency([], _Context, Status) ->
+ Status.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_recover.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_recover.erl
new file mode 100644
index 0000000000..cbb110fa6c
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_recover.erl
@@ -0,0 +1,1174 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_recover.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_recover).
+
+-behaviour(gen_server).
+
+-export([
+ allow_garb/0,
+ call/1,
+ connect_nodes/1,
+ disconnect/1,
+ dump_decision_tab/0,
+ get_master_node_info/0,
+ get_master_node_tables/0,
+ get_master_nodes/1,
+ get_mnesia_downs/0,
+ has_mnesia_down/1,
+ incr_trans_tid_serial/0,
+ init/0,
+ log_decision/1,
+ log_master_nodes/3,
+ log_mnesia_down/1,
+ log_mnesia_up/1,
+ mnesia_down/1,
+ note_decision/2,
+ note_log_decision/2,
+ outcome/2,
+ start/0,
+ start_garb/0,
+ still_pending/1,
+ sync_trans_tid_serial/1,
+ wait_for_decision/2,
+ what_happened/3
+ ]).
+
+%% gen_server callbacks
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
+
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [set/2, verbose/2, error/2, fatal/2]).
+
+-record(state, {supervisor,
+ unclear_pid,
+ unclear_decision,
+ unclear_waitfor,
+ tm_queue_len = 0,
+ initiated = false,
+ early_msgs = []
+ }).
+
+%%-define(DBG(F, A), mnesia:report_event(list_to_atom(lists:flatten(io_lib:format(F, A))))).
+%%-define(DBG(F, A), io:format("DBG: " ++ F, A)).
+
+-record(transient_decision, {tid, outcome}).
+
+start() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [self()],
+ [{timeout, infinity}
+ %%, {debug, [trace]}
+ ]).
+
+init() ->
+ call(init).
+
+start_garb() ->
+ Pid = whereis(mnesia_recover),
+ {ok, _} = timer:send_interval(timer:minutes(2), Pid, garb_decisions),
+ {ok, _} = timer:send_interval(timer:seconds(10), Pid, check_overload).
+
+allow_garb() ->
+ cast(allow_garb).
+
+
+%% The transaction log has either been swiched (latest -> previous) or
+%% there is nothing to be dumped. This means that the previous
+%% transaction log only may contain commit records which refers to
+%% transactions noted in the last two of the 'Prev' tables. All other
+%% tables may now be garbed by 'garb_decisions' (after 2 minutes).
+%% Max 10 tables are kept.
+do_allow_garb() ->
+ %% The order of the following stuff is important!
+ Curr = val(latest_transient_decision),
+ Old = val(previous_transient_decisions),
+ Next = create_transient_decision(),
+ {Prev, ReallyOld} = sublist([Curr | Old], 10, []),
+ [?ets_delete_table(Tab) || Tab <- ReallyOld],
+ set(previous_transient_decisions, Prev),
+ set(latest_transient_decision, Next).
+
+sublist([H|R], N, Acc) when N > 0 ->
+ sublist(R, N-1, [H| Acc]);
+sublist(List, _N, Acc) ->
+ {lists:reverse(Acc), List}.
+
+do_garb_decisions() ->
+ case val(previous_transient_decisions) of
+ [First, Second | Rest] ->
+ set(previous_transient_decisions, [First, Second]),
+ [?ets_delete_table(Tab) || Tab <- Rest];
+ _ ->
+ ignore
+ end.
+
+connect_nodes([]) ->
+ [];
+connect_nodes(Ns) ->
+ %% Determine which nodes we should try to connect
+ AlreadyConnected = val(recover_nodes),
+ {_, Nodes} = mnesia_lib:search_delete(node(), Ns),
+ Check = Nodes -- AlreadyConnected,
+ GoodNodes = mnesia_monitor:negotiate_protocol(Check),
+ if
+ GoodNodes == [] ->
+ %% No good noodes to connect to
+ ignore;
+ true ->
+ %% Now we have agreed upon a protocol with some new nodes
+ %% and we may use them when we recover transactions
+ mnesia_lib:add_list(recover_nodes, GoodNodes),
+ cast({announce_all, GoodNodes}),
+ case get_master_nodes(schema) of
+ [] ->
+ Context = starting_partitioned_network,
+ mnesia_monitor:detect_inconcistency(GoodNodes, Context);
+ _ -> %% If master_nodes is set ignore old inconsistencies
+ ignore
+ end
+ end,
+ {GoodNodes, AlreadyConnected}.
+
+disconnect(Node) ->
+ mnesia_monitor:disconnect(Node),
+ mnesia_lib:del(recover_nodes, Node).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+call(Msg) ->
+ Pid = whereis(?MODULE),
+ case Pid of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ link(Pid),
+ Res = gen_server:call(Pid, Msg, infinity),
+ unlink(Pid),
+
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+multicall(Nodes, Msg) ->
+ rpc:multicall(Nodes, ?MODULE, call, [Msg]).
+
+cast(Msg) ->
+ case whereis(?MODULE) of
+ undefined -> ignore;
+ Pid -> gen_server:cast(Pid, Msg)
+ end.
+
+abcast(Nodes, Msg) ->
+ gen_server:abcast(Nodes, ?MODULE, Msg).
+
+note_decision(Tid, Outcome) ->
+ Tab = val(latest_transient_decision),
+ ?ets_insert(Tab, #transient_decision{tid = Tid, outcome = Outcome}).
+
+note_up(Node, _Date, _Time) ->
+ ?ets_delete(mnesia_decision, Node).
+
+note_down(Node, Date, Time) ->
+ ?ets_insert(mnesia_decision, {mnesia_down, Node, Date, Time}).
+
+note_master_nodes(Tab, []) ->
+ ?ets_delete(mnesia_decision, Tab);
+note_master_nodes(Tab, Nodes) when list(Nodes) ->
+ Master = {master_nodes, Tab, Nodes},
+ ?ets_insert(mnesia_decision, Master).
+
+note_outcome(D) when D#decision.disc_nodes == [] ->
+%% ?DBG("~w: note_tmp_decision: ~w~n", [node(), D]),
+ note_decision(D#decision.tid, filter_outcome(D#decision.outcome)),
+ ?ets_delete(mnesia_decision, D#decision.tid);
+note_outcome(D) when D#decision.disc_nodes /= [] ->
+%% ?DBG("~w: note_decision: ~w~n", [node(), D]),
+ ?ets_insert(mnesia_decision, D).
+
+log_decision(D) when D#decision.outcome /= unclear ->
+ OldD = decision(D#decision.tid),
+ MergedD = merge_decisions(node(), OldD, D),
+ do_log_decision(MergedD, true);
+log_decision(D) ->
+ do_log_decision(D, false).
+
+do_log_decision(D, DoTell) ->
+ RamNs = D#decision.ram_nodes,
+ DiscNs = D#decision.disc_nodes -- [node()],
+ Outcome = D#decision.outcome,
+ D2 =
+ case Outcome of
+ aborted -> D#decision{disc_nodes = DiscNs};
+ committed -> D#decision{disc_nodes = DiscNs};
+ _ -> D
+ end,
+ note_outcome(D2),
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:append(latest_log, D2),
+ if
+ DoTell == true, Outcome /= unclear ->
+ tell_im_certain(DiscNs, D2),
+ tell_im_certain(RamNs, D2);
+ true ->
+ ignore
+ end;
+ false ->
+ ignore
+ end.
+
+tell_im_certain([], _D) ->
+ ignore;
+tell_im_certain(Nodes, D) ->
+ Msg = {im_certain, node(), D},
+%% ?DBG("~w: ~w: tell: ~w~n", [node(), Msg, Nodes]),
+ abcast(Nodes, Msg).
+
+log_mnesia_up(Node) ->
+ call({log_mnesia_up, Node}).
+
+log_mnesia_down(Node) ->
+ call({log_mnesia_down, Node}).
+
+get_mnesia_downs() ->
+ Tab = mnesia_decision,
+ Pat = {mnesia_down, '_', '_', '_'},
+ Downs = ?ets_match_object(Tab, Pat),
+ [Node || {mnesia_down, Node, _Date, _Time} <- Downs].
+
+%% Check if we have got a mnesia_down from Node
+has_mnesia_down(Node) ->
+ case ?ets_lookup(mnesia_decision, Node) of
+ [{mnesia_down, Node, _Date, _Time}] ->
+ true;
+ [] ->
+ false
+ end.
+
+mnesia_down(Node) ->
+ case ?catch_val(recover_nodes) of
+ {'EXIT', _} ->
+ %% Not started yet
+ ignore;
+ _ ->
+ mnesia_lib:del(recover_nodes, Node),
+ cast({mnesia_down, Node})
+ end.
+
+log_master_nodes(Args, UseDir, IsRunning) ->
+ if
+ IsRunning == yes ->
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ UseDir == false ->
+ ok;
+ true ->
+ Name = latest_log,
+ Fname = mnesia_log:latest_log_file(),
+ Exists = mnesia_lib:exists(Fname),
+ Repair = mnesia:system_info(auto_repair),
+ OpenArgs = [{file, Fname}, {name, Name}, {repair, Repair}],
+ case disk_log:open(OpenArgs) of
+ {ok, Name} ->
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ {repaired, Name, {recovered, _R}, {badbytes, _B}}
+ when Exists == true ->
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ {repaired, Name, {recovered, _R}, {badbytes, _B}}
+ when Exists == false ->
+ mnesia_log:write_trans_log_header(),
+ log_master_nodes2(Args, UseDir, IsRunning, ok);
+ {error, Reason} ->
+ {error, Reason}
+ end
+ end.
+
+log_master_nodes2([{Tab, Nodes} | Tail], UseDir, IsRunning, WorstRes) ->
+ Res =
+ case IsRunning of
+ yes ->
+ R = call({log_master_nodes, Tab, Nodes, UseDir, IsRunning}),
+ mnesia_controller:master_nodes_updated(Tab, Nodes),
+ R;
+ _ ->
+ do_log_master_nodes(Tab, Nodes, UseDir, IsRunning)
+ end,
+ case Res of
+ ok ->
+ log_master_nodes2(Tail, UseDir, IsRunning, WorstRes);
+ {error, Reason} ->
+ log_master_nodes2(Tail, UseDir, IsRunning, {error, Reason})
+ end;
+log_master_nodes2([], _UseDir, IsRunning, WorstRes) ->
+ case IsRunning of
+ yes ->
+ WorstRes;
+ _ ->
+ disk_log:close(latest_log),
+ WorstRes
+ end.
+
+get_master_node_info() ->
+ Tab = mnesia_decision,
+ Pat = {master_nodes, '_', '_'},
+ case catch mnesia_lib:db_match_object(ram_copies,Tab, Pat) of
+ {'EXIT', _} ->
+ [];
+ Masters ->
+ Masters
+ end.
+
+get_master_node_tables() ->
+ Masters = get_master_node_info(),
+ [Tab || {master_nodes, Tab, _Nodes} <- Masters].
+
+get_master_nodes(Tab) ->
+ case catch ?ets_lookup_element(mnesia_decision, Tab, 3) of
+ {'EXIT', _} -> [];
+ Nodes -> Nodes
+ end.
+
+%% Determine what has happened to the transaction
+what_happened(Tid, Protocol, Nodes) ->
+ Default =
+ case Protocol of
+ asym_trans -> aborted;
+ _ -> unclear %% sym_trans and sync_sym_trans
+ end,
+ This = node(),
+ case lists:member(This, Nodes) of
+ true ->
+ {ok, Outcome} = call({what_happened, Default, Tid}),
+ Others = Nodes -- [This],
+ case filter_outcome(Outcome) of
+ unclear -> what_happened_remotely(Tid, Default, Others);
+ aborted -> aborted;
+ committed -> committed
+ end;
+ false ->
+ what_happened_remotely(Tid, Default, Nodes)
+ end.
+
+what_happened_remotely(Tid, Default, Nodes) ->
+ {Replies, _} = multicall(Nodes, {what_happened, Default, Tid}),
+ check_what_happened(Replies, 0, 0).
+
+check_what_happened([H | T], Aborts, Commits) ->
+ case H of
+ {ok, R} ->
+ case filter_outcome(R) of
+ committed ->
+ check_what_happened(T, Aborts, Commits + 1);
+ aborted ->
+ check_what_happened(T, Aborts + 1, Commits);
+ unclear ->
+ check_what_happened(T, Aborts, Commits)
+ end;
+ {error, _} ->
+ check_what_happened(T, Aborts, Commits);
+ {badrpc, _} ->
+ check_what_happened(T, Aborts, Commits)
+ end;
+check_what_happened([], Aborts, Commits) ->
+ if
+ Aborts == 0, Commits == 0 -> aborted; % None of the active nodes knows
+ Aborts > 0 -> aborted; % Someody has aborted
+ Aborts == 0, Commits > 0 -> committed % All has committed
+ end.
+
+%% Determine what has happened to the transaction
+%% and possibly wait forever for the decision.
+wait_for_decision(presume_commit, _InitBy) ->
+ %% sym_trans
+ {{presume_commit, self()}, committed};
+
+wait_for_decision(D, InitBy) when D#decision.outcome == presume_abort ->
+ %% asym_trans
+ Tid = D#decision.tid,
+ Outcome = filter_outcome(outcome(Tid, D#decision.outcome)),
+ if
+ Outcome /= unclear ->
+ {Tid, Outcome};
+
+ InitBy /= startup ->
+ %% Wait a while for active transactions
+ %% to end and try again
+ timer:sleep(200),
+ wait_for_decision(D, InitBy);
+
+ InitBy == startup ->
+ {ok, Res} = call({wait_for_decision, D}),
+ {Tid, Res}
+ end.
+
+still_pending([Tid | Pending]) ->
+ case filter_outcome(outcome(Tid, unclear)) of
+ unclear -> [Tid | still_pending(Pending)];
+ _ -> still_pending(Pending)
+ end;
+still_pending([]) ->
+ [].
+
+load_decision_tab() ->
+ Cont = mnesia_log:open_decision_tab(),
+ load_decision_tab(Cont, load_decision_tab),
+ mnesia_log:close_decision_tab().
+
+load_decision_tab(eof, _InitBy) ->
+ ok;
+load_decision_tab(Cont, InitBy) ->
+ case mnesia_log:chunk_decision_tab(Cont) of
+ {Cont2, Decisions} ->
+ note_log_decisions(Decisions, InitBy),
+ load_decision_tab(Cont2, InitBy);
+ eof ->
+ ok
+ end.
+
+%% Dumps DECISION.LOG and PDECISION.LOG and removes them.
+%% From now on all decisions are logged in the transaction log file
+convert_old() ->
+ HasOldStuff =
+ mnesia_lib:exists(mnesia_log:previous_decision_log_file()) or
+ mnesia_lib:exists(mnesia_log:decision_log_file()),
+ case HasOldStuff of
+ true ->
+ mnesia_log:open_decision_log(),
+ dump_decision_log(startup),
+ dump_decision_log(startup),
+ mnesia_log:close_decision_log(),
+ Latest = mnesia_log:decision_log_file(),
+ ok = file:delete(Latest);
+ false ->
+ ignore
+ end.
+
+dump_decision_log(InitBy) ->
+ %% Assumed to be run in transaction log dumper process
+ Cont = mnesia_log:prepare_decision_log_dump(),
+ perform_dump_decision_log(Cont, InitBy).
+
+perform_dump_decision_log(eof, _InitBy) ->
+ confirm_decision_log_dump();
+perform_dump_decision_log(Cont, InitBy) when InitBy == startup ->
+ case mnesia_log:chunk_decision_log(Cont) of
+ {Cont2, Decisions} ->
+ note_log_decisions(Decisions, InitBy),
+ perform_dump_decision_log(Cont2, InitBy);
+ eof ->
+ confirm_decision_log_dump()
+ end;
+perform_dump_decision_log(_Cont, _InitBy) ->
+ confirm_decision_log_dump().
+
+confirm_decision_log_dump() ->
+ dump_decision_tab(),
+ mnesia_log:confirm_decision_log_dump().
+
+dump_decision_tab() ->
+ Tab = mnesia_decision,
+ All = mnesia_lib:db_match_object(ram_copies,Tab, '_'),
+ mnesia_log:save_decision_tab({decision_list, All}).
+
+note_log_decisions([What | Tail], InitBy) ->
+ note_log_decision(What, InitBy),
+ note_log_decisions(Tail, InitBy);
+note_log_decisions([], _InitBy) ->
+ ok.
+
+note_log_decision(NewD, InitBy) when NewD#decision.outcome == pre_commit ->
+ note_log_decision(NewD#decision{outcome = unclear}, InitBy);
+
+note_log_decision(NewD, _InitBy) when record(NewD, decision) ->
+ Tid = NewD#decision.tid,
+ sync_trans_tid_serial(Tid),
+ OldD = decision(Tid),
+ MergedD = merge_decisions(node(), OldD, NewD),
+ note_outcome(MergedD);
+
+note_log_decision({trans_tid, serial, _Serial}, startup) ->
+ ignore;
+
+note_log_decision({trans_tid, serial, Serial}, _InitBy) ->
+ sync_trans_tid_serial(Serial);
+
+note_log_decision({mnesia_up, Node, Date, Time}, _InitBy) ->
+ note_up(Node, Date, Time);
+
+note_log_decision({mnesia_down, Node, Date, Time}, _InitBy) ->
+ note_down(Node, Date, Time);
+
+note_log_decision({master_nodes, Tab, Nodes}, _InitBy) ->
+ note_master_nodes(Tab, Nodes);
+
+note_log_decision(H, _InitBy) when H#log_header.log_kind == decision_log ->
+ V = mnesia_log:decision_log_version(),
+ if
+ H#log_header.log_version == V->
+ ok;
+ H#log_header.log_version == "2.0" ->
+ verbose("Accepting an old version format of decision log: ~p~n",
+ [V]),
+ ok;
+ true ->
+ fatal("Bad version of decision log: ~p~n", [H])
+ end;
+
+note_log_decision(H, _InitBy) when H#log_header.log_kind == decision_tab ->
+ V = mnesia_log:decision_tab_version(),
+ if
+ V == H#log_header.log_version ->
+ ok;
+ true ->
+ fatal("Bad version of decision tab: ~p~n", [H])
+ end;
+note_log_decision({decision_list, ItemList}, InitBy) ->
+ note_log_decisions(ItemList, InitBy);
+note_log_decision(BadItem, InitBy) ->
+ exit({"Bad decision log item", BadItem, InitBy}).
+
+trans_tid_serial() ->
+ ?ets_lookup_element(mnesia_decision, serial, 3).
+
+set_trans_tid_serial(Val) ->
+ ?ets_insert(mnesia_decision, {trans_tid, serial, Val}).
+
+incr_trans_tid_serial() ->
+ ?ets_update_counter(mnesia_decision, serial, 1).
+
+sync_trans_tid_serial(ThatCounter) when integer(ThatCounter) ->
+ ThisCounter = trans_tid_serial(),
+ if
+ ThatCounter > ThisCounter ->
+ set_trans_tid_serial(ThatCounter + 1);
+ true ->
+ ignore
+ end;
+sync_trans_tid_serial(Tid) ->
+ sync_trans_tid_serial(Tid#tid.counter).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Callback functions from gen_server
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ mnesia_lib:verbose("~p starting: ~p~n", [?MODULE, self()]),
+ set(latest_transient_decision, create_transient_decision()),
+ set(previous_transient_decisions, []),
+ set(recover_nodes, []),
+ State = #state{supervisor = Parent},
+ {ok, State}.
+
+create_transient_decision() ->
+ ?ets_new_table(mnesia_transient_decision, [{keypos, 2}, set, public]).
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_call(init, From, State) when State#state.initiated == false ->
+ Args = [{keypos, 2}, set, public, named_table],
+ case mnesia_monitor:use_dir() of
+ true ->
+ ?ets_new_table(mnesia_decision, Args),
+ set_trans_tid_serial(0),
+ TabFile = mnesia_log:decision_tab_file(),
+ case mnesia_lib:exists(TabFile) of
+ true ->
+ load_decision_tab();
+ false ->
+ ignore
+ end,
+ convert_old(),
+ mnesia_dumper:opt_dump_log(scan_decisions);
+ false ->
+ ?ets_new_table(mnesia_decision, Args),
+ set_trans_tid_serial(0)
+ end,
+ handle_early_msgs(State, From);
+
+handle_call(Msg, From, State) when State#state.initiated == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ {noreply, State#state{early_msgs = [{call, Msg, From} | Msgs]}};
+
+handle_call({what_happened, Default, Tid}, _From, State) ->
+ sync_trans_tid_serial(Tid),
+ Outcome = outcome(Tid, Default),
+ {reply, {ok, Outcome}, State};
+
+handle_call({wait_for_decision, D}, From, State) ->
+ Recov = val(recover_nodes),
+ AliveRam = (mnesia_lib:intersect(D#decision.ram_nodes, Recov) -- [node()]),
+ RemoteDisc = D#decision.disc_nodes -- [node()],
+ if
+ AliveRam == [], RemoteDisc == [] ->
+ %% No more else to wait for and we may safely abort
+ {reply, {ok, aborted}, State};
+ true ->
+ verbose("Transaction ~p is unclear. "
+ "Wait for disc nodes: ~w ram: ~w~n",
+ [D#decision.tid, RemoteDisc, AliveRam]),
+ AliveDisc = mnesia_lib:intersect(RemoteDisc, Recov),
+ Msg = {what_decision, node(), D},
+ abcast(AliveRam, Msg),
+ abcast(AliveDisc, Msg),
+ case val(max_wait_for_decision) of
+ infinity ->
+ ignore;
+ MaxWait ->
+ ForceMsg = {force_decision, D#decision.tid},
+ {ok, _} = timer:send_after(MaxWait, ForceMsg)
+ end,
+ State2 = State#state{unclear_pid = From,
+ unclear_decision = D,
+ unclear_waitfor = (RemoteDisc ++ AliveRam)},
+ {noreply, State2}
+ end;
+
+handle_call({log_mnesia_up, Node}, _From, State) ->
+ do_log_mnesia_up(Node),
+ {reply, ok, State};
+
+handle_call({log_mnesia_down, Node}, _From, State) ->
+ do_log_mnesia_down(Node),
+ {reply, ok, State};
+
+handle_call({log_master_nodes, Tab, Nodes, UseDir, IsRunning}, _From, State) ->
+ do_log_master_nodes(Tab, Nodes, UseDir, IsRunning),
+ {reply, ok, State};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+do_log_mnesia_up(Node) ->
+ Yoyo = {mnesia_up, Node, Date = date(), Time = time()},
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:append(latest_log, Yoyo),
+ disk_log:sync(latest_log);
+ false ->
+ ignore
+ end,
+ note_up(Node, Date, Time).
+
+do_log_mnesia_down(Node) ->
+ Yoyo = {mnesia_down, Node, Date = date(), Time = time()},
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:append(latest_log, Yoyo),
+ disk_log:sync(latest_log);
+ false ->
+ ignore
+ end,
+ note_down(Node, Date, Time).
+
+do_log_master_nodes(Tab, Nodes, UseDir, IsRunning) ->
+ Master = {master_nodes, Tab, Nodes},
+ Res =
+ case UseDir of
+ true ->
+ LogRes = mnesia_log:append(latest_log, Master),
+ disk_log:sync(latest_log),
+ LogRes;
+ false ->
+ ok
+ end,
+ case IsRunning of
+ yes ->
+ note_master_nodes(Tab, Nodes);
+ _NotRunning ->
+ ignore
+ end,
+ Res.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_cast(Msg, State) when State#state.initiated == false ->
+ %% Buffer early messages
+ Msgs = State#state.early_msgs,
+ {noreply, State#state{early_msgs = [{cast, Msg} | Msgs]}};
+
+handle_cast({im_certain, Node, NewD}, State) ->
+ OldD = decision(NewD#decision.tid),
+ MergedD = merge_decisions(Node, OldD, NewD),
+ do_log_decision(MergedD, false),
+ {noreply, State};
+
+handle_cast(allow_garb, State) ->
+ do_allow_garb(),
+ {noreply, State};
+
+handle_cast({decisions, Node, Decisions}, State) ->
+ mnesia_lib:add(recover_nodes, Node),
+ State2 = add_remote_decisions(Node, Decisions, State),
+ {noreply, State2};
+
+handle_cast({what_decision, Node, OtherD}, State) ->
+ Tid = OtherD#decision.tid,
+ sync_trans_tid_serial(Tid),
+ Decision =
+ case decision(Tid) of
+ no_decision -> OtherD;
+ MyD when record(MyD, decision) -> MyD
+ end,
+ announce([Node], [Decision], [], true),
+ {noreply, State};
+
+handle_cast({mnesia_down, Node}, State) ->
+ case State#state.unclear_decision of
+ undefined ->
+ {noreply, State};
+ D ->
+ case lists:member(Node, D#decision.ram_nodes) of
+ false ->
+ {noreply, State};
+ true ->
+ State2 = add_remote_decision(Node, D, State),
+ {noreply, State2}
+ end
+ end;
+
+handle_cast({announce_all, Nodes}, State) ->
+ announce_all(Nodes, tabs()),
+ {noreply, State};
+
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+%% No need for buffering
+%% handle_info(Msg, State) when State#state.initiated == false ->
+%% %% Buffer early messages
+%% Msgs = State#state.early_msgs,
+%% {noreply, State#state{early_msgs = [{info, Msg} | Msgs]}};
+
+handle_info(check_overload, S) ->
+ %% Time to check if mnesia_tm is overloaded
+ case whereis(mnesia_tm) of
+ Pid when pid(Pid) ->
+
+ Threshold = 100,
+ Prev = S#state.tm_queue_len,
+ {message_queue_len, Len} =
+ process_info(Pid, message_queue_len),
+ if
+ Len > Threshold, Prev > Threshold ->
+ What = {mnesia_tm, message_queue_len, [Prev, Len]},
+ mnesia_lib:report_system_event({mnesia_overload, What}),
+ {noreply, S#state{tm_queue_len = 0}};
+
+ Len > Threshold ->
+ {noreply, S#state{tm_queue_len = Len}};
+
+ true ->
+ {noreply, S#state{tm_queue_len = 0}}
+ end;
+ undefined ->
+ {noreply, S}
+ end;
+
+handle_info(garb_decisions, State) ->
+ do_garb_decisions(),
+ {noreply, State};
+
+handle_info({force_decision, Tid}, State) ->
+ %% Enforce a transaction recovery decision,
+ %% if we still are waiting for the outcome
+
+ case State#state.unclear_decision of
+ U when U#decision.tid == Tid ->
+ verbose("Decided to abort transaction ~p since "
+ "max_wait_for_decision has been exceeded~n",
+ [Tid]),
+ D = U#decision{outcome = aborted},
+ State2 = add_remote_decision(node(), D, State),
+ {noreply, State2};
+ _ ->
+ {noreply, State}
+ end;
+
+handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
+ mnesia_lib:dbg_out("~p was ~p~n",[?MODULE, R]),
+ {stop, shutdown, State};
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+
+terminate(Reason, State) ->
+ mnesia_monitor:terminate_proc(?MODULE, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+handle_early_msgs(State, From) ->
+ Res = do_handle_early_msgs(State#state.early_msgs,
+ State#state{early_msgs = [],
+ initiated = true}),
+ gen_server:reply(From, ok),
+ Res.
+
+do_handle_early_msgs([Msg | Msgs], State) ->
+ %% The messages are in reverted order
+ case do_handle_early_msgs(Msgs, State) of
+ {stop, Reason, Reply, State2} ->
+ {stop, Reason, Reply, State2};
+ {stop, Reason, State2} ->
+ {stop, Reason, State2};
+ {noreply, State2} ->
+ handle_early_msg(Msg, State2)
+ end;
+
+do_handle_early_msgs([], State) ->
+ {noreply, State}.
+
+handle_early_msg({call, Msg, From}, State) ->
+ case handle_call(Msg, From, State) of
+ {reply, R, S} ->
+ gen_server:reply(From, R),
+ {noreply, S};
+ Other ->
+ Other
+ end;
+handle_early_msg({cast, Msg}, State) ->
+ handle_cast(Msg, State);
+handle_early_msg({info, Msg}, State) ->
+ handle_info(Msg, State).
+
+tabs() ->
+ Curr = val(latest_transient_decision), % Do not miss any trans even
+ Prev = val(previous_transient_decisions), % if the tabs are switched
+ [Curr, mnesia_decision | Prev]. % Ordered by hit probability
+
+decision(Tid) ->
+ decision(Tid, tabs()).
+
+decision(Tid, [Tab | Tabs]) ->
+ case catch ?ets_lookup(Tab, Tid) of
+ [D] when record(D, decision) ->
+ D;
+ [C] when record(C, transient_decision) ->
+ #decision{tid = C#transient_decision.tid,
+ outcome = C#transient_decision.outcome,
+ disc_nodes = [],
+ ram_nodes = []
+ };
+ [] ->
+ decision(Tid, Tabs);
+ {'EXIT', _} ->
+ %% Recently switched transient decision table
+ decision(Tid, Tabs)
+ end;
+decision(_Tid, []) ->
+ no_decision.
+
+outcome(Tid, Default) ->
+ outcome(Tid, Default, tabs()).
+
+outcome(Tid, Default, [Tab | Tabs]) ->
+ case catch ?ets_lookup_element(Tab, Tid, 3) of
+ {'EXIT', _} ->
+ outcome(Tid, Default, Tabs);
+ Val ->
+ Val
+ end;
+outcome(_Tid, Default, []) ->
+ Default.
+
+filter_outcome(Val) ->
+ case Val of
+ unclear -> unclear;
+ aborted -> aborted;
+ presume_abort -> aborted;
+ committed -> committed;
+ pre_commit -> unclear
+ end.
+
+filter_aborted(D) when D#decision.outcome == presume_abort ->
+ D#decision{outcome = aborted};
+filter_aborted(D) ->
+ D.
+
+%% Merge old decision D with new (probably remote) decision
+merge_decisions(Node, D, NewD0) ->
+ NewD = filter_aborted(NewD0),
+ if
+ D == no_decision, node() /= Node ->
+ %% We did not know anything about this txn
+ NewD#decision{disc_nodes = []};
+ D == no_decision ->
+ NewD;
+ record(D, decision) ->
+ DiscNs = D#decision.disc_nodes -- ([node(), Node]),
+ OldD = filter_aborted(D#decision{disc_nodes = DiscNs}),
+%% mnesia_lib:dbg_out("merge ~w: NewD = ~w~n D = ~w~n OldD = ~w~n",
+%% [Node, NewD, D, OldD]),
+ if
+ OldD#decision.outcome == unclear,
+ NewD#decision.outcome == unclear ->
+ D;
+
+ OldD#decision.outcome == NewD#decision.outcome ->
+ %% We have come to the same decision
+ OldD;
+
+ OldD#decision.outcome == committed,
+ NewD#decision.outcome == aborted ->
+ %% Interesting! We have already committed,
+ %% but someone else has aborted. Now we
+ %% have a nice little inconcistency. The
+ %% other guy (or some one else) has
+ %% enforced a recovery decision when
+ %% max_wait_for_decision was exceeded.
+ %% We will pretend that we have obeyed
+ %% the forced recovery decision, but we
+ %% will also generate an event in case the
+ %% application wants to do something clever.
+ Msg = {inconsistent_database, bad_decision, Node},
+ mnesia_lib:report_system_event(Msg),
+ OldD#decision{outcome = aborted};
+
+ OldD#decision.outcome == aborted ->
+ %% aborted overrrides anything
+ OldD#decision{outcome = aborted};
+
+ NewD#decision.outcome == aborted ->
+ %% aborted overrrides anything
+ OldD#decision{outcome = aborted};
+
+ OldD#decision.outcome == committed,
+ NewD#decision.outcome == unclear ->
+ %% committed overrides unclear
+ OldD#decision{outcome = committed};
+
+ OldD#decision.outcome == unclear,
+ NewD#decision.outcome == committed ->
+ %% committed overrides unclear
+ OldD#decision{outcome = committed}
+ end
+ end.
+
+add_remote_decisions(Node, [D | Tail], State) when record(D, decision) ->
+ State2 = add_remote_decision(Node, D, State),
+ add_remote_decisions(Node, Tail, State2);
+
+add_remote_decisions(Node, [C | Tail], State)
+ when record(C, transient_decision) ->
+ D = #decision{tid = C#transient_decision.tid,
+ outcome = C#transient_decision.outcome,
+ disc_nodes = [],
+ ram_nodes = []},
+ State2 = add_remote_decision(Node, D, State),
+ add_remote_decisions(Node, Tail, State2);
+
+add_remote_decisions(Node, [{mnesia_down, _, _, _} | Tail], State) ->
+ add_remote_decisions(Node, Tail, State);
+
+add_remote_decisions(Node, [{trans_tid, serial, Serial} | Tail], State) ->
+ sync_trans_tid_serial(Serial),
+ case State#state.unclear_decision of
+ undefined ->
+ ignored;
+ D ->
+ case lists:member(Node, D#decision.ram_nodes) of
+ true ->
+ ignore;
+ false ->
+ abcast([Node], {what_decision, node(), D})
+ end
+ end,
+ add_remote_decisions(Node, Tail, State);
+
+add_remote_decisions(_Node, [], State) ->
+ State.
+
+add_remote_decision(Node, NewD, State) ->
+ Tid = NewD#decision.tid,
+ OldD = decision(Tid),
+ D = merge_decisions(Node, OldD, NewD),
+ do_log_decision(D, false),
+ Outcome = D#decision.outcome,
+ if
+ OldD == no_decision ->
+ ignore;
+ Outcome == unclear ->
+ ignore;
+ true ->
+ case lists:member(node(), NewD#decision.disc_nodes) or
+ lists:member(node(), NewD#decision.ram_nodes) of
+ true ->
+ tell_im_certain([Node], D);
+ false ->
+ ignore
+ end
+ end,
+ case State#state.unclear_decision of
+ U when U#decision.tid == Tid ->
+ WaitFor = State#state.unclear_waitfor -- [Node],
+ if
+ Outcome == unclear, WaitFor == [] ->
+ %% Everybody are uncertain, lets abort
+ NewOutcome = aborted,
+ CertainD = D#decision{outcome = NewOutcome,
+ disc_nodes = [],
+ ram_nodes = []},
+ tell_im_certain(D#decision.disc_nodes, CertainD),
+ tell_im_certain(D#decision.ram_nodes, CertainD),
+ do_log_decision(CertainD, false),
+ verbose("Decided to abort transaction ~p "
+ "since everybody are uncertain ~p~n",
+ [Tid, CertainD]),
+ gen_server:reply(State#state.unclear_pid, {ok, NewOutcome}),
+ State#state{unclear_pid = undefined,
+ unclear_decision = undefined,
+ unclear_waitfor = undefined};
+ Outcome /= unclear ->
+ verbose("~p told us that transaction ~p was ~p~n",
+ [Node, Tid, Outcome]),
+ gen_server:reply(State#state.unclear_pid, {ok, Outcome}),
+ State#state{unclear_pid = undefined,
+ unclear_decision = undefined,
+ unclear_waitfor = undefined};
+ Outcome == unclear ->
+ State#state{unclear_waitfor = WaitFor}
+ end;
+ _ ->
+ State
+ end.
+
+announce_all([], _Tabs) ->
+ ok;
+announce_all(ToNodes, [Tab | Tabs]) ->
+ case catch mnesia_lib:db_match_object(ram_copies, Tab, '_') of
+ {'EXIT', _} ->
+ %% Oops, we are in the middle of a 'garb_decisions'
+ announce_all(ToNodes, Tabs);
+ List ->
+ announce(ToNodes, List, [], false),
+ announce_all(ToNodes, Tabs)
+ end;
+announce_all(_ToNodes, []) ->
+ ok.
+
+announce(ToNodes, [Head | Tail], Acc, ForceSend) ->
+ Acc2 = arrange(ToNodes, Head, Acc, ForceSend),
+ announce(ToNodes, Tail, Acc2, ForceSend);
+
+announce(_ToNodes, [], Acc, _ForceSend) ->
+ send_decisions(Acc).
+
+send_decisions([{Node, Decisions} | Tail]) ->
+ abcast([Node], {decisions, node(), Decisions}),
+ send_decisions(Tail);
+send_decisions([]) ->
+ ok.
+
+arrange([To | ToNodes], D, Acc, ForceSend) when record(D, decision) ->
+ NeedsAdd = (ForceSend or
+ lists:member(To, D#decision.disc_nodes) or
+ lists:member(To, D#decision.ram_nodes)),
+ case NeedsAdd of
+ true ->
+ Acc2 = add_decision(To, D, Acc),
+ arrange(ToNodes, D, Acc2, ForceSend);
+ false ->
+ arrange(ToNodes, D, Acc, ForceSend)
+ end;
+
+arrange([To | ToNodes], C, Acc, ForceSend) when record(C, transient_decision) ->
+ Acc2 = add_decision(To, C, Acc),
+ arrange(ToNodes, C, Acc2, ForceSend);
+
+arrange([_To | _ToNodes], {mnesia_down, _Node, _Date, _Time}, Acc, _ForceSend) ->
+ %% The others have their own info about this
+ Acc;
+
+arrange([_To | _ToNodes], {master_nodes, _Tab, _Nodes}, Acc, _ForceSend) ->
+ %% The others have their own info about this
+ Acc;
+
+arrange([To | ToNodes], {trans_tid, serial, Serial}, Acc, ForceSend) ->
+ %% Do the lamport thing plus release the others
+ %% from uncertainity.
+ Acc2 = add_decision(To, {trans_tid, serial, Serial}, Acc),
+ arrange(ToNodes, {trans_tid, serial, Serial}, Acc2, ForceSend);
+
+arrange([], _Decision, Acc, _ForceSend) ->
+ Acc.
+
+add_decision(Node, Decision, [{Node, Decisions} | Tail]) ->
+ [{Node, [Decision | Decisions]} | Tail];
+add_decision(Node, Decision, [Head | Tail]) ->
+ [Head | add_decision(Node, Decision, Tail)];
+add_decision(Node, Decision, []) ->
+ [{Node, [Decision]}].
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_registry.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_registry.erl
new file mode 100644
index 0000000000..a7e65506fa
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_registry.erl
@@ -0,0 +1,276 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_registry.erl,v 1.2 2010/03/04 13:54:19 maria Exp $
+%%
+-module(mnesia_registry).
+
+%%%----------------------------------------------------------------------
+%%% File : mnesia_registry.erl
+%%% Purpose : Support dump and restore of a registry on a C-node
+%%% This is an OTP internal module and is not public available.
+%%%
+%%% Example : Dump some hardcoded records into the Mnesia table Tab
+%%%
+%%% case rpc:call(Node, mnesia_registry, start_dump, [Tab, self()]) of
+%%% Pid when pid(Pid) ->
+%%% Pid ! {write, key1, key_size1, val_type1, val_size1, val1},
+%%% Pid ! {delete, key3},
+%%% Pid ! {write, key2, key_size2, val_type2, val_size2, val2},
+%%% Pid ! {write, key4, key_size4, val_type4, val_size4, val4},
+%%% Pid ! {commit, self()},
+%%% receive
+%%% {ok, Pid} ->
+%%% ok;
+%%% {'EXIT', Pid, Reason} ->
+%%% exit(Reason)
+%%% end;
+%%% {badrpc, Reason} ->
+%%% exit(Reason)
+%%% end.
+%%%
+%%% Example : Restore the corresponding Mnesia table Tab
+%%%
+%%% case rpc:call(Node, mnesia_registry, start_restore, [Tab, self()]) of
+%%% {size, Pid, N, LargestKey, LargestVal} ->
+%%% Pid ! {send_records, self()},
+%%% Fun = fun() ->
+%%% receive
+%%% {restore, KeySize, ValSize, ValType, Key, Val} ->
+%%% {Key, Val};
+%%% {'EXIT', Pid, Reason} ->
+%%% exit(Reason)
+%%% end
+%%% end,
+%%% lists:map(Fun, lists:seq(1, N));
+%%% {badrpc, Reason} ->
+%%% exit(Reason)
+%%% end.
+%%%
+%%%----------------------------------------------------------------------
+
+%% External exports
+-export([start_dump/2, start_restore/2]).
+-export([create_table/1, create_table/2]).
+
+%% Internal exports
+-export([init/4]).
+
+-record(state, {table, ops = [], link_to}).
+
+-record(registry_entry, {key, key_size, val_type, val_size, val}).
+
+-record(size, {pid = self(), n_values = 0, largest_key = 0, largest_val = 0}).
+
+%%%----------------------------------------------------------------------
+%%% Client
+%%%----------------------------------------------------------------------
+
+start(Type, Tab, LinkTo) ->
+ Starter = self(),
+ Args = [Type, Starter, LinkTo, Tab],
+ Pid = spawn_link(?MODULE, init, Args),
+ %% The receiver process may unlink the current process
+ receive
+ {ok, Res} ->
+ Res;
+ {'EXIT', Pid, Reason} when LinkTo == Starter ->
+ exit(Reason)
+ end.
+
+%% Starts a receiver process and optionally creates a Mnesia table
+%% with suitable default values. Returns the Pid of the receiver process
+%%
+%% The receiver process accumulates Mnesia operations and performs
+%% all operations or none at commit. The understood messages are:
+%%
+%% {write, Key, KeySize, ValType, ValSize, Val} ->
+%% accumulates mnesia:write({Tab, Key, KeySize, ValType, ValSize, Val})
+%% (no reply)
+%% {delete, Key} ->
+%% accumulates mnesia:delete({Tab, Key}) (no reply)
+%% {commit, ReplyTo} ->
+%% commits all accumulated operations
+%% and stops the process (replies {ok, Pid})
+%% abort ->
+%% stops the process (no reply)
+%%
+%% The receiver process is linked to the process with the process identifier
+%% LinkTo. If some error occurs the receiver process will invoke exit(Reason)
+%% and it is up to he LinkTo process to act properly when it receives an exit
+%% signal.
+
+start_dump(Tab, LinkTo) ->
+ start(dump, Tab, LinkTo).
+
+%% Starts a sender process which sends restore messages back to the
+%% LinkTo process. But first are some statistics about the table
+%% determined and returned as a 5-tuple:
+%%
+%% {size, SenderPid, N, LargestKeySize, LargestValSize}
+%%
+%% where N is the number of records in the table. Then the sender process
+%% waits for a 2-tuple message:
+%%
+%% {send_records, ReplyTo}
+%%
+%% At last N 6-tuple messages is sent to the ReplyTo process:
+%%
+%% ReplyTo ! {restore, KeySize, ValSize, ValType, Key, Val}
+%%
+%% If some error occurs the receiver process will invoke exit(Reason)
+%% and it is up to he LinkTo process to act properly when it receives an
+%% exit signal.
+
+start_restore(Tab, LinkTo) ->
+ start(restore, Tab, LinkTo).
+
+
+%% Optionally creates the Mnesia table Tab with suitable default values.
+%% Returns ok or EXIT's
+create_table(Tab) ->
+ Storage = mnesia:table_info(schema, storage_type),
+ create_table(Tab, [{Storage, [node()]}]).
+
+create_table(Tab, TabDef) ->
+ Attrs = record_info(fields, registry_entry),
+ case mnesia:create_table(Tab, [{attributes, Attrs} | TabDef]) of
+ {'atomic', ok} ->
+ ok;
+ {aborted, {already_exists, Tab}} ->
+ ok;
+ {aborted, Reason} ->
+ exit(Reason)
+ end.
+
+%%%----------------------------------------------------------------------
+%%% Server
+%%%----------------------------------------------------------------------
+
+init(Type, Starter, LinkTo, Tab) ->
+ if
+ LinkTo /= Starter ->
+ link(LinkTo),
+ unlink(Starter);
+ true ->
+ ignore
+ end,
+ case Type of
+ dump ->
+ Starter ! {ok, self()},
+ dump_loop(#state{table = Tab, link_to = LinkTo});
+ restore ->
+ restore_table(Tab, Starter, LinkTo)
+ end.
+
+%%%----------------------------------------------------------------------
+%%% Dump loop
+%%%----------------------------------------------------------------------
+
+dump_loop(S) ->
+ Tab = S#state.table,
+ Ops = S#state.ops,
+ receive
+ {write, Key, KeySize, ValType, ValSize, Val} ->
+ RE = #registry_entry{key = Key,
+ key_size = KeySize,
+ val_type = ValType,
+ val_size = ValSize,
+ val = Val},
+ dump_loop(S#state{ops = [{write, RE} | Ops]});
+ {delete, Key} ->
+ dump_loop(S#state{ops = [{delete, Key} | Ops]});
+ {commit, ReplyTo} ->
+ create_table(Tab),
+ RecName = mnesia:table_info(Tab, record_name),
+ %% The Ops are in reverse order, but there is no need
+ %% for reversing the list of accumulated operations
+ case mnesia:transaction(fun handle_ops/3, [Tab, RecName, Ops]) of
+ {'atomic', ok} ->
+ ReplyTo ! {ok, self()},
+ stop(S#state.link_to);
+ {aborted, Reason} ->
+ exit({aborted, Reason})
+ end;
+ abort ->
+ stop(S#state.link_to);
+ BadMsg ->
+ exit({bad_message, BadMsg})
+ end.
+
+stop(LinkTo) ->
+ unlink(LinkTo),
+ exit(normal).
+
+%% Grab a write lock for the entire table
+%% and iterate over all accumulated operations
+handle_ops(Tab, RecName, Ops) ->
+ mnesia:write_lock_table(Tab),
+ do_handle_ops(Tab, RecName, Ops).
+
+do_handle_ops(Tab, RecName, [{write, RegEntry} | Ops]) ->
+ Record = setelement(1, RegEntry, RecName),
+ mnesia:write(Tab, Record, write),
+ do_handle_ops(Tab, RecName, Ops);
+do_handle_ops(Tab, RecName, [{delete, Key} | Ops]) ->
+ mnesia:delete(Tab, Key, write),
+ do_handle_ops(Tab, RecName, Ops);
+do_handle_ops(_Tab, _RecName, []) ->
+ ok.
+
+%%%----------------------------------------------------------------------
+%%% Restore table
+%%%----------------------------------------------------------------------
+
+restore_table(Tab, Starter, LinkTo) ->
+ Pat = mnesia:table_info(Tab, wild_pattern),
+ Fun = fun() -> mnesia:match_object(Tab, Pat, read) end,
+ case mnesia:transaction(Fun) of
+ {'atomic', AllRecords} ->
+ Size = calc_size(AllRecords, #size{}),
+ Starter ! {ok, Size},
+ receive
+ {send_records, ReplyTo} ->
+ send_records(AllRecords, ReplyTo),
+ unlink(LinkTo),
+ exit(normal);
+ BadMsg ->
+ exit({bad_message, BadMsg})
+ end;
+ {aborted, Reason} ->
+ exit(Reason)
+ end.
+
+calc_size([H | T], S) ->
+ KeySize = max(element(#registry_entry.key_size, H), S#size.largest_key),
+ ValSize = max(element(#registry_entry.val_size, H), S#size.largest_val),
+ N = S#size.n_values + 1,
+ calc_size(T, S#size{n_values = N, largest_key = KeySize, largest_val = ValSize});
+calc_size([], Size) ->
+ Size.
+
+max(New, Old) when New > Old -> New;
+max(_New, Old) -> Old.
+
+send_records([H | T], ReplyTo) ->
+ KeySize = element(#registry_entry.key_size, H),
+ ValSize = element(#registry_entry.val_size, H),
+ ValType = element(#registry_entry.val_type, H),
+ Key = element(#registry_entry.key, H),
+ Val = element(#registry_entry.val, H),
+ ReplyTo ! {restore, KeySize, ValSize, ValType, Key, Val},
+ send_records(T, ReplyTo);
+send_records([], _ReplyTo) ->
+ ok.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
new file mode 100644
index 0000000000..395532e91b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
@@ -0,0 +1,2898 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_schema.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+%% In this module we provide a number of explicit functions
+%% to maninpulate the schema. All these functions are called
+%% within a special schema transaction.
+%%
+%% We also have an init/1 function defined here, this func is
+%% used by mnesia:start() to initialize the entire schema.
+
+-module(mnesia_schema).
+
+-export([
+ add_snmp/2,
+ add_table_copy/3,
+ add_table_index/2,
+ arrange_restore/3,
+ attr_tab_to_pos/2,
+ attr_to_pos/2,
+ change_table_copy_type/3,
+ change_table_access_mode/2,
+ change_table_load_order/2,
+ change_table_frag/2,
+ clear_table/1,
+ create_table/1,
+ cs2list/1,
+ del_snmp/1,
+ del_table_copy/2,
+ del_table_index/2,
+ delete_cstruct/2,
+ delete_schema/1,
+ delete_schema2/0,
+ delete_table/1,
+ delete_table_property/2,
+ dump_tables/1,
+ ensure_no_schema/1,
+ get_create_list/1,
+ get_initial_schema/2,
+ get_table_properties/1,
+ info/0,
+ info/1,
+ init/1,
+ insert_cstruct/3,
+ is_remote_member/1,
+ list2cs/1,
+ lock_schema/0,
+ lock_del_table/4, % Spawned
+ merge_schema/0,
+ move_table/3,
+ opt_create_dir/2,
+ prepare_commit/3,
+ purge_dir/2,
+ purge_tmp_files/0,
+ ram_delete_table/2,
+% ram_delete_table/3,
+ read_cstructs_from_disc/0,
+ read_nodes/0,
+ remote_read_schema/0,
+ restore/1,
+ restore/2,
+ restore/3,
+ schema_coordinator/3,
+ set_where_to_read/3,
+ transform_table/4,
+ undo_prepare_commit/2,
+ unlock_schema/0,
+ version/0,
+ write_table_property/2
+ ]).
+
+%% Exports for mnesia_frag
+-export([
+ get_tid_ts_and_lock/2,
+ make_create_table/1,
+ ensure_active/1,
+ pick/4,
+ verify/3,
+ incr_version/1,
+ check_keys/3,
+ check_duplicates/2,
+ make_delete_table/2
+ ]).
+
+%% Needed outside to be able to use/set table_properties
+%% from user (not supported)
+-export([schema_transaction/1,
+ insert_schema_ops/2,
+ do_create_table/1,
+ do_delete_table/1,
+ do_delete_table_property/2,
+ do_write_table_property/2]).
+
+-include("mnesia.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-import(mnesia_lib, [set/2, del/2, verbose/2, dbg_out/2]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Here comes the init function which also resides in
+%% this module, it is called upon by the trans server
+%% at startup of the system
+%%
+%% We have a meta table which looks like
+%% {table, schema,
+%% {type, set},
+%% {disc_copies, all},
+%% {arity, 2}
+%% {attributes, [key, val]}
+%%
+%% This means that we have a series of {schema, Name, Cs} tuples
+%% in a table called schema !!
+
+init(IgnoreFallback) ->
+ Res = read_schema(true, false, IgnoreFallback),
+ {ok, Source, _CreateList} = exit_on_error(Res),
+ verbose("Schema initiated from: ~p~n", [Source]),
+ set({schema, tables}, []),
+ set({schema, local_tables}, []),
+ Tabs = set_schema(?ets_first(schema)),
+ lists:foreach(fun(Tab) -> clear_whereabouts(Tab) end, Tabs),
+ set({schema, where_to_read}, node()),
+ set({schema, load_node}, node()),
+ set({schema, load_reason}, initial),
+ mnesia_controller:add_active_replica(schema, node()).
+
+exit_on_error({error, Reason}) ->
+ exit(Reason);
+exit_on_error(GoodRes) ->
+ GoodRes.
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ Value -> Value
+ end.
+
+%% This function traverses all cstructs in the schema and
+%% sets all values in mnesia_gvar accordingly for each table/cstruct
+
+set_schema('$end_of_table') ->
+ [];
+set_schema(Tab) ->
+ do_set_schema(Tab),
+ [Tab | set_schema(?ets_next(schema, Tab))].
+
+get_create_list(Tab) ->
+ ?ets_lookup_element(schema, Tab, 3).
+
+do_set_schema(Tab) ->
+ List = get_create_list(Tab),
+ Cs = list2cs(List),
+ do_set_schema(Tab, Cs).
+
+do_set_schema(Tab, Cs) ->
+ Type = Cs#cstruct.type,
+ set({Tab, setorbag}, Type),
+ set({Tab, local_content}, Cs#cstruct.local_content),
+ set({Tab, ram_copies}, Cs#cstruct.ram_copies),
+ set({Tab, disc_copies}, Cs#cstruct.disc_copies),
+ set({Tab, disc_only_copies}, Cs#cstruct.disc_only_copies),
+ set({Tab, load_order}, Cs#cstruct.load_order),
+ set({Tab, access_mode}, Cs#cstruct.access_mode),
+ set({Tab, snmp}, Cs#cstruct.snmp),
+ set({Tab, user_properties}, Cs#cstruct.user_properties),
+ [set({Tab, user_property, element(1, P)}, P) || P <- Cs#cstruct.user_properties],
+ set({Tab, frag_properties}, Cs#cstruct.frag_properties),
+ mnesia_frag:set_frag_hash(Tab, Cs#cstruct.frag_properties),
+ set({Tab, attributes}, Cs#cstruct.attributes),
+ Arity = length(Cs#cstruct.attributes) + 1,
+ set({Tab, arity}, Arity),
+ RecName = Cs#cstruct.record_name,
+ set({Tab, record_name}, RecName),
+ set({Tab, record_validation}, {RecName, Arity, Type}),
+ set({Tab, wild_pattern}, wild(RecName, Arity)),
+ set({Tab, index}, Cs#cstruct.index),
+ %% create actual index tabs later
+ set({Tab, cookie}, Cs#cstruct.cookie),
+ set({Tab, version}, Cs#cstruct.version),
+ set({Tab, cstruct}, Cs),
+ Storage = mnesia_lib:schema_cs_to_storage_type(node(), Cs),
+ set({Tab, storage_type}, Storage),
+ mnesia_lib:add({schema, tables}, Tab),
+ Ns = mnesia_lib:cs_to_nodes(Cs),
+ case lists:member(node(), Ns) of
+ true ->
+ mnesia_lib:add({schema, local_tables}, Tab);
+ false when Tab == schema ->
+ mnesia_lib:add({schema, local_tables}, Tab);
+ false ->
+ ignore
+ end.
+
+wild(RecName, Arity) ->
+ Wp0 = list_to_tuple(lists:duplicate(Arity, '_')),
+ setelement(1, Wp0, RecName).
+
+%% Temporarily read the local schema and return a list
+%% of all nodes mentioned in the schema.DAT file
+read_nodes() ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case read_schema(false, false) of
+ {ok, _Source, CreateList} ->
+ Cs = list2cs(CreateList),
+ {ok, Cs#cstruct.disc_copies ++ Cs#cstruct.ram_copies};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+%% Returns Version from the tuple {Version,MasterNodes}
+version() ->
+ case read_schema(false, false) of
+ {ok, Source, CreateList} when Source /= default ->
+ Cs = list2cs(CreateList),
+ {Version, _Details} = Cs#cstruct.version,
+ Version;
+ _ ->
+ case dir_exists(mnesia_lib:dir()) of
+ true -> {1,0};
+ false -> {0,0}
+ end
+ end.
+
+%% Calculate next table version from old cstruct
+incr_version(Cs) ->
+ {{Major, Minor}, _} = Cs#cstruct.version,
+ Nodes = mnesia_lib:intersect(val({schema, disc_copies}),
+ mnesia_lib:cs_to_nodes(Cs)),
+ V =
+ case Nodes -- val({Cs#cstruct.name, active_replicas}) of
+ [] -> {Major + 1, 0}; % All replicas are active
+ _ -> {Major, Minor + 1} % Some replicas are inactive
+ end,
+ Cs#cstruct{version = {V, {node(), now()}}}.
+
+%% Returns table name
+insert_cstruct(Tid, Cs, KeepWhereabouts) ->
+ Tab = Cs#cstruct.name,
+ TabDef = cs2list(Cs),
+ Val = {schema, Tab, TabDef},
+ mnesia_checkpoint:tm_retain(Tid, schema, Tab, write),
+ mnesia_subscr:report_table_event(schema, Tid, Val, write),
+ Active = val({Tab, active_replicas}),
+
+ case KeepWhereabouts of
+ true ->
+ ignore;
+ false when Active == [] ->
+ clear_whereabouts(Tab);
+ false ->
+ %% Someone else has initiated table
+ ignore
+ end,
+ set({Tab, cstruct}, Cs),
+ ?ets_insert(schema, Val),
+ do_set_schema(Tab, Cs),
+ Val.
+
+clear_whereabouts(Tab) ->
+ set({Tab, checkpoints}, []),
+ set({Tab, subscribers}, []),
+ set({Tab, where_to_read}, nowhere),
+ set({Tab, active_replicas}, []),
+ set({Tab, commit_work}, []),
+ set({Tab, where_to_write}, []),
+ set({Tab, where_to_commit}, []),
+ set({Tab, load_by_force}, false),
+ set({Tab, load_node}, unknown),
+ set({Tab, load_reason}, unknown).
+
+%% Returns table name
+delete_cstruct(Tid, Cs) ->
+ Tab = Cs#cstruct.name,
+ TabDef = cs2list(Cs),
+ Val = {schema, Tab, TabDef},
+ mnesia_checkpoint:tm_retain(Tid, schema, Tab, delete),
+ mnesia_subscr:report_table_event(schema, Tid, Val, delete),
+ ?ets_match_delete(mnesia_gvar, {{Tab, '_'}, '_'}),
+ ?ets_match_delete(mnesia_gvar, {{Tab, '_', '_'}, '_'}),
+ del({schema, local_tables}, Tab),
+ del({schema, tables}, Tab),
+ ?ets_delete(schema, Tab),
+ Val.
+
+%% Delete the Mnesia directory on all given nodes
+%% Requires that Mnesia is not running anywhere
+%% Returns ok | {error,Reason}
+delete_schema(Ns) when list(Ns), Ns /= [] ->
+ RunningNs = mnesia_lib:running_nodes(Ns),
+ Reason = "Cannot delete schema on all nodes",
+ if
+ RunningNs == [] ->
+ case rpc:multicall(Ns, ?MODULE, delete_schema2, []) of
+ {Replies, []} ->
+ case [R || R <- Replies, R /= ok] of
+ [] ->
+ ok;
+ BadReplies ->
+ verbose("~s: ~p~n", [Reason, BadReplies]),
+ {error, {"All nodes not running", BadReplies}}
+ end;
+ {_Replies, BadNs} ->
+ verbose("~s: ~p~n", [Reason, BadNs]),
+ {error, {"All nodes not running", BadNs}}
+ end;
+ true ->
+ verbose("~s: ~p~n", [Reason, RunningNs]),
+ {error, {"Mnesia is not stopped everywhere", RunningNs}}
+ end;
+delete_schema(Ns) ->
+ {error, {badarg, Ns}}.
+
+delete_schema2() ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case mnesia_lib:is_running() of
+ no ->
+ Dir = mnesia_lib:dir(),
+ purge_dir(Dir, []),
+ ok;
+ _ ->
+ {error, {"Mnesia still running", node()}}
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+ensure_no_schema([H|T]) when atom(H) ->
+ case rpc:call(H, ?MODULE, remote_read_schema, []) of
+ {badrpc, Reason} ->
+ {H, {"All nodes not running", H, Reason}};
+ {ok,Source, _} when Source /= default ->
+ {H, {already_exists, H}};
+ _ ->
+ ensure_no_schema(T)
+ end;
+ensure_no_schema([H|_]) ->
+ {error,{badarg, H}};
+ensure_no_schema([]) ->
+ ok.
+
+remote_read_schema() ->
+ %% Ensure that we access the intended Mnesia
+ %% directory. This function may not be called
+ %% during startup since it will cause the
+ %% application_controller to get into deadlock
+ case mnesia_lib:ensure_loaded(?APPLICATION) of
+ ok ->
+ case mnesia_monitor:get_env(schema_location) of
+ opt_disc ->
+ read_schema(false, true);
+ _ ->
+ read_schema(false, false)
+ end;
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+dir_exists(Dir) ->
+ dir_exists(Dir, mnesia_monitor:use_dir()).
+dir_exists(Dir, true) ->
+ case file:read_file_info(Dir) of
+ {ok, _} -> true;
+ _ -> false
+ end;
+dir_exists(_Dir, false) ->
+ false.
+
+opt_create_dir(UseDir, Dir) when UseDir == true->
+ case dir_exists(Dir, UseDir) of
+ true ->
+ check_can_write(Dir);
+ false ->
+ case file:make_dir(Dir) of
+ ok ->
+ verbose("Create Directory ~p~n", [Dir]),
+ ok;
+ {error, Reason} ->
+ verbose("Cannot create mnesia dir ~p~n", [Reason]),
+ {error, {"Cannot create Mnesia dir", Dir, Reason}}
+ end
+ end;
+opt_create_dir(false, _) ->
+ {error, {has_no_disc, node()}}.
+
+check_can_write(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok, FI} when FI#file_info.type == directory,
+ FI#file_info.access == read_write ->
+ ok;
+ {ok, _} ->
+ {error, "Not allowed to write in Mnesia dir", Dir};
+ _ ->
+ {error, "Non existent Mnesia dir", Dir}
+ end.
+
+lock_schema() ->
+ mnesia_lib:lock_table(schema).
+
+unlock_schema() ->
+ mnesia_lib:unlock_table(schema).
+
+read_schema(Keep, _UseDirAnyway) ->
+ read_schema(Keep, false, false).
+
+%% The schema may be read for several reasons.
+%% If Mnesia is not already started the read intention
+%% we normally do not want the ets table named schema
+%% be left around.
+%% If Keep == true, the ets table schema is kept
+%% If Keep == false, the ets table schema is removed
+%%
+%% Returns {ok, Source, SchemaCstruct} or {error, Reason}
+%% Source may be: default | ram | disc | fallback
+
+read_schema(Keep, UseDirAnyway, IgnoreFallback) ->
+ lock_schema(),
+ Res =
+ case mnesia:system_info(is_running) of
+ yes ->
+ {ok, ram, get_create_list(schema)};
+ _IsRunning ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ read_disc_schema(Keep, IgnoreFallback);
+ false when UseDirAnyway == true ->
+ read_disc_schema(Keep, IgnoreFallback);
+ false when Keep == true ->
+ Args = [{keypos, 2}, public, named_table, set],
+ mnesia_monitor:mktab(schema, Args),
+ CreateList = get_initial_schema(ram_copies, []),
+ ?ets_insert(schema,{schema, schema, CreateList}),
+ {ok, default, CreateList};
+ false when Keep == false ->
+ CreateList = get_initial_schema(ram_copies, []),
+ {ok, default, CreateList}
+ end
+ end,
+ unlock_schema(),
+ Res.
+
+read_disc_schema(Keep, IgnoreFallback) ->
+ Running = mnesia:system_info(is_running),
+ case mnesia_bup:fallback_exists() of
+ true when IgnoreFallback == false, Running /= yes ->
+ mnesia_bup:fallback_to_schema();
+ _ ->
+ %% If we're running, we read the schema file even
+ %% if fallback exists
+ Dat = mnesia_lib:tab2dat(schema),
+ case mnesia_lib:exists(Dat) of
+ true ->
+ do_read_disc_schema(Dat, Keep);
+ false ->
+ Dmp = mnesia_lib:tab2dmp(schema),
+ case mnesia_lib:exists(Dmp) of
+ true ->
+ %% May only happen when toggling of
+ %% schema storage type has been
+ %% interrupted
+ do_read_disc_schema(Dmp, Keep);
+ false ->
+ {error, "No schema file exists"}
+ end
+ end
+ end.
+
+do_read_disc_schema(Fname, Keep) ->
+ T =
+ case Keep of
+ false ->
+ Args = [{keypos, 2}, public, set],
+ ?ets_new_table(schema, Args);
+ true ->
+ Args = [{keypos, 2}, public, named_table, set],
+ mnesia_monitor:mktab(schema, Args)
+ end,
+ Repair = mnesia_monitor:get_env(auto_repair),
+ Res = % BUGBUG Fixa till dcl!
+ case mnesia_lib:dets_to_ets(schema, T, Fname, set, Repair, no) of
+ loaded -> {ok, disc, ?ets_lookup_element(T, schema, 3)};
+ Other -> {error, {"Cannot read schema", Fname, Other}}
+ end,
+ case Keep of
+ true -> ignore;
+ false -> ?ets_delete_table(T)
+ end,
+ Res.
+
+get_initial_schema(SchemaStorage, Nodes) ->
+ Cs = #cstruct{name = schema,
+ record_name = schema,
+ attributes = [table, cstruct]},
+ Cs2 =
+ case SchemaStorage of
+ ram_copies -> Cs#cstruct{ram_copies = Nodes};
+ disc_copies -> Cs#cstruct{disc_copies = Nodes}
+ end,
+ cs2list(Cs2).
+
+read_cstructs_from_disc() ->
+ %% Assumptions:
+ %% - local schema lock in global
+ %% - use_dir is true
+ %% - Mnesia is not running
+ %% - Ignore fallback
+
+ Fname = mnesia_lib:tab2dat(schema),
+ case mnesia_lib:exists(Fname) of
+ true ->
+ Args = [{file, Fname},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)},
+ {type, set}],
+ case dets:open_file(make_ref(), Args) of
+ {ok, Tab} ->
+ Fun = fun({_, _, List}) ->
+ {continue, list2cs(List)}
+ end,
+ Cstructs = dets:traverse(Tab, Fun),
+ dets:close(Tab),
+ {ok, Cstructs};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+ false ->
+ {error, "No schema file exists"}
+ end.
+
+%% We run a very special type of transactions when we
+%% we want to manipulate the schema.
+
+get_tid_ts_and_lock(Tab, Intent) ->
+ TidTs = get(mnesia_activity_state),
+ case TidTs of
+ {_Mod, Tid, Ts} when record(Ts, tidstore)->
+ Store = Ts#tidstore.store,
+ case Intent of
+ read -> mnesia_locker:rlock_table(Tid, Store, Tab);
+ write -> mnesia_locker:wlock_table(Tid, Store, Tab);
+ none -> ignore
+ end,
+ TidTs;
+ _ ->
+ mnesia:abort(no_transaction)
+ end.
+
+schema_transaction(Fun) ->
+ case get(mnesia_activity_state) of
+ undefined ->
+ Args = [self(), Fun, whereis(mnesia_controller)],
+ Pid = spawn_link(?MODULE, schema_coordinator, Args),
+ receive
+ {transaction_done, Res, Pid} -> Res;
+ {'EXIT', Pid, R} -> {aborted, {transaction_crashed, R}}
+ end;
+ _ ->
+ {aborted, nested_transaction}
+ end.
+
+%% This process may dump the transaction log, and should
+%% therefore not be run in an application process
+%%
+schema_coordinator(Client, _Fun, undefined) ->
+ Res = {aborted, {node_not_running, node()}},
+ Client ! {transaction_done, Res, self()},
+ unlink(Client);
+
+schema_coordinator(Client, Fun, Controller) when pid(Controller) ->
+ %% Do not trap exit in order to automatically die
+ %% when the controller dies
+
+ link(Controller),
+ unlink(Client),
+
+ %% Fulfull the transaction even if the client dies
+ Res = mnesia:transaction(Fun),
+ Client ! {transaction_done, Res, self()},
+ unlink(Controller), % Avoids spurious exit message
+ unlink(whereis(mnesia_tm)), % Avoids spurious exit message
+ exit(normal).
+
+%% The make* rotines return a list of ops, this function
+%% inserts em all in the Store and maintains the local order
+%% of ops.
+
+insert_schema_ops({_Mod, _Tid, Ts}, SchemaIOps) ->
+ do_insert_schema_ops(Ts#tidstore.store, SchemaIOps).
+
+do_insert_schema_ops(Store, [Head | Tail]) ->
+ ?ets_insert(Store, Head),
+ do_insert_schema_ops(Store, Tail);
+do_insert_schema_ops(_Store, []) ->
+ ok.
+
+cs2list(Cs) when record(Cs, cstruct) ->
+ Tags = record_info(fields, cstruct),
+ rec2list(Tags, 2, Cs);
+cs2list(CreateList) when list(CreateList) ->
+ CreateList.
+
+rec2list([Tag | Tags], Pos, Rec) ->
+ Val = element(Pos, Rec),
+ [{Tag, Val} | rec2list(Tags, Pos + 1, Rec)];
+rec2list([], _Pos, _Rec) ->
+ [].
+
+list2cs(List) when list(List) ->
+ Name = pick(unknown, name, List, must),
+ Type = pick(Name, type, List, set),
+ Rc0 = pick(Name, ram_copies, List, []),
+ Dc = pick(Name, disc_copies, List, []),
+ Doc = pick(Name, disc_only_copies, List, []),
+ Rc = case {Rc0, Dc, Doc} of
+ {[], [], []} -> [node()];
+ _ -> Rc0
+ end,
+ LC = pick(Name, local_content, List, false),
+ RecName = pick(Name, record_name, List, Name),
+ Attrs = pick(Name, attributes, List, [key, val]),
+ Snmp = pick(Name, snmp, List, []),
+ LoadOrder = pick(Name, load_order, List, 0),
+ AccessMode = pick(Name, access_mode, List, read_write),
+ UserProps = pick(Name, user_properties, List, []),
+ verify({alt, [nil, list]}, mnesia_lib:etype(UserProps),
+ {bad_type, Name, {user_properties, UserProps}}),
+ Cookie = pick(Name, cookie, List, ?unique_cookie),
+ Version = pick(Name, version, List, {{2, 0}, []}),
+ Ix = pick(Name, index, List, []),
+ verify({alt, [nil, list]}, mnesia_lib:etype(Ix),
+ {bad_type, Name, {index, [Ix]}}),
+ Ix2 = [attr_to_pos(I, Attrs) || I <- Ix],
+
+ Frag = pick(Name, frag_properties, List, []),
+ verify({alt, [nil, list]}, mnesia_lib:etype(Frag),
+ {badarg, Name, {frag_properties, Frag}}),
+
+ Keys = check_keys(Name, List, record_info(fields, cstruct)),
+ check_duplicates(Name, Keys),
+ #cstruct{name = Name,
+ ram_copies = Rc,
+ disc_copies = Dc,
+ disc_only_copies = Doc,
+ type = Type,
+ index = Ix2,
+ snmp = Snmp,
+ load_order = LoadOrder,
+ access_mode = AccessMode,
+ local_content = LC,
+ record_name = RecName,
+ attributes = Attrs,
+ user_properties = lists:sort(UserProps),
+ frag_properties = lists:sort(Frag),
+ cookie = Cookie,
+ version = Version};
+list2cs(Other) ->
+ mnesia:abort({badarg, Other}).
+
+pick(Tab, Key, List, Default) ->
+ case lists:keysearch(Key, 1, List) of
+ false when Default == must ->
+ mnesia:abort({badarg, Tab, "Missing key", Key, List});
+ false ->
+ Default;
+ {value, {Key, Value}} ->
+ Value;
+ {value, BadArg} ->
+ mnesia:abort({bad_type, Tab, BadArg})
+ end.
+
+%% Convert attribute name to integer if neccessary
+attr_tab_to_pos(_Tab, Pos) when integer(Pos) ->
+ Pos;
+attr_tab_to_pos(Tab, Attr) ->
+ attr_to_pos(Attr, val({Tab, attributes})).
+
+%% Convert attribute name to integer if neccessary
+attr_to_pos(Pos, _Attrs) when integer(Pos) ->
+ Pos;
+attr_to_pos(Attr, Attrs) when atom(Attr) ->
+ attr_to_pos(Attr, Attrs, 2);
+attr_to_pos(Attr, _) ->
+ mnesia:abort({bad_type, Attr}).
+
+attr_to_pos(Attr, [Attr | _Attrs], Pos) ->
+ Pos;
+attr_to_pos(Attr, [_ | Attrs], Pos) ->
+ attr_to_pos(Attr, Attrs, Pos + 1);
+attr_to_pos(Attr, _, _) ->
+ mnesia:abort({bad_type, Attr}).
+
+check_keys(Tab, [{Key, _Val} | Tail], Items) ->
+ case lists:member(Key, Items) of
+ true -> [Key | check_keys(Tab, Tail, Items)];
+ false -> mnesia:abort({badarg, Tab, Key})
+ end;
+check_keys(_, [], _) ->
+ [];
+check_keys(Tab, Arg, _) ->
+ mnesia:abort({badarg, Tab, Arg}).
+
+check_duplicates(Tab, Keys) ->
+ case has_duplicates(Keys) of
+ false -> ok;
+ true -> mnesia:abort({badarg, Tab, "Duplicate keys", Keys})
+ end.
+
+has_duplicates([H | T]) ->
+ case lists:member(H, T) of
+ true -> true;
+ false -> has_duplicates(T)
+ end;
+has_duplicates([]) ->
+ false.
+
+%% This is the only place where we check the validity of data
+verify_cstruct(Cs) when record(Cs, cstruct) ->
+ verify_nodes(Cs),
+
+ Tab = Cs#cstruct.name,
+ verify(atom, mnesia_lib:etype(Tab), {bad_type, Tab}),
+ Type = Cs#cstruct.type,
+ verify(true, lists:member(Type, [set, bag, ordered_set]),
+ {bad_type, Tab, {type, Type}}),
+
+ %% Currently ordered_set is not supported for disk_only_copies.
+ if
+ Type == ordered_set, Cs#cstruct.disc_only_copies /= [] ->
+ mnesia:abort({bad_type, Tab, {not_supported, Type, disc_only_copies}});
+ true ->
+ ok
+ end,
+
+ RecName = Cs#cstruct.record_name,
+ verify(atom, mnesia_lib:etype(RecName),
+ {bad_type, Tab, {record_name, RecName}}),
+
+ Attrs = Cs#cstruct.attributes,
+ verify(list, mnesia_lib:etype(Attrs),
+ {bad_type, Tab, {attributes, Attrs}}),
+
+ Arity = length(Attrs) + 1,
+ verify(true, Arity > 2, {bad_type, Tab, {attributes, Attrs}}),
+
+ lists:foldl(fun(Attr,_Other) when Attr == snmp ->
+ mnesia:abort({bad_type, Tab, {attributes, [Attr]}});
+ (Attr,Other) ->
+ verify(atom, mnesia_lib:etype(Attr),
+ {bad_type, Tab, {attributes, [Attr]}}),
+ verify(false, lists:member(Attr, Other),
+ {combine_error, Tab, {attributes, [Attr | Other]}}),
+ [Attr | Other]
+ end,
+ [],
+ Attrs),
+
+ Index = Cs#cstruct.index,
+ verify({alt, [nil, list]}, mnesia_lib:etype(Index),
+ {bad_type, Tab, {index, Index}}),
+
+ IxFun =
+ fun(Pos) ->
+ verify(true, fun() ->
+ if
+ integer(Pos),
+ Pos > 2,
+ Pos =< Arity ->
+ true;
+ true -> false
+ end
+ end,
+ {bad_type, Tab, {index, [Pos]}})
+ end,
+ lists:foreach(IxFun, Index),
+
+ LC = Cs#cstruct.local_content,
+ verify({alt, [true, false]}, LC,
+ {bad_type, Tab, {local_content, LC}}),
+ Access = Cs#cstruct.access_mode,
+ verify({alt, [read_write, read_only]}, Access,
+ {bad_type, Tab, {access_mode, Access}}),
+
+ Snmp = Cs#cstruct.snmp,
+ verify(true, mnesia_snmp_hook:check_ustruct(Snmp),
+ {badarg, Tab, {snmp, Snmp}}),
+
+ CheckProp = fun(Prop) when tuple(Prop), size(Prop) >= 1 -> ok;
+ (Prop) -> mnesia:abort({bad_type, Tab, {user_properties, [Prop]}})
+ end,
+ lists:foreach(CheckProp, Cs#cstruct.user_properties),
+
+ case Cs#cstruct.cookie of
+ {{MegaSecs, Secs, MicroSecs}, _Node}
+ when integer(MegaSecs), integer(Secs),
+ integer(MicroSecs), atom(node) ->
+ ok;
+ Cookie ->
+ mnesia:abort({bad_type, Tab, {cookie, Cookie}})
+ end,
+ case Cs#cstruct.version of
+ {{Major, Minor}, _Detail}
+ when integer(Major), integer(Minor) ->
+ ok;
+ Version ->
+ mnesia:abort({bad_type, Tab, {version, Version}})
+ end.
+
+verify_nodes(Cs) ->
+ Tab = Cs#cstruct.name,
+ Ram = Cs#cstruct.ram_copies,
+ Disc = Cs#cstruct.disc_copies,
+ DiscOnly = Cs#cstruct.disc_only_copies,
+ LoadOrder = Cs#cstruct.load_order,
+
+ verify({alt, [nil, list]}, mnesia_lib:etype(Ram),
+ {bad_type, Tab, {ram_copies, Ram}}),
+ verify({alt, [nil, list]}, mnesia_lib:etype(Disc),
+ {bad_type, Tab, {disc_copies, Disc}}),
+ case Tab of
+ schema ->
+ verify([], DiscOnly, {bad_type, Tab, {disc_only_copies, DiscOnly}});
+ _ ->
+ verify({alt, [nil, list]},
+ mnesia_lib:etype(DiscOnly),
+ {bad_type, Tab, {disc_only_copies, DiscOnly}})
+ end,
+ verify(integer, mnesia_lib:etype(LoadOrder),
+ {bad_type, Tab, {load_order, LoadOrder}}),
+
+ Nodes = Ram ++ Disc ++ DiscOnly,
+ verify(list, mnesia_lib:etype(Nodes),
+ {combine_error, Tab,
+ [{ram_copies, []}, {disc_copies, []}, {disc_only_copies, []}]}),
+ verify(false, has_duplicates(Nodes), {combine_error, Tab, Nodes}),
+ AtomCheck = fun(N) -> verify(atom, mnesia_lib:etype(N), {bad_type, Tab, N}) end,
+ lists:foreach(AtomCheck, Nodes).
+
+verify(Expected, Fun, Error) when function(Fun) ->
+ do_verify(Expected, catch Fun(), Error);
+verify(Expected, Actual, Error) ->
+ do_verify(Expected, Actual, Error).
+
+do_verify({alt, Values}, Value, Error) ->
+ case lists:member(Value, Values) of
+ true -> ok;
+ false -> mnesia:abort(Error)
+ end;
+do_verify(Value, Value, _) ->
+ ok;
+do_verify(_Value, _, Error) ->
+ mnesia:abort(Error).
+
+ensure_writable(Tab) ->
+ case val({Tab, where_to_write}) of
+ [] -> mnesia:abort({read_only, Tab});
+ _ -> ok
+ end.
+
+%% Ensure that all replicas on disk full nodes are active
+ensure_active(Cs) ->
+ ensure_active(Cs, active_replicas).
+
+ensure_active(Cs, What) ->
+ Tab = Cs#cstruct.name,
+ case val({Tab, What}) of
+ [] -> mnesia:abort({no_exists, Tab});
+ _ -> ok
+ end,
+ Nodes = mnesia_lib:intersect(val({schema, disc_copies}),
+ mnesia_lib:cs_to_nodes(Cs)),
+ W = {Tab, What},
+ case Nodes -- val(W) of
+ [] ->
+ ok;
+ Ns ->
+ Expl = "All replicas on diskfull nodes are not active yet",
+ case val({Tab, local_content}) of
+ true ->
+ case rpc:multicall(Ns, ?MODULE, is_remote_member, [W]) of
+ {Replies, []} ->
+ check_active(Replies, Expl, Tab);
+ {_Replies, BadNs} ->
+ mnesia:abort({not_active, Expl, Tab, BadNs})
+ end;
+ false ->
+ mnesia:abort({not_active, Expl, Tab, Ns})
+ end
+ end.
+
+ensure_not_active(schema, Node) ->
+ case lists:member(Node, val({schema, active_replicas})) of
+ false ->
+ ok;
+ true ->
+ Expl = "Mnesia is running",
+ mnesia:abort({active, Expl, Node})
+ end.
+
+is_remote_member(Key) ->
+ IsActive = lists:member(node(), val(Key)),
+ {IsActive, node()}.
+
+check_active([{true, _Node} | Replies], Expl, Tab) ->
+ check_active(Replies, Expl, Tab);
+check_active([{false, Node} | _Replies], Expl, Tab) ->
+ mnesia:abort({not_active, Expl, Tab, [Node]});
+check_active([{badrpc, Reason} | _Replies], Expl, Tab) ->
+ mnesia:abort({not_active, Expl, Tab, Reason});
+check_active([], _Expl, _Tab) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Here's the real interface function to create a table
+
+create_table(TabDef) ->
+ schema_transaction(fun() -> do_multi_create_table(TabDef) end).
+
+%% And the corresponding do routines ....
+
+do_multi_create_table(TabDef) ->
+ get_tid_ts_and_lock(schema, write),
+ ensure_writable(schema),
+ Cs = list2cs(TabDef),
+ case Cs#cstruct.frag_properties of
+ [] ->
+ do_create_table(Cs);
+ _Props ->
+ CsList = mnesia_frag:expand_cstruct(Cs),
+ lists:foreach(fun do_create_table/1, CsList)
+ end,
+ ok.
+
+do_create_table(Cs) ->
+ {_Mod, _Tid, Ts} = get_tid_ts_and_lock(schema, none),
+ Store = Ts#tidstore.store,
+ do_insert_schema_ops(Store, make_create_table(Cs)).
+
+make_create_table(Cs) ->
+ Tab = Cs#cstruct.name,
+ verify('EXIT', element(1, ?catch_val({Tab, cstruct})),
+ {already_exists, Tab}),
+ unsafe_make_create_table(Cs).
+
+% unsafe_do_create_table(Cs) ->
+% {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, none),
+% Store = Ts#tidstore.store,
+% do_insert_schema_ops(Store, unsafe_make_create_table(Cs)).
+
+unsafe_make_create_table(Cs) ->
+ {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, none),
+ verify_cstruct(Cs),
+ Tab = Cs#cstruct.name,
+
+ %% Check that we have all disc replica nodes running
+ DiscNodes = Cs#cstruct.disc_copies ++ Cs#cstruct.disc_only_copies,
+ RunningNodes = val({current, db_nodes}),
+ CheckDisc = fun(N) ->
+ verify(true, lists:member(N, RunningNodes),
+ {not_active, Tab, N})
+ end,
+ lists:foreach(CheckDisc, DiscNodes),
+
+ Nodes = mnesia_lib:intersect(mnesia_lib:cs_to_nodes(Cs), RunningNodes),
+ Store = Ts#tidstore.store,
+ mnesia_locker:wlock_no_exist(Tid, Store, Tab, Nodes),
+ [{op, create_table, cs2list(Cs)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Delete a table entirely on all nodes.
+
+delete_table(Tab) ->
+ schema_transaction(fun() -> do_delete_table(Tab) end).
+
+do_delete_table(schema) ->
+ mnesia:abort({bad_type, schema});
+do_delete_table(Tab) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ ensure_writable(schema),
+ insert_schema_ops(TidTs, make_delete_table(Tab, whole_table)).
+
+make_delete_table(Tab, Mode) ->
+ case Mode of
+ whole_table ->
+ case val({Tab, frag_properties}) of
+ [] ->
+ [make_delete_table2(Tab)];
+ _Props ->
+ %% Check if it is a base table
+ mnesia_frag:lookup_frag_hash(Tab),
+
+ %% Check for foreigners
+ F = mnesia_frag:lookup_foreigners(Tab),
+ verify([], F, {combine_error, Tab, "Too many foreigners", F}),
+ [make_delete_table2(T) || T <- mnesia_frag:frag_names(Tab)]
+ end;
+ single_frag ->
+ [make_delete_table2(Tab)]
+ end.
+
+make_delete_table2(Tab) ->
+ get_tid_ts_and_lock(Tab, write),
+ Cs = val({Tab, cstruct}),
+ ensure_active(Cs),
+ ensure_writable(Tab),
+ {op, delete_table, cs2list(Cs)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Change fragmentation of a table
+
+change_table_frag(Tab, Change) ->
+ schema_transaction(fun() -> do_change_table_frag(Tab, Change) end).
+
+do_change_table_frag(Tab, Change) when atom(Tab), Tab /= schema ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ Ops = mnesia_frag:change_table_frag(Tab, Change),
+ [insert_schema_ops(TidTs, Op) || Op <- Ops],
+ ok;
+do_change_table_frag(Tab, _Change) ->
+ mnesia:abort({bad_type, Tab}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Clear a table
+
+clear_table(Tab) ->
+ schema_transaction(fun() -> do_clear_table(Tab) end).
+
+do_clear_table(schema) ->
+ mnesia:abort({bad_type, schema});
+do_clear_table(Tab) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, write),
+ insert_schema_ops(TidTs, make_clear_table(Tab)).
+
+make_clear_table(Tab) ->
+ ensure_writable(schema),
+ Cs = val({Tab, cstruct}),
+ ensure_active(Cs),
+ ensure_writable(Tab),
+ [{op, clear_table, cs2list(Cs)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_table_copy(Tab, Node, Storage) ->
+ schema_transaction(fun() -> do_add_table_copy(Tab, Node, Storage) end).
+
+do_add_table_copy(Tab, Node, Storage) when atom(Tab), atom(Node) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ insert_schema_ops(TidTs, make_add_table_copy(Tab, Node, Storage));
+do_add_table_copy(Tab,Node,_) ->
+ mnesia:abort({badarg, Tab, Node}).
+
+make_add_table_copy(Tab, Node, Storage) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ Ns = mnesia_lib:cs_to_nodes(Cs),
+ verify(false, lists:member(Node, Ns), {already_exists, Tab, Node}),
+ Cs2 = new_cs(Cs, Node, Storage, add),
+ verify_cstruct(Cs2),
+
+ %% Check storage and if node is running
+ IsRunning = lists:member(Node, val({current, db_nodes})),
+ if
+ Storage == unknown ->
+ mnesia:abort({badarg, Tab, Storage});
+ Tab == schema ->
+ if
+ Storage /= ram_copies ->
+ mnesia:abort({badarg, Tab, Storage});
+ IsRunning == true ->
+ mnesia:abort({already_exists, Tab, Node});
+ true ->
+ ignore
+ end;
+ Storage == ram_copies ->
+ ignore;
+ IsRunning == true ->
+ ignore;
+ IsRunning == false ->
+ mnesia:abort({not_active, schema, Node})
+ end,
+ [{op, add_table_copy, Storage, Node, cs2list(Cs2)}].
+
+del_table_copy(Tab, Node) ->
+ schema_transaction(fun() -> do_del_table_copy(Tab, Node) end).
+
+do_del_table_copy(Tab, Node) when atom(Node) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+%% get_tid_ts_and_lock(Tab, write),
+ insert_schema_ops(TidTs, make_del_table_copy(Tab, Node));
+do_del_table_copy(Tab, Node) ->
+ mnesia:abort({badarg, Tab, Node}).
+
+make_del_table_copy(Tab, Node) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ Storage = mnesia_lib:schema_cs_to_storage_type(Node, Cs),
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ case mnesia_lib:cs_to_nodes(Cs2) of
+ [] when Tab == schema ->
+ mnesia:abort({combine_error, Tab, "Last replica"});
+ [] ->
+ ensure_active(Cs),
+ dbg_out("Last replica deleted in table ~p~n", [Tab]),
+ make_delete_table(Tab, whole_table);
+ _ when Tab == schema ->
+ ensure_active(Cs2),
+ ensure_not_active(Tab, Node),
+ verify_cstruct(Cs2),
+ Ops = remove_node_from_tabs(val({schema, tables}), Node),
+ [{op, del_table_copy, ram_copies, Node, cs2list(Cs2)} | Ops];
+ _ ->
+ ensure_active(Cs),
+ verify_cstruct(Cs2),
+ [{op, del_table_copy, Storage, Node, cs2list(Cs2)}]
+ end.
+
+remove_node_from_tabs([], _Node) ->
+ [];
+remove_node_from_tabs([schema|Rest], Node) ->
+ remove_node_from_tabs(Rest, Node);
+remove_node_from_tabs([Tab|Rest], Node) ->
+ {Cs, IsFragModified} =
+ mnesia_frag:remove_node(Node, incr_version(val({Tab, cstruct}))),
+ case mnesia_lib:schema_cs_to_storage_type(Node, Cs) of
+ unknown ->
+ case IsFragModified of
+ true ->
+ [{op, change_table_frag, {del_node, Node}, cs2list(Cs)} |
+ remove_node_from_tabs(Rest, Node)];
+ false ->
+ remove_node_from_tabs(Rest, Node)
+ end;
+ Storage ->
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ case mnesia_lib:cs_to_nodes(Cs2) of
+ [] ->
+ [{op, delete_table, cs2list(Cs)} |
+ remove_node_from_tabs(Rest, Node)];
+ _Ns ->
+ verify_cstruct(Cs2),
+ [{op, del_table_copy, ram_copies, Node, cs2list(Cs2)}|
+ remove_node_from_tabs(Rest, Node)]
+ end
+ end.
+
+new_cs(Cs, Node, ram_copies, add) ->
+ Cs#cstruct{ram_copies = opt_add(Node, Cs#cstruct.ram_copies)};
+new_cs(Cs, Node, disc_copies, add) ->
+ Cs#cstruct{disc_copies = opt_add(Node, Cs#cstruct.disc_copies)};
+new_cs(Cs, Node, disc_only_copies, add) ->
+ Cs#cstruct{disc_only_copies = opt_add(Node, Cs#cstruct.disc_only_copies)};
+new_cs(Cs, Node, ram_copies, del) ->
+ Cs#cstruct{ram_copies = lists:delete(Node , Cs#cstruct.ram_copies)};
+new_cs(Cs, Node, disc_copies, del) ->
+ Cs#cstruct{disc_copies = lists:delete(Node , Cs#cstruct.disc_copies)};
+new_cs(Cs, Node, disc_only_copies, del) ->
+ Cs#cstruct{disc_only_copies =
+ lists:delete(Node , Cs#cstruct.disc_only_copies)};
+new_cs(Cs, _Node, Storage, _Op) ->
+ mnesia:abort({badarg, Cs#cstruct.name, Storage}).
+
+
+opt_add(N, L) -> [N | lists:delete(N, L)].
+
+move_table(Tab, FromNode, ToNode) ->
+ schema_transaction(fun() -> do_move_table(Tab, FromNode, ToNode) end).
+
+do_move_table(schema, _FromNode, _ToNode) ->
+ mnesia:abort({bad_type, schema});
+do_move_table(Tab, FromNode, ToNode) when atom(FromNode), atom(ToNode) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ insert_schema_ops(TidTs, make_move_table(Tab, FromNode, ToNode));
+do_move_table(Tab, FromNode, ToNode) ->
+ mnesia:abort({badarg, Tab, FromNode, ToNode}).
+
+make_move_table(Tab, FromNode, ToNode) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ Ns = mnesia_lib:cs_to_nodes(Cs),
+ verify(false, lists:member(ToNode, Ns), {already_exists, Tab, ToNode}),
+ verify(true, lists:member(FromNode, val({Tab, where_to_write})),
+ {not_active, Tab, FromNode}),
+ verify(false, val({Tab,local_content}),
+ {"Cannot move table with local content", Tab}),
+ ensure_active(Cs),
+ Running = val({current, db_nodes}),
+ Storage = mnesia_lib:schema_cs_to_storage_type(FromNode, Cs),
+ verify(true, lists:member(ToNode, Running), {not_active, schema, ToNode}),
+
+ Cs2 = new_cs(Cs, ToNode, Storage, add),
+ Cs3 = new_cs(Cs2, FromNode, Storage, del),
+ verify_cstruct(Cs3),
+ [{op, add_table_copy, Storage, ToNode, cs2list(Cs2)},
+ {op, sync_trans},
+ {op, del_table_copy, Storage, FromNode, cs2list(Cs3)}].
+
+%% end of functions to add and delete nodes to tables
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+
+change_table_copy_type(Tab, Node, ToS) ->
+ schema_transaction(fun() -> do_change_table_copy_type(Tab, Node, ToS) end).
+
+do_change_table_copy_type(Tab, Node, ToS) when atom(Node) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, write), % ensure global sync
+ %% get_tid_ts_and_lock(Tab, read),
+ insert_schema_ops(TidTs, make_change_table_copy_type(Tab, Node, ToS));
+do_change_table_copy_type(Tab, Node, _ToS) ->
+ mnesia:abort({badarg, Tab, Node}).
+
+make_change_table_copy_type(Tab, Node, unknown) ->
+ make_del_table_copy(Tab, Node);
+make_change_table_copy_type(Tab, Node, ToS) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ FromS = mnesia_lib:storage_type_at_node(Node, Tab),
+
+ case compare_storage_type(false, FromS, ToS) of
+ {same, _} ->
+ mnesia:abort({already_exists, Tab, Node, ToS});
+ {diff, _} ->
+ ignore;
+ incompatible ->
+ ensure_active(Cs)
+ end,
+
+ Cs2 = new_cs(Cs, Node, FromS, del),
+ Cs3 = new_cs(Cs2, Node, ToS, add),
+ verify_cstruct(Cs3),
+
+ if
+ FromS == unknown ->
+ make_add_table_copy(Tab, Node, ToS);
+ true ->
+ ignore
+ end,
+
+ [{op, change_table_copy_type, Node, FromS, ToS, cs2list(Cs3)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% change index functions ....
+%% Pos is allready added by 1 in both of these functions
+
+add_table_index(Tab, Pos) ->
+ schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
+
+do_add_table_index(schema, _Attr) ->
+ mnesia:abort({bad_type, schema});
+do_add_table_index(Tab, Attr) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ Pos = attr_tab_to_pos(Tab, Attr),
+ insert_schema_ops(TidTs, make_add_table_index(Tab, Pos)).
+
+make_add_table_index(Tab, Pos) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ Ix = Cs#cstruct.index,
+ verify(false, lists:member(Pos, Ix), {already_exists, Tab, Pos}),
+ Ix2 = lists:sort([Pos | Ix]),
+ Cs2 = Cs#cstruct{index = Ix2},
+ verify_cstruct(Cs2),
+ [{op, add_index, Pos, cs2list(Cs2)}].
+
+del_table_index(Tab, Pos) ->
+ schema_transaction(fun() -> do_del_table_index(Tab, Pos) end).
+
+do_del_table_index(schema, _Attr) ->
+ mnesia:abort({bad_type, schema});
+do_del_table_index(Tab, Attr) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ Pos = attr_tab_to_pos(Tab, Attr),
+ insert_schema_ops(TidTs, make_del_table_index(Tab, Pos)).
+
+make_del_table_index(Tab, Pos) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ Ix = Cs#cstruct.index,
+ verify(true, lists:member(Pos, Ix), {no_exists, Tab, Pos}),
+ Cs2 = Cs#cstruct{index = lists:delete(Pos, Ix)},
+ verify_cstruct(Cs2),
+ [{op, del_index, Pos, cs2list(Cs2)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+add_snmp(Tab, Ustruct) ->
+ schema_transaction(fun() -> do_add_snmp(Tab, Ustruct) end).
+
+do_add_snmp(schema, _Ustruct) ->
+ mnesia:abort({bad_type, schema});
+do_add_snmp(Tab, Ustruct) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ insert_schema_ops(TidTs, make_add_snmp(Tab, Ustruct)).
+
+make_add_snmp(Tab, Ustruct) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ verify([], Cs#cstruct.snmp, {already_exists, Tab, snmp}),
+ Error = {badarg, Tab, snmp, Ustruct},
+ verify(true, mnesia_snmp_hook:check_ustruct(Ustruct), Error),
+ Cs2 = Cs#cstruct{snmp = Ustruct},
+ verify_cstruct(Cs2),
+ [{op, add_snmp, Ustruct, cs2list(Cs2)}].
+
+del_snmp(Tab) ->
+ schema_transaction(fun() -> do_del_snmp(Tab) end).
+
+do_del_snmp(schema) ->
+ mnesia:abort({bad_type, schema});
+do_del_snmp(Tab) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, read),
+ insert_schema_ops(TidTs, make_del_snmp(Tab)).
+
+make_del_snmp(Tab) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ Cs2 = Cs#cstruct{snmp = []},
+ verify_cstruct(Cs2),
+ [{op, del_snmp, cs2list(Cs2)}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+
+transform_table(Tab, Fun, NewAttrs, NewRecName)
+ when function(Fun), list(NewAttrs), atom(NewRecName) ->
+ schema_transaction(fun() -> do_transform_table(Tab, Fun, NewAttrs, NewRecName) end);
+
+transform_table(Tab, ignore, NewAttrs, NewRecName)
+ when list(NewAttrs), atom(NewRecName) ->
+ schema_transaction(fun() -> do_transform_table(Tab, ignore, NewAttrs, NewRecName) end);
+
+transform_table(Tab, Fun, NewAttrs, NewRecName) ->
+ {aborted,{bad_type, Tab, Fun, NewAttrs, NewRecName}}.
+
+do_transform_table(schema, _Fun, _NewAttrs, _NewRecName) ->
+ mnesia:abort({bad_type, schema});
+do_transform_table(Tab, Fun, NewAttrs, NewRecName) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, write),
+ insert_schema_ops(TidTs, make_transform(Tab, Fun, NewAttrs, NewRecName)).
+
+make_transform(Tab, Fun, NewAttrs, NewRecName) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ ensure_writable(Tab),
+ case mnesia_lib:val({Tab, index}) of
+ [] ->
+ Cs2 = Cs#cstruct{attributes = NewAttrs, record_name = NewRecName},
+ verify_cstruct(Cs2),
+ [{op, transform, Fun, cs2list(Cs2)}];
+ PosList ->
+ DelIdx = fun(Pos, Ncs) ->
+ Ix = Ncs#cstruct.index,
+ Ncs1 = Ncs#cstruct{index = lists:delete(Pos, Ix)},
+ Op = {op, del_index, Pos, cs2list(Ncs1)},
+ {Op, Ncs1}
+ end,
+ AddIdx = fun(Pos, Ncs) ->
+ Ix = Ncs#cstruct.index,
+ Ix2 = lists:sort([Pos | Ix]),
+ Ncs1 = Ncs#cstruct{index = Ix2},
+ Op = {op, add_index, Pos, cs2list(Ncs1)},
+ {Op, Ncs1}
+ end,
+ {DelOps, Cs1} = lists:mapfoldl(DelIdx, Cs, PosList),
+ Cs2 = Cs1#cstruct{attributes = NewAttrs, record_name = NewRecName},
+ {AddOps, Cs3} = lists:mapfoldl(AddIdx, Cs2, PosList),
+ verify_cstruct(Cs3),
+ lists:flatten([DelOps, {op, transform, Fun, cs2list(Cs2)}, AddOps])
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+
+change_table_access_mode(Tab, Mode) ->
+ schema_transaction(fun() -> do_change_table_access_mode(Tab, Mode) end).
+
+do_change_table_access_mode(Tab, Mode) ->
+ {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, write),
+ Store = Ts#tidstore.store,
+ mnesia_locker:wlock_no_exist(Tid, Store, schema, val({schema, active_replicas})),
+ mnesia_locker:wlock_no_exist(Tid, Store, Tab, val({Tab, active_replicas})),
+ do_insert_schema_ops(Store, make_change_table_access_mode(Tab, Mode)).
+
+make_change_table_access_mode(Tab, Mode) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ OldMode = Cs#cstruct.access_mode,
+ verify(false, OldMode == Mode, {already_exists, Tab, Mode}),
+ Cs2 = Cs#cstruct{access_mode = Mode},
+ verify_cstruct(Cs2),
+ [{op, change_table_access_mode, cs2list(Cs2), OldMode, Mode}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+change_table_load_order(Tab, LoadOrder) ->
+ schema_transaction(fun() -> do_change_table_load_order(Tab, LoadOrder) end).
+
+do_change_table_load_order(schema, _LoadOrder) ->
+ mnesia:abort({bad_type, schema});
+do_change_table_load_order(Tab, LoadOrder) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ get_tid_ts_and_lock(Tab, none),
+ insert_schema_ops(TidTs, make_change_table_load_order(Tab, LoadOrder)).
+
+make_change_table_load_order(Tab, LoadOrder) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ OldLoadOrder = Cs#cstruct.load_order,
+ Cs2 = Cs#cstruct{load_order = LoadOrder},
+ verify_cstruct(Cs2),
+ [{op, change_table_load_order, cs2list(Cs2), OldLoadOrder, LoadOrder}].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+write_table_property(Tab, Prop) when tuple(Prop), size(Prop) >= 1 ->
+ schema_transaction(fun() -> do_write_table_property(Tab, Prop) end);
+write_table_property(Tab, Prop) ->
+ {aborted, {bad_type, Tab, Prop}}.
+do_write_table_property(Tab, Prop) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ {_, _, Ts} = TidTs,
+ Store = Ts#tidstore.store,
+ case change_prop_in_existing_op(Tab, Prop, write_property, Store) of
+ true ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,write_property,Store) -> true~n",
+ [Tab,Prop]),
+ %% we have merged the table prop into the create_table op
+ ok;
+ false ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,write_property,Store) -> false~n",
+ [Tab,Prop]),
+ %% this must be an existing table
+ get_tid_ts_and_lock(Tab, none),
+ insert_schema_ops(TidTs, make_write_table_properties(Tab, [Prop]))
+ end.
+
+make_write_table_properties(Tab, Props) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ make_write_table_properties(Tab, Props, Cs).
+
+make_write_table_properties(Tab, [Prop | Props], Cs) ->
+ OldProps = Cs#cstruct.user_properties,
+ PropKey = element(1, Prop),
+ DelProps = lists:keydelete(PropKey, 1, OldProps),
+ MergedProps = lists:merge(DelProps, [Prop]),
+ Cs2 = Cs#cstruct{user_properties = MergedProps},
+ verify_cstruct(Cs2),
+ [{op, write_property, cs2list(Cs2), Prop} |
+ make_write_table_properties(Tab, Props, Cs2)];
+make_write_table_properties(_Tab, [], _Cs) ->
+ [].
+
+change_prop_in_existing_op(Tab, Prop, How, Store) ->
+ Ops = ets:match_object(Store, '_'),
+ case update_existing_op(Ops, Tab, Prop, How, []) of
+ {true, Ops1} ->
+ ets:match_delete(Store, '_'),
+ [ets:insert(Store, Op) || Op <- Ops1],
+ true;
+ false ->
+ false
+ end.
+
+update_existing_op([{op, Op, L = [{name,Tab}|_], _OldProp}|Ops],
+ Tab, Prop, How, Acc) when Op == write_property;
+ Op == delete_property ->
+ %% Apparently, mnesia_dumper doesn't care about OldProp here -- just L,
+ %% so we will throw away OldProp (not that it matters...) and insert Prop.
+ %% as element 3.
+ L1 = insert_prop(Prop, L, How),
+ NewOp = {op, How, L1, Prop},
+ {true, lists:reverse(Acc) ++ [NewOp|Ops]};
+update_existing_op([Op = {op, create_table, L}|Ops], Tab, Prop, How, Acc) ->
+ case lists:keysearch(name, 1, L) of
+ {value, {_, Tab}} ->
+ %% Tab is being created here -- insert Prop into L
+ L1 = insert_prop(Prop, L, How),
+ {true, lists:reverse(Acc) ++ [{op, create_table, L1}|Ops]};
+ _ ->
+ update_existing_op(Ops, Tab, Prop, How, [Op|Acc])
+ end;
+update_existing_op([Op|Ops], Tab, Prop, How, Acc) ->
+ update_existing_op(Ops, Tab, Prop, How, [Op|Acc]);
+update_existing_op([], _, _, _, _) ->
+ false.
+
+%% perhaps a misnomer. How could also be delete_property... never mind.
+%% Returns the modified L.
+insert_prop(Prop, L, How) ->
+ Prev = find_props(L),
+ MergedProps = merge_with_previous(How, Prop, Prev),
+ replace_props(L, MergedProps).
+
+
+find_props([{user_properties, P}|_]) -> P;
+find_props([_H|T]) -> find_props(T).
+%% we shouldn't reach []
+
+replace_props([{user_properties, _}|T], P) -> [{user_properties, P}|T];
+replace_props([H|T], P) -> [H|replace_props(T, P)].
+%% again, we shouldn't reach []
+
+merge_with_previous(write_property, Prop, Prev) ->
+ Key = element(1, Prop),
+ Prev1 = lists:keydelete(Key, 1, Prev),
+ lists:sort([Prop|Prev1]);
+merge_with_previous(delete_property, PropKey, Prev) ->
+ lists:keydelete(PropKey, 1, Prev).
+
+delete_table_property(Tab, PropKey) ->
+ schema_transaction(fun() -> do_delete_table_property(Tab, PropKey) end).
+
+do_delete_table_property(Tab, PropKey) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ {_, _, Ts} = TidTs,
+ Store = Ts#tidstore.store,
+ case change_prop_in_existing_op(Tab, PropKey, delete_property, Store) of
+ true ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,delete_property,Store) -> true~n",
+ [Tab,PropKey]),
+ %% we have merged the table prop into the create_table op
+ ok;
+ false ->
+ dbg_out("change_prop_in_existing_op"
+ "(~p,~p,delete_property,Store) -> false~n",
+ [Tab,PropKey]),
+ %% this must be an existing table
+ get_tid_ts_and_lock(Tab, none),
+ insert_schema_ops(TidTs,
+ make_delete_table_properties(Tab, [PropKey]))
+ end.
+
+make_delete_table_properties(Tab, PropKeys) ->
+ ensure_writable(schema),
+ Cs = incr_version(val({Tab, cstruct})),
+ ensure_active(Cs),
+ make_delete_table_properties(Tab, PropKeys, Cs).
+
+make_delete_table_properties(Tab, [PropKey | PropKeys], Cs) ->
+ OldProps = Cs#cstruct.user_properties,
+ Props = lists:keydelete(PropKey, 1, OldProps),
+ Cs2 = Cs#cstruct{user_properties = Props},
+ verify_cstruct(Cs2),
+ [{op, delete_property, cs2list(Cs2), PropKey} |
+ make_delete_table_properties(Tab, PropKeys, Cs2)];
+make_delete_table_properties(_Tab, [], _Cs) ->
+ [].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Ensure that the transaction can be committed even
+%% if the node crashes and Mnesia is restarted
+prepare_commit(Tid, Commit, WaitFor) ->
+ case Commit#commit.schema_ops of
+ [] ->
+ {false, Commit, optional};
+ OrigOps ->
+ {Modified, Ops, DumperMode} =
+ prepare_ops(Tid, OrigOps, WaitFor, false, [], optional),
+ InitBy = schema_prepare,
+ GoodRes = {Modified,
+ Commit#commit{schema_ops = lists:reverse(Ops)},
+ DumperMode},
+ case DumperMode of
+ optional ->
+ dbg_out("Transaction log dump skipped (~p): ~w~n",
+ [DumperMode, InitBy]);
+ mandatory ->
+ case mnesia_controller:sync_dump_log(InitBy) of
+ dumped ->
+ GoodRes;
+ {error, Reason} ->
+ mnesia:abort(Reason)
+ end
+ end,
+ case Ops of
+ [] ->
+ ignore;
+ _ ->
+ %% We need to grab a dumper lock here, the log may not
+ %% be dumped by others, during the schema commit phase.
+ mnesia_controller:wait_for_schema_commit_lock()
+ end,
+ GoodRes
+ end.
+
+prepare_ops(Tid, [Op | Ops], WaitFor, Changed, Acc, DumperMode) ->
+ case prepare_op(Tid, Op, WaitFor) of
+ {true, mandatory} ->
+ prepare_ops(Tid, Ops, WaitFor, Changed, [Op | Acc], mandatory);
+ {true, optional} ->
+ prepare_ops(Tid, Ops, WaitFor, Changed, [Op | Acc], DumperMode);
+ {true, Ops2, mandatory} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Ops2 ++ Acc, mandatory);
+ {true, Ops2, optional} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Ops2 ++ Acc, DumperMode);
+ {false, mandatory} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Acc, mandatory);
+ {false, optional} ->
+ prepare_ops(Tid, Ops, WaitFor, true, Acc, DumperMode)
+ end;
+prepare_ops(_Tid, [], _WaitFor, Changed, Acc, DumperMode) ->
+ {Changed, Acc, DumperMode}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Prepare for commit
+%% returns true if Op should be included, i.e. unmodified
+%% {true, Operation} if NewRecs should be included, i.e. modified
+%% false if Op should NOT be included, i.e. modified
+%%
+prepare_op(_Tid, {op, rec, unknown, Rec}, _WaitFor) ->
+ {{Tab, Key}, Items, _Op} = Rec,
+ case val({Tab, storage_type}) of
+ unknown ->
+ {false, optional};
+ Storage ->
+ mnesia_tm:prepare_snmp(Tab, Key, Items), % May exit
+ {true, [{op, rec, Storage, Rec}], optional}
+ end;
+
+prepare_op(_Tid, {op, announce_im_running, _Node, SchemaDef, Running, RemoteRunning}, _WaitFor) ->
+ SchemaCs = list2cs(SchemaDef),
+ case lists:member(node(), Running) of
+ true ->
+ announce_im_running(RemoteRunning -- Running, SchemaCs);
+ false ->
+ announce_im_running(Running -- RemoteRunning, SchemaCs)
+ end,
+ {false, optional};
+
+prepare_op(_Tid, {op, sync_trans}, {part, CoordPid}) ->
+ CoordPid ! {sync_trans, self()},
+ receive
+ {sync_trans, CoordPid} ->
+ {false, optional};
+ Else ->
+ mnesia_lib:verbose("sync_op terminated due to ~p~n", [Else]),
+ mnesia:abort(Else)
+ end;
+
+prepare_op(_Tid, {op, sync_trans}, {coord, Nodes}) ->
+ case receive_sync(Nodes, []) of
+ {abort, Reason} ->
+ mnesia_lib:verbose("sync_op terminated due to ~p~n", [Reason]),
+ mnesia:abort(Reason);
+ Pids ->
+ [Pid ! {sync_trans, self()} || Pid <- Pids],
+ {false, optional}
+ end;
+prepare_op(Tid, {op, create_table, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
+ UseDir = mnesia_monitor:use_dir(),
+ Tab = Cs#cstruct.name,
+ case Storage of
+ disc_copies when UseDir == false ->
+ UseDirReason = {bad_type, Tab, Storage, node()},
+ mnesia:abort(UseDirReason);
+ disc_only_copies when UseDir == false ->
+ UseDirReason = {bad_type, Tab, Storage, node()},
+ mnesia:abort(UseDirReason);
+ ram_copies ->
+ create_ram_table(Tab, Cs#cstruct.type),
+ insert_cstruct(Tid, Cs, false),
+ {true, optional};
+ disc_copies ->
+ create_ram_table(Tab, Cs#cstruct.type),
+ create_disc_table(Tab),
+ insert_cstruct(Tid, Cs, false),
+ {true, optional};
+ disc_only_copies ->
+ create_disc_only_table(Tab,Cs#cstruct.type),
+ insert_cstruct(Tid, Cs, false),
+ {true, optional};
+ unknown -> %% No replica on this node
+ insert_cstruct(Tid, Cs, false),
+ {true, optional}
+ end;
+
+prepare_op(Tid, {op, add_table_copy, Storage, Node, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+
+ if
+ Tab == schema ->
+ {true, optional}; % Nothing to prepare
+ Node == node() ->
+ case mnesia_lib:val({schema, storage_type}) of
+ ram_copies when Storage /= ram_copies ->
+ Error = {combine_error, Tab, "has no disc", Node},
+ mnesia:abort(Error);
+ _ ->
+ ok
+ end,
+ %% Tables are created by mnesia_loader get_network code
+ insert_cstruct(Tid, Cs, true),
+ case mnesia_controller:get_network_copy(Tab, Cs) of
+ {loaded, ok} ->
+ {true, optional};
+ {not_loaded, ErrReason} ->
+ Reason = {system_limit, Tab, {Node, ErrReason}},
+ mnesia:abort(Reason)
+ end;
+ Node /= node() ->
+ %% Verify that ram table not has been dumped to disc
+ if
+ Storage /= ram_copies ->
+ case mnesia_lib:schema_cs_to_storage_type(node(), Cs) of
+ ram_copies ->
+ Dat = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dat) of
+ true ->
+ mnesia:abort({combine_error, Tab, Storage,
+ "Table dumped to disc", node()});
+ false ->
+ ok
+ end;
+ _ ->
+ ok
+ end;
+ true ->
+ ok
+ end,
+ insert_cstruct(Tid, Cs, true),
+ {true, optional}
+ end;
+
+prepare_op(Tid, {op, del_table_copy, _Storage, Node, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+
+ if
+ %% Schema table lock is always required to run a schema op.
+ %% No need to look it.
+ node(Tid#tid.pid) == node(), Tab /= schema ->
+ Pid = spawn_link(?MODULE, lock_del_table, [Tab, Node, Cs, self()]),
+ receive
+ {Pid, updated} ->
+ {true, optional};
+ {Pid, FailReason} ->
+ mnesia:abort(FailReason);
+ {'EXIT', Pid, Reason} ->
+ mnesia:abort(Reason)
+ end;
+ true ->
+ {true, optional}
+ end;
+
+prepare_op(_Tid, {op, change_table_copy_type, N, FromS, ToS, TabDef}, _WaitFor)
+ when N == node() ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+
+ NotActive = mnesia_lib:not_active_here(Tab),
+
+ if
+ NotActive == true ->
+ mnesia:abort({not_active, Tab, node()});
+
+ Tab == schema ->
+ case {FromS, ToS} of
+ {ram_copies, disc_copies} ->
+ case mnesia:system_info(schema_location) of
+ opt_disc ->
+ ignore;
+ _ ->
+ mnesia:abort({combine_error, Tab, node(),
+ "schema_location must be opt_disc"})
+ end,
+ Dir = mnesia_lib:dir(),
+ case opt_create_dir(true, Dir) of
+ ok ->
+ purge_dir(Dir, []),
+ mnesia_log:purge_all_logs(),
+ set(use_dir, true),
+ mnesia_log:init(),
+ Ns = val({current, db_nodes}), %mnesia_lib:running_nodes(),
+ F = fun(U) -> mnesia_recover:log_mnesia_up(U) end,
+ lists:foreach(F, Ns),
+
+ mnesia_dumper:raw_named_dump_table(Tab, dmp),
+ mnesia_checkpoint:tm_change_table_copy_type(Tab, FromS, ToS);
+ {error, Reason} ->
+ mnesia:abort(Reason)
+ end;
+ {disc_copies, ram_copies} ->
+ Ltabs = val({schema, local_tables}) -- [schema],
+ Dtabs = [L || L <- Ltabs,
+ val({L, storage_type}) /= ram_copies],
+ verify([], Dtabs, {"Disc resident tables", Dtabs, N});
+ _ ->
+ mnesia:abort({combine_error, Tab, ToS})
+ end;
+
+ FromS == ram_copies ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Dat = mnesia_lib:tab2dcd(Tab),
+ case mnesia_lib:exists(Dat) of
+ true ->
+ mnesia:abort({combine_error, Tab, node(),
+ "Table dump exists"});
+ false ->
+ case ToS of
+ disc_copies ->
+ mnesia_log:ets2dcd(Tab, dmp);
+ disc_only_copies ->
+ mnesia_dumper:raw_named_dump_table(Tab, dmp)
+ end,
+ mnesia_checkpoint:tm_change_table_copy_type(Tab, FromS, ToS)
+ end;
+ false ->
+ mnesia:abort({has_no_disc, node()})
+ end;
+
+ FromS == disc_copies, ToS == disc_only_copies ->
+ mnesia_dumper:raw_named_dump_table(Tab, dmp);
+ FromS == disc_only_copies ->
+ Type = Cs#cstruct.type,
+ create_ram_table(Tab, Type),
+ Datname = mnesia_lib:tab2dat(Tab),
+ Repair = mnesia_monitor:get_env(auto_repair),
+ case mnesia_lib:dets_to_ets(Tab, Tab, Datname, Type, Repair, no) of
+ loaded -> ok;
+ Reason ->
+ Err = "Failed to copy disc data to ram",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end;
+ true ->
+ ignore
+ end,
+ {true, mandatory};
+
+prepare_op(_Tid, {op, change_table_copy_type, N, _FromS, _ToS, _TabDef}, _WaitFor)
+ when N /= node() ->
+ {true, mandatory};
+
+prepare_op(_Tid, {op, delete_table, _TabDef}, _WaitFor) ->
+ {true, mandatory};
+
+prepare_op(_Tid, {op, dump_table, unknown, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ case lists:member(node(), Cs#cstruct.ram_copies) of
+ true ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ mnesia_log:ets2dcd(Tab, dmp),
+ Size = mnesia:table_info(Tab, size),
+ {true, [{op, dump_table, Size, TabDef}], optional};
+ false ->
+ mnesia:abort({has_no_disc, node()})
+ end;
+ false ->
+ {false, optional}
+ end;
+
+prepare_op(_Tid, {op, add_snmp, Ustruct, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ {true, optional};
+ Storage ->
+ Tab = Cs#cstruct.name,
+ Stab = mnesia_snmp_hook:create_table(Ustruct, Tab, Storage),
+ mnesia_lib:set({Tab, {index, snmp}}, Stab),
+ {true, optional}
+ end;
+
+prepare_op(_Tid, {op, transform, ignore, _TabDef}, _WaitFor) ->
+ {true, mandatory}; %% Apply schema changes only.
+prepare_op(_Tid, {op, transform, Fun, TabDef}, _WaitFor) ->
+ Cs = list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ {true, mandatory};
+ Storage ->
+ Tab = Cs#cstruct.name,
+ RecName = Cs#cstruct.record_name,
+ Type = Cs#cstruct.type,
+ NewArity = length(Cs#cstruct.attributes) + 1,
+ mnesia_lib:db_fixtable(Storage, Tab, true),
+ Key = mnesia_lib:db_first(Tab),
+ Op = {op, transform, Fun, TabDef},
+ case catch transform_objs(Fun, Tab, RecName,
+ Key, NewArity, Storage, Type, [Op]) of
+ {'EXIT', Reason} ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ exit({"Bad transform function", Tab, Fun, node(), Reason});
+ Objs ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ {true, Objs, mandatory}
+ end
+ end;
+
+prepare_op(_Tid, _Op, _WaitFor) ->
+ {true, optional}.
+
+
+create_ram_table(Tab, Type) ->
+ Args = [{keypos, 2}, public, named_table, Type],
+ case mnesia_monitor:unsafe_mktab(Tab, Args) of
+ Tab ->
+ ok;
+ {error,Reason} ->
+ Err = "Failed to create ets table",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end.
+create_disc_table(Tab) ->
+ File = mnesia_lib:tab2dcd(Tab),
+ file:delete(File),
+ FArg = [{file, File}, {name, {mnesia,create}},
+ {repair, false}, {mode, read_write}],
+ case mnesia_monitor:open_log(FArg) of
+ {ok,Log} ->
+ mnesia_monitor:unsafe_close_log(Log),
+ ok;
+ {error,Reason} ->
+ Err = "Failed to create disc table",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end.
+create_disc_only_table(Tab,Type) ->
+ File = mnesia_lib:tab2dat(Tab),
+ file:delete(File),
+ Args = [{file, mnesia_lib:tab2dat(Tab)},
+ {type, mnesia_lib:disk_type(Tab, Type)},
+ {keypos, 2},
+ {repair, mnesia_monitor:get_env(auto_repair)}],
+ case mnesia_monitor:unsafe_open_dets(Tab, Args) of
+ {ok, _} ->
+ ok;
+ {error,Reason} ->
+ Err = "Failed to create disc table",
+ mnesia:abort({system_limit, Tab, {Err,Reason}})
+ end.
+
+
+receive_sync([], Pids) ->
+ Pids;
+receive_sync(Nodes, Pids) ->
+ receive
+ {sync_trans, Pid} ->
+ Node = node(Pid),
+ receive_sync(lists:delete(Node, Nodes), [Pid | Pids]);
+ Else ->
+ {abort, Else}
+ end.
+
+lock_del_table(Tab, Node, Cs, Father) ->
+ Ns = val({schema, active_replicas}),
+ Lock = fun() ->
+ mnesia:write_lock_table(Tab),
+ {Res, []} = rpc:multicall(Ns, ?MODULE, set_where_to_read, [Tab, Node, Cs]),
+ Filter = fun(ok) ->
+ false;
+ ({badrpc, {'EXIT', {undef, _}}}) ->
+ %% This will be the case we talks with elder nodes
+ %% than 3.8.2, they will set where_to_read without
+ %% getting a lock.
+ false;
+ (_) ->
+ true
+ end,
+ [] = lists:filter(Filter, Res),
+ ok
+ end,
+ case mnesia:transaction(Lock) of
+ {'atomic', ok} ->
+ Father ! {self(), updated};
+ {aborted, R} ->
+ Father ! {self(), R}
+ end,
+ unlink(Father),
+ exit(normal).
+
+set_where_to_read(Tab, Node, Cs) ->
+ case mnesia_lib:val({Tab, where_to_read}) of
+ Node ->
+ case Cs#cstruct.local_content of
+ true ->
+ ok;
+ false ->
+ mnesia_lib:set_remote_where_to_read(Tab, [Node]),
+ ok
+ end;
+ _ ->
+ ok
+ end.
+
+%% Build up the list in reverse order.
+transform_objs(_Fun, _Tab, _RT, '$end_of_table', _NewArity, _Storage, _Type, Acc) ->
+ Acc;
+transform_objs(Fun, Tab, RecName, Key, A, Storage, Type, Acc) ->
+ Objs = mnesia_lib:db_get(Tab, Key),
+ NextKey = mnesia_lib:db_next_key(Tab, Key),
+ Oid = {Tab, Key},
+ NewObjs = {Ws, Ds} = transform_obj(Tab, RecName, Key, Fun, Objs, A, Type, [], []),
+ if
+ NewObjs == {[], []} ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type, Acc);
+ Type == bag ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ws, write}},
+ {op, rec, Storage, {Oid, [Oid], delete}} | Acc]);
+ Ds == [] ->
+ %% Type is set or ordered_set, no need to delete the record first
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ws, write}} | Acc]);
+ Ws == [] ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ds, write}} | Acc]);
+ true ->
+ transform_objs(Fun, Tab, RecName, NextKey, A, Storage, Type,
+ [{op, rec, Storage, {Oid, Ws, write}},
+ {op, rec, Storage, {Oid, Ds, delete}} | Acc])
+ end.
+
+transform_obj(Tab, RecName, Key, Fun, [Obj|Rest], NewArity, Type, Ws, Ds) ->
+ NewObj = Fun(Obj),
+ if
+ size(NewObj) /= NewArity ->
+ exit({"Bad arity", Obj, NewObj});
+ NewObj == Obj ->
+ transform_obj(Tab, RecName, Key, Fun, Rest, NewArity, Type, Ws, Ds);
+ RecName == element(1, NewObj), Key == element(2, NewObj) ->
+ transform_obj(Tab, RecName, Key, Fun, Rest, NewArity,
+ Type, [NewObj | Ws], Ds);
+ NewObj == delete ->
+ case Type of
+ bag -> %% Just don't write that object
+ transform_obj(Tab, RecName, Key, Fun, Rest,
+ NewArity, Type, Ws, Ds);
+ _ ->
+ transform_obj(Tab, RecName, Key, Fun, Rest, NewArity,
+ Type, Ws, [NewObj | Ds])
+ end;
+ true ->
+ exit({"Bad key or Record Name", Obj, NewObj})
+ end;
+transform_obj(_Tab, _RecName, _Key, _Fun, [], _NewArity, _Type, Ws, Ds) ->
+ {lists:reverse(Ws), lists:reverse(Ds)}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Undo prepare of commit
+undo_prepare_commit(Tid, Commit) ->
+ case Commit#commit.schema_ops of
+ [] ->
+ ignore;
+ Ops ->
+ %% Catch to allow failure mnesia_controller may not be started
+ catch mnesia_controller:release_schema_commit_lock(),
+ undo_prepare_ops(Tid, Ops)
+ end,
+ Commit.
+
+%% Undo in reverse order
+undo_prepare_ops(Tid, [Op | Ops]) ->
+ case element(1, Op) of
+ TheOp when TheOp /= op, TheOp /= restore_op ->
+ undo_prepare_ops(Tid, Ops);
+ _ ->
+ undo_prepare_ops(Tid, Ops),
+ undo_prepare_op(Tid, Op)
+ end;
+undo_prepare_ops(_Tid, []) ->
+ [].
+
+undo_prepare_op(_Tid, {op, announce_im_running, _, _, Running, RemoteRunning}) ->
+ case lists:member(node(), Running) of
+ true ->
+ unannounce_im_running(RemoteRunning -- Running);
+ false ->
+ unannounce_im_running(Running -- RemoteRunning)
+ end;
+
+undo_prepare_op(_Tid, {op, sync_trans}) ->
+ ok;
+
+undo_prepare_op(Tid, {op, create_table, TabDef}) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_lib:unset({Tab, create_table}),
+ delete_cstruct(Tid, Cs),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ ok;
+ ram_copies ->
+ ram_delete_table(Tab, ram_copies);
+ disc_copies ->
+ ram_delete_table(Tab, disc_copies),
+ DcdFile = mnesia_lib:tab2dcd(Tab),
+ %% disc_delete_table(Tab, Storage),
+ file:delete(DcdFile);
+ disc_only_copies ->
+ mnesia_monitor:unsafe_close_dets(Tab),
+ Dat = mnesia_lib:tab2dat(Tab),
+ %% disc_delete_table(Tab, Storage),
+ file:delete(Dat)
+ end;
+
+undo_prepare_op(Tid, {op, add_table_copy, Storage, Node, TabDef}) ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ if
+ Tab == schema ->
+ true; % Nothing to prepare
+ Node == node() ->
+ mnesia_checkpoint:tm_del_copy(Tab, Node),
+ mnesia_controller:unannounce_add_table_copy(Tab, Node),
+ if
+ Storage == disc_only_copies; Tab == schema ->
+ mnesia_monitor:close_dets(Tab),
+ file:delete(mnesia_lib:tab2dat(Tab));
+ true ->
+ file:delete(mnesia_lib:tab2dcd(Tab))
+ end,
+ ram_delete_table(Tab, Storage),
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ insert_cstruct(Tid, Cs2, true); % Don't care about the version
+ Node /= node() ->
+ mnesia_controller:unannounce_add_table_copy(Tab, Node),
+ Cs2 = new_cs(Cs, Node, Storage, del),
+ insert_cstruct(Tid, Cs2, true) % Don't care about the version
+ end;
+
+undo_prepare_op(_Tid, {op, del_table_copy, _, Node, TabDef})
+ when Node == node() ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_lib:set({Tab, where_to_read}, Node);
+
+
+undo_prepare_op(_Tid, {op, change_table_copy_type, N, FromS, ToS, TabDef})
+ when N == node() ->
+ Cs = list2cs(TabDef),
+ Tab = Cs#cstruct.name,
+ mnesia_checkpoint:tm_change_table_copy_type(Tab, ToS, FromS),
+ Dmp = mnesia_lib:tab2dmp(Tab),
+
+ case {FromS, ToS} of
+ {ram_copies, disc_copies} when Tab == schema ->
+ file:delete(Dmp),
+ mnesia_log:purge_some_logs(),
+ set(use_dir, false);
+ {ram_copies, disc_copies} ->
+ file:delete(Dmp);
+ {ram_copies, disc_only_copies} ->
+ file:delete(Dmp);
+ {disc_only_copies, _} ->
+ ram_delete_table(Tab, ram_copies);
+ _ ->
+ ignore
+ end;
+
+undo_prepare_op(_Tid, {op, dump_table, _Size, TabDef}) ->
+ Cs = list2cs(TabDef),
+ case lists:member(node(), Cs#cstruct.ram_copies) of
+ true ->
+ Tab = Cs#cstruct.name,
+ Dmp = mnesia_lib:tab2dmp(Tab),
+ file:delete(Dmp);
+ false ->
+ ignore
+ end;
+
+undo_prepare_op(_Tid, {op, add_snmp, _Ustruct, TabDef}) ->
+ Cs = list2cs(TabDef),
+ case mnesia_lib:cs_to_storage_type(node(), Cs) of
+ unknown ->
+ true;
+ _Storage ->
+ Tab = Cs#cstruct.name,
+ case ?catch_val({Tab, {index, snmp}}) of
+ {'EXIT',_} ->
+ ignore;
+ Stab ->
+ mnesia_snmp_hook:delete_table(Tab, Stab),
+ mnesia_lib:unset({Tab, {index, snmp}})
+ end
+ end;
+
+undo_prepare_op(_Tid, _Op) ->
+ ignore.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+ram_delete_table(Tab, Storage) ->
+ case Storage of
+ unknown ->
+ ignore;
+ disc_only_copies ->
+ ignore;
+ _Else ->
+ %% delete possible index files and data .....
+ %% Got to catch this since if no info has been set in the
+ %% mnesia_gvar it will crash
+ catch mnesia_index:del_transient(Tab, Storage),
+ case ?catch_val({Tab, {index, snmp}}) of
+ {'EXIT', _} ->
+ ignore;
+ Etab ->
+ catch mnesia_snmp_hook:delete_table(Tab, Etab)
+ end,
+ catch ?ets_delete_table(Tab)
+ end.
+
+purge_dir(Dir, KeepFiles) ->
+ Suffixes = known_suffixes(),
+ purge_dir(Dir, KeepFiles, Suffixes).
+
+purge_dir(Dir, KeepFiles, Suffixes) ->
+ case dir_exists(Dir) of
+ true ->
+ {ok, AllFiles} = file:list_dir(Dir),
+ purge_known_files(AllFiles, KeepFiles, Dir, Suffixes);
+ false ->
+ ok
+ end.
+
+purge_tmp_files() ->
+ case mnesia_monitor:use_dir() of
+ true ->
+ Dir = mnesia_lib:dir(),
+ KeepFiles = [],
+ Exists = mnesia_lib:exists(mnesia_lib:tab2dat(schema)),
+ case Exists of
+ true ->
+ Suffixes = tmp_suffixes(),
+ purge_dir(Dir, KeepFiles, Suffixes);
+ false ->
+ %% Interrupted change of storage type
+ %% for schema table
+ Suffixes = known_suffixes(),
+ purge_dir(Dir, KeepFiles, Suffixes),
+ mnesia_lib:set(use_dir, false)
+ end;
+
+ false ->
+ ok
+ end.
+
+purge_known_files([File | Tail], KeepFiles, Dir, Suffixes) ->
+ case lists:member(File, KeepFiles) of
+ true ->
+ ignore;
+ false ->
+ case has_known_suffix(File, Suffixes, false) of
+ false ->
+ ignore;
+ true ->
+ AbsFile = filename:join([Dir, File]),
+ file:delete(AbsFile)
+ end
+ end,
+ purge_known_files(Tail, KeepFiles, Dir, Suffixes);
+purge_known_files([], _KeepFiles, _Dir, _Suffixes) ->
+ ok.
+
+has_known_suffix(_File, _Suffixes, true) ->
+ true;
+has_known_suffix(File, [Suffix | Tail], false) ->
+ has_known_suffix(File, Tail, lists:suffix(Suffix, File));
+has_known_suffix(_File, [], Bool) ->
+ Bool.
+
+known_suffixes() -> real_suffixes() ++ tmp_suffixes().
+
+real_suffixes() -> [".DAT", ".LOG", ".BUP", ".DCL", ".DCD"].
+
+tmp_suffixes() -> [".TMP", ".BUPTMP", ".RET", ".DMP"].
+
+info() ->
+ Tabs = lists:sort(val({schema, tables})),
+ lists:foreach(fun(T) -> info(T) end, Tabs),
+ ok.
+
+info(Tab) ->
+ Props = get_table_properties(Tab),
+ io:format("-- Properties for ~w table --- ~n",[Tab]),
+ info2(Tab, Props).
+info2(Tab, [{cstruct, _V} | Tail]) -> % Ignore cstruct
+ info2(Tab, Tail);
+info2(Tab, [{frag_hash, _V} | Tail]) -> % Ignore frag_hash
+ info2(Tab, Tail);
+info2(Tab, [{P, V} | Tail]) ->
+ io:format("~-20w -> ~p~n",[P,V]),
+ info2(Tab, Tail);
+info2(_, []) ->
+ io:format("~n", []).
+
+get_table_properties(Tab) ->
+ case catch mnesia_lib:db_match_object(ram_copies,
+ mnesia_gvar, {{Tab, '_'}, '_'}) of
+ {'EXIT', _} ->
+ mnesia:abort({no_exists, Tab, all});
+ RawGvar ->
+ case [{Item, Val} || {{_Tab, Item}, Val} <- RawGvar] of
+ [] ->
+ [];
+ Gvar ->
+ Size = {size, mnesia:table_info(Tab, size)},
+ Memory = {memory, mnesia:table_info(Tab, memory)},
+ Master = {master_nodes, mnesia:table_info(Tab, master_nodes)},
+ lists:sort([Size, Memory, Master | Gvar])
+ end
+ end.
+
+%%%%%%%%%%% RESTORE %%%%%%%%%%%
+
+-record(r, {iter = schema,
+ module,
+ table_options = [],
+ default_op = clear_tables,
+ tables = [],
+ opaque,
+ insert_op = error_fun,
+ recs = error_recs
+ }).
+
+restore(Opaque) ->
+ restore(Opaque, [], mnesia_monitor:get_env(backup_module)).
+restore(Opaque, Args) when list(Args) ->
+ restore(Opaque, Args, mnesia_monitor:get_env(backup_module));
+restore(_Opaque, BadArg) ->
+ {aborted, {badarg, BadArg}}.
+restore(Opaque, Args, Module) when list(Args), atom(Module) ->
+ InitR = #r{opaque = Opaque, module = Module},
+ case catch lists:foldl(fun check_restore_arg/2, InitR, Args) of
+ R when record(R, r) ->
+ case mnesia_bup:read_schema(Module, Opaque) of
+ {error, Reason} ->
+ {aborted, Reason};
+ BupSchema ->
+ schema_transaction(fun() -> do_restore(R, BupSchema) end)
+ end;
+ {'EXIT', Reason} ->
+ {aborted, Reason}
+ end;
+restore(_Opaque, Args, Module) ->
+ {aborted, {badarg, Args, Module}}.
+
+check_restore_arg({module, Mod}, R) when atom(Mod) ->
+ R#r{module = Mod};
+
+check_restore_arg({clear_tables, List}, R) when list(List) ->
+ case lists:member(schema, List) of
+ false ->
+ TableList = [{Tab, clear_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+ true ->
+ exit({badarg, {clear_tables, schema}})
+ end;
+check_restore_arg({recreate_tables, List}, R) when list(List) ->
+ case lists:member(schema, List) of
+ false ->
+ TableList = [{Tab, recreate_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+ true ->
+ exit({badarg, {recreate_tables, schema}})
+ end;
+check_restore_arg({keep_tables, List}, R) when list(List) ->
+ TableList = [{Tab, keep_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+check_restore_arg({skip_tables, List}, R) when list(List) ->
+ TableList = [{Tab, skip_tables} || Tab <- List],
+ R#r{table_options = R#r.table_options ++ TableList};
+check_restore_arg({default_op, Op}, R) ->
+ case Op of
+ clear_tables -> ok;
+ recreate_tables -> ok;
+ keep_tables -> ok;
+ skip_tables -> ok;
+ Else ->
+ exit({badarg, {bad_default_op, Else}})
+ end,
+ R#r{default_op = Op};
+
+check_restore_arg(BadArg,_) ->
+ exit({badarg, BadArg}).
+
+do_restore(R, BupSchema) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ R2 = restore_schema(BupSchema, R),
+ insert_schema_ops(TidTs, [{restore_op, R2}]),
+ [element(1, TabStruct) || TabStruct <- R2#r.tables].
+
+arrange_restore(R, Fun, Recs) ->
+ R2 = R#r{insert_op = Fun, recs = Recs},
+ case mnesia_bup:iterate(R#r.module, fun restore_items/4, R#r.opaque, R2) of
+ {ok, R3} -> R3#r.recs;
+ {error, Reason} -> mnesia:abort(Reason);
+ Reason -> mnesia:abort(Reason)
+ end.
+
+restore_items([Rec | Recs], Header, Schema, R) ->
+ Tab = element(1, Rec),
+ case lists:keysearch(Tab, 1, R#r.tables) of
+ {value, {Tab, Where, Snmp, RecName}} ->
+ {Rest, NRecs} =
+ restore_tab_items([Rec | Recs], Tab, RecName, Where, Snmp,
+ R#r.recs, R#r.insert_op),
+ restore_items(Rest, Header, Schema, R#r{recs = NRecs});
+ false ->
+ Rest = skip_tab_items(Recs, Tab),
+ restore_items(Rest, Header, Schema, R)
+ end;
+
+restore_items([], _Header, _Schema, R) ->
+ R.
+
+restore_func(Tab, R) ->
+ case lists:keysearch(Tab, 1, R#r.table_options) of
+ {value, {Tab, OP}} ->
+ OP;
+ false ->
+ R#r.default_op
+ end.
+
+where_to_commit(Tab, CsList) ->
+ Ram = [{N, ram_copies} || N <- pick(Tab, ram_copies, CsList, [])],
+ Disc = [{N, disc_copies} || N <- pick(Tab, disc_copies, CsList, [])],
+ DiscO = [{N, disc_only_copies} || N <- pick(Tab, disc_only_copies, CsList, [])],
+ Ram ++ Disc ++ DiscO.
+
+%% Changes of the Meta info of schema itself is not allowed
+restore_schema([{schema, schema, _List} | Schema], R) ->
+ restore_schema(Schema, R);
+restore_schema([{schema, Tab, List} | Schema], R) ->
+ case restore_func(Tab, R) of
+ clear_tables ->
+ do_clear_table(Tab),
+ Where = val({Tab, where_to_commit}),
+ Snmp = val({Tab, snmp}),
+ RecName = val({Tab, record_name}),
+ R2 = R#r{tables = [{Tab, Where, Snmp, RecName} | R#r.tables]},
+ restore_schema(Schema, R2);
+ recreate_tables ->
+ TidTs = get_tid_ts_and_lock(Tab, write),
+ NC = {cookie, ?unique_cookie},
+ List2 = lists:keyreplace(cookie, 1, List, NC),
+ Where = where_to_commit(Tab, List2),
+ Snmp = pick(Tab, snmp, List2, []),
+ RecName = pick(Tab, record_name, List2, Tab),
+% case ?catch_val({Tab, cstruct}) of
+% {'EXIT', _} ->
+% ignore;
+% OldCs when record(OldCs, cstruct) ->
+% do_delete_table(Tab)
+% end,
+% unsafe_do_create_table(list2cs(List2)),
+ insert_schema_ops(TidTs, [{op, restore_recreate, List2}]),
+ R2 = R#r{tables = [{Tab, Where, Snmp, RecName} | R#r.tables]},
+ restore_schema(Schema, R2);
+ keep_tables ->
+ get_tid_ts_and_lock(Tab, write),
+ Where = val({Tab, where_to_commit}),
+ Snmp = val({Tab, snmp}),
+ RecName = val({Tab, record_name}),
+ R2 = R#r{tables = [{Tab, Where, Snmp, RecName} | R#r.tables]},
+ restore_schema(Schema, R2);
+ skip_tables ->
+ restore_schema(Schema, R)
+ end;
+
+restore_schema([{schema, Tab} | Schema], R) ->
+ do_delete_table(Tab),
+ Tabs = lists:delete(Tab,R#r.tables),
+ restore_schema(Schema, R#r{tables = Tabs});
+restore_schema([], R) ->
+ R.
+
+restore_tab_items([Rec | Rest], Tab, RecName, Where, Snmp, Recs, Op)
+ when element(1, Rec) == Tab ->
+ NewRecs = Op(Rec, Recs, RecName, Where, Snmp),
+ restore_tab_items(Rest, Tab, RecName, Where, Snmp, NewRecs, Op);
+
+restore_tab_items(Rest, _Tab, _RecName, _Where, _Snmp, Recs, _Op) ->
+ {Rest, Recs}.
+
+skip_tab_items([Rec| Rest], Tab)
+ when element(1, Rec) == Tab ->
+ skip_tab_items(Rest, Tab);
+skip_tab_items(Recs, _) ->
+ Recs.
+
+%%%%%%%%% Dump tables %%%%%%%%%%%%%
+dump_tables(Tabs) when list(Tabs) ->
+ schema_transaction(fun() -> do_dump_tables(Tabs) end);
+dump_tables(Tabs) ->
+ {aborted, {bad_type, Tabs}}.
+
+do_dump_tables(Tabs) ->
+ TidTs = get_tid_ts_and_lock(schema, write),
+ insert_schema_ops(TidTs, make_dump_tables(Tabs)).
+
+make_dump_tables([schema | _Tabs]) ->
+ mnesia:abort({bad_type, schema});
+make_dump_tables([Tab | Tabs]) ->
+ get_tid_ts_and_lock(Tab, read),
+ TabDef = get_create_list(Tab),
+ DiscResident = val({Tab, disc_copies}) ++ val({Tab, disc_only_copies}),
+ verify([], DiscResident,
+ {"Only allowed on ram_copies", Tab, DiscResident}),
+ [{op, dump_table, unknown, TabDef} | make_dump_tables(Tabs)];
+make_dump_tables([]) ->
+ [].
+
+%% Merge the local schema with the schema on other nodes
+merge_schema() ->
+ schema_transaction(fun() -> do_merge_schema() end).
+
+do_merge_schema() ->
+ {_Mod, Tid, Ts} = get_tid_ts_and_lock(schema, write),
+ Connected = val(recover_nodes),
+ Running = val({current, db_nodes}),
+ Store = Ts#tidstore.store,
+ case Connected -- Running of
+ [Node | _] ->
+ %% Time for a schema merging party!
+ mnesia_locker:wlock_no_exist(Tid, Store, schema, [Node]),
+
+ case rpc:call(Node, mnesia_controller, get_cstructs, []) of
+ {cstructs, Cstructs, RemoteRunning1} ->
+ LockedAlready = Running ++ [Node],
+ {New, Old} = mnesia_recover:connect_nodes(RemoteRunning1),
+ RemoteRunning = mnesia_lib:intersect(New ++ Old, RemoteRunning1),
+ if
+ RemoteRunning /= RemoteRunning1 ->
+ mnesia_lib:error("Mnesia on ~p could not connect to node(s) ~p~n",
+ [node(), RemoteRunning1 -- RemoteRunning]);
+ true -> ok
+ end,
+ NeedsLock = RemoteRunning -- LockedAlready,
+ mnesia_locker:wlock_no_exist(Tid, Store, schema, NeedsLock),
+
+ {value, SchemaCs} =
+ lists:keysearch(schema, #cstruct.name, Cstructs),
+
+ %% Announce that Node is running
+ A = [{op, announce_im_running, node(),
+ cs2list(SchemaCs), Running, RemoteRunning}],
+ do_insert_schema_ops(Store, A),
+
+ %% Introduce remote tables to local node
+ do_insert_schema_ops(Store, make_merge_schema(Node, Cstructs)),
+
+ %% Introduce local tables to remote nodes
+ Tabs = val({schema, tables}),
+ Ops = [{op, merge_schema, get_create_list(T)}
+ || T <- Tabs,
+ not lists:keymember(T, #cstruct.name, Cstructs)],
+ do_insert_schema_ops(Store, Ops),
+
+ %% Ensure that the txn will be committed on all nodes
+ announce_im_running(RemoteRunning, SchemaCs),
+ {merged, Running, RemoteRunning};
+ {error, Reason} ->
+ {"Cannot get cstructs", Node, Reason};
+ {badrpc, Reason} ->
+ {"Cannot get cstructs", Node, {badrpc, Reason}}
+ end;
+ [] ->
+ %% No more nodes to merge schema with
+ not_merged
+ end.
+
+make_merge_schema(Node, [Cs | Cstructs]) ->
+ Ops = do_make_merge_schema(Node, Cs),
+ Ops ++ make_merge_schema(Node, Cstructs);
+make_merge_schema(_Node, []) ->
+ [].
+
+%% Merge definitions of schema table
+do_make_merge_schema(Node, RemoteCs)
+ when RemoteCs#cstruct.name == schema ->
+ Cs = val({schema, cstruct}),
+ Masters = mnesia_recover:get_master_nodes(schema),
+ HasRemoteMaster = lists:member(Node, Masters),
+ HasLocalMaster = lists:member(node(), Masters),
+ Force = HasLocalMaster or HasRemoteMaster,
+ %% What is the storage types opinions?
+ StCsLocal = mnesia_lib:cs_to_storage_type(node(), Cs),
+ StRcsLocal = mnesia_lib:cs_to_storage_type(node(), RemoteCs),
+ StCsRemote = mnesia_lib:cs_to_storage_type(Node, Cs),
+ StRcsRemote = mnesia_lib:cs_to_storage_type(Node, RemoteCs),
+
+ if
+ Cs#cstruct.cookie == RemoteCs#cstruct.cookie,
+ Cs#cstruct.version == RemoteCs#cstruct.version ->
+ %% Great, we have the same cookie and version
+ %% and do not need to merge cstructs
+ [];
+
+ Cs#cstruct.cookie /= RemoteCs#cstruct.cookie,
+ Cs#cstruct.disc_copies /= [],
+ RemoteCs#cstruct.disc_copies /= [] ->
+ %% Both cstructs involves disc nodes
+ %% and we cannot merge them
+ if
+ HasLocalMaster == true,
+ HasRemoteMaster == false ->
+ %% Choose local cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(Cs)}];
+
+ HasRemoteMaster == true,
+ HasLocalMaster == false ->
+ %% Choose remote cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(RemoteCs)}];
+
+ true ->
+ Str = io_lib:format("Incompatible schema cookies. "
+ "Please, restart from old backup."
+ "~w = ~w, ~w = ~w~n",
+ [Node, cs2list(RemoteCs), node(), cs2list(Cs)]),
+ throw(Str)
+ end;
+
+ StCsLocal /= StRcsLocal, StRcsLocal /= unknown ->
+ Str = io_lib:format("Incompatible schema storage types. "
+ "on ~w storage ~w, on ~w storage ~w~n",
+ [node(), StCsLocal, Node, StRcsLocal]),
+ throw(Str);
+ StCsRemote /= StRcsRemote, StCsRemote /= unknown ->
+ Str = io_lib:format("Incompatible schema storage types. "
+ "on ~w storage ~w, on ~w storage ~w~n",
+ [node(), StCsRemote, Node, StRcsRemote]),
+ throw(Str);
+
+ Cs#cstruct.disc_copies /= [] ->
+ %% Choose local cstruct,
+ %% since it involves disc nodes
+ MergedCs = merge_cstructs(Cs, RemoteCs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ RemoteCs#cstruct.disc_copies /= [] ->
+ %% Choose remote cstruct,
+ %% since it involves disc nodes
+ MergedCs = merge_cstructs(RemoteCs, Cs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ Cs > RemoteCs ->
+ %% Choose remote cstruct
+ MergedCs = merge_cstructs(RemoteCs, Cs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ true ->
+ %% Choose local cstruct
+ MergedCs = merge_cstructs(Cs, RemoteCs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}]
+ end;
+
+%% Merge definitions of normal table
+do_make_merge_schema(Node, RemoteCs) ->
+ Tab = RemoteCs#cstruct.name,
+ Masters = mnesia_recover:get_master_nodes(schema),
+ HasRemoteMaster = lists:member(Node, Masters),
+ HasLocalMaster = lists:member(node(), Masters),
+ Force = HasLocalMaster or HasRemoteMaster,
+ case ?catch_val({Tab, cstruct}) of
+ {'EXIT', _} ->
+ %% A completely new table, created while Node was down
+ [{op, merge_schema, cs2list(RemoteCs)}];
+ Cs when Cs#cstruct.cookie == RemoteCs#cstruct.cookie ->
+ if
+ Cs#cstruct.version == RemoteCs#cstruct.version ->
+ %% We have exactly the same version of the
+ %% table def
+ [];
+
+ Cs#cstruct.version > RemoteCs#cstruct.version ->
+ %% Oops, we have different versions
+ %% of the table def, lets merge them.
+ %% The only changes that may have occurred
+ %% is that new replicas may have been added.
+ MergedCs = merge_cstructs(Cs, RemoteCs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}];
+
+ Cs#cstruct.version < RemoteCs#cstruct.version ->
+ %% Oops, we have different versions
+ %% of the table def, lets merge them
+ MergedCs = merge_cstructs(RemoteCs, Cs, Force),
+ [{op, merge_schema, cs2list(MergedCs)}]
+ end;
+ Cs ->
+ %% Different cookies, not possible to merge
+ if
+ HasLocalMaster == true,
+ HasRemoteMaster == false ->
+ %% Choose local cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(Cs)}];
+
+ HasRemoteMaster == true,
+ HasLocalMaster == false ->
+ %% Choose remote cstruct,
+ %% since it's the master
+ [{op, merge_schema, cs2list(RemoteCs)}];
+
+ true ->
+ Str = io_lib:format("Bad cookie in table definition"
+ " ~w: ~w = ~w, ~w = ~w~n",
+ [Tab, node(), Cs, Node, RemoteCs]),
+ throw(Str)
+ end
+ end.
+
+%% Change of table definitions (cstructs) requires all replicas
+%% of the table to be active. New replicas, db_nodes and tables
+%% may however be added even if some replica is inactive. These
+%% invariants must be enforced in order to allow merge of cstructs.
+%%
+%% Returns a new cstruct or issues a fatal error
+merge_cstructs(Cs, RemoteCs, Force) ->
+ verify_cstruct(Cs),
+ case catch do_merge_cstructs(Cs, RemoteCs, Force) of
+ {'EXIT', {aborted, _Reason}} when Force == true ->
+ Cs;
+ {'EXIT', Reason} ->
+ exit(Reason);
+ MergedCs when record(MergedCs, cstruct) ->
+ MergedCs;
+ Other ->
+ throw(Other)
+ end.
+
+do_merge_cstructs(Cs, RemoteCs, Force) ->
+ verify_cstruct(RemoteCs),
+ Ns = mnesia_lib:uniq(mnesia_lib:cs_to_nodes(Cs) ++
+ mnesia_lib:cs_to_nodes(RemoteCs)),
+ {AnythingNew, MergedCs} =
+ merge_storage_type(Ns, false, Cs, RemoteCs, Force),
+ MergedCs2 = merge_versions(AnythingNew, MergedCs, RemoteCs, Force),
+ verify_cstruct(MergedCs2),
+ MergedCs2.
+
+merge_storage_type([N | Ns], AnythingNew, Cs, RemoteCs, Force) ->
+ Local = mnesia_lib:cs_to_storage_type(N, Cs),
+ Remote = mnesia_lib:cs_to_storage_type(N, RemoteCs),
+ case compare_storage_type(true, Local, Remote) of
+ {same, _Storage} ->
+ merge_storage_type(Ns, AnythingNew, Cs, RemoteCs, Force);
+ {diff, Storage} ->
+ Cs2 = change_storage_type(N, Storage, Cs),
+ merge_storage_type(Ns, true, Cs2, RemoteCs, Force);
+ incompatible when Force == true ->
+ merge_storage_type(Ns, AnythingNew, Cs, RemoteCs, Force);
+ Other ->
+ Str = io_lib:format("Cannot merge storage type for node ~w "
+ "in cstruct ~w with remote cstruct ~w (~w)~n",
+ [N, Cs, RemoteCs, Other]),
+ throw(Str)
+ end;
+merge_storage_type([], AnythingNew, MergedCs, _RemoteCs, _Force) ->
+ {AnythingNew, MergedCs}.
+
+compare_storage_type(_Retry, Any, Any) ->
+ {same, Any};
+compare_storage_type(_Retry, unknown, Any) ->
+ {diff, Any};
+compare_storage_type(_Retry, ram_copies, disc_copies) ->
+ {diff, disc_copies};
+compare_storage_type(_Retry, disc_copies, disc_only_copies) ->
+ {diff, disc_only_copies};
+compare_storage_type(true, One, Another) ->
+ compare_storage_type(false, Another, One);
+compare_storage_type(false, _One, _Another) ->
+ incompatible.
+
+change_storage_type(N, ram_copies, Cs) ->
+ Nodes = [N | Cs#cstruct.ram_copies],
+ Cs#cstruct{ram_copies = mnesia_lib:uniq(Nodes)};
+change_storage_type(N, disc_copies, Cs) ->
+ Nodes = [N | Cs#cstruct.disc_copies],
+ Cs#cstruct{disc_copies = mnesia_lib:uniq(Nodes)};
+change_storage_type(N, disc_only_copies, Cs) ->
+ Nodes = [N | Cs#cstruct.disc_only_copies],
+ Cs#cstruct{disc_only_copies = mnesia_lib:uniq(Nodes)}.
+
+%% BUGBUG: Verify match of frag info; equalit demanded for all but add_node
+
+merge_versions(AnythingNew, Cs, RemoteCs, Force) ->
+ if
+ Cs#cstruct.name == schema ->
+ ok;
+ Cs#cstruct.name /= schema,
+ Cs#cstruct.cookie == RemoteCs#cstruct.cookie ->
+ ok;
+ Force == true ->
+ ok;
+ true ->
+ Str = io_lib:format("Bad cookies. Cannot merge definitions of "
+ "table ~w. Local = ~w, Remote = ~w~n",
+ [Cs#cstruct.name, Cs, RemoteCs]),
+ throw(Str)
+ end,
+ if
+ Cs#cstruct.name == RemoteCs#cstruct.name,
+ Cs#cstruct.type == RemoteCs#cstruct.type,
+ Cs#cstruct.local_content == RemoteCs#cstruct.local_content,
+ Cs#cstruct.attributes == RemoteCs#cstruct.attributes,
+ Cs#cstruct.index == RemoteCs#cstruct.index,
+ Cs#cstruct.snmp == RemoteCs#cstruct.snmp,
+ Cs#cstruct.access_mode == RemoteCs#cstruct.access_mode,
+ Cs#cstruct.load_order == RemoteCs#cstruct.load_order,
+ Cs#cstruct.user_properties == RemoteCs#cstruct.user_properties ->
+ do_merge_versions(AnythingNew, Cs, RemoteCs);
+ Force == true ->
+ do_merge_versions(AnythingNew, Cs, RemoteCs);
+ true ->
+ Str1 = io_lib:format("Cannot merge definitions of "
+ "table ~w. Local = ~w, Remote = ~w~n",
+ [Cs#cstruct.name, Cs, RemoteCs]),
+ throw(Str1)
+ end.
+
+do_merge_versions(AnythingNew, MergedCs, RemoteCs) ->
+ {{Major1, Minor1}, _Detail1} = MergedCs#cstruct.version,
+ {{Major2, Minor2}, _Detail2} = RemoteCs#cstruct.version,
+ if
+ MergedCs#cstruct.version == RemoteCs#cstruct.version ->
+ MergedCs;
+ AnythingNew == false ->
+ MergedCs;
+ Major1 == Major2 ->
+ Minor = lists:max([Minor1, Minor2]),
+ V = {{Major1, Minor}, dummy},
+ incr_version(MergedCs#cstruct{version = V});
+ Major1 /= Major2 ->
+ Major = lists:max([Major1, Major2]),
+ V = {{Major, 0}, dummy},
+ incr_version(MergedCs#cstruct{version = V})
+ end.
+
+announce_im_running([N | Ns], SchemaCs) ->
+ {L1, L2} = mnesia_recover:connect_nodes([N]),
+ case lists:member(N, L1) or lists:member(N, L2) of
+ true ->
+%% dbg_out("Adding ~p to {current db_nodes} ~n", [N]), %% qqqq
+ mnesia_lib:add({current, db_nodes}, N),
+ mnesia_controller:add_active_replica(schema, N, SchemaCs);
+ false ->
+ ignore
+ end,
+ announce_im_running(Ns, SchemaCs);
+announce_im_running([], _) ->
+ [].
+
+unannounce_im_running([N | Ns]) ->
+ mnesia_lib:del({current, db_nodes}, N),
+ mnesia_controller:del_active_replica(schema, N),
+ mnesia_recover:disconnect(N),
+ unannounce_im_running(Ns);
+unannounce_im_running([]) ->
+ [].
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_hook.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_hook.erl
new file mode 100644
index 0000000000..ad88bc6e6a
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_hook.erl
@@ -0,0 +1,271 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_snmp_hook.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_snmp_hook).
+
+%% Hooks (called from mnesia)
+-export([check_ustruct/1, create_table/3, delete_table/2,
+ key_to_oid/3, update/1, start/2,
+ get_row/2, get_next_index/2, get_mnesia_key/2]).
+
+%% sys callback functions
+-export([system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+%% Internal exports
+-export([b_init/2]).
+
+check_ustruct([]) ->
+ true; %% default value, not SNMP'ified
+check_ustruct([{key, Types}]) ->
+ is_snmp_type(to_list(Types));
+check_ustruct(_) -> false.
+
+to_list(Tuple) when tuple(Tuple) -> tuple_to_list(Tuple);
+to_list(X) -> [X].
+
+is_snmp_type([integer | T]) -> is_snmp_type(T);
+is_snmp_type([string | T]) -> is_snmp_type(T);
+is_snmp_type([fix_string | T]) -> is_snmp_type(T);
+is_snmp_type([]) -> true;
+is_snmp_type(_) -> false.
+
+create_table([], MnesiaTab, _Storage) ->
+ mnesia:abort({badarg, MnesiaTab, {snmp, empty_snmpstruct}});
+
+create_table([{key, Us}], MnesiaTab, Storage) ->
+ Tree = b_new(MnesiaTab, Us),
+ mnesia_lib:db_fixtable(Storage, MnesiaTab, true),
+ First = mnesia_lib:db_first(Storage, MnesiaTab),
+ build_table(First, MnesiaTab, Tree, Us, Storage),
+ mnesia_lib:db_fixtable(Storage, MnesiaTab, false),
+ Tree.
+
+build_table(MnesiaKey, MnesiaTab, Tree, Us, Storage)
+ when MnesiaKey /= '$end_of_table' ->
+%% SnmpKey = key_to_oid(MnesiaTab, MnesiaKey, Us),
+%% update(write, Tree, MnesiaKey, SnmpKey),
+ update(write, Tree, MnesiaKey, MnesiaKey),
+ Next = mnesia_lib:db_next_key(Storage, MnesiaTab, MnesiaKey),
+ build_table(Next, MnesiaTab, Tree, Us, Storage);
+build_table('$end_of_table', _MnesiaTab, _Tree, _Us, _Storage) ->
+ ok.
+
+delete_table(_MnesiaTab, Tree) ->
+ exit(Tree, shutdown),
+ ok.
+
+%%-----------------------------------------------------------------
+%% update({Op, MnesiaTab, MnesiaKey, SnmpKey})
+%%-----------------------------------------------------------------
+
+update({clear_table, MnesiaTab}) ->
+ Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}),
+ b_clear(Tree);
+
+update({Op, MnesiaTab, MnesiaKey, SnmpKey}) ->
+ Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}),
+ update(Op, Tree, MnesiaKey, SnmpKey).
+
+update(Op, Tree, MnesiaKey, _) ->
+ case Op of
+ write ->
+ b_insert(Tree, MnesiaKey, MnesiaKey);
+ update_counter ->
+ ignore;
+ delete ->
+ b_delete(Tree, MnesiaKey);
+ delete_object ->
+ b_delete(Tree, MnesiaKey)
+ end,
+ ok.
+
+%%-----------------------------------------------------------------
+%% Func: key_to_oid(Tab, Key, Ustruct)
+%% Args: Key ::= key()
+%% key() ::= int() | string() | {int() | string()}
+%% Type ::= {fix_string | term()}
+%% Make an OBJECT IDENTIFIER out of it.
+%% Variable length objects are prepended by their length.
+%% Ex. Key = {"pelle", 42} AND Type = {string, integer} =>
+%% OID [5, $p, $e, $l, $l, $e, 42]
+%% Key = {"pelle", 42} AND Type = {fix_string, integer} =>
+%% OID [$p, $e, $l, $l, $e, 42]
+%%-----------------------------------------------------------------
+key_to_oid(Tab, Key, [{key, Types}]) ->
+ MnesiaOid = {Tab, Key},
+ if
+ tuple(Key), tuple(Types) ->
+ case {size(Key), size(Types)} of
+ {Size, Size} ->
+ keys_to_oid(MnesiaOid, Size, Key, [], Types);
+ _ ->
+ exit({bad_snmp_key, MnesiaOid})
+ end;
+ true ->
+ key_to_oid_i(MnesiaOid, Key, Types)
+ end.
+
+key_to_oid_i(_MnesiaOid, Key, integer) when integer(Key) -> [Key];
+key_to_oid_i(_MnesiaOid, Key, fix_string) when list(Key) -> Key;
+key_to_oid_i(_MnesiaOid, Key, string) when list(Key) -> [length(Key) | Key];
+key_to_oid_i(MnesiaOid, Key, Type) ->
+ exit({bad_snmp_key, [MnesiaOid, Key, Type]}).
+
+keys_to_oid(_MnesiaOid, 0, _Key, Oid, _Types) -> Oid;
+keys_to_oid(MnesiaOid, N, Key, Oid, Types) ->
+ Type = element(N, Types),
+ KeyPart = element(N, Key),
+ Oid2 = key_to_oid_i(MnesiaOid, KeyPart, Type) ++ Oid,
+ keys_to_oid(MnesiaOid, N-1, Key, Oid2, Types).
+
+%%-----------------------------------------------------------------
+%% Func: get_row/2
+%% Args: Name is the name of the table (atom)
+%% RowIndex is an Oid
+%% Returns: {ok, Row} | undefined
+%% Note that the Row returned might contain columns that
+%% are not visible via SNMP. e.g. the first column may be
+%% ifIndex, and the last MFA ({ifIndex, col1, col2, MFA}).
+%% where ifIndex is used only as index (not as a real col),
+%% and MFA as extra info, used by the application.
+%%-----------------------------------------------------------------
+get_row(Name, RowIndex) ->
+ Tree = mnesia_lib:val({Name, {index, snmp}}),
+ case b_lookup(Tree, RowIndex) of
+ {ok, {_RowIndex, Key}} ->
+ [Row] = mnesia:dirty_read({Name, Key}),
+ {ok, Row};
+ _ ->
+ undefined
+ end.
+
+%%-----------------------------------------------------------------
+%% Func: get_next_index/2
+%% Args: Name is the name of the table (atom)
+%% RowIndex is an Oid
+%% Returns: {ok, NextIndex} | endOfTable
+%%-----------------------------------------------------------------
+get_next_index(Name, RowIndex) ->
+ Tree = mnesia_lib:val({Name, {index, snmp}}),
+ case b_lookup_next(Tree, RowIndex) of
+ {ok, {NextIndex, _Key}} ->
+ {ok, NextIndex};
+ _ ->
+ endOfTable
+ end.
+
+%%-----------------------------------------------------------------
+%% Func: get_mnesia_key/2
+%% Purpose: Get the mnesia key corresponding to the RowIndex.
+%% Args: Name is the name of the table (atom)
+%% RowIndex is an Oid
+%% Returns: {ok, Key} | undefiend
+%%-----------------------------------------------------------------
+get_mnesia_key(Name, RowIndex) ->
+ Tree = mnesia_lib:val({Name, {index, snmp}}),
+ case b_lookup(Tree, RowIndex) of
+ {ok, {_RowIndex, Key}} ->
+ {ok, Key};
+ _ ->
+ undefined
+ end.
+
+%%-----------------------------------------------------------------
+%% Encapsulate a bplus_tree in a process.
+%%-----------------------------------------------------------------
+
+b_new(MnesiaTab, Us) ->
+ case supervisor:start_child(mnesia_snmp_sup, [MnesiaTab, Us]) of
+ {ok, Tree} ->
+ Tree;
+ {error, Reason} ->
+ exit({badsnmp, MnesiaTab, Reason})
+ end.
+
+start(MnesiaTab, Us) ->
+ Name = {mnesia_snmp, MnesiaTab},
+ mnesia_monitor:start_proc(Name, ?MODULE, b_init, [self(), Us]).
+
+b_insert(Tree, Key, Val) -> Tree ! {insert, Key, Val}.
+b_delete(Tree, Key) -> Tree ! {delete, Key}.
+b_lookup(Tree, Key) ->
+ Tree ! {lookup, self(), Key},
+ receive
+ {bplus_res, Res} ->
+ Res
+ end.
+b_lookup_next(Tree, Key) ->
+ Tree ! {lookup_next, self(), Key},
+ receive
+ {bplus_res, Res} ->
+ Res
+ end.
+
+b_clear(Tree) ->
+ Tree ! clear,
+ ok.
+
+b_init(Parent, Us) ->
+ %% Do not trap exit
+ Tree = snmp_index:new(Us),
+ proc_lib:init_ack(Parent, {ok, self()}),
+ b_loop(Parent, Tree, Us).
+
+b_loop(Parent, Tree, Us) ->
+ receive
+ {insert, Key, Val} ->
+ NTree = snmp_index:insert(Tree, Key, Val),
+ b_loop(Parent, NTree, Us);
+ {delete, Key} ->
+ NTree = snmp_index:delete(Tree, Key),
+ b_loop(Parent, NTree, Us);
+ {lookup, From, Key} ->
+ Res = snmp_index:get(Tree, Key),
+ From ! {bplus_res, Res},
+ b_loop(Parent, Tree, Us);
+ {lookup_next, From, Key} ->
+ Res = snmp_index:get_next(Tree, Key),
+ From ! {bplus_res, Res},
+ b_loop(Parent, Tree, Us);
+ clear ->
+ catch snmp_index:delete(Tree), %% Catch because delete/1 is not
+ NewTree = snmp_index:new(Us), %% available in old snmp (before R5)
+ b_loop(Parent, NewTree, Us);
+
+ {'EXIT', Parent, Reason} ->
+ exit(Reason);
+
+ {system, From, Msg} ->
+ mnesia_lib:dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], {Tree, Us})
+
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(Parent, _Debug, {Tree, Us}) ->
+ b_loop(Parent, Tree, Us).
+
+system_terminate(Reason, _Parent, _Debug, _Tree) ->
+ exit(Reason).
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_sup.erl
new file mode 100644
index 0000000000..227eec060f
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_snmp_sup.erl
@@ -0,0 +1,39 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_snmp_sup.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_snmp_sup).
+
+-behaviour(supervisor).
+
+-export([start/0, init/1]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% top supervisor callback functions
+
+start() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% sub supervisor callback functions
+
+init([]) ->
+ Flags = {simple_one_for_one, 0, timer:hours(24)}, % Trust the top supervisor
+ MFA = {mnesia_snmp_hook, start, []},
+ Modules = [?MODULE, mnesia_snmp_hook, supervisor],
+ KillAfter = mnesia_kernel_sup:supervisor_timeout(timer:seconds(3)),
+ Workers = [{?MODULE, MFA, transient, KillAfter, worker, Modules}],
+ {ok, {Flags, Workers}}.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sp.erl
new file mode 100644
index 0000000000..bc52ad7f84
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sp.erl
@@ -0,0 +1,35 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_sp.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+
+%% To able to generate nice crash reports we need a catch on the highest level.
+%% This code can't be purged so a code change is not possible.
+%% And hence this a simple module.
+
+-module(mnesia_sp).
+
+-export([init_proc/4]).
+
+init_proc(Who, Mod, Fun, Args) ->
+ mnesia_lib:verbose("~p starting: ~p~n", [Who, self()]),
+ case catch apply(Mod, Fun, Args) of
+ {'EXIT', Reason} ->
+ mnesia_monitor:terminate_proc(Who, Reason, Args),
+ exit(Reason);
+ Other ->
+ Other
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_subscr.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_subscr.erl
new file mode 100644
index 0000000000..dc66451206
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_subscr.erl
@@ -0,0 +1,491 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_subscr.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+-module(mnesia_subscr).
+
+-behaviour(gen_server).
+
+-export([start/0,
+ set_debug_level/1,
+ subscribe/2,
+ unsubscribe/2,
+ unsubscribe_table/1,
+ subscribers/0,
+ report_table_event/4,
+ report_table_event/5,
+ report_table_event/6
+ ]).
+
+%% gen_server callbacks
+-export([init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
+
+-include("mnesia.hrl").
+
+-import(mnesia_lib, [error/2]).
+-record(state, {supervisor, pid_tab}).
+
+start() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [self()],
+ [{timeout, infinity}]).
+
+set_debug_level(Level) ->
+ OldEnv = application:get_env(mnesia, debug),
+ case mnesia_monitor:patch_env(debug, Level) of
+ {error, Reason} ->
+ {error, Reason};
+ NewLevel ->
+ set_debug_level(NewLevel, OldEnv)
+ end.
+
+set_debug_level(Level, OldEnv) ->
+ case mnesia:system_info(is_running) of
+ no when OldEnv == undefined ->
+ none;
+ no ->
+ {ok, E} = OldEnv,
+ E;
+ _ ->
+ Old = mnesia_lib:val(debug),
+ Local = mnesia:system_info(local_tables),
+ E = whereis(mnesia_event),
+ Sub = fun(Tab) -> subscribe(E, {table, Tab}) end,
+ UnSub = fun(Tab) -> unsubscribe(E, {table, Tab}) end,
+
+ case Level of
+ none ->
+ lists:foreach(UnSub, Local);
+ verbose ->
+ lists:foreach(UnSub, Local);
+ debug ->
+ lists:foreach(UnSub, Local -- [schema]),
+ Sub(schema);
+ trace ->
+ lists:foreach(Sub, Local)
+ end,
+ mnesia_lib:set(debug, Level),
+ Old
+ end.
+
+subscribe(ClientPid, system) ->
+ change_subscr(activate, ClientPid, system);
+subscribe(ClientPid, {table, Tab}) ->
+ change_subscr(activate, ClientPid, {table, Tab, simple});
+subscribe(ClientPid, {table, Tab, simple}) ->
+ change_subscr(activate, ClientPid, {table, Tab, simple});
+subscribe(ClientPid, {table, Tab, detailed}) ->
+ change_subscr(activate, ClientPid, {table, Tab, detailed});
+subscribe(_ClientPid, What) ->
+ {error, {badarg, What}}.
+
+unsubscribe(ClientPid, system) ->
+ change_subscr(deactivate, ClientPid, system);
+unsubscribe(ClientPid, {table, Tab}) ->
+ change_subscr(deactivate, ClientPid, {table, Tab, simple});
+unsubscribe(ClientPid, {table, Tab, simple}) ->
+ change_subscr(deactivate, ClientPid, {table, Tab, simple});
+unsubscribe(ClientPid, {table, Tab, detailed}) ->
+ change_subscr(deactivate, ClientPid, {table, Tab, detailed});
+unsubscribe(_ClientPid, What) ->
+ {error, {badarg, What}}.
+
+unsubscribe_table(Tab) ->
+ call({change, {deactivate_table, Tab}}).
+
+change_subscr(Kind, ClientPid, What) ->
+ call({change, {Kind, ClientPid, What}}).
+
+subscribers() ->
+ [whereis(mnesia_event) | mnesia_lib:val(subscribers)].
+
+report_table_event(Tab, Tid, Obj, Op) ->
+ case ?catch_val({Tab, commit_work}) of
+ {'EXIT', _} -> ok;
+ Commit ->
+ case lists:keysearch(subscribers, 1, Commit) of
+ false -> ok;
+ {value, Subs} ->
+ report_table_event(Subs, Tab, Tid, Obj, Op, undefined)
+ end
+ end.
+
+%% Backwards compatible for the moment when mnesia_tm get's updated!
+report_table_event(Subscr, Tab, Tid, Obj, Op) ->
+ report_table_event(Subscr, Tab, Tid, Obj, Op, undefined).
+
+report_table_event({subscribers, S1, S2}, Tab, Tid, _Obj, clear_table, _Old) ->
+ What = {delete, {schema, Tab}, Tid},
+ deliver(S1, {mnesia_table_event, What}),
+ TabDef = mnesia_schema:cs2list(?catch_val({Tab, cstruct})),
+ What2 = {write, {schema, Tab, TabDef}, Tid},
+ deliver(S1, {mnesia_table_event, What2}),
+ What3 = {delete, schema, {schema, Tab}, [{schema, Tab, TabDef}], Tid},
+ deliver(S2, {mnesia_table_event, What3}),
+ What4 = {write, schema, {schema, Tab, TabDef}, [], Tid},
+ deliver(S2, {mnesia_table_event, What4});
+
+report_table_event({subscribers, Subscr, []}, Tab, Tid, Obj, Op, _Old) ->
+ What = {Op, patch_record(Tab, Obj), Tid},
+ deliver(Subscr, {mnesia_table_event, What});
+
+report_table_event({subscribers, S1, S2}, Tab, Tid, Obj, Op, Old) ->
+ Standard = {Op, patch_record(Tab, Obj), Tid},
+ deliver(S1, {mnesia_table_event, Standard}),
+ Extended = what(Tab, Tid, Obj, Op, Old),
+ deliver(S2, Extended);
+
+%% Backwards compatible for the moment when mnesia_tm get's updated!
+report_table_event({subscribers, Subscr}, Tab, Tid, Obj, Op, Old) ->
+ report_table_event({subscribers, Subscr, []}, Tab, Tid, Obj, Op, Old).
+
+
+patch_record(Tab, Obj) ->
+ case Tab == element(1, Obj) of
+ true ->
+ Obj;
+ false ->
+ setelement(1, Obj, Tab)
+ end.
+
+what(Tab, Tid, {RecName, Key}, delete, undefined) ->
+ case catch mnesia_lib:db_get(Tab, Key) of
+ Old when list(Old) -> %% Op only allowed for set table.
+ {mnesia_table_event, {delete, Tab, {RecName, Key}, Old, Tid}};
+ _ ->
+ %% Record just deleted by a dirty_op or
+ %% the whole table has been deleted
+ ignore
+ end;
+what(Tab, Tid, Obj, delete, Old) ->
+ {mnesia_table_event, {delete, Tab, Obj, Old, Tid}};
+what(Tab, Tid, Obj, delete_object, _Old) ->
+ {mnesia_table_event, {delete, Tab, Obj, [Obj], Tid}};
+what(Tab, Tid, Obj, write, undefined) ->
+ case catch mnesia_lib:db_get(Tab, element(2, Obj)) of
+ Old when list(Old) ->
+ {mnesia_table_event, {write, Tab, Obj, Old, Tid}};
+ {'EXIT', _} ->
+ ignore
+ end.
+
+deliver(_, ignore) ->
+ ok;
+deliver([Pid | Pids], Msg) ->
+ Pid ! Msg,
+ deliver(Pids, Msg);
+deliver([], _Msg) ->
+ ok.
+
+call(Msg) ->
+ Pid = whereis(?MODULE),
+ case Pid of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ Res = gen_server:call(Pid, Msg, infinity),
+ %% We get an exit signal if server dies
+ receive
+ {'EXIT', _Pid, _Reason} ->
+ {error, {node_not_running, node()}}
+ after 0 ->
+ ignore
+ end,
+ Res
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Callback functions from gen_server
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, State} |
+%% {ok, State, Timeout} |
+%% {stop, Reason}
+%%----------------------------------------------------------------------
+init([Parent]) ->
+ process_flag(trap_exit, true),
+ ClientPid = whereis(mnesia_event),
+ link(ClientPid),
+ mnesia_lib:verbose("~p starting: ~p~n", [?MODULE, self()]),
+ Tab = ?ets_new_table(mnesia_subscr, [duplicate_bag, private]),
+ ?ets_insert(Tab, {ClientPid, system}),
+ {ok, #state{supervisor = Parent, pid_tab = Tab}}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_call/3
+%% Returns: {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} | (terminate/2 is called)
+%%----------------------------------------------------------------------
+handle_call({change, How}, _From, State) ->
+ Reply = do_change(How, State#state.pid_tab),
+ {reply, Reply, State};
+
+handle_call(Msg, _From, State) ->
+ error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_cast/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+handle_cast(Msg, State) ->
+ error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: handle_info/2
+%% Returns: {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State} (terminate/2 is called)
+%%----------------------------------------------------------------------
+
+handle_info({'EXIT', Pid, _R}, State) when Pid == State#state.supervisor ->
+ {stop, shutdown, State};
+
+handle_info({'EXIT', Pid, _Reason}, State) ->
+ handle_exit(Pid, State#state.pid_tab),
+ {noreply, State};
+
+handle_info(Msg, State) ->
+ error("~p got unexpected info: ~p~n", [?MODULE, Msg]),
+ {noreply, State}.
+
+%%----------------------------------------------------------------------
+%% Func: terminate/2
+%% Purpose: Shutdown the server
+%% Returns: any (ignored by gen_server)
+%%----------------------------------------------------------------------
+terminate(Reason, State) ->
+ prepare_stop(State#state.pid_tab),
+ mnesia_monitor:terminate_proc(?MODULE, Reason, State).
+
+%%----------------------------------------------------------------------
+%% Func: code_change/3
+%% Purpose: Upgrade process when its code is to be changed
+%% Returns: {ok, NewState}
+%%----------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+do_change({activate, ClientPid, system}, SubscrTab) when pid(ClientPid) ->
+ Var = subscribers,
+ activate(ClientPid, system, Var, subscribers(), SubscrTab);
+do_change({activate, ClientPid, {table, Tab, How}}, SubscrTab) when pid(ClientPid) ->
+ case ?catch_val({Tab, where_to_read}) of
+ Node when Node == node() ->
+ Var = {Tab, commit_work},
+ activate(ClientPid, {table, Tab, How}, Var, mnesia_lib:val(Var), SubscrTab);
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ _Node ->
+ {error, {not_active_local, Tab}}
+ end;
+do_change({deactivate, ClientPid, system}, SubscrTab) ->
+ Var = subscribers,
+ deactivate(ClientPid, system, Var, SubscrTab);
+do_change({deactivate, ClientPid, {table, Tab, How}}, SubscrTab) ->
+ Var = {Tab, commit_work},
+ deactivate(ClientPid, {table, Tab, How}, Var, SubscrTab);
+do_change({deactivate_table, Tab}, SubscrTab) ->
+ Var = {Tab, commit_work},
+ case ?catch_val(Var) of
+ {'EXIT', _} ->
+ {error, {no_exists, Tab}};
+ CommitWork ->
+ case lists:keysearch(subscribers, 1, CommitWork) of
+ false ->
+ ok;
+ {value, Subs} ->
+ Simple = {table, Tab, simple},
+ Detailed = {table, Tab, detailed},
+ Fs = fun(C) -> deactivate(C, Simple, Var, SubscrTab) end,
+ Fd = fun(C) -> deactivate(C, Detailed, Var, SubscrTab) end,
+ case Subs of
+ {subscribers, L1, L2} ->
+ lists:foreach(Fs, L1),
+ lists:foreach(Fd, L2);
+ {subscribers, L1} ->
+ lists:foreach(Fs, L1)
+ end
+ end,
+ {ok, node()}
+ end;
+do_change(_, _) ->
+ {error, badarg}.
+
+activate(ClientPid, What, Var, OldSubscribers, SubscrTab) ->
+ Old =
+ if Var == subscribers ->
+ OldSubscribers;
+ true ->
+ case lists:keysearch(subscribers, 1, OldSubscribers) of
+ false -> [];
+ {value, Subs} ->
+ case Subs of
+ {subscribers, L1, L2} ->
+ L1 ++ L2;
+ {subscribers, L1} ->
+ L1
+ end
+ end
+ end,
+ case lists:member(ClientPid, Old) of
+ false ->
+ %% Don't care about checking old links
+ case catch link(ClientPid) of
+ true ->
+ ?ets_insert(SubscrTab, {ClientPid, What}),
+ add_subscr(Var, What, ClientPid),
+ {ok, node()};
+ {'EXIT', _Reason} ->
+ {error, {no_exists, ClientPid}}
+ end;
+ true ->
+ {error, {already_exists, What}}
+ end.
+
+%%-record(subscribers, {pids = []}). Old subscriber record removed
+%% To solve backward compatibility, this code is a cludge..
+add_subscr(subscribers, _What, Pid) ->
+ mnesia_lib:add(subscribers, Pid),
+ {ok, node()};
+add_subscr({Tab, commit_work}, What, Pid) ->
+ Commit = mnesia_lib:val({Tab, commit_work}),
+ case lists:keysearch(subscribers, 1, Commit) of
+ false ->
+ Subscr =
+ case What of
+ {table, _, simple} ->
+ {subscribers, [Pid], []};
+ {table, _, detailed} ->
+ {subscribers, [], [Pid]}
+ end,
+ mnesia_lib:add({Tab, subscribers}, Pid),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit([Subscr | Commit]));
+ {value, Old} ->
+ {L1, L2} =
+ case Old of
+ {subscribers, L} -> %% Old Way
+ {L, []};
+ {subscribers, SL1, SL2} ->
+ {SL1, SL2}
+ end,
+ Subscr =
+ case What of
+ {table, _, simple} ->
+ {subscribers, [Pid | L1], L2};
+ {table, _, detailed} ->
+ {subscribers, L1, [Pid | L2]}
+ end,
+ NewC = lists:keyreplace(subscribers, 1, Commit, Subscr),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC)),
+ mnesia_lib:add({Tab, subscribers}, Pid)
+ end.
+
+deactivate(ClientPid, What, Var, SubscrTab) ->
+ ?ets_match_delete(SubscrTab, {ClientPid, What}),
+ case catch ?ets_lookup_element(SubscrTab, ClientPid, 1) of
+ List when list(List) ->
+ ignore;
+ {'EXIT', _} ->
+ unlink(ClientPid)
+ end,
+ del_subscr(Var, What, ClientPid),
+ {ok, node()}.
+
+del_subscr(subscribers, _What, Pid) ->
+ mnesia_lib:del(subscribers, Pid);
+del_subscr({Tab, commit_work}, What, Pid) ->
+ Commit = mnesia_lib:val({Tab, commit_work}),
+ case lists:keysearch(subscribers, 1, Commit) of
+ false ->
+ false;
+ {value, Old} ->
+ {L1, L2} =
+ case Old of
+ {subscribers, L} -> %% Old Way
+ {L, []};
+ {subscribers, SL1, SL2} ->
+ {SL1, SL2}
+ end,
+ Subscr =
+ case What of %% Ignore user error delete subscr from any list
+ {table, _, simple} ->
+ NewL1 = lists:delete(Pid, L1),
+ NewL2 = lists:delete(Pid, L2),
+ {subscribers, NewL1, NewL2};
+ {table, _, detailed} ->
+ NewL1 = lists:delete(Pid, L1),
+ NewL2 = lists:delete(Pid, L2),
+ {subscribers, NewL1, NewL2}
+ end,
+ case Subscr of
+ {subscribers, [], []} ->
+ NewC = lists:keydelete(subscribers, 1, Commit),
+ mnesia_lib:del({Tab, subscribers}, Pid),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC));
+ _ ->
+ NewC = lists:keyreplace(subscribers, 1, Commit, Subscr),
+ mnesia_lib:del({Tab, subscribers}, Pid),
+ mnesia_lib:set({Tab, commit_work},
+ mnesia_lib:sort_commit(NewC))
+ end
+ end.
+
+handle_exit(ClientPid, SubscrTab) ->
+ do_handle_exit(?ets_lookup(SubscrTab, ClientPid)),
+ ?ets_delete(SubscrTab, ClientPid).
+
+do_handle_exit([{ClientPid, What} | Tail]) ->
+ case What of
+ system ->
+ del_subscr(subscribers, What, ClientPid);
+ {_, Tab, _Level} ->
+ del_subscr({Tab, commit_work}, What, ClientPid)
+ end,
+ do_handle_exit(Tail);
+do_handle_exit([]) ->
+ ok.
+
+prepare_stop(SubscrTab) ->
+ mnesia_lib:report_system_event({mnesia_down, node()}),
+ do_prepare_stop(?ets_first(SubscrTab), SubscrTab).
+
+do_prepare_stop('$end_of_table', _SubscrTab) ->
+ ok;
+do_prepare_stop(ClientPid, SubscrTab) ->
+ Next = ?ets_next(SubscrTab, ClientPid),
+ handle_exit(ClientPid, SubscrTab),
+ unlink(ClientPid),
+ do_prepare_stop(Next, SubscrTab).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sup.erl
new file mode 100644
index 0000000000..78609ffdde
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_sup.erl
@@ -0,0 +1,136 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_sup.erl,v 1.1 2008/12/17 09:53:39 mikpe Exp $
+%%
+%% Supervisor for the entire Mnesia application
+
+-module(mnesia_sup).
+
+-behaviour(application).
+-behaviour(supervisor).
+
+-export([start/0, start/2, init/1, stop/1, start_event/0, kill/0]).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% application and suprvisor callback functions
+
+start(normal, Args) ->
+ SupName = {local,?MODULE},
+ case supervisor:start_link(SupName, ?MODULE, [Args]) of
+ {ok, Pid} ->
+ {ok, Pid, {normal, Args}};
+ Error ->
+ Error
+ end;
+start(_, _) ->
+ {error, badarg}.
+
+start() ->
+ SupName = {local,?MODULE},
+ supervisor:start_link(SupName, ?MODULE, []).
+
+stop(_StartArgs) ->
+ ok.
+
+init([]) -> % Supervisor
+ init();
+init([[]]) -> % Application
+ init();
+init(BadArg) ->
+ {error, {badarg, BadArg}}.
+
+init() ->
+ Flags = {one_for_all, 0, 3600}, % Should be rest_for_one policy
+
+ Event = event_procs(),
+ Kernel = kernel_procs(),
+ Mnemosyne = mnemosyne_procs(),
+
+ {ok, {Flags, Event ++ Kernel ++ Mnemosyne}}.
+
+event_procs() ->
+ KillAfter = timer:seconds(30),
+ KA = mnesia_kernel_sup:supervisor_timeout(KillAfter),
+ E = mnesia_event,
+ [{E, {?MODULE, start_event, []}, permanent, KA, worker, [E, gen_event]}].
+
+kernel_procs() ->
+ K = mnesia_kernel_sup,
+ KA = infinity,
+ [{K, {K, start, []}, permanent, KA, supervisor, [K, supervisor]}].
+
+mnemosyne_procs() ->
+ case mnesia_monitor:get_env(embedded_mnemosyne) of
+ true ->
+ Q = mnemosyne_sup,
+ KA = infinity,
+ [{Q, {Q, start, []}, permanent, KA, supervisor, [Q, supervisor]}];
+ false ->
+ []
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% event handler
+
+start_event() ->
+ case gen_event:start_link({local, mnesia_event}) of
+ {ok, Pid} ->
+ case add_event_handler() of
+ ok ->
+ {ok, Pid};
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+add_event_handler() ->
+ Handler = mnesia_monitor:get_env(event_module),
+ gen_event:add_handler(mnesia_event, Handler, []).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% debug functions
+
+kill() ->
+ Mnesia = [mnesia_fallback | mnesia:ms()],
+ Mnemosyne = mnemosyne_ms(),
+ Kill = fun(Name) -> catch exit(whereis(Name), kill) end,
+ lists:foreach(Kill, Mnemosyne),
+ lists:foreach(Kill, Mnesia),
+ lists:foreach(fun ensure_dead/1, Mnemosyne),
+ lists:foreach(fun ensure_dead/1, Mnesia),
+ timer:sleep(10),
+ case lists:keymember(mnesia, 1, application:which_applications()) of
+ true -> kill();
+ false -> ok
+ end.
+
+ensure_dead(Name) ->
+ case whereis(Name) of
+ undefined ->
+ ok;
+ Pid when pid(Pid) ->
+ exit(Pid, kill),
+ timer:sleep(10),
+ ensure_dead(Name)
+ end.
+
+mnemosyne_ms() ->
+ case mnesia_monitor:get_env(embedded_mnemosyne) of
+ true -> mnemosyne:ms();
+ false -> []
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_text.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_text.erl
new file mode 100644
index 0000000000..d74f3bf07b
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_text.erl
@@ -0,0 +1,189 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_text.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+-module(mnesia_text).
+
+-export([parse/1, file/1, load_textfile/1, dump_to_textfile/1]).
+
+load_textfile(File) ->
+ ensure_started(),
+ case parse(File) of
+ {ok, {Tabs, Data}} ->
+ Badtabs = make_tabs(lists:map(fun validate_tab/1, Tabs)),
+ load_data(del_data(Badtabs, Data, []));
+ Other ->
+ Other
+ end.
+
+dump_to_textfile(File) ->
+ dump_to_textfile(mnesia_lib:is_running(), file:open(File, [write])).
+dump_to_textfile(yes, {ok, F}) ->
+ Tabs = lists:delete(schema, mnesia_lib:local_active_tables()),
+ Defs = lists:map(fun(T) -> {T, [{record_name, mnesia_lib:val({T, record_name})},
+ {attributes, mnesia_lib:val({T, attributes})}]}
+ end,
+ Tabs),
+ io:format(F, "~p.~n", [{tables, Defs}]),
+ lists:foreach(fun(T) -> dump_tab(F, T) end, Tabs),
+ file:close(F);
+dump_to_textfile(_,_) -> error.
+
+
+dump_tab(F, T) ->
+ W = mnesia_lib:val({T, wild_pattern}),
+ {'atomic',All} = mnesia:transaction(fun() -> mnesia:match_object(T, W, read) end),
+ lists:foreach(fun(Term) -> io:format(F,"~p.~n", [setelement(1, Term, T)]) end, All).
+
+
+ensure_started() ->
+ case mnesia_lib:is_running() of
+ yes ->
+ yes;
+ no ->
+ case mnesia_lib:exists(mnesia_lib:dir("schema.DAT")) of
+ true ->
+ mnesia:start();
+ false ->
+ mnesia:create_schema([node()]),
+ mnesia:start()
+ end
+ end.
+
+del_data(Bad, [H|T], Ack) ->
+ case lists:member(element(1, H), Bad) of
+ true -> del_data(Bad, T, Ack);
+ false -> del_data(Bad, T, [H|Ack])
+ end;
+del_data(_Bad, [], Ack) ->
+ lists:reverse(Ack).
+
+%% Tis the place to call the validate func in mnesia_schema
+validate_tab({Tabname, List}) ->
+ {Tabname, List};
+validate_tab({Tabname, RecName, List}) ->
+ {Tabname, RecName, List};
+validate_tab(_) -> error(badtab).
+
+make_tabs([{Tab, Def} | Tail]) ->
+ case catch mnesia:table_info(Tab, where_to_read) of
+ {'EXIT', _} -> %% non-existing table
+ case mnesia:create_table(Tab, Def) of
+ {aborted, Reason} ->
+ io:format("** Failed to create table ~w ~n"
+ "** Reason = ~w, Args = ~p~n",
+ [Tab, Reason, Def]),
+ [Tab | make_tabs(Tail)];
+ _ ->
+ io:format("New table ~w~n", [Tab]),
+ make_tabs(Tail)
+ end;
+ Node ->
+ io:format("** Table ~w already exists on ~p, just entering data~n",
+ [Tab, Node]),
+ make_tabs(Tail)
+ end;
+
+make_tabs([]) ->
+ [].
+
+load_data(L) ->
+ mnesia:transaction(fun() ->
+ F = fun(X) ->
+ Tab = element(1, X),
+ RN = mnesia:table_info(Tab, record_name),
+ Rec = setelement(1, X, RN),
+ mnesia:write(Tab, Rec, write) end,
+ lists:foreach(F, L)
+ end).
+
+parse(File) ->
+ case file(File) of
+ {ok, Terms} ->
+ case catch collect(Terms) of
+ {error, X} ->
+ {error, X};
+ Other ->
+ {ok, Other}
+ end;
+ Other ->
+ Other
+ end.
+
+collect([{_, {tables, Tabs}}|L]) ->
+ {Tabs, collect_data(Tabs, L)};
+
+collect(_) ->
+ io:format("No tables found\n", []),
+ error(bad_header).
+
+collect_data(Tabs, [{Line, Term} | Tail]) when tuple(Term) ->
+ case lists:keysearch(element(1, Term), 1, Tabs) of
+ {value, _} ->
+ [Term | collect_data(Tabs, Tail)];
+ _Other ->
+ io:format("Object:~p at line ~w unknown\n", [Term,Line]),
+ error(undefined_object)
+ end;
+collect_data(_Tabs, []) -> [];
+collect_data(_Tabs, [H|_T]) ->
+ io:format("Object:~p unknown\n", [H]),
+ error(undefined_object).
+
+error(What) -> throw({error, What}).
+
+file(File) ->
+ case file:open(File, [read]) of
+ {ok, Stream} ->
+ Res = read_terms(Stream, File, 1, []),
+ file:close(Stream),
+ Res;
+ _Other ->
+ {error, open}
+ end.
+
+read_terms(Stream, File, Line, L) ->
+ case read_term_from_stream(Stream, File, Line) of
+ {ok, Term, NextLine} ->
+ read_terms(Stream, File, NextLine, [Term|L]);
+ error ->
+ {error, read};
+ eof ->
+ {ok, lists:reverse(L)}
+ end.
+
+read_term_from_stream(Stream, File, Line) ->
+ R = io:request(Stream, {get_until,'',erl_scan,tokens,[Line]}),
+ case R of
+ {ok,Toks,EndLine} ->
+ case erl_parse:parse_term(Toks) of
+ {ok, Term} ->
+ {ok, {Line, Term}, EndLine};
+ {error, {NewLine,Mod,What}} ->
+ Str = Mod:format_error(What),
+ io:format("Error in line:~p of:~p ~s\n",
+ [NewLine, File, Str]),
+ error;
+ T ->
+ io:format("Error2 **~p~n",[T]),
+ error
+ end;
+ {eof,_EndLine} ->
+ eof;
+ Other ->
+ io:format("Error1 **~p~n",[Other]),
+ error
+ end.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
new file mode 100644
index 0000000000..ac11087fa0
--- /dev/null
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
@@ -0,0 +1,2173 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id: mnesia_tm.erl,v 1.2 2010/03/04 13:54:20 maria Exp $
+%%
+-module(mnesia_tm).
+
+-export([
+ start/0,
+ init/1,
+ non_transaction/5,
+ transaction/6,
+ commit_participant/5,
+ dirty/2,
+ display_info/2,
+ do_update_op/3,
+ get_info/1,
+ get_transactions/0,
+ info/1,
+ mnesia_down/1,
+ prepare_checkpoint/2,
+ prepare_checkpoint/1, % Internal
+ prepare_snmp/3,
+ do_snmp/2,
+ put_activity_id/1,
+ block_tab/1,
+ unblock_tab/1
+ ]).
+
+%% sys callback functions
+-export([system_continue/3,
+ system_terminate/4,
+ system_code_change/4
+ ]).
+
+-include("mnesia.hrl").
+-import(mnesia_lib, [set/2]).
+-import(mnesia_lib, [fatal/2, verbose/2, dbg_out/2]).
+
+-record(state, {coordinators = [], participants = [], supervisor,
+ blocked_tabs = [], dirty_queue = []}).
+%% Format on coordinators is [{Tid, EtsTabList} .....
+
+-record(prep, {protocol = sym_trans,
+ %% async_dirty | sync_dirty | sym_trans | sync_sym_trans | asym_trans
+ records = [],
+ prev_tab = [], % initiate to a non valid table name
+ prev_types,
+ prev_snmp,
+ types
+ }).
+
+-record(participant, {tid, pid, commit, disc_nodes = [],
+ ram_nodes = [], protocol = sym_trans}).
+
+start() ->
+ mnesia_monitor:start_proc(?MODULE, ?MODULE, init, [self()]).
+
+init(Parent) ->
+ register(?MODULE, self()),
+ process_flag(trap_exit, true),
+
+ %% Initialize the schema
+ IgnoreFallback = mnesia_monitor:get_env(ignore_fallback_at_startup),
+ mnesia_bup:tm_fallback_start(IgnoreFallback),
+ mnesia_schema:init(IgnoreFallback),
+
+ %% Handshake and initialize transaction recovery
+ mnesia_recover:init(),
+ Early = mnesia_monitor:init(),
+ AllOthers = mnesia_lib:uniq(Early ++ mnesia_lib:all_nodes()) -- [node()],
+ set(original_nodes, AllOthers),
+ mnesia_recover:connect_nodes(AllOthers),
+
+ %% Recover transactions, may wait for decision
+ case mnesia_monitor:use_dir() of
+ true ->
+ P = mnesia_dumper:opt_dump_log(startup), % previous log
+ L = mnesia_dumper:opt_dump_log(startup), % latest log
+ Msg = "Initial dump of log during startup: ~p~n",
+ mnesia_lib:verbose(Msg, [[P, L]]),
+ mnesia_log:init();
+ false ->
+ ignore
+ end,
+
+ mnesia_schema:purge_tmp_files(),
+ mnesia_recover:start_garb(),
+
+ ?eval_debug_fun({?MODULE, init}, [{nodes, AllOthers}]),
+
+ case val(debug) of
+ Debug when Debug /= debug, Debug /= trace ->
+ ignore;
+ _ ->
+ mnesia_subscr:subscribe(whereis(mnesia_event), {table, schema})
+ end,
+ proc_lib:init_ack(Parent, {ok, self()}),
+ doit_loop(#state{supervisor = Parent}).
+
+val(Var) ->
+ case ?catch_val(Var) of
+ {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ _VaLuE_ -> _VaLuE_
+ end.
+
+reply({From,Ref}, R) ->
+ From ! {?MODULE, Ref, R};
+reply(From, R) ->
+ From ! {?MODULE, node(), R}.
+
+reply(From, R, State) ->
+ reply(From, R),
+ doit_loop(State).
+
+req(R) ->
+ case whereis(?MODULE) of
+ undefined ->
+ {error, {node_not_running, node()}};
+ Pid ->
+ Ref = make_ref(),
+ Pid ! {{self(), Ref}, R},
+ rec(Pid, Ref)
+ end.
+
+rec() ->
+ rec(whereis(?MODULE)).
+
+rec(Pid) when pid(Pid) ->
+ receive
+ {?MODULE, _, Reply} ->
+ Reply;
+
+ {'EXIT', Pid, _} ->
+ {error, {node_not_running, node()}}
+ end;
+rec(undefined) ->
+ {error, {node_not_running, node()}}.
+
+rec(Pid, Ref) ->
+ receive
+ {?MODULE, Ref, Reply} ->
+ Reply;
+ {'EXIT', Pid, _} ->
+ {error, {node_not_running, node()}}
+ end.
+
+tmlink({From, Ref}) when reference(Ref) ->
+ link(From);
+tmlink(From) ->
+ link(From).
+tmpid({Pid, _Ref}) when pid(Pid) ->
+ Pid;
+tmpid(Pid) ->
+ Pid.
+
+%% Returns a list of participant transaction Tid's
+mnesia_down(Node) ->
+ %% Syncronously call needed in order to avoid
+ %% race with mnesia_tm's coordinator processes
+ %% that may restart and acquire new locks.
+ %% mnesia_monitor takes care of the sync
+ case whereis(?MODULE) of
+ undefined ->
+ mnesia_monitor:mnesia_down(?MODULE, {Node, []});
+ Pid ->
+ Pid ! {mnesia_down, Node}
+ end.
+
+prepare_checkpoint(Nodes, Cp) ->
+ rpc:multicall(Nodes, ?MODULE, prepare_checkpoint, [Cp]).
+
+prepare_checkpoint(Cp) ->
+ req({prepare_checkpoint,Cp}).
+
+block_tab(Tab) ->
+ req({block_tab, Tab}).
+
+unblock_tab(Tab) ->
+ req({unblock_tab, Tab}).
+
+doit_loop(#state{coordinators = Coordinators, participants = Participants, supervisor = Sup}
+ = State) ->
+ receive
+ {_From, {async_dirty, Tid, Commit, Tab}} ->
+ case lists:member(Tab, State#state.blocked_tabs) of
+ false ->
+ do_async_dirty(Tid, Commit, Tab),
+ doit_loop(State);
+ true ->
+ Item = {async_dirty, Tid, Commit, Tab},
+ State2 = State#state{dirty_queue = [Item | State#state.dirty_queue]},
+ doit_loop(State2)
+ end;
+
+ {From, {sync_dirty, Tid, Commit, Tab}} ->
+ case lists:member(Tab, State#state.blocked_tabs) of
+ false ->
+ do_sync_dirty(From, Tid, Commit, Tab),
+ doit_loop(State);
+ true ->
+ Item = {sync_dirty, From, Tid, Commit, Tab},
+ State2 = State#state{dirty_queue = [Item | State#state.dirty_queue]},
+ doit_loop(State2)
+ end;
+
+ {From, start_outer} -> %% Create and associate ets_tab with Tid
+ case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table for the "
+ "local transaction store",
+ reply(From, {error, {system_limit, Msg, Reason}}, State);
+ Etab ->
+ tmlink(From),
+ C = mnesia_recover:incr_trans_tid_serial(),
+ ?ets_insert(Etab, {nodes, node()}),
+ Tid = #tid{pid = tmpid(From), counter = C},
+ A2 = [{Tid , [Etab]} | Coordinators],
+ S2 = State#state{coordinators = A2},
+ reply(From, {new_tid, Tid, Etab}, S2)
+ end;
+
+ {From, {ask_commit, Protocol, Tid, Commit, DiscNs, RamNs}} ->
+ ?eval_debug_fun({?MODULE, doit_ask_commit},
+ [{tid, Tid}, {prot, Protocol}]),
+ mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ Pid =
+ case Protocol of
+ asym_trans when node(Tid#tid.pid) /= node() ->
+ Args = [tmpid(From), Tid, Commit, DiscNs, RamNs],
+ spawn_link(?MODULE, commit_participant, Args);
+ _ when node(Tid#tid.pid) /= node() -> %% *_sym_trans
+ reply(From, {vote_yes, Tid}),
+ nopid
+ end,
+ P = #participant{tid = Tid,
+ pid = Pid,
+ commit = Commit,
+ disc_nodes = DiscNs,
+ ram_nodes = RamNs,
+ protocol = Protocol},
+ State2 = State#state{participants = [P | Participants]},
+ doit_loop(State2);
+
+ {Tid, do_commit} ->
+ case mnesia_lib:key_search_delete(Tid, #participant.tid, Participants) of
+ {none, _} ->
+ verbose("Tried to commit a non participant transaction ~p~n",
+ [Tid]),
+ doit_loop(State);
+ {P, Participants2} ->
+ ?eval_debug_fun({?MODULE, do_commit, pre},
+ [{tid, Tid}, {participant, P}]),
+ case P#participant.pid of
+ nopid ->
+ Commit = P#participant.commit,
+ Member = lists:member(node(), P#participant.disc_nodes),
+ if Member == false ->
+ ignore;
+ P#participant.protocol == sym_trans ->
+ mnesia_log:log(Commit);
+ P#participant.protocol == sync_sym_trans ->
+ mnesia_log:slog(Commit)
+ end,
+ mnesia_recover:note_decision(Tid, committed),
+ do_commit(Tid, Commit),
+ if
+ P#participant.protocol == sync_sym_trans ->
+ Tid#tid.pid ! {?MODULE, node(), {committed, Tid}};
+ true ->
+ ignore
+ end,
+ mnesia_locker:release_tid(Tid),
+ transaction_terminated(Tid),
+ ?eval_debug_fun({?MODULE, do_commit, post}, [{tid, Tid}, {pid, nopid}]),
+ doit_loop(State#state{participants = Participants2});
+ Pid when pid(Pid) ->
+ Pid ! {Tid, committed},
+ ?eval_debug_fun({?MODULE, do_commit, post}, [{tid, Tid}, {pid, Pid}]),
+ doit_loop(State)
+ end
+ end;
+
+ {Tid, simple_commit} ->
+ mnesia_recover:note_decision(Tid, committed),
+ mnesia_locker:release_tid(Tid),
+ transaction_terminated(Tid),
+ doit_loop(State);
+
+ {Tid, {do_abort, Reason}} ->
+ ?eval_debug_fun({?MODULE, do_abort, pre}, [{tid, Tid}]),
+ mnesia_locker:release_tid(Tid),
+ case mnesia_lib:key_search_delete(Tid, #participant.tid, Participants) of
+ {none, _} ->
+ verbose("Tried to abort a non participant transaction ~p: ~p~n",
+ [Tid, Reason]),
+ doit_loop(State);
+ {P, Participants2} ->
+ case P#participant.pid of
+ nopid ->
+ Commit = P#participant.commit,
+ mnesia_recover:note_decision(Tid, aborted),
+ do_abort(Tid, Commit),
+ if
+ P#participant.protocol == sync_sym_trans ->
+ Tid#tid.pid ! {?MODULE, node(), {aborted, Tid}};
+ true ->
+ ignore
+ end,
+ transaction_terminated(Tid),
+ ?eval_debug_fun({?MODULE, do_abort, post}, [{tid, Tid}, {pid, nopid}]),
+ doit_loop(State#state{participants = Participants2});
+ Pid when pid(Pid) ->
+ Pid ! {Tid, {do_abort, Reason}},
+ ?eval_debug_fun({?MODULE, do_abort, post},
+ [{tid, Tid}, {pid, Pid}]),
+ doit_loop(State)
+ end
+ end;
+
+ {From, {add_store, Tid}} -> %% new store for nested transaction
+ case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of
+ {'EXIT', Reason} -> %% system limit
+ Msg = "Cannot create an ets table for a nested "
+ "local transaction store",
+ reply(From, {error, {system_limit, Msg, Reason}}, State);
+ Etab ->
+ A2 = add_coord_store(Coordinators, Tid, Etab),
+ reply(From, {new_store, Etab},
+ State#state{coordinators = A2})
+ end;
+
+ {From, {del_store, Tid, Current, Obsolete, PropagateStore}} ->
+ opt_propagate_store(Current, Obsolete, PropagateStore),
+ A2 = del_coord_store(Coordinators, Tid, Current, Obsolete),
+ reply(From, store_erased, State#state{coordinators = A2});
+
+ {'EXIT', Pid, Reason} ->
+ handle_exit(Pid, Reason, State);
+
+ {From, {restart, Tid, Store}} ->
+ A2 = restore_stores(Coordinators, Tid, Store),
+ ?ets_match_delete(Store, '_'),
+ ?ets_insert(Store, {nodes, node()}),
+ reply(From, {restarted, Tid}, State#state{coordinators = A2});
+
+ {delete_transaction, Tid} ->
+ %% used to clear transactions which are committed
+ %% in coordinator or participant processes
+ case mnesia_lib:key_search_delete(Tid, #participant.tid, Participants) of
+ {none, _} ->
+ case mnesia_lib:key_search_delete(Tid, 1, Coordinators) of
+ {none, _} ->
+ verbose("** ERROR ** Tried to delete a non transaction ~p~n",
+ [Tid]),
+ doit_loop(State);
+ {{_Tid, Etabs}, A2} ->
+ erase_ets_tabs(Etabs),
+ transaction_terminated(Tid),
+ doit_loop(State#state{coordinators = A2})
+ end;
+ {_P, Participants2} ->
+ transaction_terminated(Tid),
+ State2 = State#state{participants = Participants2},
+ doit_loop(State2)
+ end;
+
+ {sync_trans_serial, Tid} ->
+ %% Do the Lamport thing here
+ mnesia_recover:sync_trans_tid_serial(Tid),
+ doit_loop(State);
+
+ {From, info} ->
+ reply(From, {info, Participants, Coordinators}, State);
+
+ {mnesia_down, N} ->
+ verbose("Got mnesia_down from ~p, reconfiguring...~n", [N]),
+ reconfigure_coordinators(N, Coordinators),
+
+ Tids = [P#participant.tid || P <- Participants],
+ reconfigure_participants(N, Participants),
+ mnesia_monitor:mnesia_down(?MODULE, {N, Tids}),
+ doit_loop(State);
+
+ {From, {unblock_me, Tab}} ->
+ case lists:member(Tab, State#state.blocked_tabs) of
+ false ->
+ verbose("Wrong dirty Op blocked on ~p ~p ~p",
+ [node(), Tab, From]),
+ reply(From, unblocked),
+ doit_loop(State);
+ true ->
+ Item = {Tab, unblock_me, From},
+ State2 = State#state{dirty_queue = [Item | State#state.dirty_queue]},
+ doit_loop(State2)
+ end;
+
+ {From, {block_tab, Tab}} ->
+ State2 = State#state{blocked_tabs = [Tab | State#state.blocked_tabs]},
+ reply(From, ok, State2);
+
+ {From, {unblock_tab, Tab}} ->
+ BlockedTabs2 = State#state.blocked_tabs -- [Tab],
+ case lists:member(Tab, BlockedTabs2) of
+ false ->
+ mnesia_controller:unblock_table(Tab),
+ Queue = process_dirty_queue(Tab, State#state.dirty_queue),
+ State2 = State#state{blocked_tabs = BlockedTabs2,
+ dirty_queue = Queue},
+ reply(From, ok, State2);
+ true ->
+ State2 = State#state{blocked_tabs = BlockedTabs2},
+ reply(From, ok, State2)
+ end;
+
+ {From, {prepare_checkpoint, Cp}} ->
+ Res = mnesia_checkpoint:tm_prepare(Cp),
+ case Res of
+ {ok, _Name, IgnoreNew, _Node} ->
+ prepare_pending_coordinators(Coordinators, IgnoreNew),
+ prepare_pending_participants(Participants, IgnoreNew);
+ {error, _Reason} ->
+ ignore
+ end,
+ reply(From, Res, State);
+
+ {system, From, Msg} ->
+ dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ sys:handle_system_msg(Msg, From, Sup, ?MODULE, [], State);
+
+ Msg ->
+ verbose("** ERROR ** ~p got unexpected message: ~p~n", [?MODULE, Msg]),
+ doit_loop(State)
+ end.
+
+do_sync_dirty(From, Tid, Commit, _Tab) ->
+ ?eval_debug_fun({?MODULE, sync_dirty, pre}, [{tid, Tid}]),
+ Res = (catch do_dirty(Tid, Commit)),
+ ?eval_debug_fun({?MODULE, sync_dirty, post}, [{tid, Tid}]),
+ From ! {?MODULE, node(), {dirty_res, Res}}.
+
+do_async_dirty(Tid, Commit, _Tab) ->
+ ?eval_debug_fun({?MODULE, async_dirty, pre}, [{tid, Tid}]),
+ catch do_dirty(Tid, Commit),
+ ?eval_debug_fun({?MODULE, async_dirty, post}, [{tid, Tid}]).
+
+%% Process items in fifo order
+process_dirty_queue(Tab, [Item | Queue]) ->
+ Queue2 = process_dirty_queue(Tab, Queue),
+ case Item of
+ {async_dirty, Tid, Commit, Tab} ->
+ do_async_dirty(Tid, Commit, Tab),
+ Queue2;
+ {sync_dirty, From, Tid, Commit, Tab} ->
+ do_sync_dirty(From, Tid, Commit, Tab),
+ Queue2;
+ {Tab, unblock_me, From} ->
+ reply(From, unblocked),
+ Queue2;
+ _ ->
+ [Item | Queue2]
+ end;
+process_dirty_queue(_Tab, []) ->
+ [].
+
+prepare_pending_coordinators([{Tid, [Store | _Etabs]} | Coords], IgnoreNew) ->
+ case catch ?ets_lookup(Store, pending) of
+ [] ->
+ prepare_pending_coordinators(Coords, IgnoreNew);
+ [Pending] ->
+ case lists:member(Tid, IgnoreNew) of
+ false ->
+ mnesia_checkpoint:tm_enter_pending(Pending);
+ true ->
+ ignore
+ end,
+ prepare_pending_coordinators(Coords, IgnoreNew);
+ {'EXIT', _} ->
+ prepare_pending_coordinators(Coords, IgnoreNew)
+ end;
+prepare_pending_coordinators([], _IgnoreNew) ->
+ ok.
+
+prepare_pending_participants([Part | Parts], IgnoreNew) ->
+ Tid = Part#participant.tid,
+ D = Part#participant.disc_nodes,
+ R = Part#participant.ram_nodes,
+ case lists:member(Tid, IgnoreNew) of
+ false ->
+ mnesia_checkpoint:tm_enter_pending(Tid, D, R);
+ true ->
+ ignore
+ end,
+ prepare_pending_participants(Parts, IgnoreNew);
+prepare_pending_participants([], _IgnoreNew) ->
+ ok.
+
+handle_exit(Pid, Reason, State) when node(Pid) /= node() ->
+ %% We got exit from a remote fool
+ dbg_out("~p got remote EXIT from unknown ~p~n",
+ [?MODULE, {Pid, Reason}]),
+ doit_loop(State);
+
+handle_exit(Pid, _Reason, State) when Pid == State#state.supervisor ->
+ %% Our supervisor has died, time to stop
+ do_stop(State);
+
+handle_exit(Pid, Reason, State) ->
+ %% Check if it is a coordinator
+ case pid_search_delete(Pid, State#state.coordinators) of
+ {none, _} ->
+ %% Check if it is a participant
+ case mnesia_lib:key_search_delete(Pid, #participant.pid, State#state.participants) of
+ {none, _} ->
+ %% We got exit from a local fool
+ verbose("** ERROR ** ~p got local EXIT from unknown process: ~p~n",
+ [?MODULE, {Pid, Reason}]),
+ doit_loop(State);
+
+ {P, RestP} when record(P, participant) ->
+ fatal("Participant ~p in transaction ~p died ~p~n",
+ [P#participant.pid, P#participant.tid, Reason]),
+ doit_loop(State#state{participants = RestP})
+ end;
+
+ {{Tid, Etabs}, RestC} ->
+ %% A local coordinator has died and
+ %% we must determine the outcome of the
+ %% transaction and tell mnesia_tm on the
+ %% other nodes about it and then recover
+ %% locally.
+ recover_coordinator(Tid, Etabs),
+ doit_loop(State#state{coordinators = RestC})
+ end.
+
+recover_coordinator(Tid, Etabs) ->
+ verbose("Coordinator ~p in transaction ~p died.~n", [Tid#tid.pid, Tid]),
+
+ Store = hd(Etabs),
+ CheckNodes = get_nodes(Store),
+ TellNodes = CheckNodes -- [node()],
+ case catch arrange(Tid, Store, async) of
+ {'EXIT', Reason} ->
+ dbg_out("Recovery of coordinator ~p failed:~n", [Tid, Reason]),
+ Protocol = asym_trans,
+ tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes);
+ {_N, Prep} ->
+ %% Tell the participants about the outcome
+ Protocol = Prep#prep.protocol,
+ Outcome = tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes),
+
+ %% Recover locally
+ CR = Prep#prep.records,
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ {value, Local} = lists:keysearch(node(), #commit.node, CR),
+
+ ?eval_debug_fun({?MODULE, recover_coordinator, pre},
+ [{tid, Tid}, {outcome, Outcome}, {prot, Protocol}]),
+ recover_coordinator(Tid, Protocol, Outcome, Local, DiscNs, RamNs),
+ ?eval_debug_fun({?MODULE, recover_coordinator, post},
+ [{tid, Tid}, {outcome, Outcome}, {prot, Protocol}])
+
+ end,
+ erase_ets_tabs(Etabs),
+ transaction_terminated(Tid),
+ mnesia_locker:release_tid(Tid).
+
+recover_coordinator(Tid, sym_trans, committed, Local, _, _) ->
+ mnesia_recover:note_decision(Tid, committed),
+ do_dirty(Tid, Local);
+recover_coordinator(Tid, sym_trans, aborted, _Local, _, _) ->
+ mnesia_recover:note_decision(Tid, aborted);
+recover_coordinator(Tid, sync_sym_trans, committed, Local, _, _) ->
+ mnesia_recover:note_decision(Tid, committed),
+ do_dirty(Tid, Local);
+recover_coordinator(Tid, sync_sym_trans, aborted, _Local, _, _) ->
+ mnesia_recover:note_decision(Tid, aborted);
+
+recover_coordinator(Tid, asym_trans, committed, Local, DiscNs, RamNs) ->
+ D = #decision{tid = Tid, outcome = committed,
+ disc_nodes = DiscNs, ram_nodes = RamNs},
+ mnesia_recover:log_decision(D),
+ do_commit(Tid, Local);
+recover_coordinator(Tid, asym_trans, aborted, Local, DiscNs, RamNs) ->
+ D = #decision{tid = Tid, outcome = aborted,
+ disc_nodes = DiscNs, ram_nodes = RamNs},
+ mnesia_recover:log_decision(D),
+ do_abort(Tid, Local).
+
+restore_stores([{Tid, Etstabs} | Tail], Tid, Store) ->
+ Remaining = lists:delete(Store, Etstabs),
+ erase_ets_tabs(Remaining),
+ [{Tid, [Store]} | Tail];
+restore_stores([H | T], Tid, Store) ->
+ [H | restore_stores(T, Tid, Store)].
+%% No NIL case on purpose
+
+add_coord_store([{Tid, Stores} | Coordinators], Tid, Etab) ->
+ [{Tid, [Etab | Stores]} | Coordinators];
+add_coord_store([H | T], Tid, Etab) ->
+ [H | add_coord_store(T, Tid, Etab)].
+%% no NIL case on purpose
+
+del_coord_store([{Tid, Stores} | Coordinators], Tid, Current, Obsolete) ->
+ Rest =
+ case Stores of
+ [Obsolete, Current | Tail] -> Tail;
+ [Current, Obsolete | Tail] -> Tail
+ end,
+ ?ets_delete_table(Obsolete),
+ [{Tid, [Current | Rest]} | Coordinators];
+del_coord_store([H | T], Tid, Current, Obsolete) ->
+ [H | del_coord_store(T, Tid, Current, Obsolete)].
+%% no NIL case on purpose
+
+erase_ets_tabs([H | T]) ->
+ ?ets_delete_table(H),
+ erase_ets_tabs(T);
+erase_ets_tabs([]) ->
+ ok.
+
+%% Deletes a pid from a list of participants
+%% or from a list of coordinators and returns
+%% {none, All} or {Tr, Rest}
+pid_search_delete(Pid, Trs) ->
+ pid_search_delete(Pid, Trs, none, []).
+pid_search_delete(Pid, [Tr = {Tid, _Ts} | Trs], _Val, Ack) when Tid#tid.pid == Pid ->
+ pid_search_delete(Pid, Trs, Tr, Ack);
+pid_search_delete(Pid, [Tr | Trs], Val, Ack) ->
+ pid_search_delete(Pid, Trs, Val, [Tr | Ack]);
+
+pid_search_delete(_Pid, [], Val, Ack) ->
+ {Val, Ack}.
+
+%% When TM gets an EXIT sig, we must also check to see
+%% if the crashing transaction is in the Participant list
+%%
+%% search_participant_for_pid([Participant | Tail], Pid) ->
+%% Tid = Participant#participant.tid,
+%% if
+%% Tid#tid.pid == Pid ->
+%% {coordinator, Participant};
+%% Participant#participant.pid == Pid ->
+%% {participant, Participant};
+%% true ->
+%% search_participant_for_pid(Tail, Pid)
+%% end;
+%% search_participant_for_pid([], _) ->
+%% fool.
+
+transaction_terminated(Tid) ->
+ mnesia_checkpoint:tm_exit_pending(Tid),
+ Pid = Tid#tid.pid,
+ if
+ node(Pid) == node() ->
+ unlink(Pid);
+ true -> %% Do the Lamport thing here
+ mnesia_recover:sync_trans_tid_serial(Tid)
+ end.
+
+non_transaction(OldState, Fun, Args, ActivityKind, Mod) ->
+ Id = {ActivityKind, self()},
+ NewState = {Mod, Id, non_transaction},
+ put(mnesia_activity_state, NewState),
+ %% I Want something uniqe here, references are expensive
+ Ref = mNeSia_nOn_TrAnSacTioN,
+ RefRes = (catch {Ref, apply(Fun, Args)}),
+ case OldState of
+ undefined -> erase(mnesia_activity_state);
+ _ -> put(mnesia_activity_state, OldState)
+ end,
+ case RefRes of
+ {Ref, Res} ->
+ case Res of
+ {'EXIT', Reason} -> exit(Reason);
+ {aborted, Reason} -> mnesia:abort(Reason);
+ _ -> Res
+ end;
+ {'EXIT', Reason} ->
+ exit(Reason);
+ Throw ->
+ throw(Throw)
+ end.
+
+transaction(OldTidTs, Fun, Args, Retries, Mod, Type) ->
+ Factor = 1,
+ case OldTidTs of
+ undefined -> % Outer
+ execute_outer(Mod, Fun, Args, Factor, Retries, Type);
+ {_OldMod, Tid, Ts} -> % Nested
+ execute_inner(Mod, Tid, Ts, Fun, Args, Factor, Retries, Type);
+ _ -> % Bad nesting
+ {aborted, nested_transaction}
+ end.
+
+execute_outer(Mod, Fun, Args, Factor, Retries, Type) ->
+ case req(start_outer) of
+ {error, Reason} ->
+ {aborted, Reason};
+ {new_tid, Tid, Store} ->
+ Ts = #tidstore{store = Store},
+ NewTidTs = {Mod, Tid, Ts},
+ put(mnesia_activity_state, NewTidTs),
+ execute_transaction(Fun, Args, Factor, Retries, Type)
+ end.
+
+execute_inner(Mod, Tid, Ts, Fun, Args, Factor, Retries, Type) ->
+ case req({add_store, Tid}) of
+ {error, Reason} ->
+ {aborted, Reason};
+ {new_store, Ets} ->
+ copy_ets(Ts#tidstore.store, Ets),
+ Up = [Ts#tidstore.store | Ts#tidstore.up_stores],
+ NewTs = Ts#tidstore{level = 1 + Ts#tidstore.level,
+ store = Ets,
+ up_stores = Up},
+ NewTidTs = {Mod, Tid, NewTs},
+ put(mnesia_activity_state, NewTidTs),
+ execute_transaction(Fun, Args, Factor, Retries, Type)
+ end.
+
+copy_ets(From, To) ->
+ do_copy_ets(?ets_first(From), From, To).
+do_copy_ets('$end_of_table', _,_) ->
+ ok;
+do_copy_ets(K, From, To) ->
+ Objs = ?ets_lookup(From, K),
+ insert_objs(Objs, To),
+ do_copy_ets(?ets_next(From, K), From, To).
+
+insert_objs([H|T], Tab) ->
+ ?ets_insert(Tab, H),
+ insert_objs(T, Tab);
+insert_objs([], _Tab) ->
+ ok.
+
+execute_transaction(Fun, Args, Factor, Retries, Type) ->
+ case catch apply_fun(Fun, Args, Type) of
+ {'EXIT', Reason} ->
+ check_exit(Fun, Args, Factor, Retries, Reason, Type);
+ {'atomic', Value} ->
+ mnesia_lib:incr_counter(trans_commits),
+ erase(mnesia_activity_state),
+ %% no need to clear locks, already done by commit ...
+ %% Flush any un processed mnesia_down messages we might have
+ flush_downs(),
+ {'atomic', Value};
+ {nested_atomic, Value} ->
+ mnesia_lib:incr_counter(trans_commits),
+ {'atomic', Value};
+ Value -> %% User called throw
+ Reason = {aborted, {throw, Value}},
+ return_abort(Fun, Args, Reason)
+ end.
+
+apply_fun(Fun, Args, Type) ->
+ Result = apply(Fun, Args),
+ case t_commit(Type) of
+ do_commit ->
+ {'atomic', Result};
+ do_commit_nested ->
+ {nested_atomic, Result};
+ {do_abort, {aborted, Reason}} ->
+ {'EXIT', {aborted, Reason}};
+ {do_abort, Reason} ->
+ {'EXIT', {aborted, Reason}}
+ end.
+
+check_exit(Fun, Args, Factor, Retries, Reason, Type) ->
+ case Reason of
+ {aborted, C} when record(C, cyclic) ->
+ maybe_restart(Fun, Args, Factor, Retries, Type, C);
+ {aborted, {node_not_running, N}} ->
+ maybe_restart(Fun, Args, Factor, Retries, Type, {node_not_running, N});
+ {aborted, {bad_commit, N}} ->
+ maybe_restart(Fun, Args, Factor, Retries, Type, {bad_commit, N});
+ _ ->
+ return_abort(Fun, Args, Reason)
+ end.
+
+maybe_restart(Fun, Args, Factor, Retries, Type, Why) ->
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ case try_again(Retries) of
+ yes when Ts#tidstore.level == 1 ->
+ restart(Mod, Tid, Ts, Fun, Args, Factor, Retries, Type, Why);
+ yes ->
+ return_abort(Fun, Args, Why);
+ no ->
+ return_abort(Fun, Args, {aborted, nomore})
+ end.
+
+try_again(infinity) -> yes;
+try_again(X) when number(X) , X > 1 -> yes;
+try_again(_) -> no.
+
+%% We can only restart toplevel transactions.
+%% If a deadlock situation occurs in a nested transaction
+%% The whole thing including all nested transactions need to be
+%% restarted. The stack is thus popped by a consequtive series of
+%% exit({aborted, #cyclic{}}) calls
+
+restart(Mod, Tid, Ts, Fun, Args, Factor0, Retries0, Type, Why) ->
+ mnesia_lib:incr_counter(trans_restarts),
+ Retries = decr(Retries0),
+ case Why of
+ {bad_commit, _N} ->
+ return_abort(Fun, Args, Why),
+ Factor = 1,
+ SleepTime = mnesia_lib:random_time(Factor, Tid#tid.counter),
+ dbg_out("Restarting transaction ~w: in ~wms ~w~n", [Tid, SleepTime, Why]),
+ timer:sleep(SleepTime),
+ execute_outer(Mod, Fun, Args, Factor, Retries, Type);
+ {node_not_running, _N} -> %% Avoids hanging in receive_release_tid_ack
+ return_abort(Fun, Args, Why),
+ Factor = 1,
+ SleepTime = mnesia_lib:random_time(Factor, Tid#tid.counter),
+ dbg_out("Restarting transaction ~w: in ~wms ~w~n", [Tid, SleepTime, Why]),
+ timer:sleep(SleepTime),
+ execute_outer(Mod, Fun, Args, Factor, Retries, Type);
+ _ ->
+ SleepTime = mnesia_lib:random_time(Factor0, Tid#tid.counter),
+ dbg_out("Restarting transaction ~w: in ~wms ~w~n", [Tid, SleepTime, Why]),
+
+ if
+ Factor0 /= 10 ->
+ ignore;
+ true ->
+ %% Our serial may be much larger than other nodes ditto
+ AllNodes = val({current, db_nodes}),
+ verbose("Sync serial ~p~n", [Tid]),
+ rpc:abcast(AllNodes, ?MODULE, {sync_trans_serial, Tid})
+ end,
+ intercept_friends(Tid, Ts),
+ Store = Ts#tidstore.store,
+ Nodes = get_nodes(Store),
+ ?MODULE ! {self(), {restart, Tid, Store}},
+ mnesia_locker:send_release_tid(Nodes, Tid),
+ timer:sleep(SleepTime),
+ mnesia_locker:receive_release_tid_acc(Nodes, Tid),
+ case rec() of
+ {restarted, Tid} ->
+ execute_transaction(Fun, Args, Factor0 + 1,
+ Retries, Type);
+ {error, Reason} ->
+ mnesia:abort(Reason)
+ end
+ end.
+
+decr(infinity) -> infinity;
+decr(X) when integer(X), X > 1 -> X - 1;
+decr(_X) -> 0.
+
+return_abort(Fun, Args, Reason) ->
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ OldStore = Ts#tidstore.store,
+ Nodes = get_nodes(OldStore),
+ intercept_friends(Tid, Ts),
+ catch mnesia_lib:incr_counter(trans_failures),
+ Level = Ts#tidstore.level,
+ if
+ Level == 1 ->
+ mnesia_locker:async_release_tid(Nodes, Tid),
+ ?MODULE ! {delete_transaction, Tid},
+ erase(mnesia_activity_state),
+ dbg_out("Transaction ~p calling ~p with ~p, failed ~p~n",
+ [Tid, Fun, Args, Reason]),
+ flush_downs(),
+ {aborted, mnesia_lib:fix_error(Reason)};
+ true ->
+ %% Nested transaction
+ [NewStore | Tail] = Ts#tidstore.up_stores,
+ req({del_store, Tid, NewStore, OldStore, true}),
+ Ts2 = Ts#tidstore{store = NewStore,
+ up_stores = Tail,
+ level = Level - 1},
+ NewTidTs = {Mod, Tid, Ts2},
+ put(mnesia_activity_state, NewTidTs),
+ case Reason of
+ #cyclic{} ->
+ exit({aborted, Reason});
+ {node_not_running, _N} ->
+ exit({aborted, Reason});
+ {bad_commit, _N}->
+ exit({aborted, Reason});
+ _ ->
+ {aborted, mnesia_lib:fix_error(Reason)}
+ end
+ end.
+
+flush_downs() ->
+ receive
+ {?MODULE, _, _} -> flush_downs(); % Votes
+ {mnesia_down, _} -> flush_downs()
+ after 0 -> flushed
+ end.
+
+put_activity_id(undefined) ->
+ erase_activity_id();
+put_activity_id({Mod, Tid, Ts}) when record(Tid, tid), record(Ts, tidstore) ->
+ flush_downs(),
+ Store = Ts#tidstore.store,
+ ?ets_insert(Store, {friends, self()}),
+ NewTidTs = {Mod, Tid, Ts},
+ put(mnesia_activity_state, NewTidTs);
+put_activity_id(SimpleState) ->
+ put(mnesia_activity_state, SimpleState).
+
+erase_activity_id() ->
+ flush_downs(),
+ erase(mnesia_activity_state).
+
+get_nodes(Store) ->
+ case catch ?ets_lookup_element(Store, nodes, 2) of
+ {'EXIT', _} -> [node()];
+ Nodes -> Nodes
+ end.
+
+get_friends(Store) ->
+ case catch ?ets_lookup_element(Store, friends, 2) of
+ {'EXIT', _} -> [];
+ Friends -> Friends
+ end.
+
+opt_propagate_store(_Current, _Obsolete, false) ->
+ ok;
+opt_propagate_store(Current, Obsolete, true) ->
+ propagate_store(Current, nodes, get_nodes(Obsolete)),
+ propagate_store(Current, friends, get_friends(Obsolete)).
+
+propagate_store(Store, Var, [Val | Vals]) ->
+ ?ets_insert(Store, {Var, Val}),
+ propagate_store(Store, Var, Vals);
+propagate_store(_Store, _Var, []) ->
+ ok.
+
+%% Tell all processes that are cooperating with the current transaction
+intercept_friends(_Tid, Ts) ->
+ Friends = get_friends(Ts#tidstore.store),
+ Message = {activity_ended, undefined, self()},
+ intercept_best_friend(Friends, Message).
+
+intercept_best_friend([], _Message) ->
+ ok;
+intercept_best_friend([Pid | _], Message) ->
+ Pid ! Message,
+ wait_for_best_friend(Pid, 0).
+
+wait_for_best_friend(Pid, Timeout) ->
+ receive
+ {'EXIT', Pid, _} -> ok;
+ {activity_ended, _, Pid} -> ok
+ after Timeout ->
+ case my_process_is_alive(Pid) of
+ true -> wait_for_best_friend(Pid, 1000);
+ false -> ok
+ end
+ end.
+
+my_process_is_alive(Pid) ->
+ case catch erlang:is_process_alive(Pid) of % New BIF in R5
+ true ->
+ true;
+ false ->
+ false;
+ {'EXIT', _} -> % Pre R5 backward compatibility
+ case process_info(Pid, message_queue_len) of
+ undefined -> false;
+ _ -> true
+ end
+ end.
+
+dirty(Protocol, Item) ->
+ {{Tab, Key}, _Val, _Op} = Item,
+ Tid = {dirty, self()},
+ Prep = prepare_items(Tid, Tab, Key, [Item], #prep{protocol= Protocol}),
+ CR = Prep#prep.records,
+ case Protocol of
+ async_dirty ->
+ %% Send commit records to the other involved nodes,
+ %% but do only wait for one node to complete.
+ %% Preferrably, the local node if possible.
+
+ ReadNode = val({Tab, where_to_read}),
+ {WaitFor, FirstRes} = async_send_dirty(Tid, CR, Tab, ReadNode),
+ rec_dirty(WaitFor, FirstRes);
+
+ sync_dirty ->
+ %% Send commit records to the other involved nodes,
+ %% and wait for all nodes to complete
+ {WaitFor, FirstRes} = sync_send_dirty(Tid, CR, Tab, []),
+ rec_dirty(WaitFor, FirstRes);
+ _ ->
+ mnesia:abort({bad_activity, Protocol})
+ end.
+
+%% This is the commit function, The first thing it does,
+%% is to find out which nodes that have been participating
+%% in this particular transaction, all of the mnesia_locker:lock*
+%% functions insert the names of the nodes where it aquires locks
+%% into the local shadow Store
+%% This function exacutes in the context of the user process
+t_commit(Type) ->
+ {Mod, Tid, Ts} = get(mnesia_activity_state),
+ Store = Ts#tidstore.store,
+ if
+ Ts#tidstore.level == 1 ->
+ intercept_friends(Tid, Ts),
+ %% N is number of updates
+ case arrange(Tid, Store, Type) of
+ {N, Prep} when N > 0 ->
+ multi_commit(Prep#prep.protocol,
+ Tid, Prep#prep.records, Store);
+ {0, Prep} ->
+ multi_commit(read_only, Tid, Prep#prep.records, Store)
+ end;
+ true ->
+ %% nested commit
+ Level = Ts#tidstore.level,
+ [Obsolete | Tail] = Ts#tidstore.up_stores,
+ req({del_store, Tid, Store, Obsolete, false}),
+ NewTs = Ts#tidstore{store = Store,
+ up_stores = Tail,
+ level = Level - 1},
+ NewTidTs = {Mod, Tid, NewTs},
+ put(mnesia_activity_state, NewTidTs),
+ do_commit_nested
+ end.
+
+%% This function arranges for all objects we shall write in S to be
+%% in a list of {Node, CommitRecord}
+%% Important function for the performance of mnesia.
+
+arrange(Tid, Store, Type) ->
+ %% The local node is always included
+ Nodes = get_nodes(Store),
+ Recs = prep_recs(Nodes, []),
+ Key = ?ets_first(Store),
+ N = 0,
+ Prep =
+ case Type of
+ async -> #prep{protocol = sym_trans, records = Recs};
+ sync -> #prep{protocol = sync_sym_trans, records = Recs}
+ end,
+ case catch do_arrange(Tid, Store, Key, Prep, N) of
+ {'EXIT', Reason} ->
+ dbg_out("do_arrange failed ~p ~p~n", [Reason, Tid]),
+ case Reason of
+ {aborted, R} ->
+ mnesia:abort(R);
+ _ ->
+ mnesia:abort(Reason)
+ end;
+ {New, Prepared} ->
+ {New, Prepared#prep{records = reverse(Prepared#prep.records)}}
+ end.
+
+reverse([]) ->
+ [];
+reverse([H|R]) when record(H, commit) ->
+ [
+ H#commit{
+ ram_copies = lists:reverse(H#commit.ram_copies),
+ disc_copies = lists:reverse(H#commit.disc_copies),
+ disc_only_copies = lists:reverse(H#commit.disc_only_copies),
+ snmp = lists:reverse(H#commit.snmp)
+ }
+ | reverse(R)].
+
+prep_recs([N | Nodes], Recs) ->
+ prep_recs(Nodes, [#commit{decision = presume_commit, node = N} | Recs]);
+prep_recs([], Recs) ->
+ Recs.
+
+%% storage_types is a list of {Node, Storage} tuples
+%% where each tuple represents an active replica
+do_arrange(Tid, Store, {Tab, Key}, Prep, N) ->
+ Oid = {Tab, Key},
+ Items = ?ets_lookup(Store, Oid), %% Store is a bag
+ P2 = prepare_items(Tid, Tab, Key, Items, Prep),
+ do_arrange(Tid, Store, ?ets_next(Store, Oid), P2, N + 1);
+do_arrange(Tid, Store, SchemaKey, Prep, N) when SchemaKey == op ->
+ Items = ?ets_lookup(Store, SchemaKey), %% Store is a bag
+ P2 = prepare_schema_items(Tid, Items, Prep),
+ do_arrange(Tid, Store, ?ets_next(Store, SchemaKey), P2, N + 1);
+do_arrange(Tid, Store, RestoreKey, Prep, N) when RestoreKey == restore_op ->
+ [{restore_op, R}] = ?ets_lookup(Store, RestoreKey),
+ Fun = fun({Tab, Key}, CommitRecs, _RecName, Where, Snmp) ->
+ Item = [{{Tab, Key}, {Tab, Key}, delete}],
+ do_prepare_items(Tid, Tab, Key, Where, Snmp, Item, CommitRecs);
+ (BupRec, CommitRecs, RecName, Where, Snmp) ->
+ Tab = element(1, BupRec),
+ Key = element(2, BupRec),
+ Item =
+ if
+ Tab == RecName ->
+ [{{Tab, Key}, BupRec, write}];
+ true ->
+ BupRec2 = setelement(1, BupRec, RecName),
+ [{{Tab, Key}, BupRec2, write}]
+ end,
+ do_prepare_items(Tid, Tab, Key, Where, Snmp, Item, CommitRecs)
+ end,
+ Recs2 = mnesia_schema:arrange_restore(R, Fun, Prep#prep.records),
+ P2 = Prep#prep{protocol = asym_trans, records = Recs2},
+ do_arrange(Tid, Store, ?ets_next(Store, RestoreKey), P2, N + 1);
+do_arrange(_Tid, _Store, '$end_of_table', Prep, N) ->
+ {N, Prep};
+do_arrange(Tid, Store, IgnoredKey, Prep, N) -> %% locks, nodes ... local atoms...
+ do_arrange(Tid, Store, ?ets_next(Store, IgnoredKey), Prep, N).
+
+%% Returns a prep record with all items in reverse order
+prepare_schema_items(Tid, Items, Prep) ->
+ Types = [{N, schema_ops} || N <- val({current, db_nodes})],
+ Recs = prepare_nodes(Tid, Types, Items, Prep#prep.records, schema),
+ Prep#prep{protocol = asym_trans, records = Recs}.
+
+%% Returns a prep record with all items in reverse order
+prepare_items(Tid, Tab, Key, Items, Prep) when Prep#prep.prev_tab == Tab ->
+ Types = Prep#prep.prev_types,
+ Snmp = Prep#prep.prev_snmp,
+ Recs = Prep#prep.records,
+ Recs2 = do_prepare_items(Tid, Tab, Key, Types, Snmp, Items, Recs),
+ Prep#prep{records = Recs2};
+
+prepare_items(Tid, Tab, Key, Items, Prep) ->
+ Types = val({Tab, where_to_commit}),
+ case Types of
+ [] -> mnesia:abort({no_exists, Tab});
+ {blocked, _} ->
+ unblocked = req({unblock_me, Tab}),
+ prepare_items(Tid, Tab, Key, Items, Prep);
+ _ ->
+ Snmp = val({Tab, snmp}),
+ Recs2 = do_prepare_items(Tid, Tab, Key, Types,
+ Snmp, Items, Prep#prep.records),
+ Prep2 = Prep#prep{records = Recs2, prev_tab = Tab,
+ prev_types = Types, prev_snmp = Snmp},
+ check_prep(Prep2, Types)
+ end.
+
+do_prepare_items(Tid, Tab, Key, Types, Snmp, Items, Recs) ->
+ Recs2 = prepare_snmp(Tid, Tab, Key, Types, Snmp, Items, Recs), % May exit
+ prepare_nodes(Tid, Types, Items, Recs2, normal).
+
+prepare_snmp(Tab, Key, Items) ->
+ case val({Tab, snmp}) of
+ [] ->
+ [];
+ Ustruct when Key /= '_' ->
+ {_Oid, _Val, Op} = hd(Items),
+ %% Still making snmp oid (not used) because we want to catch errors here
+ %% And also it keeps backwards comp. with old nodes.
+ SnmpOid = mnesia_snmp_hook:key_to_oid(Tab, Key, Ustruct), % May exit
+ [{Op, Tab, Key, SnmpOid}];
+ _ ->
+ [{clear_table, Tab}]
+ end.
+
+prepare_snmp(_Tid, _Tab, _Key, _Types, [], _Items, Recs) ->
+ Recs;
+
+prepare_snmp(Tid, Tab, Key, Types, Us, Items, Recs) ->
+ if Key /= '_' ->
+ {_Oid, _Val, Op} = hd(Items),
+ SnmpOid = mnesia_snmp_hook:key_to_oid(Tab, Key, Us), % May exit
+ prepare_nodes(Tid, Types, [{Op, Tab, Key, SnmpOid}], Recs, snmp);
+ Key == '_' ->
+ prepare_nodes(Tid, Types, [{clear_table, Tab}], Recs, snmp)
+ end.
+
+check_prep(Prep, Types) when Prep#prep.types == Types ->
+ Prep;
+check_prep(Prep, Types) when Prep#prep.types == undefined ->
+ Prep#prep{types = Types};
+check_prep(Prep, _Types) ->
+ Prep#prep{protocol = asym_trans}.
+
+%% Returns a list of commit records
+prepare_nodes(Tid, [{Node, Storage} | Rest], Items, C, Kind) ->
+ {Rec, C2} = pick_node(Tid, Node, C, []),
+ Rec2 = prepare_node(Node, Storage, Items, Rec, Kind),
+ [Rec2 | prepare_nodes(Tid, Rest, Items, C2, Kind)];
+prepare_nodes(_Tid, [], _Items, CommitRecords, _Kind) ->
+ CommitRecords.
+
+pick_node(Tid, Node, [Rec | Rest], Done) ->
+ if
+ Rec#commit.node == Node ->
+ {Rec, Done ++ Rest};
+ true ->
+ pick_node(Tid, Node, Rest, [Rec | Done])
+ end;
+pick_node(_Tid, Node, [], Done) ->
+ {#commit{decision = presume_commit, node = Node}, Done}.
+
+prepare_node(Node, Storage, [Item | Items], Rec, Kind) when Kind == snmp ->
+ Rec2 = Rec#commit{snmp = [Item | Rec#commit.snmp]},
+ prepare_node(Node, Storage, Items, Rec2, Kind);
+prepare_node(Node, Storage, [Item | Items], Rec, Kind) when Kind /= schema ->
+ Rec2 =
+ case Storage of
+ ram_copies ->
+ Rec#commit{ram_copies = [Item | Rec#commit.ram_copies]};
+ disc_copies ->
+ Rec#commit{disc_copies = [Item | Rec#commit.disc_copies]};
+ disc_only_copies ->
+ Rec#commit{disc_only_copies =
+ [Item | Rec#commit.disc_only_copies]}
+ end,
+ prepare_node(Node, Storage, Items, Rec2, Kind);
+prepare_node(_Node, _Storage, Items, Rec, Kind)
+ when Kind == schema, Rec#commit.schema_ops == [] ->
+ Rec#commit{schema_ops = Items};
+prepare_node(_Node, _Storage, [], Rec, _Kind) ->
+ Rec.
+
+%% multi_commit((Protocol, Tid, CommitRecords, Store)
+%% Local work is always performed in users process
+multi_commit(read_only, Tid, CR, _Store) ->
+ %% This featherweight commit protocol is used when no
+ %% updates has been performed in the transaction.
+
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ Msg = {Tid, simple_commit},
+ rpc:abcast(DiscNs -- [node()], ?MODULE, Msg),
+ rpc:abcast(RamNs -- [node()], ?MODULE, Msg),
+ mnesia_recover:note_decision(Tid, committed),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid},
+ do_commit;
+
+multi_commit(sym_trans, Tid, CR, Store) ->
+ %% This lightweight commit protocol is used when all
+ %% the involved tables are replicated symetrically.
+ %% Their storage types must match on each node.
+ %%
+ %% 1 Ask the other involved nodes if they want to commit
+ %% All involved nodes votes yes if they are up
+ %% 2a Somebody has voted no
+ %% Tell all yes voters to do_abort
+ %% 2b Everybody has voted yes
+ %% Tell everybody to do_commit. I.e. that they should
+ %% prepare the commit, log the commit record and
+ %% perform the updates.
+ %%
+ %% The outcome is kept 3 minutes in the transient decision table.
+ %%
+ %% Recovery:
+ %% If somebody dies before the coordinator has
+ %% broadcasted do_commit, the transaction is aborted.
+ %%
+ %% If a participant dies, the table load algorithm
+ %% ensures that the contents of the involved tables
+ %% are picked from another node.
+ %%
+ %% If the coordinator dies, each participants checks
+ %% the outcome with all the others. If all are uncertain
+ %% about the outcome, the transaction is aborted. If
+ %% somebody knows the outcome the others will follow.
+
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ ?ets_insert(Store, Pending),
+
+ {WaitFor, Local} = ask_commit(sym_trans, Tid, CR, DiscNs, RamNs),
+ {Outcome, []} = rec_all(WaitFor, Tid, do_commit, []),
+ ?eval_debug_fun({?MODULE, multi_commit_sym},
+ [{tid, Tid}, {outcome, Outcome}]),
+ rpc:abcast(DiscNs -- [node()], ?MODULE, {Tid, Outcome}),
+ rpc:abcast(RamNs -- [node()], ?MODULE, {Tid, Outcome}),
+ case Outcome of
+ do_commit ->
+ mnesia_recover:note_decision(Tid, committed),
+ do_dirty(Tid, Local),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid};
+ {do_abort, _Reason} ->
+ mnesia_recover:note_decision(Tid, aborted)
+ end,
+ ?eval_debug_fun({?MODULE, multi_commit_sym, post},
+ [{tid, Tid}, {outcome, Outcome}]),
+ Outcome;
+
+multi_commit(sync_sym_trans, Tid, CR, Store) ->
+ %% This protocol is the same as sym_trans except that it
+ %% uses syncronized calls to disk_log and syncronized commits
+ %% when several nodes are involved.
+
+ {DiscNs, RamNs} = commit_nodes(CR, [], []),
+ Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ ?ets_insert(Store, Pending),
+
+ {WaitFor, Local} = ask_commit(sync_sym_trans, Tid, CR, DiscNs, RamNs),
+ {Outcome, []} = rec_all(WaitFor, Tid, do_commit, []),
+ ?eval_debug_fun({?MODULE, multi_commit_sym_sync},
+ [{tid, Tid}, {outcome, Outcome}]),
+ rpc:abcast(DiscNs -- [node()], ?MODULE, {Tid, Outcome}),
+ rpc:abcast(RamNs -- [node()], ?MODULE, {Tid, Outcome}),
+ case Outcome of
+ do_commit ->
+ mnesia_recover:note_decision(Tid, committed),
+ mnesia_log:slog(Local),
+ do_commit(Tid, Local),
+ %% Just wait for completion result is ignore.
+ rec_all(WaitFor, Tid, ignore, []),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid};
+ {do_abort, _Reason} ->
+ mnesia_recover:note_decision(Tid, aborted)
+ end,
+ ?eval_debug_fun({?MODULE, multi_commit_sym, post},
+ [{tid, Tid}, {outcome, Outcome}]),
+ Outcome;
+
+multi_commit(asym_trans, Tid, CR, Store) ->
+ %% This more expensive commit protocol is used when
+ %% table definitions are changed (schema transactions).
+ %% It is also used when the involved tables are
+ %% replicated asymetrically. If the storage type differs
+ %% on at least one node this protocol is used.
+ %%
+ %% 1 Ask the other involved nodes if they want to commit.
+ %% All involved nodes prepares the commit, logs a presume_abort
+ %% commit record and votes yes or no depending of the
+ %% outcome of the prepare. The preparation is also performed
+ %% by the coordinator.
+ %%
+ %% 2a Somebody has died or voted no
+ %% Tell all yes voters to do_abort
+ %% 2b Everybody has voted yes
+ %% Put a unclear marker in the log.
+ %% Tell the others to pre_commit. I.e. that they should
+ %% put a unclear marker in the log and reply
+ %% acc_pre_commit when they are done.
+ %%
+ %% 3a Somebody died
+ %% Tell the remaining participants to do_abort
+ %% 3b Everybody has replied acc_pre_commit
+ %% Tell everybody to committed. I.e that they should
+ %% put a committed marker in the log, perform the updates
+ %% and reply done_commit when they are done. The coordinator
+ %% must wait with putting his committed marker inte the log
+ %% until the committed has been sent to all the others.
+ %% Then he performs local commit before collecting replies.
+ %%
+ %% 4 Everybody has either died or replied done_commit
+ %% Return to the caller.
+ %%
+ %% Recovery:
+ %% If the coordinator dies, the participants (and
+ %% the coordinator when he starts again) must do
+ %% the following:
+ %%
+ %% If we have no unclear marker in the log we may
+ %% safely abort, since we know that nobody may have
+ %% decided to commit yet.
+ %%
+ %% If we have a committed marker in the log we may
+ %% safely commit since we know that everybody else
+ %% also will come to this conclusion.
+ %%
+ %% If we have a unclear marker but no committed
+ %% in the log we are uncertain about the real outcome
+ %% of the transaction and must ask the others before
+ %% we can decide what to do. If someone knows the
+ %% outcome we will do the same. If nobody knows, we
+ %% will wait for the remaining involved nodes to come
+ %% up. When all involved nodes are up and uncertain,
+ %% we decide to commit (first put a committed marker
+ %% in the log, then do the updates).
+
+ D = #decision{tid = Tid, outcome = presume_abort},
+ {D2, CR2} = commit_decision(D, CR, [], []),
+ DiscNs = D2#decision.disc_nodes,
+ RamNs = D2#decision.ram_nodes,
+ Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
+ ?ets_insert(Store, Pending),
+ {WaitFor, Local} = ask_commit(asym_trans, Tid, CR2, DiscNs, RamNs),
+ SchemaPrep = (catch mnesia_schema:prepare_commit(Tid, Local, {coord, WaitFor})),
+ {Votes, Pids} = rec_all(WaitFor, Tid, do_commit, []),
+
+ ?eval_debug_fun({?MODULE, multi_commit_asym_got_votes},
+ [{tid, Tid}, {votes, Votes}]),
+ case Votes of
+ do_commit ->
+ case SchemaPrep of
+ {_Modified, C, DumperMode} when record(C, commit) ->
+ mnesia_log:log(C), % C is not a binary
+ ?eval_debug_fun({?MODULE, multi_commit_asym_log_commit_rec},
+ [{tid, Tid}]),
+
+ D3 = C#commit.decision,
+ D4 = D3#decision{outcome = unclear},
+ mnesia_recover:log_decision(D4),
+ ?eval_debug_fun({?MODULE, multi_commit_asym_log_commit_dec},
+ [{tid, Tid}]),
+ tell_participants(Pids, {Tid, pre_commit}),
+ %% Now we are uncertain and we do not know
+ %% if all participants have logged that
+ %% they are uncertain or not
+ rec_acc_pre_commit(Pids, Tid, Store, C,
+ do_commit, DumperMode, [], []);
+ {'EXIT', Reason} ->
+ %% The others have logged the commit
+ %% record but they are not uncertain
+ mnesia_recover:note_decision(Tid, aborted),
+ ?eval_debug_fun({?MODULE, multi_commit_asym_prepare_exit},
+ [{tid, Tid}]),
+ tell_participants(Pids, {Tid, {do_abort, Reason}}),
+ do_abort(Tid, Local),
+ {do_abort, Reason}
+ end;
+
+ {do_abort, Reason} ->
+ %% The others have logged the commit
+ %% record but they are not uncertain
+ mnesia_recover:note_decision(Tid, aborted),
+ ?eval_debug_fun({?MODULE, multi_commit_asym_do_abort}, [{tid, Tid}]),
+ tell_participants(Pids, {Tid, {do_abort, Reason}}),
+ do_abort(Tid, Local),
+ {do_abort, Reason}
+ end.
+
+%% Returns do_commit or {do_abort, Reason}
+rec_acc_pre_commit([Pid | Tail], Tid, Store, Commit, Res, DumperMode,
+ GoodPids, SchemaAckPids) ->
+ receive
+ {?MODULE, _, {acc_pre_commit, Tid, Pid, true}} ->
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, Res, DumperMode,
+ [Pid | GoodPids], [Pid | SchemaAckPids]);
+
+ {?MODULE, _, {acc_pre_commit, Tid, Pid, false}} ->
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, Res, DumperMode,
+ [Pid | GoodPids], SchemaAckPids);
+
+ {?MODULE, _, {acc_pre_commit, Tid, Pid}} ->
+ %% Kept for backwards compatibility. Remove after Mnesia 4.x
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, Res, DumperMode,
+ [Pid | GoodPids], [Pid | SchemaAckPids]);
+
+ {mnesia_down, Node} when Node == node(Pid) ->
+ AbortRes = {do_abort, {bad_commit, Node}},
+ rec_acc_pre_commit(Tail, Tid, Store, Commit, AbortRes, DumperMode,
+ GoodPids, SchemaAckPids)
+ end;
+rec_acc_pre_commit([], Tid, Store, Commit, Res, DumperMode, GoodPids, SchemaAckPids) ->
+ D = Commit#commit.decision,
+ case Res of
+ do_commit ->
+ %% Now everybody knows that the others
+ %% has voted yes. We also know that
+ %% everybody are uncertain.
+ prepare_sync_schema_commit(Store, SchemaAckPids),
+ tell_participants(GoodPids, {Tid, committed}),
+ D2 = D#decision{outcome = committed},
+ mnesia_recover:log_decision(D2),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_log_commit},
+ [{tid, Tid}]),
+
+ %% Now we have safely logged committed
+ %% and we can recover without asking others
+ do_commit(Tid, Commit, DumperMode),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_done_commit},
+ [{tid, Tid}]),
+ sync_schema_commit(Tid, Store, SchemaAckPids),
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid};
+
+ {do_abort, Reason} ->
+ tell_participants(GoodPids, {Tid, {do_abort, Reason}}),
+ D2 = D#decision{outcome = aborted},
+ mnesia_recover:log_decision(D2),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_log_abort},
+ [{tid, Tid}]),
+ do_abort(Tid, Commit),
+ ?eval_debug_fun({?MODULE, rec_acc_pre_commit_done_abort},
+ [{tid, Tid}])
+ end,
+ Res.
+
+%% Note all nodes in case of mnesia_down mgt
+prepare_sync_schema_commit(_Store, []) ->
+ ok;
+prepare_sync_schema_commit(Store, [Pid | Pids]) ->
+ ?ets_insert(Store, {waiting_for_commit_ack, node(Pid)}),
+ prepare_sync_schema_commit(Store, Pids).
+
+sync_schema_commit(_Tid, _Store, []) ->
+ ok;
+sync_schema_commit(Tid, Store, [Pid | Tail]) ->
+ receive
+ {?MODULE, _, {schema_commit, Tid, Pid}} ->
+ ?ets_match_delete(Store, {waiting_for_commit_ack, node(Pid)}),
+ sync_schema_commit(Tid, Store, Tail);
+
+ {mnesia_down, Node} when Node == node(Pid) ->
+ ?ets_match_delete(Store, {waiting_for_commit_ack, Node}),
+ sync_schema_commit(Tid, Store, Tail)
+ end.
+
+tell_participants([Pid | Pids], Msg) ->
+ Pid ! Msg,
+ tell_participants(Pids, Msg);
+tell_participants([], _Msg) ->
+ ok.
+
+%% No need for trapping exits. We are only linked
+%% to mnesia_tm and if it dies we should also die.
+%% The same goes for disk_log and dets.
+commit_participant(Coord, Tid, Bin, DiscNs, RamNs) when binary(Bin) ->
+ Commit = binary_to_term(Bin),
+ commit_participant(Coord, Tid, Bin, Commit, DiscNs, RamNs);
+commit_participant(Coord, Tid, C, DiscNs, RamNs) when record(C, commit) ->
+ commit_participant(Coord, Tid, C, C, DiscNs, RamNs).
+
+commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
+ ?eval_debug_fun({?MODULE, commit_participant, pre}, [{tid, Tid}]),
+ case catch mnesia_schema:prepare_commit(Tid, C0, {part, Coord}) of
+ {Modified, C, DumperMode} when record(C, commit) ->
+ %% If we can not find any local unclear decision
+ %% we should presume abort at startup recovery
+ case lists:member(node(), DiscNs) of
+ false ->
+ ignore;
+ true ->
+ case Modified of
+ false -> mnesia_log:log(Bin);
+ true -> mnesia_log:log(C)
+ end
+ end,
+ ?eval_debug_fun({?MODULE, commit_participant, vote_yes},
+ [{tid, Tid}]),
+ reply(Coord, {vote_yes, Tid, self()}),
+
+ receive
+ {Tid, pre_commit} ->
+ D = C#commit.decision,
+ mnesia_recover:log_decision(D#decision{outcome = unclear}),
+ ?eval_debug_fun({?MODULE, commit_participant, pre_commit},
+ [{tid, Tid}]),
+ Expect_schema_ack = C#commit.schema_ops /= [],
+ reply(Coord, {acc_pre_commit, Tid, self(), Expect_schema_ack}),
+
+ %% Now we are vulnerable for failures, since
+ %% we cannot decide without asking others
+ receive
+ {Tid, committed} ->
+ mnesia_recover:log_decision(D#decision{outcome = committed}),
+ ?eval_debug_fun({?MODULE, commit_participant, log_commit},
+ [{tid, Tid}]),
+ do_commit(Tid, C, DumperMode),
+ case Expect_schema_ack of
+ false -> ignore;
+ true -> reply(Coord, {schema_commit, Tid, self()})
+ end,
+ ?eval_debug_fun({?MODULE, commit_participant, do_commit},
+ [{tid, Tid}]);
+
+ {Tid, {do_abort, _Reason}} ->
+ mnesia_recover:log_decision(D#decision{outcome = aborted}),
+ ?eval_debug_fun({?MODULE, commit_participant, log_abort},
+ [{tid, Tid}]),
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, undo_prepare},
+ [{tid, Tid}]);
+
+ {'EXIT', _, _} ->
+ mnesia_recover:log_decision(D#decision{outcome = aborted}),
+ ?eval_debug_fun({?MODULE, commit_participant, exit_log_abort},
+ [{tid, Tid}]),
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, exit_undo_prepare},
+ [{tid, Tid}]);
+
+ Msg ->
+ verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
+ [Tid, Msg])
+ end;
+ {Tid, {do_abort, _Reason}} ->
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, pre_commit_undo_prepare},
+ [{tid, Tid}]);
+
+ {'EXIT', _, _} ->
+ mnesia_schema:undo_prepare_commit(Tid, C),
+ ?eval_debug_fun({?MODULE, commit_participant, pre_commit_undo_prepare}, [{tid, Tid}]);
+
+ Msg ->
+ verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
+ [Tid, Msg])
+ end;
+
+ {'EXIT', Reason} ->
+ ?eval_debug_fun({?MODULE, commit_participant, vote_no},
+ [{tid, Tid}]),
+ reply(Coord, {vote_no, Tid, Reason}),
+ mnesia_schema:undo_prepare_commit(Tid, C0)
+ end,
+ mnesia_locker:release_tid(Tid),
+ ?MODULE ! {delete_transaction, Tid},
+ unlink(whereis(?MODULE)),
+ exit(normal).
+
+do_abort(Tid, Bin) when binary(Bin) ->
+ %% Possible optimization:
+ %% If we want we could pass arround a flag
+ %% that tells us whether the binary contains
+ %% schema ops or not. Only if the binary
+ %% contains schema ops there are meningful
+ %% unpack the binary and perform
+ %% mnesia_schema:undo_prepare_commit/1.
+ do_abort(Tid, binary_to_term(Bin));
+do_abort(Tid, Commit) ->
+ mnesia_schema:undo_prepare_commit(Tid, Commit),
+ Commit.
+
+do_dirty(Tid, Commit) when Commit#commit.schema_ops == [] ->
+ mnesia_log:log(Commit),
+ do_commit(Tid, Commit).
+
+%% do_commit(Tid, CommitRecord)
+do_commit(Tid, Bin) when binary(Bin) ->
+ do_commit(Tid, binary_to_term(Bin));
+do_commit(Tid, C) ->
+ do_commit(Tid, C, optional).
+do_commit(Tid, Bin, DumperMode) when binary(Bin) ->
+ do_commit(Tid, binary_to_term(Bin), DumperMode);
+do_commit(Tid, C, DumperMode) ->
+ mnesia_dumper:update(Tid, C#commit.schema_ops, DumperMode),
+ R = do_snmp(Tid, C#commit.snmp),
+ R2 = do_update(Tid, ram_copies, C#commit.ram_copies, R),
+ R3 = do_update(Tid, disc_copies, C#commit.disc_copies, R2),
+ do_update(Tid, disc_only_copies, C#commit.disc_only_copies, R3).
+
+%% Update the items
+do_update(Tid, Storage, [Op | Ops], OldRes) ->
+ case catch do_update_op(Tid, Storage, Op) of
+ ok ->
+ do_update(Tid, Storage, Ops, OldRes);
+ {'EXIT', Reason} ->
+ %% This may only happen when we recently have
+ %% deleted our local replica, changed storage_type
+ %% or transformed table
+ %% BUGBUG: Updates may be lost if storage_type is changed.
+ %% Determine actual storage type and try again.
+ %% BUGBUG: Updates may be lost if table is transformed.
+
+ verbose("do_update in ~w failed: ~p -> {'EXIT', ~p}~n",
+ [Tid, Op, Reason]),
+ do_update(Tid, Storage, Ops, OldRes);
+ NewRes ->
+ do_update(Tid, Storage, Ops, NewRes)
+ end;
+do_update(_Tid, _Storage, [], Res) ->
+ Res.
+
+do_update_op(Tid, Storage, {{Tab, K}, Obj, write}) ->
+ commit_write(?catch_val({Tab, commit_work}), Tid,
+ Tab, K, Obj, undefined),
+ mnesia_lib:db_put(Storage, Tab, Obj);
+
+do_update_op(Tid, Storage, {{Tab, K}, Val, delete}) ->
+ commit_delete(?catch_val({Tab, commit_work}), Tid, Tab, K, Val, undefined),
+ mnesia_lib:db_erase(Storage, Tab, K);
+
+do_update_op(Tid, Storage, {{Tab, K}, {RecName, Incr}, update_counter}) ->
+ {NewObj, OldObjs} =
+ case catch mnesia_lib:db_update_counter(Storage, Tab, K, Incr) of
+ NewVal when integer(NewVal), NewVal >= 0 ->
+ {{RecName, K, NewVal}, [{RecName, K, NewVal - Incr}]};
+ _ ->
+ Zero = {RecName, K, 0},
+ mnesia_lib:db_put(Storage, Tab, Zero),
+ {Zero, []}
+ end,
+ commit_update(?catch_val({Tab, commit_work}), Tid, Tab,
+ K, NewObj, OldObjs),
+ element(3, NewObj);
+
+do_update_op(Tid, Storage, {{Tab, Key}, Obj, delete_object}) ->
+ commit_del_object(?catch_val({Tab, commit_work}),
+ Tid, Tab, Key, Obj, undefined),
+ mnesia_lib:db_match_erase(Storage, Tab, Obj);
+
+do_update_op(Tid, Storage, {{Tab, Key}, Obj, clear_table}) ->
+ commit_clear(?catch_val({Tab, commit_work}), Tid, Tab, Key, Obj),
+ mnesia_lib:db_match_erase(Storage, Tab, Obj).
+
+commit_write([], _, _, _, _, _) -> ok;
+commit_write([{checkpoints, CpList}|R], Tid, Tab, K, Obj, Old) ->
+ mnesia_checkpoint:tm_retain(Tid, Tab, K, write, CpList),
+ commit_write(R, Tid, Tab, K, Obj, Old);
+commit_write([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, write, Old),
+ commit_write(R, Tid, Tab, K, Obj, Old);
+commit_write([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:add_index(H, Tab, K, Obj, Old),
+ commit_write(R, Tid, Tab, K, Obj, Old).
+
+commit_update([], _, _, _, _, _) -> ok;
+commit_update([{checkpoints, CpList}|R], Tid, Tab, K, Obj, _) ->
+ Old = mnesia_checkpoint:tm_retain(Tid, Tab, K, write, CpList),
+ commit_update(R, Tid, Tab, K, Obj, Old);
+commit_update([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, write, Old),
+ commit_update(R, Tid, Tab, K, Obj, Old);
+commit_update([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:add_index(H, Tab, K, Obj, Old),
+ commit_update(R, Tid, Tab, K, Obj, Old).
+
+commit_delete([], _, _, _, _, _) -> ok;
+commit_delete([{checkpoints, CpList}|R], Tid, Tab, K, Obj, _) ->
+ Old = mnesia_checkpoint:tm_retain(Tid, Tab, K, delete, CpList),
+ commit_delete(R, Tid, Tab, K, Obj, Old);
+commit_delete([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, delete, Old),
+ commit_delete(R, Tid, Tab, K, Obj, Old);
+commit_delete([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:delete_index(H, Tab, K),
+ commit_delete(R, Tid, Tab, K, Obj, Old).
+
+commit_del_object([], _, _, _, _, _) -> ok;
+commit_del_object([{checkpoints, CpList}|R], Tid, Tab, K, Obj, _) ->
+ Old = mnesia_checkpoint:tm_retain(Tid, Tab, K, delete_object, CpList),
+ commit_del_object(R, Tid, Tab, K, Obj, Old);
+commit_del_object([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, delete_object, Old),
+ commit_del_object(R, Tid, Tab, K, Obj, Old);
+commit_del_object([H|R], Tid, Tab, K, Obj, Old)
+ when element(1, H) == index ->
+ mnesia_index:del_object_index(H, Tab, K, Obj, Old),
+ commit_del_object(R, Tid, Tab, K, Obj, Old).
+
+commit_clear([], _, _, _, _) -> ok;
+commit_clear([{checkpoints, CpList}|R], Tid, Tab, K, Obj) ->
+ mnesia_checkpoint:tm_retain(Tid, Tab, K, clear_table, CpList),
+ commit_clear(R, Tid, Tab, K, Obj);
+commit_clear([H|R], Tid, Tab, K, Obj)
+ when element(1, H) == subscribers ->
+ mnesia_subscr:report_table_event(H, Tab, Tid, Obj, clear_table, undefined),
+ commit_clear(R, Tid, Tab, K, Obj);
+commit_clear([H|R], Tid, Tab, K, Obj)
+ when element(1, H) == index ->
+ mnesia_index:clear_index(H, Tab, K, Obj),
+ commit_clear(R, Tid, Tab, K, Obj).
+
+do_snmp(_, []) -> ok;
+do_snmp(Tid, [Head | Tail]) ->
+ case catch mnesia_snmp_hook:update(Head) of
+ {'EXIT', Reason} ->
+ %% This should only happen when we recently have
+ %% deleted our local replica or recently deattached
+ %% the snmp table
+
+ verbose("do_snmp in ~w failed: ~p -> {'EXIT', ~p}~n",
+ [Tid, Head, Reason]);
+ ok ->
+ ignore
+ end,
+ do_snmp(Tid, Tail).
+
+commit_nodes([C | Tail], AccD, AccR)
+ when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [],
+ C#commit.schema_ops == [] ->
+ commit_nodes(Tail, AccD, [C#commit.node | AccR]);
+commit_nodes([C | Tail], AccD, AccR) ->
+ commit_nodes(Tail, [C#commit.node | AccD], AccR);
+commit_nodes([], AccD, AccR) ->
+ {AccD, AccR}.
+
+commit_decision(D, [C | Tail], AccD, AccR) ->
+ N = C#commit.node,
+ {D2, Tail2} =
+ case C#commit.schema_ops of
+ [] when C#commit.disc_copies == [],
+ C#commit.disc_only_copies == [] ->
+ commit_decision(D, Tail, AccD, [N | AccR]);
+ [] ->
+ commit_decision(D, Tail, [N | AccD], AccR);
+ Ops ->
+ case ram_only_ops(N, Ops) of
+ true ->
+ commit_decision(D, Tail, AccD, [N | AccR]);
+ false ->
+ commit_decision(D, Tail, [N | AccD], AccR)
+ end
+ end,
+ {D2, [C#commit{decision = D2} | Tail2]};
+commit_decision(D, [], AccD, AccR) ->
+ {D#decision{disc_nodes = AccD, ram_nodes = AccR}, []}.
+
+ram_only_ops(N, [{op, change_table_copy_type, N, _FromS, _ToS, Cs} | _Ops ]) ->
+ case lists:member({name, schema}, Cs) of
+ true ->
+ %% We always use disk if change type of the schema
+ false;
+ false ->
+ not lists:member(N, val({schema, disc_copies}))
+ end;
+
+ram_only_ops(N, _Ops) ->
+ not lists:member(N, val({schema, disc_copies})).
+
+%% Returns {WaitFor, Res}
+sync_send_dirty(Tid, [Head | Tail], Tab, WaitFor) ->
+ Node = Head#commit.node,
+ if
+ Node == node() ->
+ {WF, _} = sync_send_dirty(Tid, Tail, Tab, WaitFor),
+ Res = do_dirty(Tid, Head),
+ {WF, Res};
+ true ->
+ {?MODULE, Node} ! {self(), {sync_dirty, Tid, Head, Tab}},
+ sync_send_dirty(Tid, Tail, Tab, [Node | WaitFor])
+ end;
+sync_send_dirty(_Tid, [], _Tab, WaitFor) ->
+ {WaitFor, {'EXIT', {aborted, {node_not_running, WaitFor}}}}.
+
+%% Returns {WaitFor, Res}
+async_send_dirty(_Tid, _Nodes, Tab, nowhere) ->
+ {[], {'EXIT', {aborted, {no_exists, Tab}}}};
+async_send_dirty(Tid, Nodes, Tab, ReadNode) ->
+ async_send_dirty(Tid, Nodes, Tab, ReadNode, [], ok).
+
+async_send_dirty(Tid, [Head | Tail], Tab, ReadNode, WaitFor, Res) ->
+ Node = Head#commit.node,
+ if
+ ReadNode == Node, Node == node() ->
+ NewRes = do_dirty(Tid, Head),
+ async_send_dirty(Tid, Tail, Tab, ReadNode, WaitFor, NewRes);
+ ReadNode == Node ->
+ {?MODULE, Node} ! {self(), {sync_dirty, Tid, Head, Tab}},
+ NewRes = {'EXIT', {aborted, {node_not_running, Node}}},
+ async_send_dirty(Tid, Tail, Tab, ReadNode, [Node | WaitFor], NewRes);
+ true ->
+ {?MODULE, Node} ! {self(), {async_dirty, Tid, Head, Tab}},
+ async_send_dirty(Tid, Tail, Tab, ReadNode, WaitFor, Res)
+ end;
+async_send_dirty(_Tid, [], _Tab, _ReadNode, WaitFor, Res) ->
+ {WaitFor, Res}.
+
+rec_dirty([Node | Tail], Res) when Node /= node() ->
+ NewRes = get_dirty_reply(Node, Res),
+ rec_dirty(Tail, NewRes);
+rec_dirty([], Res) ->
+ Res.
+
+get_dirty_reply(Node, Res) ->
+ receive
+ {?MODULE, Node, {'EXIT', Reason}} ->
+ {'EXIT', {aborted, {badarg, Reason}}};
+ {?MODULE, Node, {dirty_res, ok}} ->
+ case Res of
+ {'EXIT', {aborted, {node_not_running, _Node}}} ->
+ ok;
+ _ ->
+ %% Prioritize bad results, but node_not_running
+ Res
+ end;
+ {?MODULE, Node, {dirty_res, Reply}} ->
+ Reply;
+ {mnesia_down, Node} ->
+ %% It's ok to ignore mnesia_down's
+ %% since we will make the replicas
+ %% consistent again when Node is started
+ Res
+ after 1000 ->
+ case lists:member(Node, val({current, db_nodes})) of
+ true ->
+ get_dirty_reply(Node, Res);
+ false ->
+ Res
+ end
+ end.
+
+%% Assume that CommitRecord is no binary
+%% Return {Res, Pids}
+ask_commit(Protocol, Tid, CR, DiscNs, RamNs) ->
+ ask_commit(Protocol, Tid, CR, DiscNs, RamNs, [], no_local).
+
+ask_commit(Protocol, Tid, [Head | Tail], DiscNs, RamNs, WaitFor, Local) ->
+ Node = Head#commit.node,
+ if
+ Node == node() ->
+ ask_commit(Protocol, Tid, Tail, DiscNs, RamNs, WaitFor, Head);
+ true ->
+ Bin = opt_term_to_binary(Protocol, Head, DiscNs++RamNs),
+ Msg = {ask_commit, Protocol, Tid, Bin, DiscNs, RamNs},
+ {?MODULE, Node} ! {self(), Msg},
+ ask_commit(Protocol, Tid, Tail, DiscNs, RamNs, [Node | WaitFor], Local)
+ end;
+ask_commit(_Protocol, _Tid, [], _DiscNs, _RamNs, WaitFor, Local) ->
+ {WaitFor, Local}.
+
+opt_term_to_binary(asym_trans, Head, Nodes) ->
+ opt_term_to_binary(Nodes, Head);
+opt_term_to_binary(_Protocol, Head, _Nodes) ->
+ Head.
+
+opt_term_to_binary([], Head) ->
+ term_to_binary(Head);
+opt_term_to_binary([H|R], Head) ->
+ case mnesia_monitor:needs_protocol_conversion(H) of
+ true -> Head;
+ false ->
+ opt_term_to_binary(R, Head)
+ end.
+
+rec_all([Node | Tail], Tid, Res, Pids) ->
+ receive
+ {?MODULE, Node, {vote_yes, Tid}} ->
+ rec_all(Tail, Tid, Res, Pids);
+ {?MODULE, Node, {vote_yes, Tid, Pid}} ->
+ rec_all(Tail, Tid, Res, [Pid | Pids]);
+ {?MODULE, Node, {vote_no, Tid, Reason}} ->
+ rec_all(Tail, Tid, {do_abort, Reason}, Pids);
+ {?MODULE, Node, {committed, Tid}} ->
+ rec_all(Tail, Tid, Res, Pids);
+ {?MODULE, Node, {aborted, Tid}} ->
+ rec_all(Tail, Tid, Res, Pids);
+
+ {mnesia_down, Node} ->
+ rec_all(Tail, Tid, {do_abort, {bad_commit, Node}}, Pids)
+ end;
+rec_all([], _Tid, Res, Pids) ->
+ {Res, Pids}.
+
+get_transactions() ->
+ {info, Participant, Coordinator} = req(info),
+ lists:map(fun({Tid, _Tabs}) ->
+ Status = tr_status(Tid,Participant),
+ {Tid#tid.counter, Tid#tid.pid, Status}
+ end,Coordinator).
+
+tr_status(Tid,Participant) ->
+ case lists:keymember(Tid, 1, Participant) of
+ true -> participant;
+ false -> coordinator
+ end.
+
+get_info(Timeout) ->
+ case whereis(?MODULE) of
+ undefined ->
+ {timeout, Timeout};
+ Pid ->
+ Pid ! {self(), info},
+ receive
+ {?MODULE, _, {info, Part, Coord}} ->
+ {info, Part, Coord}
+ after Timeout ->
+ {timeout, Timeout}
+ end
+ end.
+
+display_info(Stream, {timeout, T}) ->
+ io:format(Stream, "---> No info about coordinator and participant transactions, "
+ "timeout ~p <--- ~n", [T]);
+
+display_info(Stream, {info, Part, Coord}) ->
+ io:format(Stream, "---> Participant transactions <--- ~n", []),
+ lists:foreach(fun(P) -> pr_participant(Stream, P) end, Part),
+ io:format(Stream, "---> Coordinator transactions <---~n", []),
+ lists:foreach(fun({Tid, _Tabs}) -> pr_tid(Stream, Tid) end, Coord).
+
+pr_participant(Stream, P) ->
+ Commit0 = P#participant.commit,
+ Commit =
+ if
+ binary(Commit0) -> binary_to_term(Commit0);
+ true -> Commit0
+ end,
+ pr_tid(Stream, P#participant.tid),
+ io:format(Stream, "with participant objects ~p~n", [Commit]).
+
+
+pr_tid(Stream, Tid) ->
+ io:format(Stream, "Tid: ~p (owned by ~p) ~n",
+ [Tid#tid.counter, Tid#tid.pid]).
+
+info(Serial) ->
+ io:format( "Info about transaction with serial == ~p~n", [Serial]),
+ {info, Participant, Trs} = req(info),
+ search_pr_participant(Serial, Participant),
+ search_pr_coordinator(Serial, Trs).
+
+
+search_pr_coordinator(_S, []) -> no;
+search_pr_coordinator(S, [{Tid, _Ts}|Tail]) ->
+ case Tid#tid.counter of
+ S ->
+ io:format( "Tid is coordinator, owner == \n", []),
+ display_pid_info(Tid#tid.pid),
+ search_pr_coordinator(S, Tail);
+ _ ->
+ search_pr_coordinator(S, Tail)
+ end.
+
+search_pr_participant(_S, []) ->
+ false;
+search_pr_participant(S, [ P | Tail]) ->
+ Tid = P#participant.tid,
+ Commit0 = P#participant.commit,
+ if
+ Tid#tid.counter == S ->
+ io:format( "Tid is participant to commit, owner == \n", []),
+ Pid = Tid#tid.pid,
+ display_pid_info(Pid),
+ io:format( "Tid wants to write objects \n",[]),
+ Commit =
+ if
+ binary(Commit0) -> binary_to_term(Commit0);
+ true -> Commit0
+ end,
+
+ io:format("~p~n", [Commit]),
+ search_pr_participant(S,Tail); %% !!!!!
+ true ->
+ search_pr_participant(S, Tail)
+ end.
+
+display_pid_info(Pid) ->
+ case rpc:pinfo(Pid) of
+ undefined ->
+ io:format( "Dead process \n");
+ Info ->
+ Call = fetch(initial_call, Info),
+ Curr = case fetch(current_function, Info) of
+ {Mod,F,Args} when list(Args) ->
+ {Mod,F,length(Args)};
+ Other ->
+ Other
+ end,
+ Reds = fetch(reductions, Info),
+ LM = length(fetch(messages, Info)),
+ pformat(io_lib:format("~p", [Pid]),
+ io_lib:format("~p", [Call]),
+ io_lib:format("~p", [Curr]), Reds, LM)
+ end.
+
+pformat(A1, A2, A3, A4, A5) ->
+ io:format( "~-12s ~-21s ~-21s ~9w ~4w~n", [A1,A2,A3,A4,A5]).
+
+fetch(Key, Info) ->
+ case lists:keysearch(Key, 1, Info) of
+ {value, {_, Val}} ->
+ Val;
+ _ ->
+ 0
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%% reconfigure stuff comes here ......
+%%%%%%%%%%%%%%%%%%%%%
+
+reconfigure_coordinators(N, [{Tid, [Store | _]} | Coordinators]) ->
+ case mnesia_recover:outcome(Tid, unknown) of
+ committed ->
+ WaitingNodes = ?ets_lookup(Store, waiting_for_commit_ack),
+ case lists:keymember(N, 2, WaitingNodes) of
+ false ->
+ ignore; % avoid spurious mnesia_down messages
+ true ->
+ send_mnesia_down(Tid, Store, N)
+ end;
+ aborted ->
+ ignore; % avoid spurious mnesia_down messages
+ _ ->
+ %% Tell the coordinator about the mnesia_down
+ send_mnesia_down(Tid, Store, N)
+ end,
+ reconfigure_coordinators(N, Coordinators);
+reconfigure_coordinators(_N, []) ->
+ ok.
+
+send_mnesia_down(Tid, Store, Node) ->
+ Msg = {mnesia_down, Node},
+ send_to_pids([Tid#tid.pid | get_friends(Store)], Msg).
+
+send_to_pids([Pid | Pids], Msg) ->
+ Pid ! Msg,
+ send_to_pids(Pids, Msg);
+send_to_pids([], _Msg) ->
+ ok.
+
+reconfigure_participants(N, [P | Tail]) ->
+ case lists:member(N, P#participant.disc_nodes) or
+ lists:member(N, P#participant.ram_nodes) of
+ false ->
+ %% Ignore, since we are not a participant
+ %% in the transaction.
+ reconfigure_participants(N, Tail);
+
+ true ->
+ %% We are on a participant node, lets
+ %% check if the dead one was a
+ %% participant or a coordinator.
+ Tid = P#participant.tid,
+ if
+ node(Tid#tid.pid) /= N ->
+ %% Another participant node died. Ignore.
+ reconfigure_participants(N, Tail);
+
+ true ->
+ %% The coordinator node has died and
+ %% we must determine the outcome of the
+ %% transaction and tell mnesia_tm on all
+ %% nodes (including the local node) about it
+ verbose("Coordinator ~p in transaction ~p died~n",
+ [Tid#tid.pid, Tid]),
+
+ Nodes = P#participant.disc_nodes ++
+ P#participant.ram_nodes,
+ AliveNodes = Nodes -- [N],
+ Protocol = P#participant.protocol,
+ tell_outcome(Tid, Protocol, N, AliveNodes, AliveNodes),
+ reconfigure_participants(N, Tail)
+ end
+ end;
+reconfigure_participants(_, []) ->
+ [].
+
+%% We need to determine the outcome of the transaction and
+%% tell mnesia_tm on all involved nodes (including the local node)
+%% about the outcome.
+tell_outcome(Tid, Protocol, Node, CheckNodes, TellNodes) ->
+ Outcome = mnesia_recover:what_happened(Tid, Protocol, CheckNodes),
+ case Outcome of
+ aborted ->
+ rpc:abcast(TellNodes, ?MODULE, {Tid,{do_abort, {mnesia_down, Node}}});
+ committed ->
+ rpc:abcast(TellNodes, ?MODULE, {Tid, do_commit})
+ end,
+ Outcome.
+
+do_stop(#state{coordinators = Coordinators}) ->
+ Msg = {mnesia_down, node()},
+ lists:foreach(fun({Tid, _}) -> Tid#tid.pid ! Msg end, Coordinators),
+ mnesia_checkpoint:stop(),
+ mnesia_log:stop(),
+ exit(shutdown).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% System upgrade
+
+system_continue(_Parent, _Debug, State) ->
+ doit_loop(State).
+
+system_terminate(_Reason, _Parent, _Debug, State) ->
+ do_stop(State).
+
+system_code_change(State, _Module, _OldVsn, _Extra) ->
+ {ok, State}.